From d950c8b865aaf3085766c80a62abc1ac60d69b8f Mon Sep 17 00:00:00 2001 From: Anish Asthana Date: Mon, 11 Sep 2023 10:30:09 -0400 Subject: [PATCH 001/496] Add missing 'a' for image-build action Signed-off-by: Anish Asthana --- .github/workflows/release.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index a13d92f3..20dce4e7 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -86,7 +86,7 @@ jobs: - name: Notebook Image Build and Push run: | - gh workflow run image-build-and-push.yml --repo ${{ github.event.inputs.codeflare-repository-organization }}/codeflare-sdk --ref ${{ github.ref }} --field is-stable=${{ github.event.inputs.is-stable }} --field release-version=${{ github.event.inputs.release-version }} --field quay-organization=${{ github.event.inputs.quay-organization }} + gh workflow run image-build-and-push.yaml --repo ${{ github.event.inputs.codeflare-repository-organization }}/codeflare-sdk --ref ${{ github.ref }} --field is-stable=${{ github.event.inputs.is-stable }} --field release-version=${{ github.event.inputs.release-version }} --field quay-organization=${{ github.event.inputs.quay-organization }} env: GITHUB_TOKEN: ${{ secrets.CODEFLARE_MACHINE_ACCOUNT_TOKEN }} shell: bash From c2013baf4dc8a28320b471ccf8b6eccfa2985f5d Mon Sep 17 00:00:00 2001 From: Anish Asthana Date: Mon, 11 Sep 2023 10:49:13 -0400 Subject: [PATCH 002/496] Add cleanup step Signed-off-by: Anish Asthana --- .github/workflows/image-build-and-push.yaml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/.github/workflows/image-build-and-push.yaml b/.github/workflows/image-build-and-push.yaml index 8aaf7703..451a9bdf 100644 --- a/.github/workflows/image-build-and-push.yaml +++ b/.github/workflows/image-build-and-push.yaml @@ -28,6 +28,22 @@ jobs: release: runs-on: ubuntu-latest steps: + - name: Cleanup + run: | + ls -lart + echo "Initial status:" + df -h + echo "Cleaning up resources:" + sudo swapoff -a + sudo rm -f /swapfile + sudo apt clean + sudo rm -rf /usr/share/dotnet + sudo rm -rf /opt/ghc + sudo rm -rf "/usr/local/share/boost" + sudo rm -rf "$AGENT_TOOLSDIRECTORY" + docker rmi $(docker image ls -aq) + echo "Final status:" + df -h - name: Checkout the repository uses: actions/checkout@v3 - name: Install Python From 808db3d73f3036ad5b91f3aaf248fb0fe401beff Mon Sep 17 00:00:00 2001 From: Anish Asthana Date: Tue, 12 Sep 2023 11:37:32 -0400 Subject: [PATCH 003/496] Increase OWNERS Signed-off-by: Anish Asthana --- OWNERS | 3 +++ 1 file changed, 3 insertions(+) diff --git a/OWNERS b/OWNERS index d6c91274..07e6825e 100644 --- a/OWNERS +++ b/OWNERS @@ -1,8 +1,11 @@ approvers: + - kpostoffice - maxusmusti - MichaelClifford reviewers: - anishasthana + - Bobbins228 + - dimakis - kpostoffice - maxusmusti - MichaelClifford From bcfe678c278c768ca6663212726253a4d3e19295 Mon Sep 17 00:00:00 2001 From: Anish Asthana Date: Tue, 12 Sep 2023 11:49:55 -0400 Subject: [PATCH 004/496] Add Fiona to reviewers Signed-off-by: Anish Asthana --- OWNERS | 1 + 1 file changed, 1 insertion(+) diff --git a/OWNERS b/OWNERS index 07e6825e..1b962d5f 100644 --- a/OWNERS +++ b/OWNERS @@ -6,6 +6,7 @@ reviewers: - anishasthana - Bobbins228 - dimakis + - Fiona-Waters - kpostoffice - maxusmusti - MichaelClifford From c7dc58a8918365116e3570ab1cd33184fe247ada Mon Sep 17 00:00:00 2001 From: Alexey Roytman Date: Sat, 2 Sep 2023 17:22:49 +0300 Subject: [PATCH 005/496] RayCluster status safeguards Signed-off-by: Alexey Roytman --- src/codeflare_sdk/cluster/cluster.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index afad28ba..b496ab6e 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -572,7 +572,7 @@ def _get_app_wrappers( def _map_to_ray_cluster(rc) -> Optional[RayCluster]: - if "state" in rc["status"]: + if "status" in rc and "state" in rc["status"]: status = RayClusterStatus(rc["status"]["state"].lower()) else: status = RayClusterStatus.UNKNOWN From 7d49e82ed00167489aef5f90ebc8862c3ff44295 Mon Sep 17 00:00:00 2001 From: Anish Asthana Date: Mon, 10 Apr 2023 16:50:57 -0400 Subject: [PATCH 006/496] Update python tests job Signed-off-by: Anish Asthana --- .github/build/Containerfile | 12 ++++-- .github/build/requirements-dev.txt | 6 --- .github/build/requirements.txt | 11 ++++++ .../{precommit.yaml => pre-commit.yaml} | 0 .github/workflows/python-app.yml | 38 ------------------- .github/workflows/unit-tests.yml | 21 ++++++++++ README.md | 5 +-- requirements-dev.txt | 6 --- 8 files changed, 42 insertions(+), 57 deletions(-) delete mode 100644 .github/build/requirements-dev.txt create mode 100644 .github/build/requirements.txt rename .github/workflows/{precommit.yaml => pre-commit.yaml} (100%) delete mode 100644 .github/workflows/python-app.yml create mode 100644 .github/workflows/unit-tests.yml delete mode 100644 requirements-dev.txt diff --git a/.github/build/Containerfile b/.github/build/Containerfile index 3c1393cc..6d578856 100644 --- a/.github/build/Containerfile +++ b/.github/build/Containerfile @@ -5,10 +5,16 @@ LABEL summary="Toolchain for running pre-commit hooks." \ io.k8s.display-name="Pre-Commit Toolchain" USER root -RUN dnf install nodejs +RUN dnf install nodejs -y && \ + dnf clean all && \ + rm -rf /var/cache/dnf +ADD https://mirror.openshift.com/pub/openshift-v4/clients/oc/latest/linux/oc.tar.gz $TMPDIR/ +RUN tar -C /usr/local/bin -xvf $TMPDIR/oc.tar.gz && \ + chmod +x /usr/local/bin/oc && \ + rm $TMPDIR/oc.tar.gz USER $USERID -COPY requirements-dev.txt /tmp/requirements-dev.txt -RUN python -m pip install -r /tmp/requirements-dev.txt +COPY requirements.txt /tmp/requirements.txt +RUN python -m pip install -r /tmp/requirements.txt CMD bash diff --git a/.github/build/requirements-dev.txt b/.github/build/requirements-dev.txt deleted file mode 100644 index 986bc512..00000000 --- a/.github/build/requirements-dev.txt +++ /dev/null @@ -1,6 +0,0 @@ -pre-commit -poetry -pytest -pytest-mock -coverage -black==22.3.0 diff --git a/.github/build/requirements.txt b/.github/build/requirements.txt new file mode 100644 index 00000000..3a7351ac --- /dev/null +++ b/.github/build/requirements.txt @@ -0,0 +1,11 @@ +openshift-client==1.0.18 +rich==12.5.1 +ray[default]==2.1.0 +kubernetes==26.1.0 +codeflare-torchx==0.6.0.dev0 +pre-commit +poetry +pytest +pytest-mock +coverage +black==22.3.0 diff --git a/.github/workflows/precommit.yaml b/.github/workflows/pre-commit.yaml similarity index 100% rename from .github/workflows/precommit.yaml rename to .github/workflows/pre-commit.yaml diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml deleted file mode 100644 index f22e8f22..00000000 --- a/.github/workflows/python-app.yml +++ /dev/null @@ -1,38 +0,0 @@ -# This workflow will install Python dependencies, run tests and lint with a single version of Python -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions - -name: Python application - -on: - push: - branches: [ main ] - pull_request: - branches: [ main ] - -jobs: - build: - - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v2 - - name: Set up Python 3.9 - uses: actions/setup-python@v2 - with: - python-version: 3.9 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install pytest==6.2.4 - pip install pytest-mock==3.6.1 - pip install coverage - pip install black==22.3.0 - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - - name: Check formatting with black - run: | - black --check . - - name: Test with pytest and check coverage - run: | - coverage run -m --source=src pytest -v tests/unit_test.py - coverage=$(coverage report -m | tail -1 | tail -c 4 | head -c 2) - if (( $coverage < 90 )); then exit 1; else echo "Coverage passed, ${coverage}%"; fi diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml new file mode 100644 index 00000000..daf6071d --- /dev/null +++ b/.github/workflows/unit-tests.yml @@ -0,0 +1,21 @@ +name: Python Tests + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +jobs: + unit-tests: + + runs-on: ubuntu-latest + container: + image: quay.io/project-codeflare/codeflare-sdk-precommit:v0.0.3 + steps: + - uses: actions/checkout@v3 + - name: Test with pytest and check coverage + run: | + coverage run -m --source=src pytest -v tests/unit_test.py + coverage=$(coverage report -m | tail -1 | tail -c 4 | head -c 2) + if (( $coverage < 90 )); then exit 1; else echo "Coverage passed, ${coverage}%"; fi diff --git a/README.md b/README.md index 6f5e6881..3fa1f808 100644 --- a/README.md +++ b/README.md @@ -22,10 +22,7 @@ Can be installed via `pip`: `pip install codeflare-sdk` ### Prerequisites We recommend using Python 3.9 for development. -Install development specific dependencies: - `$ pip install -r requirements-dev.txt` - -Additional dependencies can be found in `requirements.txt`: `$ pip install -r requirements.txt` +Install dependencies: `$ pip install -r requirements.txt` ### Pre-commit diff --git a/requirements-dev.txt b/requirements-dev.txt deleted file mode 100644 index 986bc512..00000000 --- a/requirements-dev.txt +++ /dev/null @@ -1,6 +0,0 @@ -pre-commit -poetry -pytest -pytest-mock -coverage -black==22.3.0 From 6c786b3e8da530fcb211cce791f53144dcde23cf Mon Sep 17 00:00:00 2001 From: Fiona Waters Date: Mon, 14 Aug 2023 11:17:56 +0100 Subject: [PATCH 007/496] refactor to use poetry Co-authored-by: Anish Asthana aasthana@redhat.com Co-authored-by: Fiona Waters fiwaters6@gmail.com --- .github/build/Containerfile | 10 +- .github/build/README.md | 2 +- .github/build/pyproject.toml | 44 + .gitignore | 1 - README.md | 7 +- poetry.lock | 2451 ++++++++++++++++++++++++++++++++++ requirements.txt | 8 - 7 files changed, 2509 insertions(+), 14 deletions(-) create mode 100644 .github/build/pyproject.toml create mode 100644 poetry.lock delete mode 100644 requirements.txt diff --git a/.github/build/Containerfile b/.github/build/Containerfile index 6d578856..9532d328 100644 --- a/.github/build/Containerfile +++ b/.github/build/Containerfile @@ -14,7 +14,11 @@ RUN tar -C /usr/local/bin -xvf $TMPDIR/oc.tar.gz && \ rm $TMPDIR/oc.tar.gz USER $USERID -COPY requirements.txt /tmp/requirements.txt -RUN python -m pip install -r /tmp/requirements.txt +RUN pip3 install poetry +COPY pyproject.toml ./ +RUN poetry install -CMD bash +#COPY requirements.txt /tmp/requirements.txt +#RUN python -m pip install -r /tmp/requirements.txt + +CMD bash \ No newline at end of file diff --git a/.github/build/README.md b/.github/build/README.md index 9273edb8..48af1512 100644 --- a/.github/build/README.md +++ b/.github/build/README.md @@ -1,3 +1,3 @@ # Pre-Commit Build Artifacts -This directory contains the artifacts required to build the codeflare-sdk pre-commit image. As of right now, we will need to manually update `requirements-dev.txt` in this directory as well. +This directory contains the artifacts required to build the codeflare-sdk pre-commit image. \ No newline at end of file diff --git a/.github/build/pyproject.toml b/.github/build/pyproject.toml new file mode 100644 index 00000000..b2167e99 --- /dev/null +++ b/.github/build/pyproject.toml @@ -0,0 +1,44 @@ +[tool.poetry] +name = "codeflare-sdk" +version = "0.0.0-dev" +description = "Python SDK for codeflare client" + +license = "Apache-2.0" + +authors = [ + "Michael Clifford ", + "Mustafa Eyceoz ", + "Abhishek Malvankar ", + "Atin Sood ", +] + +readme = 'README.md' + +repository = "https://github.com/project-codeflare/codeflare-sdk" +homepage = "https://github.com/project-codeflare/codeflare-sdk" + +keywords = ['codeflare', 'python', 'sdk', 'client', 'batch', 'scale'] + +[tool.poetry.dependencies] +python = ">=3.8,<4.0" +openshift-client = "1.0.18" +rich = "^12.5" +ray = {version = "2.5.0", extras = ["default"]} +kubernetes = ">= 25.3.0, < 27" +codeflare-torchx = "0.6.0.dev0" +cryptography = "40.0.2" +executing = "1.2.0" +pydantic = "< 2" +pre-commit = "3.3.3" + +[tool.poetry.group.docs] +optional = true + +[tool.poetry.group.docs.dependencies] +pdoc3 = "0.10.0" + +[tool.poetry.group.test.dependencies] +pytest = "7.4.0" +coverage = "7.2.7" +pytest-mock = "3.11.1" +black = "22.3.0" diff --git a/.gitignore b/.gitignore index fbb31b2b..9b0018c2 100644 --- a/.gitignore +++ b/.gitignore @@ -4,7 +4,6 @@ __pycache__/ .coverage Pipfile Pipfile.lock -poetry.lock .venv* build/ tls-cluster-namespace diff --git a/README.md b/README.md index 3fa1f808..b20bd5e6 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,12 @@ Can be installed via `pip`: `pip install codeflare-sdk` ### Prerequisites We recommend using Python 3.9 for development. -Install dependencies: `$ pip install -r requirements.txt` +Install dependencies: `poetry install` +This will install all requirements as specified in the poetry.lock file. + +If you require a requirements.txt file you can run: + +`poetry export -f requirements.txt --output requirements.txt --without-hashes` ### Pre-commit diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 00000000..9767a715 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,2451 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "aiohttp" +version = "3.8.5" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.6" +files = [ + {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a94159871304770da4dd371f4291b20cac04e8c94f11bdea1c3478e557fbe0d8"}, + {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:13bf85afc99ce6f9ee3567b04501f18f9f8dbbb2ea11ed1a2e079670403a7c84"}, + {file = "aiohttp-3.8.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ce2ac5708501afc4847221a521f7e4b245abf5178cf5ddae9d5b3856ddb2f3a"}, + {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96943e5dcc37a6529d18766597c491798b7eb7a61d48878611298afc1fca946c"}, + {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ad5c3c4590bb3cc28b4382f031f3783f25ec223557124c68754a2231d989e2b"}, + {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c413c633d0512df4dc7fd2373ec06cc6a815b7b6d6c2f208ada7e9e93a5061d"}, + {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df72ac063b97837a80d80dec8d54c241af059cc9bb42c4de68bd5b61ceb37caa"}, + {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c48c5c0271149cfe467c0ff8eb941279fd6e3f65c9a388c984e0e6cf57538e14"}, + {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:368a42363c4d70ab52c2c6420a57f190ed3dfaca6a1b19afda8165ee16416a82"}, + {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7607ec3ce4993464368505888af5beb446845a014bc676d349efec0e05085905"}, + {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0d21c684808288a98914e5aaf2a7c6a3179d4df11d249799c32d1808e79503b5"}, + {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:312fcfbacc7880a8da0ae8b6abc6cc7d752e9caa0051a53d217a650b25e9a691"}, + {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad093e823df03bb3fd37e7dec9d4670c34f9e24aeace76808fc20a507cace825"}, + {file = "aiohttp-3.8.5-cp310-cp310-win32.whl", hash = "sha256:33279701c04351a2914e1100b62b2a7fdb9a25995c4a104259f9a5ead7ed4802"}, + {file = "aiohttp-3.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:6e4a280e4b975a2e7745573e3fc9c9ba0d1194a3738ce1cbaa80626cc9b4f4df"}, + {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae871a964e1987a943d83d6709d20ec6103ca1eaf52f7e0d36ee1b5bebb8b9b9"}, + {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:461908b2578955045efde733719d62f2b649c404189a09a632d245b445c9c975"}, + {file = "aiohttp-3.8.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:72a860c215e26192379f57cae5ab12b168b75db8271f111019509a1196dfc780"}, + {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc14be025665dba6202b6a71cfcdb53210cc498e50068bc088076624471f8bb9"}, + {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af740fc2711ad85f1a5c034a435782fbd5b5f8314c9a3ef071424a8158d7f6b"}, + {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:841cd8233cbd2111a0ef0a522ce016357c5e3aff8a8ce92bcfa14cef890d698f"}, + {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed1c46fb119f1b59304b5ec89f834f07124cd23ae5b74288e364477641060ff"}, + {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84f8ae3e09a34f35c18fa57f015cc394bd1389bce02503fb30c394d04ee6b938"}, + {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62360cb771707cb70a6fd114b9871d20d7dd2163a0feafe43fd115cfe4fe845e"}, + {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:23fb25a9f0a1ca1f24c0a371523546366bb642397c94ab45ad3aedf2941cec6a"}, + {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0ba0d15164eae3d878260d4c4df859bbdc6466e9e6689c344a13334f988bb53"}, + {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5d20003b635fc6ae3f96d7260281dfaf1894fc3aa24d1888a9b2628e97c241e5"}, + {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0175d745d9e85c40dcc51c8f88c74bfbaef9e7afeeeb9d03c37977270303064c"}, + {file = "aiohttp-3.8.5-cp311-cp311-win32.whl", hash = "sha256:2e1b1e51b0774408f091d268648e3d57f7260c1682e7d3a63cb00d22d71bb945"}, + {file = "aiohttp-3.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:043d2299f6dfdc92f0ac5e995dfc56668e1587cea7f9aa9d8a78a1b6554e5755"}, + {file = "aiohttp-3.8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cae533195e8122584ec87531d6df000ad07737eaa3c81209e85c928854d2195c"}, + {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f21e83f355643c345177a5d1d8079f9f28b5133bcd154193b799d380331d5d3"}, + {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a75ef35f2df54ad55dbf4b73fe1da96f370e51b10c91f08b19603c64004acc"}, + {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e2e9839e14dd5308ee773c97115f1e0a1cb1d75cbeeee9f33824fa5144c7634"}, + {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44e65da1de4403d0576473e2344828ef9c4c6244d65cf4b75549bb46d40b8dd"}, + {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78d847e4cde6ecc19125ccbc9bfac4a7ab37c234dd88fbb3c5c524e8e14da543"}, + {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:c7a815258e5895d8900aec4454f38dca9aed71085f227537208057853f9d13f2"}, + {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:8b929b9bd7cd7c3939f8bcfffa92fae7480bd1aa425279d51a89327d600c704d"}, + {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:5db3a5b833764280ed7618393832e0853e40f3d3e9aa128ac0ba0f8278d08649"}, + {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:a0215ce6041d501f3155dc219712bc41252d0ab76474615b9700d63d4d9292af"}, + {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:fd1ed388ea7fbed22c4968dd64bab0198de60750a25fe8c0c9d4bef5abe13824"}, + {file = "aiohttp-3.8.5-cp36-cp36m-win32.whl", hash = "sha256:6e6783bcc45f397fdebc118d772103d751b54cddf5b60fbcc958382d7dd64f3e"}, + {file = "aiohttp-3.8.5-cp36-cp36m-win_amd64.whl", hash = "sha256:b5411d82cddd212644cf9360879eb5080f0d5f7d809d03262c50dad02f01421a"}, + {file = "aiohttp-3.8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:01d4c0c874aa4ddfb8098e85d10b5e875a70adc63db91f1ae65a4b04d3344cda"}, + {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5980a746d547a6ba173fd5ee85ce9077e72d118758db05d229044b469d9029a"}, + {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a482e6da906d5e6e653be079b29bc173a48e381600161c9932d89dfae5942ef"}, + {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80bd372b8d0715c66c974cf57fe363621a02f359f1ec81cba97366948c7fc873"}, + {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1161b345c0a444ebcf46bf0a740ba5dcf50612fd3d0528883fdc0eff578006a"}, + {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd56db019015b6acfaaf92e1ac40eb8434847d9bf88b4be4efe5bfd260aee692"}, + {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:153c2549f6c004d2754cc60603d4668899c9895b8a89397444a9c4efa282aaf4"}, + {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4a01951fabc4ce26ab791da5f3f24dca6d9a6f24121746eb19756416ff2d881b"}, + {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bfb9162dcf01f615462b995a516ba03e769de0789de1cadc0f916265c257e5d8"}, + {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7dde0009408969a43b04c16cbbe252c4f5ef4574ac226bc8815cd7342d2028b6"}, + {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4149d34c32f9638f38f544b3977a4c24052042affa895352d3636fa8bffd030a"}, + {file = "aiohttp-3.8.5-cp37-cp37m-win32.whl", hash = "sha256:68c5a82c8779bdfc6367c967a4a1b2aa52cd3595388bf5961a62158ee8a59e22"}, + {file = "aiohttp-3.8.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2cf57fb50be5f52bda004b8893e63b48530ed9f0d6c96c84620dc92fe3cd9b9d"}, + {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:eca4bf3734c541dc4f374ad6010a68ff6c6748f00451707f39857f429ca36ced"}, + {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1274477e4c71ce8cfe6c1ec2f806d57c015ebf84d83373676036e256bc55d690"}, + {file = "aiohttp-3.8.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28c543e54710d6158fc6f439296c7865b29e0b616629767e685a7185fab4a6b9"}, + {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:910bec0c49637d213f5d9877105d26e0c4a4de2f8b1b29405ff37e9fc0ad52b8"}, + {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5443910d662db951b2e58eb70b0fbe6b6e2ae613477129a5805d0b66c54b6cb7"}, + {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e460be6978fc24e3df83193dc0cc4de46c9909ed92dd47d349a452ef49325b7"}, + {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb1558def481d84f03b45888473fc5a1f35747b5f334ef4e7a571bc0dfcb11f8"}, + {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34dd0c107799dcbbf7d48b53be761a013c0adf5571bf50c4ecad5643fe9cfcd0"}, + {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aa1990247f02a54185dc0dff92a6904521172a22664c863a03ff64c42f9b5410"}, + {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0e584a10f204a617d71d359fe383406305a4b595b333721fa50b867b4a0a1548"}, + {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a3cf433f127efa43fee6b90ea4c6edf6c4a17109d1d037d1a52abec84d8f2e42"}, + {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c11f5b099adafb18e65c2c997d57108b5bbeaa9eeee64a84302c0978b1ec948b"}, + {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:84de26ddf621d7ac4c975dbea4c945860e08cccde492269db4e1538a6a6f3c35"}, + {file = "aiohttp-3.8.5-cp38-cp38-win32.whl", hash = "sha256:ab88bafedc57dd0aab55fa728ea10c1911f7e4d8b43e1d838a1739f33712921c"}, + {file = "aiohttp-3.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:5798a9aad1879f626589f3df0f8b79b3608a92e9beab10e5fda02c8a2c60db2e"}, + {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a6ce61195c6a19c785df04e71a4537e29eaa2c50fe745b732aa937c0c77169f3"}, + {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:773dd01706d4db536335fcfae6ea2440a70ceb03dd3e7378f3e815b03c97ab51"}, + {file = "aiohttp-3.8.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f83a552443a526ea38d064588613aca983d0ee0038801bc93c0c916428310c28"}, + {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f7372f7341fcc16f57b2caded43e81ddd18df53320b6f9f042acad41f8e049a"}, + {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea353162f249c8097ea63c2169dd1aa55de1e8fecbe63412a9bc50816e87b761"}, + {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d47ae48db0b2dcf70bc8a3bc72b3de86e2a590fc299fdbbb15af320d2659de"}, + {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d827176898a2b0b09694fbd1088c7a31836d1a505c243811c87ae53a3f6273c1"}, + {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3562b06567c06439d8b447037bb655ef69786c590b1de86c7ab81efe1c9c15d8"}, + {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4e874cbf8caf8959d2adf572a78bba17cb0e9d7e51bb83d86a3697b686a0ab4d"}, + {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6809a00deaf3810e38c628e9a33271892f815b853605a936e2e9e5129762356c"}, + {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:33776e945d89b29251b33a7e7d006ce86447b2cfd66db5e5ded4e5cd0340585c"}, + {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eaeed7abfb5d64c539e2db173f63631455f1196c37d9d8d873fc316470dfbacd"}, + {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e91d635961bec2d8f19dfeb41a539eb94bd073f075ca6dae6c8dc0ee89ad6f91"}, + {file = "aiohttp-3.8.5-cp39-cp39-win32.whl", hash = "sha256:00ad4b6f185ec67f3e6562e8a1d2b69660be43070bd0ef6fcec5211154c7df67"}, + {file = "aiohttp-3.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:c0a9034379a37ae42dea7ac1e048352d96286626251862e448933c0f59cbd79c"}, + {file = "aiohttp-3.8.5.tar.gz", hash = "sha256:b9552ec52cc147dbf1944ac7ac98af7602e51ea2dcd076ed194ca3c0d1c7d0bc"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = ">=4.0.0a3,<5.0" +asynctest = {version = "0.13.0", markers = "python_version < \"3.8\""} +attrs = ">=17.3.0" +charset-normalizer = ">=2.0,<4.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "cchardet"] + +[[package]] +name = "aiohttp-cors" +version = "0.7.0" +description = "CORS support for aiohttp" +optional = false +python-versions = "*" +files = [ + {file = "aiohttp-cors-0.7.0.tar.gz", hash = "sha256:4d39c6d7100fd9764ed1caf8cebf0eb01bf5e3f24e2e073fda6234bc48b19f5d"}, + {file = "aiohttp_cors-0.7.0-py3-none-any.whl", hash = "sha256:0451ba59fdf6909d0e2cd21e4c0a43752bc0703d33fc78ae94d9d9321710193e"}, +] + +[package.dependencies] +aiohttp = ">=1.1" + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "ansicon" +version = "1.89.0" +description = "Python wrapper for loading Jason Hood's ANSICON" +optional = false +python-versions = "*" +files = [ + {file = "ansicon-1.89.0-py2.py3-none-any.whl", hash = "sha256:f1def52d17f65c2c9682cf8370c03f541f410c1752d6a14029f97318e4b9dfec"}, + {file = "ansicon-1.89.0.tar.gz", hash = "sha256:e4d039def5768a47e4afec8e89e83ec3ae5a26bf00ad851f914d1240b444d2b1"}, +] + +[[package]] +name = "async-timeout" +version = "4.0.2" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.6" +files = [ + {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, + {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, +] + +[package.dependencies] +typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""} + +[[package]] +name = "asynctest" +version = "0.13.0" +description = "Enhance the standard unittest package with features for testing asyncio libraries" +optional = false +python-versions = ">=3.5" +files = [ + {file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"}, + {file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"}, +] + +[[package]] +name = "attrs" +version = "23.1.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, +] + +[package.dependencies] +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] + +[[package]] +name = "bcrypt" +version = "4.0.1" +description = "Modern password hashing for your software and your servers" +optional = false +python-versions = ">=3.6" +files = [ + {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"}, + {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"}, + {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"}, + {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"}, + {file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"}, + {file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"}, + {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"}, + {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"}, + {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"}, + {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"}, + {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"}, + {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"}, + {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"}, + {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"}, + {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"}, + {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"}, +] + +[package.extras] +tests = ["pytest (>=3.2.1,!=3.3.0)"] +typecheck = ["mypy"] + +[[package]] +name = "blessed" +version = "1.20.0" +description = "Easy, practical library for making terminal apps, by providing an elegant, well-documented interface to Colors, Keyboard input, and screen Positioning capabilities." +optional = false +python-versions = ">=2.7" +files = [ + {file = "blessed-1.20.0-py2.py3-none-any.whl", hash = "sha256:0c542922586a265e699188e52d5f5ac5ec0dd517e5a1041d90d2bbf23f906058"}, + {file = "blessed-1.20.0.tar.gz", hash = "sha256:2cdd67f8746e048f00df47a2880f4d6acbcdb399031b604e34ba8f71d5787680"}, +] + +[package.dependencies] +jinxed = {version = ">=1.1.0", markers = "platform_system == \"Windows\""} +six = ">=1.9.0" +wcwidth = ">=0.1.4" + +[[package]] +name = "cachetools" +version = "5.3.1" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.1-py3-none-any.whl", hash = "sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590"}, + {file = "cachetools-5.3.1.tar.gz", hash = "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"}, +] + +[[package]] +name = "certifi" +version = "2023.7.22" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, +] + +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = "*" +files = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "3.2.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, + {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, +] + +[[package]] +name = "click" +version = "8.1.6" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.6-py3-none-any.whl", hash = "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5"}, + {file = "click-8.1.6.tar.gz", hash = "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + +[[package]] +name = "codeflare-torchx" +version = "0.6.0.dev0" +description = "TorchX SDK and Components" +optional = false +python-versions = ">=3.7" +files = [ + {file = "codeflare_torchx-0.6.0.dev0-py3-none-any.whl", hash = "sha256:fd6b2ee084097bc0e84a4b4b44772a7c44fc90e3490a93ec804ed088534cbd0a"}, +] + +[package.dependencies] +docker = "*" +docstring-parser = "0.8.1" +filelock = "*" +fsspec = "*" +importlib-metadata = "*" +pyre-extensions = "*" +pyyaml = "*" +tabulate = "*" +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +dev = ["aiobotocore (==2.4.2)", "ax-platform[mysql] (==0.2.3)", "black (==23.3.0)", "boto3 (==1.24.59)", "captum (>=0.4.0)", "flake8 (==3.9.0)", "fsspec[s3] (==2023.1.0)", "google-api-core", "google-cloud-batch (>=0.5.0)", "google-cloud-logging (>=3.0.0)", "google-cloud-runtimeconfig (>=0.33.2)", "hydra-core", "ipython", "kfp (==1.8.9)", "mlflow-skinny", "moto (==4.1.6)", "pyre-check", "pyre-extensions", "pytest", "pytorch-lightning (==1.5.10)", "ray[default]", "torch (>=1.10.0)", "torch-model-archiver (>=0.4.2)", "torchmetrics (<0.11.0)", "torchserve (>=0.4.2)", "torchtext (>=0.11.0)", "torchvision (>=0.11.1)", "ts (==0.5.1)", "usort (==1.0.2)"] +gcp-batch = ["google-cloud-batch (>=0.5.0)", "google-cloud-logging (>=3.0.0)", "google-cloud-runtimeconfig (>=0.33.2)"] +kfp = ["kfp (==1.6.2)"] +kubernetes = ["kubernetes (>=11)"] +ray = ["ray (>=1.12.1)"] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "colorful" +version = "0.5.5" +description = "Terminal string styling done right, in Python." +optional = false +python-versions = "*" +files = [ + {file = "colorful-0.5.5-py2.py3-none-any.whl", hash = "sha256:62c187e27c1433db9463ff93b1451898d1e7e23a7e553583fd9daeb6325182e4"}, + {file = "colorful-0.5.5.tar.gz", hash = "sha256:66f8c1264b2a26f7293b96a03bb7a76c4bc8b9634369a0bffdcd12d618056a1d"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "commonmark" +version = "0.9.1" +description = "Python parser for the CommonMark Markdown spec" +optional = false +python-versions = "*" +files = [ + {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"}, + {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, +] + +[package.extras] +test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] + +[[package]] +name = "coverage" +version = "7.2.7" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, + {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, + {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, + {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, + {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, + {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, + {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, + {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, + {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, + {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, + {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, + {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, + {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, + {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, + {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, + {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, + {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, + {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, +] + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "cryptography" +version = "40.0.2" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.6" +files = [ + {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:8f79b5ff5ad9d3218afb1e7e20ea74da5f76943ee5edb7f76e56ec5161ec782b"}, + {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:05dc219433b14046c476f6f09d7636b92a1c3e5808b9a6536adf4932b3b2c440"}, + {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4df2af28d7bedc84fe45bd49bc35d710aede676e2a4cb7fc6d103a2adc8afe4d"}, + {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dcca15d3a19a66e63662dc8d30f8036b07be851a8680eda92d079868f106288"}, + {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:a04386fb7bc85fab9cd51b6308633a3c271e3d0d3eae917eebab2fac6219b6d2"}, + {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:adc0d980fd2760c9e5de537c28935cc32b9353baaf28e0814df417619c6c8c3b"}, + {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d5a1bd0e9e2031465761dfa920c16b0065ad77321d8a8c1f5ee331021fda65e9"}, + {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a95f4802d49faa6a674242e25bfeea6fc2acd915b5e5e29ac90a32b1139cae1c"}, + {file = "cryptography-40.0.2-cp36-abi3-win32.whl", hash = "sha256:aecbb1592b0188e030cb01f82d12556cf72e218280f621deed7d806afd2113f9"}, + {file = "cryptography-40.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:b12794f01d4cacfbd3177b9042198f3af1c856eedd0a98f10f141385c809a14b"}, + {file = "cryptography-40.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:142bae539ef28a1c76794cca7f49729e7c54423f615cfd9b0b1fa90ebe53244b"}, + {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:956ba8701b4ffe91ba59665ed170a2ebbdc6fc0e40de5f6059195d9f2b33ca0e"}, + {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4f01c9863da784558165f5d4d916093737a75203a5c5286fde60e503e4276c7a"}, + {file = "cryptography-40.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3daf9b114213f8ba460b829a02896789751626a2a4e7a43a28ee77c04b5e4958"}, + {file = "cryptography-40.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48f388d0d153350f378c7f7b41497a54ff1513c816bcbbcafe5b829e59b9ce5b"}, + {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c0764e72b36a3dc065c155e5b22f93df465da9c39af65516fe04ed3c68c92636"}, + {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:cbaba590180cba88cb99a5f76f90808a624f18b169b90a4abb40c1fd8c19420e"}, + {file = "cryptography-40.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7a38250f433cd41df7fcb763caa3ee9362777fdb4dc642b9a349721d2bf47404"}, + {file = "cryptography-40.0.2.tar.gz", hash = "sha256:c33c0d32b8594fa647d2e01dbccc303478e16fdd7cf98652d5b3ed11aa5e5c99"}, +] + +[package.dependencies] +cffi = ">=1.12" + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +pep8test = ["black", "check-manifest", "mypy", "ruff"] +sdist = ["setuptools-rust (>=0.11.4)"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] +tox = ["tox"] + +[[package]] +name = "distlib" +version = "0.3.7" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, + {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, +] + +[[package]] +name = "docker" +version = "6.1.3" +description = "A Python library for the Docker Engine API." +optional = false +python-versions = ">=3.7" +files = [ + {file = "docker-6.1.3-py3-none-any.whl", hash = "sha256:aecd2277b8bf8e506e484f6ab7aec39abe0038e29fa4a6d3ba86c3fe01844ed9"}, + {file = "docker-6.1.3.tar.gz", hash = "sha256:aa6d17830045ba5ef0168d5eaa34d37beeb113948c413affe1d5991fc11f9a20"}, +] + +[package.dependencies] +packaging = ">=14.0" +pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} +requests = ">=2.26.0" +urllib3 = ">=1.26.0" +websocket-client = ">=0.32.0" + +[package.extras] +ssh = ["paramiko (>=2.4.3)"] + +[[package]] +name = "docstring-parser" +version = "0.8.1" +description = "\"Parse Python docstrings in reST, Google and Numpydoc format\"" +optional = false +python-versions = ">=3.5" +files = [ + {file = "docstring_parser-0.8.1.tar.gz", hash = "sha256:dd68bbe33446e9dbdec087fd18bad8f6dc8eedda4c5c2321208951f3acfc5e04"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.1.2" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, + {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "executing" +version = "1.2.0" +description = "Get the currently executing AST node of a frame, and other information" +optional = false +python-versions = "*" +files = [ + {file = "executing-1.2.0-py2.py3-none-any.whl", hash = "sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc"}, + {file = "executing-1.2.0.tar.gz", hash = "sha256:19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107"}, +] + +[package.extras] +tests = ["asttokens", "littleutils", "pytest", "rich"] + +[[package]] +name = "filelock" +version = "3.12.2" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.7" +files = [ + {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"}, + {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, +] + +[package.extras] +docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] + +[[package]] +name = "frozenlist" +version = "1.3.3" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.7" +files = [ + {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4"}, + {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0"}, + {file = "frozenlist-1.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420"}, + {file = "frozenlist-1.3.3-cp310-cp310-win32.whl", hash = "sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642"}, + {file = "frozenlist-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81"}, + {file = "frozenlist-1.3.3-cp311-cp311-win32.whl", hash = "sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8"}, + {file = "frozenlist-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32"}, + {file = "frozenlist-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401"}, + {file = "frozenlist-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a"}, + {file = "frozenlist-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3"}, + {file = "frozenlist-1.3.3-cp38-cp38-win32.whl", hash = "sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b"}, + {file = "frozenlist-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1"}, + {file = "frozenlist-1.3.3-cp39-cp39-win32.whl", hash = "sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38"}, + {file = "frozenlist-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9"}, + {file = "frozenlist-1.3.3.tar.gz", hash = "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a"}, +] + +[[package]] +name = "fsspec" +version = "2023.1.0" +description = "File-system specification" +optional = false +python-versions = ">=3.7" +files = [ + {file = "fsspec-2023.1.0-py3-none-any.whl", hash = "sha256:b833e2e541e9e8cde0ab549414187871243177feb3d344f9d27b25a93f5d8139"}, + {file = "fsspec-2023.1.0.tar.gz", hash = "sha256:fbae7f20ff801eb5f7d0bedf81f25c787c0dfac5e982d98fa3884a9cde2b5411"}, +] + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +arrow = ["pyarrow (>=1)"] +dask = ["dask", "distributed"] +dropbox = ["dropbox", "dropboxdrivefs", "requests"] +entrypoints = ["importlib-metadata"] +fuse = ["fusepy"] +gcs = ["gcsfs"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +gui = ["panel"] +hdfs = ["pyarrow (>=1)"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "requests"] +libarchive = ["libarchive-c"] +oci = ["ocifs"] +s3 = ["s3fs"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] +tqdm = ["tqdm"] + +[[package]] +name = "google-api-core" +version = "2.11.1" +description = "Google API client core library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-api-core-2.11.1.tar.gz", hash = "sha256:25d29e05a0058ed5f19c61c0a78b1b53adea4d9364b464d014fbda941f6d1c9a"}, + {file = "google_api_core-2.11.1-py3-none-any.whl", hash = "sha256:d92a5a92dc36dd4f4b9ee4e55528a90e432b059f93aee6ad857f9de8cc7ae94a"}, +] + +[package.dependencies] +google-auth = ">=2.14.1,<3.0.dev0" +googleapis-common-protos = ">=1.56.2,<2.0.dev0" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +requests = ">=2.18.0,<3.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] + +[[package]] +name = "google-auth" +version = "2.22.0" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "google-auth-2.22.0.tar.gz", hash = "sha256:164cba9af4e6e4e40c3a4f90a1a6c12ee56f14c0b4868d1ca91b32826ab334ce"}, + {file = "google_auth-2.22.0-py2.py3-none-any.whl", hash = "sha256:d61d1b40897407b574da67da1a833bdc10d5a11642566e506565d1b1a46ba873"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" +six = ">=1.9.0" +urllib3 = "<2.0" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] +enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0.dev0)"] + +[[package]] +name = "googleapis-common-protos" +version = "1.60.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "googleapis-common-protos-1.60.0.tar.gz", hash = "sha256:e73ebb404098db405ba95d1e1ae0aa91c3e15a71da031a2eeb6b2e23e7bc3708"}, + {file = "googleapis_common_protos-1.60.0-py2.py3-none-any.whl", hash = "sha256:69f9bbcc6acde92cab2db95ce30a70bd2b81d20b12eff3f1aabaffcbe8a93918"}, +] + +[package.dependencies] +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] + +[[package]] +name = "gpustat" +version = "1.1" +description = "An utility to monitor NVIDIA GPU status and usage" +optional = false +python-versions = ">=3.6" +files = [ + {file = "gpustat-1.1.tar.gz", hash = "sha256:c8fc105404aac11884f70ed2fa06cfd210d3cd389ccae4a9bc38579c72460cee"}, +] + +[package.dependencies] +blessed = ">=1.17.1" +nvidia-ml-py = ">=11.450.129" +psutil = ">=5.6.0" + +[package.extras] +completion = ["shtab"] +test = ["mockito (>=1.2.1)", "pytest (>=5.4.1)", "pytest-runner"] + +[[package]] +name = "grpcio" +version = "1.49.1" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpcio-1.49.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:fd86040232e805b8e6378b2348c928490ee595b058ce9aaa27ed8e4b0f172b20"}, + {file = "grpcio-1.49.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:6fd0c9cede9552bf00f8c5791d257d5bf3790d7057b26c59df08be5e7a1e021d"}, + {file = "grpcio-1.49.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:d0d402e158d4e84e49c158cb5204119d55e1baf363ee98d6cb5dce321c3a065d"}, + {file = "grpcio-1.49.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:822ceec743d42a627e64ea266059a62d214c5a3cdfcd0d7fe2b7a8e4e82527c7"}, + {file = "grpcio-1.49.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2106d9c16527f0a85e2eea6e6b91a74fc99579c60dd810d8690843ea02bc0f5f"}, + {file = "grpcio-1.49.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:52dd02b7e7868233c571b49bc38ebd347c3bb1ff8907bb0cb74cb5f00c790afc"}, + {file = "grpcio-1.49.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:120fecba2ec5d14b5a15d11063b39783fda8dc8d24addd83196acb6582cabd9b"}, + {file = "grpcio-1.49.1-cp310-cp310-win32.whl", hash = "sha256:f1a3b88e3c53c1a6e6bed635ec1bbb92201bb6a1f2db186179f7f3f244829788"}, + {file = "grpcio-1.49.1-cp310-cp310-win_amd64.whl", hash = "sha256:a7d0017b92d3850abea87c1bdec6ea41104e71c77bca44c3e17f175c6700af62"}, + {file = "grpcio-1.49.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:9fb17ff8c0d56099ac6ebfa84f670c5a62228d6b5c695cf21c02160c2ac1446b"}, + {file = "grpcio-1.49.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:075f2d06e3db6b48a2157a1bcd52d6cbdca980dd18988fe6afdb41795d51625f"}, + {file = "grpcio-1.49.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46d93a1b4572b461a227f1db6b8d35a88952db1c47e5fadcf8b8a2f0e1dd9201"}, + {file = "grpcio-1.49.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc79b2b37d779ac42341ddef40ad5bf0966a64af412c89fc2b062e3ddabb093f"}, + {file = "grpcio-1.49.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5f8b3a971c7820ea9878f3fd70086240a36aeee15d1b7e9ecbc2743b0e785568"}, + {file = "grpcio-1.49.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49b301740cf5bc8fed4fee4c877570189ae3951432d79fa8e524b09353659811"}, + {file = "grpcio-1.49.1-cp311-cp311-win32.whl", hash = "sha256:1c66a25afc6c71d357867b341da594a5587db5849b48f4b7d5908d236bb62ede"}, + {file = "grpcio-1.49.1-cp311-cp311-win_amd64.whl", hash = "sha256:6b6c3a95d27846f4145d6967899b3ab25fffc6ae99544415e1adcacef84842d2"}, + {file = "grpcio-1.49.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:1cc400c8a2173d1c042997d98a9563e12d9bb3fb6ad36b7f355bc77c7663b8af"}, + {file = "grpcio-1.49.1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:34f736bd4d0deae90015c0e383885b431444fe6b6c591dea288173df20603146"}, + {file = "grpcio-1.49.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:196082b9c89ebf0961dcd77cb114bed8171964c8e3063b9da2fb33536a6938ed"}, + {file = "grpcio-1.49.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c9f89c42749890618cd3c2464e1fbf88446e3d2f67f1e334c8e5db2f3272bbd"}, + {file = "grpcio-1.49.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64419cb8a5b612cdb1550c2fd4acbb7d4fb263556cf4625f25522337e461509e"}, + {file = "grpcio-1.49.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8a5272061826e6164f96e3255405ef6f73b88fd3e8bef464c7d061af8585ac62"}, + {file = "grpcio-1.49.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ea9d0172445241ad7cb49577314e39d0af2c5267395b3561d7ced5d70458a9f3"}, + {file = "grpcio-1.49.1-cp37-cp37m-win32.whl", hash = "sha256:2070e87d95991473244c72d96d13596c751cb35558e11f5df5414981e7ed2492"}, + {file = "grpcio-1.49.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fcedcab49baaa9db4a2d240ac81f2d57eb0052b1c6a9501b46b8ae912720fbf"}, + {file = "grpcio-1.49.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:afbb3475cf7f4f7d380c2ca37ee826e51974f3e2665613996a91d6a58583a534"}, + {file = "grpcio-1.49.1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:a4f9ba141380abde6c3adc1727f21529137a2552002243fa87c41a07e528245c"}, + {file = "grpcio-1.49.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:cf0a1fb18a7204b9c44623dfbd1465b363236ce70c7a4ed30402f9f60d8b743b"}, + {file = "grpcio-1.49.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17bb6fe72784b630728c6cff9c9d10ccc3b6d04e85da6e0a7b27fb1d135fac62"}, + {file = "grpcio-1.49.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18305d5a082d1593b005a895c10041f833b16788e88b02bb81061f5ebcc465df"}, + {file = "grpcio-1.49.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b6a1b39e59ac5a3067794a0e498911cf2e37e4b19ee9e9977dc5e7051714f13f"}, + {file = "grpcio-1.49.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0e20d59aafc086b1cc68400463bddda6e41d3e5ed30851d1e2e0f6a2e7e342d3"}, + {file = "grpcio-1.49.1-cp38-cp38-win32.whl", hash = "sha256:e1e83233d4680863a421f3ee4a7a9b80d33cd27ee9ed7593bc93f6128302d3f2"}, + {file = "grpcio-1.49.1-cp38-cp38-win_amd64.whl", hash = "sha256:221d42c654d2a41fa31323216279c73ed17d92f533bc140a3390cc1bd78bf63c"}, + {file = "grpcio-1.49.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:fa9e6e61391e99708ac87fc3436f6b7b9c6b845dc4639b406e5e61901e1aacde"}, + {file = "grpcio-1.49.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:9b449e966ef518ce9c860d21f8afe0b0f055220d95bc710301752ac1db96dd6a"}, + {file = "grpcio-1.49.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:aa34d2ad9f24e47fa9a3172801c676e4037d862247e39030165fe83821a7aafd"}, + {file = "grpcio-1.49.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5207f4eed1b775d264fcfe379d8541e1c43b878f2b63c0698f8f5c56c40f3d68"}, + {file = "grpcio-1.49.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b24a74651438d45619ac67004638856f76cc13d78b7478f2457754cbcb1c8ad"}, + {file = "grpcio-1.49.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:fe763781669790dc8b9618e7e677c839c87eae6cf28b655ee1fa69ae04eea03f"}, + {file = "grpcio-1.49.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2f2ff7ba0f8f431f32d4b4bc3a3713426949d3533b08466c4ff1b2b475932ca8"}, + {file = "grpcio-1.49.1-cp39-cp39-win32.whl", hash = "sha256:08ff74aec8ff457a89b97152d36cb811dcc1d17cd5a92a65933524e363327394"}, + {file = "grpcio-1.49.1-cp39-cp39-win_amd64.whl", hash = "sha256:274ffbb39717918c514b35176510ae9be06e1d93121e84d50b350861dcb9a705"}, + {file = "grpcio-1.49.1.tar.gz", hash = "sha256:d4725fc9ec8e8822906ae26bb26f5546891aa7fbc3443de970cc556d43a5c99f"}, +] + +[package.dependencies] +six = ">=1.5.2" + +[package.extras] +protobuf = ["grpcio-tools (>=1.49.1)"] + +[[package]] +name = "grpcio" +version = "1.51.3" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpcio-1.51.3-cp310-cp310-linux_armv7l.whl", hash = "sha256:f601aaeae18dab81930fb8d4f916b0da21e89bb4b5f7367ef793f46b4a76b7b0"}, + {file = "grpcio-1.51.3-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:eef0450a4b5ed11feab639bf3eb1b6e23d0efa9b911bf7b06fb60e14f5f8a585"}, + {file = "grpcio-1.51.3-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:82b0ad8ac825d4bb31bff9f638557c045f4a6d824d84b21e893968286f88246b"}, + {file = "grpcio-1.51.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3667c06e37d6cd461afdd51cefe6537702f3d1dc5ff4cac07e88d8b4795dc16f"}, + {file = "grpcio-1.51.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3709048fe0aa23dda09b3e69849a12055790171dab9e399a72ea8f9dfbf9ac80"}, + {file = "grpcio-1.51.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:200d69857f9910f7458b39b9bcf83ee4a180591b40146ba9e49314e3a7419313"}, + {file = "grpcio-1.51.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cd9a5e68e79c5f031500e67793048a90209711e0854a9ddee8a3ce51728de4e5"}, + {file = "grpcio-1.51.3-cp310-cp310-win32.whl", hash = "sha256:6604f614016127ae10969176bbf12eb0e03d2fb3d643f050b3b69e160d144fb4"}, + {file = "grpcio-1.51.3-cp310-cp310-win_amd64.whl", hash = "sha256:e95c7ccd4c5807adef1602005513bf7c7d14e5a41daebcf9d8d30d8bf51b8f81"}, + {file = "grpcio-1.51.3-cp311-cp311-linux_armv7l.whl", hash = "sha256:5e77ee138100f0bb55cbd147840f87ee6241dbd25f09ea7cd8afe7efff323449"}, + {file = "grpcio-1.51.3-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:68a7514b754e38e8de9075f7bb4dee919919515ec68628c43a894027e40ddec4"}, + {file = "grpcio-1.51.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c1b9f8afa62ff265d86a4747a2990ec5a96e4efce5d5888f245a682d66eca47"}, + {file = "grpcio-1.51.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8de30f0b417744288cec65ec8cf84b8a57995cf7f1e84ccad2704d93f05d0aae"}, + {file = "grpcio-1.51.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b69c7adc7ed60da1cb1b502853db61f453fc745f940cbcc25eb97c99965d8f41"}, + {file = "grpcio-1.51.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d81528ffe0e973dc840ec73a4132fd18b8203ad129d7410155d951a0a7e4f5d0"}, + {file = "grpcio-1.51.3-cp311-cp311-win32.whl", hash = "sha256:040eb421613b57c696063abde405916dd830203c184c9000fc8c3b3b3c950325"}, + {file = "grpcio-1.51.3-cp311-cp311-win_amd64.whl", hash = "sha256:2a8e17286c4240137d933b8ca506465472248b4ce0fe46f3404459e708b65b68"}, + {file = "grpcio-1.51.3-cp37-cp37m-linux_armv7l.whl", hash = "sha256:d5cd1389669a847555df54177b911d9ff6f17345b2a6f19388707b7a9f724c88"}, + {file = "grpcio-1.51.3-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:be1bf35ce82cdbcac14e39d5102d8de4079a1c1a6a06b68e41fcd9ef64f9dd28"}, + {file = "grpcio-1.51.3-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:5eed34994c095e2bf7194ffac7381c6068b057ef1e69f8f08db77771350a7566"}, + {file = "grpcio-1.51.3-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f9a7d88082b2a17ae7bd3c2354d13bab0453899e0851733f6afa6918373f476"}, + {file = "grpcio-1.51.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c8abbc5f837111e7bd619612eedc223c290b0903b952ce0c7b00840ea70f14"}, + {file = "grpcio-1.51.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:165b05af77e6aecb4210ae7663e25acf234ba78a7c1c157fa5f2efeb0d6ec53c"}, + {file = "grpcio-1.51.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:54e36c2ee304ff15f2bfbdc43d2b56c63331c52d818c364e5b5214e5bc2ad9f6"}, + {file = "grpcio-1.51.3-cp37-cp37m-win32.whl", hash = "sha256:cd0daac21d9ef5e033a5100c1d3aa055bbed28bfcf070b12d8058045c4e821b1"}, + {file = "grpcio-1.51.3-cp37-cp37m-win_amd64.whl", hash = "sha256:2fdd6333ce96435408565a9dbbd446212cd5d62e4d26f6a3c0feb1e3c35f1cc8"}, + {file = "grpcio-1.51.3-cp38-cp38-linux_armv7l.whl", hash = "sha256:54b0c29bdd9a3b1e1b61443ab152f060fc719f1c083127ab08d03fac5efd51be"}, + {file = "grpcio-1.51.3-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:ffaaf7e93fcb437356b5a4b23bf36e8a3d0221399ff77fd057e4bc77776a24be"}, + {file = "grpcio-1.51.3-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:eafbe7501a3268d05f2e450e1ddaffb950d842a8620c13ec328b501d25d2e2c3"}, + {file = "grpcio-1.51.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:881ecb34feabf31c6b3b9bbbddd1a5b57e69f805041e5a2c6c562a28574f71c4"}, + {file = "grpcio-1.51.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e860a3222139b41d430939bbec2ec9c3f6c740938bf7a04471a9a8caaa965a2e"}, + {file = "grpcio-1.51.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:49ede0528e9dac7e8a9fe30b16c73b630ddd9a576bf4b675eb6b0c53ee5ca00f"}, + {file = "grpcio-1.51.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6972b009638b40a448d10e1bc18e2223143b8a7aa20d7def0d78dd4af4126d12"}, + {file = "grpcio-1.51.3-cp38-cp38-win32.whl", hash = "sha256:5694448256e3cdfe5bd358f1574a3f2f51afa20cc834713c4b9788d60b7cc646"}, + {file = "grpcio-1.51.3-cp38-cp38-win_amd64.whl", hash = "sha256:3ea4341efe603b049e8c9a5f13c696ca37fcdf8a23ca35f650428ad3606381d9"}, + {file = "grpcio-1.51.3-cp39-cp39-linux_armv7l.whl", hash = "sha256:6c677581ce129f5fa228b8f418cee10bd28dd449f3a544ea73c8ba590ee49d0b"}, + {file = "grpcio-1.51.3-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:30e09b5e0531685e176f49679b6a3b190762cc225f4565e55a899f5e14b3aa62"}, + {file = "grpcio-1.51.3-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:c831f31336e81243f85b6daff3e5e8a123302ce0ea1f2726ad752fd7a59f3aee"}, + {file = "grpcio-1.51.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2cd2e4cefb724cab1ba2df4b7535a9980531b9ec51b4dbb5f137a1f3a3754ef0"}, + {file = "grpcio-1.51.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7a0d0bf44438869d307f85a54f25a896ad6b4b0ca12370f76892ad732928d87"}, + {file = "grpcio-1.51.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c02abd55409bfb293371554adf6a4401197ec2133dd97727c01180889014ba4d"}, + {file = "grpcio-1.51.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2f8ff75e61e1227ba7a3f16b2eadbcc11d0a54096d52ab75a6b88cfbe56f55d1"}, + {file = "grpcio-1.51.3-cp39-cp39-win32.whl", hash = "sha256:6c99a73a6260bdf844b2e5ddad02dcd530310f80e1fa72c300fa19c1c7496962"}, + {file = "grpcio-1.51.3-cp39-cp39-win_amd64.whl", hash = "sha256:22bdfac4f7f27acdd4da359b5e7e1973dc74bf1ed406729b07d0759fde2f064b"}, + {file = "grpcio-1.51.3.tar.gz", hash = "sha256:be7b2265b7527bb12109a7727581e274170766d5b3c9258d4e466f4872522d7a"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.51.3)"] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "importlib-metadata" +version = "6.7.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"}, + {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"}, +] + +[package.dependencies] +typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "importlib-resources" +version = "5.12.0" +description = "Read resources from Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "importlib_resources-5.12.0-py3-none-any.whl", hash = "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a"}, + {file = "importlib_resources-5.12.0.tar.gz", hash = "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6"}, +] + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "jinxed" +version = "1.2.0" +description = "Jinxed Terminal Library" +optional = false +python-versions = "*" +files = [ + {file = "jinxed-1.2.0-py2.py3-none-any.whl", hash = "sha256:cfc2b2e4e3b4326954d546ba6d6b9a7a796ddcb0aef8d03161d005177eb0d48b"}, + {file = "jinxed-1.2.0.tar.gz", hash = "sha256:032acda92d5c57cd216033cbbd53de731e6ed50deb63eb4781336ca55f72cda5"}, +] + +[package.dependencies] +ansicon = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "jsonschema" +version = "4.17.3" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, + {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} +importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} +pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "kubernetes" +version = "26.1.0" +description = "Kubernetes python client" +optional = false +python-versions = ">=3.6" +files = [ + {file = "kubernetes-26.1.0-py2.py3-none-any.whl", hash = "sha256:e3db6800abf7e36c38d2629b5cb6b74d10988ee0cba6fba45595a7cbe60c0042"}, + {file = "kubernetes-26.1.0.tar.gz", hash = "sha256:5854b0c508e8d217ca205591384ab58389abdae608576f9c9afc35a3c76a366c"}, +] + +[package.dependencies] +certifi = ">=14.05.14" +google-auth = ">=1.0.1" +python-dateutil = ">=2.5.3" +pyyaml = ">=5.4.1" +requests = "*" +requests-oauthlib = "*" +setuptools = ">=21.0.0" +six = ">=1.9.0" +urllib3 = ">=1.24.2" +websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" + +[package.extras] +adal = ["adal (>=1.0.2)"] + +[[package]] +name = "mako" +version = "1.2.4" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Mako-1.2.4-py3-none-any.whl", hash = "sha256:c97c79c018b9165ac9922ae4f32da095ffd3c4e6872b45eded42926deea46818"}, + {file = "Mako-1.2.4.tar.gz", hash = "sha256:d60a3903dc3bb01a18ad6a89cdbe2e4eadc69c0bc8ef1e3773ba53d44c3f7a34"}, +] + +[package.dependencies] +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + +[[package]] +name = "markdown" +version = "3.4.4" +description = "Python implementation of John Gruber's Markdown." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Markdown-3.4.4-py3-none-any.whl", hash = "sha256:a4c1b65c0957b4bd9e7d86ddc7b3c9868fb9670660f6f99f6d1bca8954d5a941"}, + {file = "Markdown-3.4.4.tar.gz", hash = "sha256:225c6123522495d4119a90b3a3ba31a1e87a70369e03f14799ea9c0d7183a3d6"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.0)", "mkdocs-nature (>=0.4)"] +testing = ["coverage", "pyyaml"] + +[[package]] +name = "markupsafe" +version = "2.1.3" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, +] + +[[package]] +name = "msgpack" +version = "1.0.5" +description = "MessagePack serializer" +optional = false +python-versions = "*" +files = [ + {file = "msgpack-1.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:525228efd79bb831cf6830a732e2e80bc1b05436b086d4264814b4b2955b2fa9"}, + {file = "msgpack-1.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4f8d8b3bf1ff2672567d6b5c725a1b347fe838b912772aa8ae2bf70338d5a198"}, + {file = "msgpack-1.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdc793c50be3f01106245a61b739328f7dccc2c648b501e237f0699fe1395b81"}, + {file = "msgpack-1.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cb47c21a8a65b165ce29f2bec852790cbc04936f502966768e4aae9fa763cb7"}, + {file = "msgpack-1.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e42b9594cc3bf4d838d67d6ed62b9e59e201862a25e9a157019e171fbe672dd3"}, + {file = "msgpack-1.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55b56a24893105dc52c1253649b60f475f36b3aa0fc66115bffafb624d7cb30b"}, + {file = "msgpack-1.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1967f6129fc50a43bfe0951c35acbb729be89a55d849fab7686004da85103f1c"}, + {file = "msgpack-1.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20a97bf595a232c3ee6d57ddaadd5453d174a52594bf9c21d10407e2a2d9b3bd"}, + {file = "msgpack-1.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d25dd59bbbbb996eacf7be6b4ad082ed7eacc4e8f3d2df1ba43822da9bfa122a"}, + {file = "msgpack-1.0.5-cp310-cp310-win32.whl", hash = "sha256:382b2c77589331f2cb80b67cc058c00f225e19827dbc818d700f61513ab47bea"}, + {file = "msgpack-1.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:4867aa2df9e2a5fa5f76d7d5565d25ec76e84c106b55509e78c1ede0f152659a"}, + {file = "msgpack-1.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9f5ae84c5c8a857ec44dc180a8b0cc08238e021f57abdf51a8182e915e6299f0"}, + {file = "msgpack-1.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9e6ca5d5699bcd89ae605c150aee83b5321f2115695e741b99618f4856c50898"}, + {file = "msgpack-1.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5494ea30d517a3576749cad32fa27f7585c65f5f38309c88c6d137877fa28a5a"}, + {file = "msgpack-1.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ab2f3331cb1b54165976a9d976cb251a83183631c88076613c6c780f0d6e45a"}, + {file = "msgpack-1.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28592e20bbb1620848256ebc105fc420436af59515793ed27d5c77a217477705"}, + {file = "msgpack-1.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe5c63197c55bce6385d9aee16c4d0641684628f63ace85f73571e65ad1c1e8d"}, + {file = "msgpack-1.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ed40e926fa2f297e8a653c954b732f125ef97bdd4c889f243182299de27e2aa9"}, + {file = "msgpack-1.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b2de4c1c0538dcb7010902a2b97f4e00fc4ddf2c8cda9749af0e594d3b7fa3d7"}, + {file = "msgpack-1.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bf22a83f973b50f9d38e55c6aade04c41ddda19b00c4ebc558930d78eecc64ed"}, + {file = "msgpack-1.0.5-cp311-cp311-win32.whl", hash = "sha256:c396e2cc213d12ce017b686e0f53497f94f8ba2b24799c25d913d46c08ec422c"}, + {file = "msgpack-1.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c4c68d87497f66f96d50142a2b73b97972130d93677ce930718f68828b382e2"}, + {file = "msgpack-1.0.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a2b031c2e9b9af485d5e3c4520f4220d74f4d222a5b8dc8c1a3ab9448ca79c57"}, + {file = "msgpack-1.0.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f837b93669ce4336e24d08286c38761132bc7ab29782727f8557e1eb21b2080"}, + {file = "msgpack-1.0.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1d46dfe3832660f53b13b925d4e0fa1432b00f5f7210eb3ad3bb9a13c6204a6"}, + {file = "msgpack-1.0.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:366c9a7b9057e1547f4ad51d8facad8b406bab69c7d72c0eb6f529cf76d4b85f"}, + {file = "msgpack-1.0.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:4c075728a1095efd0634a7dccb06204919a2f67d1893b6aa8e00497258bf926c"}, + {file = "msgpack-1.0.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:f933bbda5a3ee63b8834179096923b094b76f0c7a73c1cfe8f07ad608c58844b"}, + {file = "msgpack-1.0.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:36961b0568c36027c76e2ae3ca1132e35123dcec0706c4b7992683cc26c1320c"}, + {file = "msgpack-1.0.5-cp36-cp36m-win32.whl", hash = "sha256:b5ef2f015b95f912c2fcab19c36814963b5463f1fb9049846994b007962743e9"}, + {file = "msgpack-1.0.5-cp36-cp36m-win_amd64.whl", hash = "sha256:288e32b47e67f7b171f86b030e527e302c91bd3f40fd9033483f2cacc37f327a"}, + {file = "msgpack-1.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:137850656634abddfb88236008339fdaba3178f4751b28f270d2ebe77a563b6c"}, + {file = "msgpack-1.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c05a4a96585525916b109bb85f8cb6511db1c6f5b9d9cbcbc940dc6b4be944b"}, + {file = "msgpack-1.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56a62ec00b636583e5cb6ad313bbed36bb7ead5fa3a3e38938503142c72cba4f"}, + {file = "msgpack-1.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef8108f8dedf204bb7b42994abf93882da1159728a2d4c5e82012edd92c9da9f"}, + {file = "msgpack-1.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1835c84d65f46900920b3708f5ba829fb19b1096c1800ad60bae8418652a951d"}, + {file = "msgpack-1.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e57916ef1bd0fee4f21c4600e9d1da352d8816b52a599c46460e93a6e9f17086"}, + {file = "msgpack-1.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:17358523b85973e5f242ad74aa4712b7ee560715562554aa2134d96e7aa4cbbf"}, + {file = "msgpack-1.0.5-cp37-cp37m-win32.whl", hash = "sha256:cb5aaa8c17760909ec6cb15e744c3ebc2ca8918e727216e79607b7bbce9c8f77"}, + {file = "msgpack-1.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:ab31e908d8424d55601ad7075e471b7d0140d4d3dd3272daf39c5c19d936bd82"}, + {file = "msgpack-1.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b72d0698f86e8d9ddf9442bdedec15b71df3598199ba33322d9711a19f08145c"}, + {file = "msgpack-1.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:379026812e49258016dd84ad79ac8446922234d498058ae1d415f04b522d5b2d"}, + {file = "msgpack-1.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:332360ff25469c346a1c5e47cbe2a725517919892eda5cfaffe6046656f0b7bb"}, + {file = "msgpack-1.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:476a8fe8fae289fdf273d6d2a6cb6e35b5a58541693e8f9f019bfe990a51e4ba"}, + {file = "msgpack-1.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9985b214f33311df47e274eb788a5893a761d025e2b92c723ba4c63936b69b1"}, + {file = "msgpack-1.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48296af57cdb1d885843afd73c4656be5c76c0c6328db3440c9601a98f303d87"}, + {file = "msgpack-1.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:addab7e2e1fcc04bd08e4eb631c2a90960c340e40dfc4a5e24d2ff0d5a3b3edb"}, + {file = "msgpack-1.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:916723458c25dfb77ff07f4c66aed34e47503b2eb3188b3adbec8d8aa6e00f48"}, + {file = "msgpack-1.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:821c7e677cc6acf0fd3f7ac664c98803827ae6de594a9f99563e48c5a2f27eb0"}, + {file = "msgpack-1.0.5-cp38-cp38-win32.whl", hash = "sha256:1c0f7c47f0087ffda62961d425e4407961a7ffd2aa004c81b9c07d9269512f6e"}, + {file = "msgpack-1.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:bae7de2026cbfe3782c8b78b0db9cbfc5455e079f1937cb0ab8d133496ac55e1"}, + {file = "msgpack-1.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:20c784e66b613c7f16f632e7b5e8a1651aa5702463d61394671ba07b2fc9e025"}, + {file = "msgpack-1.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:266fa4202c0eb94d26822d9bfd7af25d1e2c088927fe8de9033d929dd5ba24c5"}, + {file = "msgpack-1.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18334484eafc2b1aa47a6d42427da7fa8f2ab3d60b674120bce7a895a0a85bdd"}, + {file = "msgpack-1.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57e1f3528bd95cc44684beda696f74d3aaa8a5e58c816214b9046512240ef437"}, + {file = "msgpack-1.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:586d0d636f9a628ddc6a17bfd45aa5b5efaf1606d2b60fa5d87b8986326e933f"}, + {file = "msgpack-1.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a740fa0e4087a734455f0fc3abf5e746004c9da72fbd541e9b113013c8dc3282"}, + {file = "msgpack-1.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3055b0455e45810820db1f29d900bf39466df96ddca11dfa6d074fa47054376d"}, + {file = "msgpack-1.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a61215eac016f391129a013c9e46f3ab308db5f5ec9f25811e811f96962599a8"}, + {file = "msgpack-1.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:362d9655cd369b08fda06b6657a303eb7172d5279997abe094512e919cf74b11"}, + {file = "msgpack-1.0.5-cp39-cp39-win32.whl", hash = "sha256:ac9dd47af78cae935901a9a500104e2dea2e253207c924cc95de149606dc43cc"}, + {file = "msgpack-1.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:06f5174b5f8ed0ed919da0e62cbd4ffde676a374aba4020034da05fab67b9164"}, + {file = "msgpack-1.0.5.tar.gz", hash = "sha256:c075544284eadc5cddc70f4757331d99dcbc16b2bbd4849d15f8aae4cf36d31c"}, +] + +[[package]] +name = "multidict" +version = "6.0.4" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, + {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, + {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, + {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, + {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, + {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, + {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, + {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, + {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, + {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, + {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, + {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, + {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "numpy" +version = "1.21.6" +description = "NumPy is the fundamental package for array computing with Python." +optional = false +python-versions = ">=3.7,<3.11" +files = [ + {file = "numpy-1.21.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8737609c3bbdd48e380d463134a35ffad3b22dc56295eff6f79fd85bd0eeeb25"}, + {file = "numpy-1.21.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fdffbfb6832cd0b300995a2b08b8f6fa9f6e856d562800fea9182316d99c4e8e"}, + {file = "numpy-1.21.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3820724272f9913b597ccd13a467cc492a0da6b05df26ea09e78b171a0bb9da6"}, + {file = "numpy-1.21.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f17e562de9edf691a42ddb1eb4a5541c20dd3f9e65b09ded2beb0799c0cf29bb"}, + {file = "numpy-1.21.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f30427731561ce75d7048ac254dbe47a2ba576229250fb60f0fb74db96501a1"}, + {file = "numpy-1.21.6-cp310-cp310-win32.whl", hash = "sha256:d4bf4d43077db55589ffc9009c0ba0a94fa4908b9586d6ccce2e0b164c86303c"}, + {file = "numpy-1.21.6-cp310-cp310-win_amd64.whl", hash = "sha256:d136337ae3cc69aa5e447e78d8e1514be8c3ec9b54264e680cf0b4bd9011574f"}, + {file = "numpy-1.21.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6aaf96c7f8cebc220cdfc03f1d5a31952f027dda050e5a703a0d1c396075e3e7"}, + {file = "numpy-1.21.6-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:67c261d6c0a9981820c3a149d255a76918278a6b03b6a036800359aba1256d46"}, + {file = "numpy-1.21.6-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a6be4cb0ef3b8c9250c19cc122267263093eee7edd4e3fa75395dfda8c17a8e2"}, + {file = "numpy-1.21.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c4068a8c44014b2d55f3c3f574c376b2494ca9cc73d2f1bd692382b6dffe3db"}, + {file = "numpy-1.21.6-cp37-cp37m-win32.whl", hash = "sha256:7c7e5fa88d9ff656e067876e4736379cc962d185d5cd808014a8a928d529ef4e"}, + {file = "numpy-1.21.6-cp37-cp37m-win_amd64.whl", hash = "sha256:bcb238c9c96c00d3085b264e5c1a1207672577b93fa666c3b14a45240b14123a"}, + {file = "numpy-1.21.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:82691fda7c3f77c90e62da69ae60b5ac08e87e775b09813559f8901a88266552"}, + {file = "numpy-1.21.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:643843bcc1c50526b3a71cd2ee561cf0d8773f062c8cbaf9ffac9fdf573f83ab"}, + {file = "numpy-1.21.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:357768c2e4451ac241465157a3e929b265dfac85d9214074985b1786244f2ef3"}, + {file = "numpy-1.21.6-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9f411b2c3f3d76bba0865b35a425157c5dcf54937f82bbeb3d3c180789dd66a6"}, + {file = "numpy-1.21.6-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4aa48afdce4660b0076a00d80afa54e8a97cd49f457d68a4342d188a09451c1a"}, + {file = "numpy-1.21.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6a96eef20f639e6a97d23e57dd0c1b1069a7b4fd7027482a4c5c451cd7732f4"}, + {file = "numpy-1.21.6-cp38-cp38-win32.whl", hash = "sha256:5c3c8def4230e1b959671eb959083661b4a0d2e9af93ee339c7dada6759a9470"}, + {file = "numpy-1.21.6-cp38-cp38-win_amd64.whl", hash = "sha256:bf2ec4b75d0e9356edea834d1de42b31fe11f726a81dfb2c2112bc1eaa508fcf"}, + {file = "numpy-1.21.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4391bd07606be175aafd267ef9bea87cf1b8210c787666ce82073b05f202add1"}, + {file = "numpy-1.21.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:67f21981ba2f9d7ba9ade60c9e8cbaa8cf8e9ae51673934480e45cf55e953673"}, + {file = "numpy-1.21.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee5ec40fdd06d62fe5d4084bef4fd50fd4bb6bfd2bf519365f569dc470163ab0"}, + {file = "numpy-1.21.6-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1dbe1c91269f880e364526649a52eff93ac30035507ae980d2fed33aaee633ac"}, + {file = "numpy-1.21.6-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d9caa9d5e682102453d96a0ee10c7241b72859b01a941a397fd965f23b3e016b"}, + {file = "numpy-1.21.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58459d3bad03343ac4b1b42ed14d571b8743dc80ccbf27444f266729df1d6f5b"}, + {file = "numpy-1.21.6-cp39-cp39-win32.whl", hash = "sha256:7f5ae4f304257569ef3b948810816bc87c9146e8c446053539947eedeaa32786"}, + {file = "numpy-1.21.6-cp39-cp39-win_amd64.whl", hash = "sha256:e31f0bb5928b793169b87e3d1e070f2342b22d5245c755e2b81caa29756246c3"}, + {file = "numpy-1.21.6-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dd1c8f6bd65d07d3810b90d02eba7997e32abbdf1277a481d698969e921a3be0"}, + {file = "numpy-1.21.6.zip", hash = "sha256:ecb55251139706669fdec2ff073c98ef8e9a84473e51e716211b41aa0f18e656"}, +] + +[[package]] +name = "numpy" +version = "1.25.2" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.25.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:db3ccc4e37a6873045580d413fe79b68e47a681af8db2e046f1dacfa11f86eb3"}, + {file = "numpy-1.25.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:90319e4f002795ccfc9050110bbbaa16c944b1c37c0baeea43c5fb881693ae1f"}, + {file = "numpy-1.25.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe4a913e29b418d096e696ddd422d8a5d13ffba4ea91f9f60440a3b759b0187"}, + {file = "numpy-1.25.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f08f2e037bba04e707eebf4bc934f1972a315c883a9e0ebfa8a7756eabf9e357"}, + {file = "numpy-1.25.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bec1e7213c7cb00d67093247f8c4db156fd03075f49876957dca4711306d39c9"}, + {file = "numpy-1.25.2-cp310-cp310-win32.whl", hash = "sha256:7dc869c0c75988e1c693d0e2d5b26034644399dd929bc049db55395b1379e044"}, + {file = "numpy-1.25.2-cp310-cp310-win_amd64.whl", hash = "sha256:834b386f2b8210dca38c71a6e0f4fd6922f7d3fcff935dbe3a570945acb1b545"}, + {file = "numpy-1.25.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5462d19336db4560041517dbb7759c21d181a67cb01b36ca109b2ae37d32418"}, + {file = "numpy-1.25.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c5652ea24d33585ea39eb6a6a15dac87a1206a692719ff45d53c5282e66d4a8f"}, + {file = "numpy-1.25.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d60fbae8e0019865fc4784745814cff1c421df5afee233db6d88ab4f14655a2"}, + {file = "numpy-1.25.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e7f0f7f6d0eee8364b9a6304c2845b9c491ac706048c7e8cf47b83123b8dbf"}, + {file = "numpy-1.25.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bb33d5a1cf360304754913a350edda36d5b8c5331a8237268c48f91253c3a364"}, + {file = "numpy-1.25.2-cp311-cp311-win32.whl", hash = "sha256:5883c06bb92f2e6c8181df7b39971a5fb436288db58b5a1c3967702d4278691d"}, + {file = "numpy-1.25.2-cp311-cp311-win_amd64.whl", hash = "sha256:5c97325a0ba6f9d041feb9390924614b60b99209a71a69c876f71052521d42a4"}, + {file = "numpy-1.25.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b79e513d7aac42ae918db3ad1341a015488530d0bb2a6abcbdd10a3a829ccfd3"}, + {file = "numpy-1.25.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eb942bfb6f84df5ce05dbf4b46673ffed0d3da59f13635ea9b926af3deb76926"}, + {file = "numpy-1.25.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e0746410e73384e70d286f93abf2520035250aad8c5714240b0492a7302fdca"}, + {file = "numpy-1.25.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7806500e4f5bdd04095e849265e55de20d8cc4b661b038957354327f6d9b295"}, + {file = "numpy-1.25.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8b77775f4b7df768967a7c8b3567e309f617dd5e99aeb886fa14dc1a0791141f"}, + {file = "numpy-1.25.2-cp39-cp39-win32.whl", hash = "sha256:2792d23d62ec51e50ce4d4b7d73de8f67a2fd3ea710dcbc8563a51a03fb07b01"}, + {file = "numpy-1.25.2-cp39-cp39-win_amd64.whl", hash = "sha256:76b4115d42a7dfc5d485d358728cdd8719be33cc5ec6ec08632a5d6fca2ed380"}, + {file = "numpy-1.25.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1a1329e26f46230bf77b02cc19e900db9b52f398d6722ca853349a782d4cff55"}, + {file = "numpy-1.25.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c3abc71e8b6edba80a01a52e66d83c5d14433cbcd26a40c329ec7ed09f37901"}, + {file = "numpy-1.25.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1b9735c27cea5d995496f46a8b1cd7b408b3f34b6d50459d9ac8fe3a20cc17bf"}, + {file = "numpy-1.25.2.tar.gz", hash = "sha256:fd608e19c8d7c55021dffd43bfe5492fab8cc105cc8986f813f8c3c048b38760"}, +] + +[[package]] +name = "nvidia-ml-py" +version = "12.535.77" +description = "Python Bindings for the NVIDIA Management Library" +optional = false +python-versions = "*" +files = [ + {file = "nvidia-ml-py-12.535.77.tar.gz", hash = "sha256:8d81e5ed993c84006454102af84c4bffdf72ba5c51212b6c0121c65688983e14"}, + {file = "nvidia_ml_py-12.535.77-py3-none-any.whl", hash = "sha256:ae246ec810a05438375ce345e35171bc3f4a906487e9ea2632473d7e4f4bd375"}, +] + +[[package]] +name = "oauthlib" +version = "3.2.2" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +optional = false +python-versions = ">=3.6" +files = [ + {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, + {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, +] + +[package.extras] +rsa = ["cryptography (>=3.0.0)"] +signals = ["blinker (>=1.4.0)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] + +[[package]] +name = "opencensus" +version = "0.11.2" +description = "A stats collection and distributed tracing framework" +optional = false +python-versions = "*" +files = [ + {file = "opencensus-0.11.2-py2.py3-none-any.whl", hash = "sha256:7a1a34b87c8db3d9984e97ff05739058342f24de1d700766d59044eee8fb3b3f"}, + {file = "opencensus-0.11.2.tar.gz", hash = "sha256:6154042a236b9ecdd55a23dfbb2743bb3deacd0687e3e0391ec2e0c74950d66f"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.0.0,<3.0.0", markers = "python_version >= \"3.6\""} +opencensus-context = ">=0.1.3" + +[[package]] +name = "opencensus-context" +version = "0.1.3" +description = "OpenCensus Runtime Context" +optional = false +python-versions = "*" +files = [ + {file = "opencensus-context-0.1.3.tar.gz", hash = "sha256:a03108c3c10d8c80bb5ddf5c8a1f033161fa61972a9917f9b9b3a18517f0088c"}, + {file = "opencensus_context-0.1.3-py2.py3-none-any.whl", hash = "sha256:073bb0590007af276853009fac7e4bab1d523c3f03baf4cb4511ca38967c6039"}, +] + +[[package]] +name = "openshift-client" +version = "1.0.18" +description = "OpenShift python client" +optional = false +python-versions = "*" +files = [ + {file = "openshift-client-1.0.18.tar.gz", hash = "sha256:be3979440cfd96788146a3a1650dabe939d4d516eea0b39f87e66d2ab39495b1"}, + {file = "openshift_client-1.0.18-py2.py3-none-any.whl", hash = "sha256:d8a84080307ccd9556f6c62a3707a3e6507baedee36fa425754f67db9ded528b"}, +] + +[package.dependencies] +paramiko = "*" +pyyaml = "*" +six = "*" + +[[package]] +name = "packaging" +version = "23.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, + {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, +] + +[[package]] +name = "paramiko" +version = "3.3.1" +description = "SSH2 protocol library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "paramiko-3.3.1-py3-none-any.whl", hash = "sha256:b7bc5340a43de4287bbe22fe6de728aa2c22468b2a849615498dd944c2f275eb"}, + {file = "paramiko-3.3.1.tar.gz", hash = "sha256:6a3777a961ac86dbef375c5f5b8d50014a1a96d0fd7f054a43bc880134b0ff77"}, +] + +[package.dependencies] +bcrypt = ">=3.2" +cryptography = ">=3.3" +pynacl = ">=1.5" + +[package.extras] +all = ["gssapi (>=1.4.1)", "invoke (>=2.0)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] +gssapi = ["gssapi (>=1.4.1)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] +invoke = ["invoke (>=2.0)"] + +[[package]] +name = "pdoc3" +version = "0.10.0" +description = "Auto-generate API documentation for Python projects." +optional = false +python-versions = ">= 3.6" +files = [ + {file = "pdoc3-0.10.0-py3-none-any.whl", hash = "sha256:ba45d1ada1bd987427d2bf5cdec30b2631a3ff5fb01f6d0e77648a572ce6028b"}, + {file = "pdoc3-0.10.0.tar.gz", hash = "sha256:5f22e7bcb969006738e1aa4219c75a32f34c2d62d46dc9d2fb2d3e0b0287e4b7"}, +] + +[package.dependencies] +mako = "*" +markdown = ">=3.0" + +[[package]] +name = "pkgutil-resolve-name" +version = "1.3.10" +description = "Resolve a name to an object." +optional = false +python-versions = ">=3.6" +files = [ + {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, + {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, +] + +[[package]] +name = "platformdirs" +version = "3.10.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.7" +files = [ + {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, + {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.7.1", markers = "python_version < \"3.8\""} + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] + +[[package]] +name = "pluggy" +version = "1.2.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, + {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "prometheus-client" +version = "0.17.1" +description = "Python client for the Prometheus monitoring system." +optional = false +python-versions = ">=3.6" +files = [ + {file = "prometheus_client-0.17.1-py3-none-any.whl", hash = "sha256:e537f37160f6807b8202a6fc4764cdd19bac5480ddd3e0d463c3002b34462101"}, + {file = "prometheus_client-0.17.1.tar.gz", hash = "sha256:21e674f39831ae3f8acde238afd9a27a37d0d2fb5a28ea094f0ce25d2cbf2091"}, +] + +[package.extras] +twisted = ["twisted"] + +[[package]] +name = "protobuf" +version = "4.24.0" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "protobuf-4.24.0-cp310-abi3-win32.whl", hash = "sha256:81cb9c4621d2abfe181154354f63af1c41b00a4882fb230b4425cbaed65e8f52"}, + {file = "protobuf-4.24.0-cp310-abi3-win_amd64.whl", hash = "sha256:6c817cf4a26334625a1904b38523d1b343ff8b637d75d2c8790189a4064e51c3"}, + {file = "protobuf-4.24.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:ae97b5de10f25b7a443b40427033e545a32b0e9dda17bcd8330d70033379b3e5"}, + {file = "protobuf-4.24.0-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:567fe6b0647494845d0849e3d5b260bfdd75692bf452cdc9cb660d12457c055d"}, + {file = "protobuf-4.24.0-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:a6b1ca92ccabfd9903c0c7dde8876221dc7d8d87ad5c42e095cc11b15d3569c7"}, + {file = "protobuf-4.24.0-cp37-cp37m-win32.whl", hash = "sha256:a38400a692fd0c6944c3c58837d112f135eb1ed6cdad5ca6c5763336e74f1a04"}, + {file = "protobuf-4.24.0-cp37-cp37m-win_amd64.whl", hash = "sha256:5ab19ee50037d4b663c02218a811a5e1e7bb30940c79aac385b96e7a4f9daa61"}, + {file = "protobuf-4.24.0-cp38-cp38-win32.whl", hash = "sha256:e8834ef0b4c88666ebb7c7ec18045aa0f4325481d724daa624a4cf9f28134653"}, + {file = "protobuf-4.24.0-cp38-cp38-win_amd64.whl", hash = "sha256:8bb52a2be32db82ddc623aefcedfe1e0eb51da60e18fcc908fb8885c81d72109"}, + {file = "protobuf-4.24.0-cp39-cp39-win32.whl", hash = "sha256:ae7a1835721086013de193311df858bc12cd247abe4ef9710b715d930b95b33e"}, + {file = "protobuf-4.24.0-cp39-cp39-win_amd64.whl", hash = "sha256:44825e963008f8ea0d26c51911c30d3e82e122997c3c4568fd0385dd7bacaedf"}, + {file = "protobuf-4.24.0-py3-none-any.whl", hash = "sha256:82e6e9ebdd15b8200e8423676eab38b774624d6a1ad696a60d86a2ac93f18201"}, + {file = "protobuf-4.24.0.tar.gz", hash = "sha256:5d0ceb9de6e08311832169e601d1fc71bd8e8c779f3ee38a97a78554945ecb85"}, +] + +[[package]] +name = "psutil" +version = "5.9.5" +description = "Cross-platform lib for process and system monitoring in Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"}, + {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab8ed1a1d77c95453db1ae00a3f9c50227ebd955437bcf2a574ba8adbf6a74d5"}, + {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4aef137f3345082a3d3232187aeb4ac4ef959ba3d7c10c33dd73763fbc063da4"}, + {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48"}, + {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:acf2aef9391710afded549ff602b5887d7a2349831ae4c26be7c807c0a39fac4"}, + {file = "psutil-5.9.5-cp27-none-win32.whl", hash = "sha256:5b9b8cb93f507e8dbaf22af6a2fd0ccbe8244bf30b1baad6b3954e935157ae3f"}, + {file = "psutil-5.9.5-cp27-none-win_amd64.whl", hash = "sha256:8c5f7c5a052d1d567db4ddd231a9d27a74e8e4a9c3f44b1032762bd7b9fdcd42"}, + {file = "psutil-5.9.5-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3c6f686f4225553615612f6d9bc21f1c0e305f75d7d8454f9b46e901778e7217"}, + {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a7dd9997128a0d928ed4fb2c2d57e5102bb6089027939f3b722f3a210f9a8da"}, + {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89518112647f1276b03ca97b65cc7f64ca587b1eb0278383017c2a0dcc26cbe4"}, + {file = "psutil-5.9.5-cp36-abi3-win32.whl", hash = "sha256:104a5cc0e31baa2bcf67900be36acde157756b9c44017b86b2c049f11957887d"}, + {file = "psutil-5.9.5-cp36-abi3-win_amd64.whl", hash = "sha256:b258c0c1c9d145a1d5ceffab1134441c4c5113b2417fafff7315a917a026c3c9"}, + {file = "psutil-5.9.5-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30"}, + {file = "psutil-5.9.5.tar.gz", hash = "sha256:5410638e4df39c54d957fc51ce03048acd8e6d60abc0f5107af51e5fb566eb3c"}, +] + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + +[[package]] +name = "py-spy" +version = "0.3.14" +description = "Sampling profiler for Python programs" +optional = false +python-versions = "*" +files = [ + {file = "py_spy-0.3.14-py2.py3-none-macosx_10_7_x86_64.whl", hash = "sha256:5b342cc5feb8d160d57a7ff308de153f6be68dcf506ad02b4d67065f2bae7f45"}, + {file = "py_spy-0.3.14-py2.py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:fe7efe6c91f723442259d428bf1f9ddb9c1679828866b353d539345ca40d9dd2"}, + {file = "py_spy-0.3.14-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590905447241d789d9de36cff9f52067b6f18d8b5e9fb399242041568d414461"}, + {file = "py_spy-0.3.14-py2.py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd6211fe7f587b3532ba9d300784326d9a6f2b890af7bf6fff21a029ebbc812b"}, + {file = "py_spy-0.3.14-py2.py3-none-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3e8e48032e71c94c3dd51694c39e762e4bbfec250df5bf514adcdd64e79371e0"}, + {file = "py_spy-0.3.14-py2.py3-none-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f59b0b52e56ba9566305236375e6fc68888261d0d36b5addbe3cf85affbefc0e"}, + {file = "py_spy-0.3.14-py2.py3-none-win_amd64.whl", hash = "sha256:8f5b311d09f3a8e33dbd0d44fc6e37b715e8e0c7efefafcda8bfd63b31ab5a31"}, +] + +[[package]] +name = "pyasn1" +version = "0.5.0" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pyasn1-0.5.0-py2.py3-none-any.whl", hash = "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57"}, + {file = "pyasn1-0.5.0.tar.gz", hash = "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.3.0" +description = "A collection of ASN.1-based protocols modules" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, + {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, +] + +[package.dependencies] +pyasn1 = ">=0.4.6,<0.6.0" + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + +[[package]] +name = "pydantic" +version = "1.10.12" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a1fcb59f2f355ec350073af41d927bf83a63b50e640f4dbaa01053a28b7a7718"}, + {file = "pydantic-1.10.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b7ccf02d7eb340b216ec33e53a3a629856afe1c6e0ef91d84a4e6f2fb2ca70fe"}, + {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fb2aa3ab3728d950bcc885a2e9eff6c8fc40bc0b7bb434e555c215491bcf48b"}, + {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:771735dc43cf8383959dc9b90aa281f0b6092321ca98677c5fb6125a6f56d58d"}, + {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ca48477862372ac3770969b9d75f1bf66131d386dba79506c46d75e6b48c1e09"}, + {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a5e7add47a5b5a40c49b3036d464e3c7802f8ae0d1e66035ea16aa5b7a3923ed"}, + {file = "pydantic-1.10.12-cp310-cp310-win_amd64.whl", hash = "sha256:e4129b528c6baa99a429f97ce733fff478ec955513630e61b49804b6cf9b224a"}, + {file = "pydantic-1.10.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0d191db0f92dfcb1dec210ca244fdae5cbe918c6050b342d619c09d31eea0cc"}, + {file = "pydantic-1.10.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:795e34e6cc065f8f498c89b894a3c6da294a936ee71e644e4bd44de048af1405"}, + {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69328e15cfda2c392da4e713443c7dbffa1505bc9d566e71e55abe14c97ddc62"}, + {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2031de0967c279df0d8a1c72b4ffc411ecd06bac607a212892757db7462fc494"}, + {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ba5b2e6fe6ca2b7e013398bc7d7b170e21cce322d266ffcd57cca313e54fb246"}, + {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2a7bac939fa326db1ab741c9d7f44c565a1d1e80908b3797f7f81a4f86bc8d33"}, + {file = "pydantic-1.10.12-cp311-cp311-win_amd64.whl", hash = "sha256:87afda5539d5140cb8ba9e8b8c8865cb5b1463924d38490d73d3ccfd80896b3f"}, + {file = "pydantic-1.10.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:549a8e3d81df0a85226963611950b12d2d334f214436a19537b2efed61b7639a"}, + {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598da88dfa127b666852bef6d0d796573a8cf5009ffd62104094a4fe39599565"}, + {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba5c4a8552bff16c61882db58544116d021d0b31ee7c66958d14cf386a5b5350"}, + {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c79e6a11a07da7374f46970410b41d5e266f7f38f6a17a9c4823db80dadf4303"}, + {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab26038b8375581dc832a63c948f261ae0aa21f1d34c1293469f135fa92972a5"}, + {file = "pydantic-1.10.12-cp37-cp37m-win_amd64.whl", hash = "sha256:e0a16d274b588767602b7646fa05af2782576a6cf1022f4ba74cbb4db66f6ca8"}, + {file = "pydantic-1.10.12-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6a9dfa722316f4acf4460afdf5d41d5246a80e249c7ff475c43a3a1e9d75cf62"}, + {file = "pydantic-1.10.12-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a73f489aebd0c2121ed974054cb2759af8a9f747de120acd2c3394cf84176ccb"}, + {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bcb8cbfccfcf02acb8f1a261143fab622831d9c0989707e0e659f77a18e0"}, + {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fcfb5296d7877af406ba1547dfde9943b1256d8928732267e2653c26938cd9c"}, + {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2f9a6fab5f82ada41d56b0602606a5506aab165ca54e52bc4545028382ef1c5d"}, + {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dea7adcc33d5d105896401a1f37d56b47d443a2b2605ff8a969a0ed5543f7e33"}, + {file = "pydantic-1.10.12-cp38-cp38-win_amd64.whl", hash = "sha256:1eb2085c13bce1612da8537b2d90f549c8cbb05c67e8f22854e201bde5d98a47"}, + {file = "pydantic-1.10.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ef6c96b2baa2100ec91a4b428f80d8f28a3c9e53568219b6c298c1125572ebc6"}, + {file = "pydantic-1.10.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c076be61cd0177a8433c0adcb03475baf4ee91edf5a4e550161ad57fc90f523"}, + {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5a58feb9a39f481eda4d5ca220aa8b9d4f21a41274760b9bc66bfd72595b86"}, + {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5f805d2d5d0a41633651a73fa4ecdd0b3d7a49de4ec3fadf062fe16501ddbf1"}, + {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1289c180abd4bd4555bb927c42ee42abc3aee02b0fb2d1223fb7c6e5bef87dbe"}, + {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5d1197e462e0364906cbc19681605cb7c036f2475c899b6f296104ad42b9f5fb"}, + {file = "pydantic-1.10.12-cp39-cp39-win_amd64.whl", hash = "sha256:fdbdd1d630195689f325c9ef1a12900524dceb503b00a987663ff4f58669b93d"}, + {file = "pydantic-1.10.12-py3-none-any.whl", hash = "sha256:b749a43aa51e32839c9d71dc67eb1e4221bb04af1033a32e3923d46f9effa942"}, + {file = "pydantic-1.10.12.tar.gz", hash = "sha256:0fe8a415cea8f340e7a9af9c54fc71a649b43e8ca3cc732986116b3cb135d303"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pygments" +version = "2.16.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, +] + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "pynacl" +version = "1.5.0" +description = "Python binding to the Networking and Cryptography (NaCl) library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"}, + {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"}, +] + +[package.dependencies] +cffi = ">=1.4.1" + +[package.extras] +docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] +tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] + +[[package]] +name = "pyre-extensions" +version = "0.0.30" +description = "Type system extensions for use with the pyre type checker" +optional = false +python-versions = "*" +files = [ + {file = "pyre-extensions-0.0.30.tar.gz", hash = "sha256:ba7923c486e089afb37a10623a8f4ae82d73cff42426d711c48af070e5bc31b2"}, + {file = "pyre_extensions-0.0.30-py3-none-any.whl", hash = "sha256:32b37ede4eed0ea879fdd6d84e0c7811e129f19b76614f1be3a6b47f9a4b1fa0"}, +] + +[package.dependencies] +typing-extensions = "*" +typing-inspect = "*" + +[[package]] +name = "pyrsistent" +version = "0.19.3" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyrsistent-0.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win32.whl", hash = "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da"}, + {file = "pyrsistent-0.19.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win32.whl", hash = "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win_amd64.whl", hash = "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win32.whl", hash = "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b"}, + {file = "pyrsistent-0.19.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win32.whl", hash = "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7"}, + {file = "pyrsistent-0.19.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win32.whl", hash = "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98"}, + {file = "pyrsistent-0.19.3-py3-none-any.whl", hash = "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64"}, + {file = "pyrsistent-0.19.3.tar.gz", hash = "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440"}, +] + +[[package]] +name = "pytest" +version = "7.4.0" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, + {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.11.1" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-mock-3.11.1.tar.gz", hash = "sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f"}, + {file = "pytest_mock-3.11.1-py3-none-any.whl", hash = "sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39"}, +] + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pywin32" +version = "306" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "ray" +version = "2.5.0" +description = "Ray provides a simple, universal API for building distributed applications." +optional = false +python-versions = "*" +files = [ + {file = "ray-2.5.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:d1bebc874e896880c1215f4c1a11697ada49fa1595d6d99d7c5b4dc03030df36"}, + {file = "ray-2.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0285df2d24cacc36ca64b7852178a9bf37e3fc88545752fc2b46c27396965c1"}, + {file = "ray-2.5.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:38935d46c2597c1d1f113e1c8f88e2716c67052c480de5b2a0265e0a1a5ce88f"}, + {file = "ray-2.5.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:d53a07c9a9dbc134945a26980f557e9ff0f591bf8cabed1a6ebf921768d1c8bd"}, + {file = "ray-2.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:ef26ba24461dad98365b48ef01e27e70bc9737f4cf4734115804153d7d9195dc"}, + {file = "ray-2.5.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:d714175a5000ca91f82646a9b72521118bb6d2db5568e1b7ae9ceb64769716b6"}, + {file = "ray-2.5.0-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:0cde929e63497ed5f1c8626e5ccf7595ef6acaf1e7e270ad7c12f8e1c7695244"}, + {file = "ray-2.5.0-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:7e5512abf62c05c9ff90b1c89a4e0f2e45ee00e73f816eb8265e3ebd92fe4064"}, + {file = "ray-2.5.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3bf36beb213f89c0eb1ec5ac6ffddc8f53e616be745167f00ca017abd8672a2d"}, + {file = "ray-2.5.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:59c2448b07f45d9a9d8e594bb5337bd35a5fea04e42cb4211a3346c2c0d066b0"}, + {file = "ray-2.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:63008dd659d9ef25b0e20f0e1a285e8266e0af68b1178bca1b6ae43e49a68104"}, + {file = "ray-2.5.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:e9464e93d6b72e0da69b9c5ab0501cc40f2db14801e22c6b97fa4e8039647892"}, + {file = "ray-2.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7dc00fac119bfa1c2f8ac456d50a728346d6f2722fb7a21bf70841fc7476c285"}, + {file = "ray-2.5.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:d76051519bd4ae39fda4a87536978cafdebf2843c1c29a9f734c503d8ea676cd"}, + {file = "ray-2.5.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:9a8e06dc5d4201129c28b6768a971c474b82a23935b2e40461ffc7f1c2f4942a"}, + {file = "ray-2.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:849014b62ca50ff106b7a5d41430346e2762b1c4c803673af076209925b8f912"}, + {file = "ray-2.5.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:a1b52c12a3349d8e37357df31438b6f1b12c7719ef41bdf5089fc7e78e8ab212"}, + {file = "ray-2.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:25f3d50c27c4c4756259d093d152381c6604bb96684a0cf43c55ddcc2eb73f79"}, + {file = "ray-2.5.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:1cb4f6ef9cfdb69d2ae582f357e977527944390e2f5cbbf51efd8252ed4c9a11"}, + {file = "ray-2.5.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:662cff303c086369a29283badcd7445b7f911874d8407b2c589b1ccbf6028d2e"}, + {file = "ray-2.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:a2cea10981dad7cfd187edf5e225a667eb114269afc5f2321b52113ef2d86123"}, +] + +[package.dependencies] +aiohttp = {version = ">=3.7", optional = true, markers = "extra == \"default\""} +aiohttp-cors = {version = "*", optional = true, markers = "extra == \"default\""} +aiosignal = "*" +attrs = "*" +click = ">=7.0" +colorful = {version = "*", optional = true, markers = "extra == \"default\""} +filelock = "*" +frozenlist = "*" +gpustat = {version = ">=1.0.0", optional = true, markers = "extra == \"default\""} +grpcio = [ + {version = ">=1.32.0,<=1.51.3", markers = "python_version < \"3.10\" and sys_platform != \"darwin\""}, + {version = ">=1.32.0,<=1.49.1", markers = "python_version < \"3.10\" and sys_platform == \"darwin\""}, + {version = ">=1.42.0,<=1.51.3", markers = "python_version >= \"3.10\" and sys_platform != \"darwin\""}, + {version = ">=1.42.0,<=1.49.1", markers = "python_version >= \"3.10\" and sys_platform == \"darwin\""}, +] +jsonschema = "*" +msgpack = ">=1.0.0,<2.0.0" +numpy = [ + {version = ">=1.16", markers = "python_version < \"3.9\""}, + {version = ">=1.19.3", markers = "python_version >= \"3.9\""}, +] +opencensus = {version = "*", optional = true, markers = "extra == \"default\""} +packaging = "*" +prometheus-client = {version = ">=0.7.1", optional = true, markers = "extra == \"default\""} +protobuf = ">=3.15.3,<3.19.5 || >3.19.5" +py-spy = {version = ">=0.2.0", optional = true, markers = "extra == \"default\""} +pydantic = {version = "*", optional = true, markers = "extra == \"default\""} +pyyaml = "*" +requests = "*" +smart-open = {version = "*", optional = true, markers = "extra == \"default\""} +typing-extensions = {version = "*", markers = "python_version < \"3.8\""} +virtualenv = {version = ">=20.0.24,<20.21.1", optional = true, markers = "extra == \"default\""} + +[package.extras] +air = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi", "fsspec", "gpustat (>=1.0.0)", "numpy (>=1.20)", "opencensus", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic", "requests", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)"] +all = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "dm-tree", "fastapi", "fsspec", "gpustat (>=1.0.0)", "gymnasium (==0.26.3)", "kubernetes", "lz4", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic", "pyyaml", "ray-cpp (==2.5.0)", "requests", "rich", "scikit-image", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "typer", "urllib3", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)"] +cpp = ["ray-cpp (==2.5.0)"] +data = ["fsspec", "numpy (>=1.20)", "pandas (>=1.3)", "pyarrow (>=6.0.1)"] +default = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "gpustat (>=1.0.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic", "requests", "smart-open", "virtualenv (>=20.0.24,<20.21.1)"] +k8s = ["kubernetes", "urllib3"] +observability = ["opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk"] +rllib = ["dm-tree", "gymnasium (==0.26.3)", "lz4", "pandas", "pyarrow (>=6.0.1)", "pyyaml", "requests", "rich", "scikit-image", "scipy", "tensorboardX (>=1.9)", "typer"] +serve = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi", "gpustat (>=1.0.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic", "requests", "smart-open", "starlette", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)"] +train = ["pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1.9)"] +tune = ["pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1.9)"] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-oauthlib" +version = "1.3.1" +description = "OAuthlib authentication support for Requests." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, + {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, +] + +[package.dependencies] +oauthlib = ">=3.0.0" +requests = ">=2.0.0" + +[package.extras] +rsa = ["oauthlib[signedtoken] (>=3.0.0)"] + +[[package]] +name = "rich" +version = "12.6.0" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.6.3,<4.0.0" +files = [ + {file = "rich-12.6.0-py3-none-any.whl", hash = "sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e"}, + {file = "rich-12.6.0.tar.gz", hash = "sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0"}, +] + +[package.dependencies] +commonmark = ">=0.9.0,<0.10.0" +pygments = ">=2.6.0,<3.0.0" +typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""} + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] + +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "setuptools" +version = "68.0.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"}, + {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "smart-open" +version = "6.3.0" +description = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)" +optional = false +python-versions = ">=3.6,<4.0" +files = [ + {file = "smart_open-6.3.0-py3-none-any.whl", hash = "sha256:b4c9ae193ad6d3e7add50944b86afa0d150bd821ab8ec21edb26d9a06b66f6a8"}, + {file = "smart_open-6.3.0.tar.gz", hash = "sha256:d5238825fe9a9340645fac3d75b287c08fbb99fb2b422477de781c9f5f09e019"}, +] + +[package.extras] +all = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "paramiko", "requests"] +azure = ["azure-common", "azure-core", "azure-storage-blob"] +gcs = ["google-cloud-storage (>=2.6.0)"] +http = ["requests"] +s3 = ["boto3"] +ssh = ["paramiko"] +test = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "moto[server]", "paramiko", "pytest", "pytest-rerunfailures", "requests", "responses"] +webhdfs = ["requests"] + +[[package]] +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] + +[package.extras] +widechars = ["wcwidth"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.7.1" +description = "Backported and Experimental Type Hints for Python 3.7+" +optional = false +python-versions = ">=3.7" +files = [ + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +description = "Runtime inspection utilities for typing module." +optional = false +python-versions = "*" +files = [ + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, +] + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "urllib3" +version = "1.26.16" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"}, + {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "virtualenv" +version = "20.21.0" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.21.0-py3-none-any.whl", hash = "sha256:31712f8f2a17bd06234fa97fdf19609e789dd4e3e4bf108c3da71d710651adbc"}, + {file = "virtualenv-20.21.0.tar.gz", hash = "sha256:f50e3e60f990a0757c9b68333c9fdaa72d7188caa417f96af9e52407831a3b68"}, +] + +[package.dependencies] +distlib = ">=0.3.6,<1" +filelock = ">=3.4.1,<4" +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.8\""} +platformdirs = ">=2.4,<4" + +[package.extras] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] +test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23)", "pytest (>=7.2.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"] + +[[package]] +name = "wcwidth" +version = "0.2.6" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"}, + {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, +] + +[[package]] +name = "websocket-client" +version = "1.6.1" +description = "WebSocket client for Python with low level API options" +optional = false +python-versions = ">=3.7" +files = [ + {file = "websocket-client-1.6.1.tar.gz", hash = "sha256:c951af98631d24f8df89ab1019fc365f2227c0892f12fd150e935607c79dd0dd"}, + {file = "websocket_client-1.6.1-py3-none-any.whl", hash = "sha256:f1f9f2ad5291f0225a49efad77abf9e700b6fef553900623060dad6e26503b9d"}, +] + +[package.extras] +docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + +[[package]] +name = "yarl" +version = "1.9.2" +description = "Yet another URL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"}, + {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"}, + {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"}, + {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"}, + {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"}, + {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"}, + {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"}, + {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"}, + {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"}, + {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"}, + {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"}, + {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"}, + {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"}, + {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"}, + {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" +typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} + +[[package]] +name = "zipp" +version = "3.15.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.7" +files = [ + {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, + {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.7" +content-hash = "53b760742497ab5536551d316f6844e60ade48e3e3b9172b0ed4ba71ee608852" diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index c62afb19..00000000 --- a/requirements.txt +++ /dev/null @@ -1,8 +0,0 @@ -openshift-client==1.0.18 -rich==12.5.1 -ray[default]==2.5.0 -kubernetes>=25.3.0,<27 -codeflare-torchx==0.6.0.dev1 -pydantic<2 # 2.0+ broke ray[default] see detail: https://github.com/ray-project/ray/pull/37000 -cryptography==40.0.2 -executing==1.2.0 From 52760fd1c409c4319c287125eb506d10ff65f7e6 Mon Sep 17 00:00:00 2001 From: Fiona Waters Date: Wed, 16 Aug 2023 16:59:41 +0100 Subject: [PATCH 008/496] making test dependencies optional, and updating readme accordingly --- README.md | 8 +++++++- pyproject.toml | 3 +++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index b20bd5e6..dc5cb147 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,13 @@ Can be installed via `pip`: `pip install codeflare-sdk` We recommend using Python 3.9 for development. Install dependencies: `poetry install` -This will install all requirements as specified in the poetry.lock file. +This will install standard requirements as specified in the poetry.lock file. Test and docs dependencies are optional. + +To include test dependencies run: `poetry install --with test` + +To include docs dependencies run: `poetry instal --with docs` + +To include test and docs dependencies run: `poetry install --with test,docs` If you require a requirements.txt file you can run: diff --git a/pyproject.toml b/pyproject.toml index 52c237d2..5a2dbc31 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,6 +36,9 @@ optional = true [tool.poetry.group.docs.dependencies] pdoc3 = "0.10.0" +[tool.poetry.group.test] +optional = true + [tool.poetry.group.test.dependencies] pytest = "7.4.0" coverage = "7.2.7" From 5e2c745f034e84a99f1da51d637bebdf8954bfca Mon Sep 17 00:00:00 2001 From: Fiona Waters Date: Tue, 22 Aug 2023 10:58:55 +0100 Subject: [PATCH 009/496] removing requirements files and updating readme to build from root dir --- .github/build/Containerfile | 5 +--- .github/build/README.md | 4 +++- .github/build/pyproject.toml | 44 ---------------------------------- .github/build/requirements.txt | 11 --------- 4 files changed, 4 insertions(+), 60 deletions(-) delete mode 100644 .github/build/pyproject.toml delete mode 100644 .github/build/requirements.txt diff --git a/.github/build/Containerfile b/.github/build/Containerfile index 9532d328..a6e91faa 100644 --- a/.github/build/Containerfile +++ b/.github/build/Containerfile @@ -18,7 +18,4 @@ RUN pip3 install poetry COPY pyproject.toml ./ RUN poetry install -#COPY requirements.txt /tmp/requirements.txt -#RUN python -m pip install -r /tmp/requirements.txt - -CMD bash \ No newline at end of file +CMD bash diff --git a/.github/build/README.md b/.github/build/README.md index 48af1512..5cdba731 100644 --- a/.github/build/README.md +++ b/.github/build/README.md @@ -1,3 +1,5 @@ # Pre-Commit Build Artifacts -This directory contains the artifacts required to build the codeflare-sdk pre-commit image. \ No newline at end of file +This directory contains the artifacts required to build the codeflare-sdk pre-commit image. + +To build the image run `podman build -f .github/build/Containerfile .` from the root directory. \ No newline at end of file diff --git a/.github/build/pyproject.toml b/.github/build/pyproject.toml deleted file mode 100644 index b2167e99..00000000 --- a/.github/build/pyproject.toml +++ /dev/null @@ -1,44 +0,0 @@ -[tool.poetry] -name = "codeflare-sdk" -version = "0.0.0-dev" -description = "Python SDK for codeflare client" - -license = "Apache-2.0" - -authors = [ - "Michael Clifford ", - "Mustafa Eyceoz ", - "Abhishek Malvankar ", - "Atin Sood ", -] - -readme = 'README.md' - -repository = "https://github.com/project-codeflare/codeflare-sdk" -homepage = "https://github.com/project-codeflare/codeflare-sdk" - -keywords = ['codeflare', 'python', 'sdk', 'client', 'batch', 'scale'] - -[tool.poetry.dependencies] -python = ">=3.8,<4.0" -openshift-client = "1.0.18" -rich = "^12.5" -ray = {version = "2.5.0", extras = ["default"]} -kubernetes = ">= 25.3.0, < 27" -codeflare-torchx = "0.6.0.dev0" -cryptography = "40.0.2" -executing = "1.2.0" -pydantic = "< 2" -pre-commit = "3.3.3" - -[tool.poetry.group.docs] -optional = true - -[tool.poetry.group.docs.dependencies] -pdoc3 = "0.10.0" - -[tool.poetry.group.test.dependencies] -pytest = "7.4.0" -coverage = "7.2.7" -pytest-mock = "3.11.1" -black = "22.3.0" diff --git a/.github/build/requirements.txt b/.github/build/requirements.txt deleted file mode 100644 index 3a7351ac..00000000 --- a/.github/build/requirements.txt +++ /dev/null @@ -1,11 +0,0 @@ -openshift-client==1.0.18 -rich==12.5.1 -ray[default]==2.1.0 -kubernetes==26.1.0 -codeflare-torchx==0.6.0.dev0 -pre-commit -poetry -pytest -pytest-mock -coverage -black==22.3.0 From 38c6ff39a132c0ee83d4cc468013b8ede6494f9a Mon Sep 17 00:00:00 2001 From: Fiona Waters Date: Mon, 28 Aug 2023 09:45:46 +0100 Subject: [PATCH 010/496] updating coverage-badge.yaml --- .github/workflows/coverage-badge.yaml | 6 ++---- README.md | 4 +++- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/coverage-badge.yaml b/.github/workflows/coverage-badge.yaml index 625576fc..692998af 100644 --- a/.github/workflows/coverage-badge.yaml +++ b/.github/workflows/coverage-badge.yaml @@ -20,10 +20,8 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install pytest==6.2.4 - pip install pytest-mock==3.6.1 - pip install coverage - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + pip install poetry + poetry install --with test - name: Generate coverage report run: | coverage run -m --source=src pytest -v tests/unit_test.py diff --git a/README.md b/README.md index dc5cb147..de8a97d4 100644 --- a/README.md +++ b/README.md @@ -27,7 +27,7 @@ This will install standard requirements as specified in the poetry.lock file. Te To include test dependencies run: `poetry install --with test` -To include docs dependencies run: `poetry instal --with docs` +To include docs dependencies run: `poetry install --with docs` To include test and docs dependencies run: `poetry install --with test,docs` @@ -39,6 +39,8 @@ If you require a requirements.txt file you can run: We use pre-commit to make sure the code is consistently formatted. To make sure that pre-commit is run every time you commit changes, simply run `pre-commit install` +To build the codeflare-sdk pre-commit image run `podman build -f .github/build/Containerfile .` from the root directory. + ### Testing - To install codeflare-sdk in editable mode, run `pip install -e .` from the repo root. From e2506a2486234b952f547ddc8083e5ff9da932c2 Mon Sep 17 00:00:00 2001 From: Fiona Waters Date: Mon, 28 Aug 2023 14:09:26 +0100 Subject: [PATCH 011/496] fixing failing pre-commit check --- .github/build/README.md | 4 ++-- README.md | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/build/README.md b/.github/build/README.md index 5cdba731..9731ef30 100644 --- a/.github/build/README.md +++ b/.github/build/README.md @@ -1,5 +1,5 @@ # Pre-Commit Build Artifacts -This directory contains the artifacts required to build the codeflare-sdk pre-commit image. +This directory contains the artifacts required to build the codeflare-sdk pre-commit image. -To build the image run `podman build -f .github/build/Containerfile .` from the root directory. \ No newline at end of file +To build the image run `podman build -f .github/build/Containerfile .` from the root directory. diff --git a/README.md b/README.md index de8a97d4..c3e5d5d8 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ Can be installed via `pip`: `pip install codeflare-sdk` ### Prerequisites We recommend using Python 3.9 for development. -Install dependencies: `poetry install` +Install dependencies: `poetry install` This will install standard requirements as specified in the poetry.lock file. Test and docs dependencies are optional. To include test dependencies run: `poetry install --with test` From 6de9536a0c41cf7fd597e99726fe72061e98d0c0 Mon Sep 17 00:00:00 2001 From: Fiona Waters Date: Tue, 29 Aug 2023 08:46:18 +0100 Subject: [PATCH 012/496] update unit test workflow --- .github/workflows/unit-tests.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index daf6071d..7c181991 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -14,6 +14,12 @@ jobs: image: quay.io/project-codeflare/codeflare-sdk-precommit:v0.0.3 steps: - uses: actions/checkout@v3 + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip3 install poetry + poetry lock --no-update + poetry install --with test - name: Test with pytest and check coverage run: | coverage run -m --source=src pytest -v tests/unit_test.py From f4e2964ab070cc6c02a1ecf06395d6e38642ff57 Mon Sep 17 00:00:00 2001 From: Fiona Waters Date: Tue, 29 Aug 2023 16:05:01 +0100 Subject: [PATCH 013/496] updating unit-test.yml --- .github/workflows/unit-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) mode change 100644 => 100755 .github/workflows/unit-tests.yml diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml old mode 100644 new mode 100755 index 7c181991..fcd22e9c --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -22,6 +22,6 @@ jobs: poetry install --with test - name: Test with pytest and check coverage run: | - coverage run -m --source=src pytest -v tests/unit_test.py + poetry run coverage run -m --source=src pytest -v tests/unit_test.py coverage=$(coverage report -m | tail -1 | tail -c 4 | head -c 2) if (( $coverage < 90 )); then exit 1; else echo "Coverage passed, ${coverage}%"; fi From fb19983a8708563311b2de8322e0d6f3b71089ea Mon Sep 17 00:00:00 2001 From: Fiona Waters Date: Thu, 7 Sep 2023 11:12:53 +0100 Subject: [PATCH 014/496] adding pytorch-lightning dependency --- poetry.lock | 342 ++++++++++++++++++++++++++++++++++--------------- pyproject.toml | 3 +- 2 files changed, 242 insertions(+), 103 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9767a715..02235ac1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "aiohttp" @@ -99,12 +99,10 @@ files = [ [package.dependencies] aiosignal = ">=1.1.2" async-timeout = ">=4.0.0a3,<5.0" -asynctest = {version = "0.13.0", markers = "python_version < \"3.8\""} attrs = ">=17.3.0" charset-normalizer = ">=2.0,<4.0" frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" -typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} yarl = ">=1.0,<2.0" [package.extras] @@ -160,20 +158,6 @@ files = [ {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, ] -[package.dependencies] -typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""} - -[[package]] -name = "asynctest" -version = "0.13.0" -description = "Enhance the standard unittest package with features for testing asyncio libraries" -optional = false -python-versions = ">=3.5" -files = [ - {file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"}, - {file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"}, -] - [[package]] name = "attrs" version = "23.1.0" @@ -185,9 +169,6 @@ files = [ {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, ] -[package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} - [package.extras] cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] dev = ["attrs[docs,tests]", "pre-commit"] @@ -440,7 +421,6 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} [[package]] name = "codeflare-torchx" @@ -801,6 +781,10 @@ files = [ {file = "fsspec-2023.1.0.tar.gz", hash = "sha256:fbae7f20ff801eb5f7d0bedf81f25c787c0dfac5e982d98fa3884a9cde2b5411"}, ] +[package.dependencies] +aiohttp = {version = "<4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1", optional = true, markers = "extra == \"http\""} +requests = {version = "*", optional = true, markers = "extra == \"http\""} + [package.extras] abfs = ["adlfs"] adl = ["adlfs"] @@ -1047,7 +1031,6 @@ files = [ ] [package.dependencies] -typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] @@ -1084,6 +1067,23 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + [[package]] name = "jinxed" version = "1.2.0" @@ -1111,11 +1111,9 @@ files = [ [package.dependencies] attrs = ">=17.4.0" -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" -typing-extensions = {version = "*", markers = "python_version < \"3.8\""} [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] @@ -1147,6 +1145,26 @@ websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" [package.extras] adal = ["adal (>=1.0.2)"] +[[package]] +name = "lightning-utilities" +version = "0.9.0" +description = "PyTorch Lightning Sample project." +optional = false +python-versions = ">=3.7" +files = [ + {file = "lightning-utilities-0.9.0.tar.gz", hash = "sha256:efbf2c488c257f942abdfd06cf646fb84ca215a9663b60081811e22a15ee033b"}, + {file = "lightning_utilities-0.9.0-py3-none-any.whl", hash = "sha256:918dd90c775719e3855631db6282ad75c14da4c5727c4cebdd1589d865fad03d"}, +] + +[package.dependencies] +packaging = ">=17.1" +typing-extensions = "*" + +[package.extras] +cli = ["fire"] +docs = ["requests (>=2.0.0)"] +typing = ["mypy (>=1.0.0)"] + [[package]] name = "mako" version = "1.2.4" @@ -1159,7 +1177,6 @@ files = [ ] [package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} MarkupSafe = ">=0.9.2" [package.extras] @@ -1244,6 +1261,23 @@ files = [ {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] +[[package]] +name = "mpmath" +version = "1.3.0" +description = "Python library for arbitrary-precision floating-point arithmetic" +optional = false +python-versions = "*" +files = [ + {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, + {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, +] + +[package.extras] +develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"] +docs = ["sphinx"] +gmpy = ["gmpy2 (>=2.1.0a4)"] +tests = ["pytest (>=4.6)"] + [[package]] name = "msgpack" version = "1.0.5" @@ -1411,77 +1445,58 @@ files = [ ] [[package]] -name = "numpy" -version = "1.21.6" -description = "NumPy is the fundamental package for array computing with Python." -optional = false -python-versions = ">=3.7,<3.11" -files = [ - {file = "numpy-1.21.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8737609c3bbdd48e380d463134a35ffad3b22dc56295eff6f79fd85bd0eeeb25"}, - {file = "numpy-1.21.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fdffbfb6832cd0b300995a2b08b8f6fa9f6e856d562800fea9182316d99c4e8e"}, - {file = "numpy-1.21.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3820724272f9913b597ccd13a467cc492a0da6b05df26ea09e78b171a0bb9da6"}, - {file = "numpy-1.21.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f17e562de9edf691a42ddb1eb4a5541c20dd3f9e65b09ded2beb0799c0cf29bb"}, - {file = "numpy-1.21.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f30427731561ce75d7048ac254dbe47a2ba576229250fb60f0fb74db96501a1"}, - {file = "numpy-1.21.6-cp310-cp310-win32.whl", hash = "sha256:d4bf4d43077db55589ffc9009c0ba0a94fa4908b9586d6ccce2e0b164c86303c"}, - {file = "numpy-1.21.6-cp310-cp310-win_amd64.whl", hash = "sha256:d136337ae3cc69aa5e447e78d8e1514be8c3ec9b54264e680cf0b4bd9011574f"}, - {file = "numpy-1.21.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6aaf96c7f8cebc220cdfc03f1d5a31952f027dda050e5a703a0d1c396075e3e7"}, - {file = "numpy-1.21.6-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:67c261d6c0a9981820c3a149d255a76918278a6b03b6a036800359aba1256d46"}, - {file = "numpy-1.21.6-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a6be4cb0ef3b8c9250c19cc122267263093eee7edd4e3fa75395dfda8c17a8e2"}, - {file = "numpy-1.21.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c4068a8c44014b2d55f3c3f574c376b2494ca9cc73d2f1bd692382b6dffe3db"}, - {file = "numpy-1.21.6-cp37-cp37m-win32.whl", hash = "sha256:7c7e5fa88d9ff656e067876e4736379cc962d185d5cd808014a8a928d529ef4e"}, - {file = "numpy-1.21.6-cp37-cp37m-win_amd64.whl", hash = "sha256:bcb238c9c96c00d3085b264e5c1a1207672577b93fa666c3b14a45240b14123a"}, - {file = "numpy-1.21.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:82691fda7c3f77c90e62da69ae60b5ac08e87e775b09813559f8901a88266552"}, - {file = "numpy-1.21.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:643843bcc1c50526b3a71cd2ee561cf0d8773f062c8cbaf9ffac9fdf573f83ab"}, - {file = "numpy-1.21.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:357768c2e4451ac241465157a3e929b265dfac85d9214074985b1786244f2ef3"}, - {file = "numpy-1.21.6-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9f411b2c3f3d76bba0865b35a425157c5dcf54937f82bbeb3d3c180789dd66a6"}, - {file = "numpy-1.21.6-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4aa48afdce4660b0076a00d80afa54e8a97cd49f457d68a4342d188a09451c1a"}, - {file = "numpy-1.21.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6a96eef20f639e6a97d23e57dd0c1b1069a7b4fd7027482a4c5c451cd7732f4"}, - {file = "numpy-1.21.6-cp38-cp38-win32.whl", hash = "sha256:5c3c8def4230e1b959671eb959083661b4a0d2e9af93ee339c7dada6759a9470"}, - {file = "numpy-1.21.6-cp38-cp38-win_amd64.whl", hash = "sha256:bf2ec4b75d0e9356edea834d1de42b31fe11f726a81dfb2c2112bc1eaa508fcf"}, - {file = "numpy-1.21.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4391bd07606be175aafd267ef9bea87cf1b8210c787666ce82073b05f202add1"}, - {file = "numpy-1.21.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:67f21981ba2f9d7ba9ade60c9e8cbaa8cf8e9ae51673934480e45cf55e953673"}, - {file = "numpy-1.21.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee5ec40fdd06d62fe5d4084bef4fd50fd4bb6bfd2bf519365f569dc470163ab0"}, - {file = "numpy-1.21.6-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1dbe1c91269f880e364526649a52eff93ac30035507ae980d2fed33aaee633ac"}, - {file = "numpy-1.21.6-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d9caa9d5e682102453d96a0ee10c7241b72859b01a941a397fd965f23b3e016b"}, - {file = "numpy-1.21.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58459d3bad03343ac4b1b42ed14d571b8743dc80ccbf27444f266729df1d6f5b"}, - {file = "numpy-1.21.6-cp39-cp39-win32.whl", hash = "sha256:7f5ae4f304257569ef3b948810816bc87c9146e8c446053539947eedeaa32786"}, - {file = "numpy-1.21.6-cp39-cp39-win_amd64.whl", hash = "sha256:e31f0bb5928b793169b87e3d1e070f2342b22d5245c755e2b81caa29756246c3"}, - {file = "numpy-1.21.6-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dd1c8f6bd65d07d3810b90d02eba7997e32abbdf1277a481d698969e921a3be0"}, - {file = "numpy-1.21.6.zip", hash = "sha256:ecb55251139706669fdec2ff073c98ef8e9a84473e51e716211b41aa0f18e656"}, +name = "networkx" +version = "3.1" +description = "Python package for creating and manipulating graphs and networks" +optional = false +python-versions = ">=3.8" +files = [ + {file = "networkx-3.1-py3-none-any.whl", hash = "sha256:4f33f68cb2afcf86f28a45f43efc27a9386b535d567d2127f8f61d51dec58d36"}, + {file = "networkx-3.1.tar.gz", hash = "sha256:de346335408f84de0eada6ff9fafafff9bcda11f0a0dfaa931133debb146ab61"}, ] +[package.extras] +default = ["matplotlib (>=3.4)", "numpy (>=1.20)", "pandas (>=1.3)", "scipy (>=1.8)"] +developer = ["mypy (>=1.1)", "pre-commit (>=3.2)"] +doc = ["nb2plots (>=0.6)", "numpydoc (>=1.5)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.13)", "sphinx (>=6.1)", "sphinx-gallery (>=0.12)", "texext (>=0.6.7)"] +extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.10)", "sympy (>=1.10)"] +test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] + [[package]] name = "numpy" -version = "1.25.2" +version = "1.24.4" description = "Fundamental package for array computing in Python" optional = false -python-versions = ">=3.9" -files = [ - {file = "numpy-1.25.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:db3ccc4e37a6873045580d413fe79b68e47a681af8db2e046f1dacfa11f86eb3"}, - {file = "numpy-1.25.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:90319e4f002795ccfc9050110bbbaa16c944b1c37c0baeea43c5fb881693ae1f"}, - {file = "numpy-1.25.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe4a913e29b418d096e696ddd422d8a5d13ffba4ea91f9f60440a3b759b0187"}, - {file = "numpy-1.25.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f08f2e037bba04e707eebf4bc934f1972a315c883a9e0ebfa8a7756eabf9e357"}, - {file = "numpy-1.25.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bec1e7213c7cb00d67093247f8c4db156fd03075f49876957dca4711306d39c9"}, - {file = "numpy-1.25.2-cp310-cp310-win32.whl", hash = "sha256:7dc869c0c75988e1c693d0e2d5b26034644399dd929bc049db55395b1379e044"}, - {file = "numpy-1.25.2-cp310-cp310-win_amd64.whl", hash = "sha256:834b386f2b8210dca38c71a6e0f4fd6922f7d3fcff935dbe3a570945acb1b545"}, - {file = "numpy-1.25.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5462d19336db4560041517dbb7759c21d181a67cb01b36ca109b2ae37d32418"}, - {file = "numpy-1.25.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c5652ea24d33585ea39eb6a6a15dac87a1206a692719ff45d53c5282e66d4a8f"}, - {file = "numpy-1.25.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d60fbae8e0019865fc4784745814cff1c421df5afee233db6d88ab4f14655a2"}, - {file = "numpy-1.25.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e7f0f7f6d0eee8364b9a6304c2845b9c491ac706048c7e8cf47b83123b8dbf"}, - {file = "numpy-1.25.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bb33d5a1cf360304754913a350edda36d5b8c5331a8237268c48f91253c3a364"}, - {file = "numpy-1.25.2-cp311-cp311-win32.whl", hash = "sha256:5883c06bb92f2e6c8181df7b39971a5fb436288db58b5a1c3967702d4278691d"}, - {file = "numpy-1.25.2-cp311-cp311-win_amd64.whl", hash = "sha256:5c97325a0ba6f9d041feb9390924614b60b99209a71a69c876f71052521d42a4"}, - {file = "numpy-1.25.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b79e513d7aac42ae918db3ad1341a015488530d0bb2a6abcbdd10a3a829ccfd3"}, - {file = "numpy-1.25.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eb942bfb6f84df5ce05dbf4b46673ffed0d3da59f13635ea9b926af3deb76926"}, - {file = "numpy-1.25.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e0746410e73384e70d286f93abf2520035250aad8c5714240b0492a7302fdca"}, - {file = "numpy-1.25.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7806500e4f5bdd04095e849265e55de20d8cc4b661b038957354327f6d9b295"}, - {file = "numpy-1.25.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8b77775f4b7df768967a7c8b3567e309f617dd5e99aeb886fa14dc1a0791141f"}, - {file = "numpy-1.25.2-cp39-cp39-win32.whl", hash = "sha256:2792d23d62ec51e50ce4d4b7d73de8f67a2fd3ea710dcbc8563a51a03fb07b01"}, - {file = "numpy-1.25.2-cp39-cp39-win_amd64.whl", hash = "sha256:76b4115d42a7dfc5d485d358728cdd8719be33cc5ec6ec08632a5d6fca2ed380"}, - {file = "numpy-1.25.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1a1329e26f46230bf77b02cc19e900db9b52f398d6722ca853349a782d4cff55"}, - {file = "numpy-1.25.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c3abc71e8b6edba80a01a52e66d83c5d14433cbcd26a40c329ec7ed09f37901"}, - {file = "numpy-1.25.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1b9735c27cea5d995496f46a8b1cd7b408b3f34b6d50459d9ac8fe3a20cc17bf"}, - {file = "numpy-1.25.2.tar.gz", hash = "sha256:fd608e19c8d7c55021dffd43bfe5492fab8cc105cc8986f813f8c3c048b38760"}, +python-versions = ">=3.8" +files = [ + {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, + {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"}, + {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"}, + {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"}, + {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"}, + {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"}, + {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"}, + {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"}, + {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"}, + {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, + {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, ] [[package]] @@ -1622,9 +1637,6 @@ files = [ {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, ] -[package.dependencies] -typing-extensions = {version = ">=4.7.1", markers = "python_version < \"3.8\""} - [package.extras] docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] @@ -1640,9 +1652,6 @@ files = [ {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, ] -[package.dependencies] -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} - [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] @@ -1918,7 +1927,6 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<2.0" @@ -1958,6 +1966,37 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "pytorch-lightning" +version = "2.0.8" +description = "PyTorch Lightning is the lightweight PyTorch wrapper for ML researchers. Scale your models. Write less boilerplate." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytorch-lightning-2.0.8.tar.gz", hash = "sha256:fb7e8fbe473158b2c1666b6b31fb996c8aa2c3f5e8e2a54a3f50a7b5b2d00a20"}, + {file = "pytorch_lightning-2.0.8-py3-none-any.whl", hash = "sha256:718d11f22551d95ef38614b4727433553c95ea2b50cf843938fb13baf34325a6"}, +] + +[package.dependencies] +fsspec = {version = ">2021.06.0", extras = ["http"]} +lightning-utilities = ">=0.7.0" +numpy = ">=1.17.2" +packaging = ">=17.1" +PyYAML = ">=5.4" +torch = ">=1.11.0" +torchmetrics = ">=0.7.0" +tqdm = ">=4.57.0" +typing-extensions = ">=4.0.0" + +[package.extras] +all = ["deepspeed (>=0.8.2)", "gym[classic-control] (>=0.17.0)", "hydra-core (>=1.0.5)", "ipython[all] (<8.14.1)", "jsonargparse[signatures] (>=4.18.0,<4.23.0)", "lightning-utilities (>=0.7.0)", "matplotlib (>3.1)", "omegaconf (>=2.0.5)", "rich (>=12.3.0)", "tensorboardX (>=2.2)", "torchmetrics (>=0.10.0)", "torchvision (>=0.12.0)"] +deepspeed = ["deepspeed (>=0.8.2)"] +dev = ["cloudpickle (>=1.3)", "coverage (==7.3.0)", "deepspeed (>=0.8.2)", "fastapi (<0.100.0)", "gym[classic-control] (>=0.17.0)", "hydra-core (>=1.0.5)", "ipython[all] (<8.14.1)", "jsonargparse[signatures] (>=4.18.0,<4.23.0)", "lightning-utilities (>=0.7.0)", "matplotlib (>3.1)", "omegaconf (>=2.0.5)", "onnx (<1.15.0)", "onnxruntime (<1.16.0)", "pandas (>1.0)", "protobuf (<=3.20.1)", "psutil (<5.9.6)", "pytest (==7.4.0)", "pytest-cov (==4.1.0)", "pytest-forked (==1.4.0)", "pytest-rerunfailures (==10.3)", "rich (>=12.3.0)", "scikit-learn (>0.22.1)", "tensorboard (>=2.9.1)", "tensorboardX (>=2.2)", "torchmetrics (>=0.10.0)", "torchvision (>=0.12.0)", "uvicorn (<0.23.3)"] +examples = ["gym[classic-control] (>=0.17.0)", "ipython[all] (<8.14.1)", "lightning-utilities (>=0.7.0)", "torchmetrics (>=0.10.0)", "torchvision (>=0.12.0)"] +extra = ["hydra-core (>=1.0.5)", "jsonargparse[signatures] (>=4.18.0,<4.23.0)", "matplotlib (>3.1)", "omegaconf (>=2.0.5)", "rich (>=12.3.0)", "tensorboardX (>=2.2)"] +strategies = ["deepspeed (>=0.8.2)"] +test = ["cloudpickle (>=1.3)", "coverage (==7.3.0)", "fastapi (<0.100.0)", "onnx (<1.15.0)", "onnxruntime (<1.16.0)", "pandas (>1.0)", "protobuf (<=3.20.1)", "psutil (<5.9.6)", "pytest (==7.4.0)", "pytest-cov (==4.1.0)", "pytest-forked (==1.4.0)", "pytest-rerunfailures (==10.3)", "scikit-learn (>0.22.1)", "tensorboard (>=2.9.1)", "uvicorn (<0.23.3)"] + [[package]] name = "pywin32" version = "306" @@ -2091,7 +2130,6 @@ pydantic = {version = "*", optional = true, markers = "extra == \"default\""} pyyaml = "*" requests = "*" smart-open = {version = "*", optional = true, markers = "extra == \"default\""} -typing-extensions = {version = "*", markers = "python_version < \"3.8\""} virtualenv = {version = ">=20.0.24,<20.21.1", optional = true, markers = "extra == \"default\""} [package.extras] @@ -2227,6 +2265,20 @@ ssh = ["paramiko"] test = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "moto[server]", "paramiko", "pytest", "pytest-rerunfailures", "requests", "responses"] webhdfs = ["requests"] +[[package]] +name = "sympy" +version = "1.12" +description = "Computer algebra system (CAS) in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sympy-1.12-py3-none-any.whl", hash = "sha256:c3588cd4295d0c0f603d0f2ae780587e64e2efeedb3521e46b9bb1d08d184fa5"}, + {file = "sympy-1.12.tar.gz", hash = "sha256:ebf595c8dac3e0fdc4152c51878b498396ec7f30e7a914d6071e674d49420fb8"}, +] + +[package.dependencies] +mpmath = ">=0.19" + [[package]] name = "tabulate" version = "0.9.0" @@ -2252,6 +2304,94 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] +[[package]] +name = "torch" +version = "2.0.1" +description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "torch-2.0.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:8ced00b3ba471856b993822508f77c98f48a458623596a4c43136158781e306a"}, + {file = "torch-2.0.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:359bfaad94d1cda02ab775dc1cc386d585712329bb47b8741607ef6ef4950747"}, + {file = "torch-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:7c84e44d9002182edd859f3400deaa7410f5ec948a519cc7ef512c2f9b34d2c4"}, + {file = "torch-2.0.1-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:567f84d657edc5582d716900543e6e62353dbe275e61cdc36eda4929e46df9e7"}, + {file = "torch-2.0.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:787b5a78aa7917465e9b96399b883920c88a08f4eb63b5a5d2d1a16e27d2f89b"}, + {file = "torch-2.0.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:e617b1d0abaf6ced02dbb9486803abfef0d581609b09641b34fa315c9c40766d"}, + {file = "torch-2.0.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:b6019b1de4978e96daa21d6a3ebb41e88a0b474898fe251fd96189587408873e"}, + {file = "torch-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:dbd68cbd1cd9da32fe5d294dd3411509b3d841baecb780b38b3b7b06c7754434"}, + {file = "torch-2.0.1-cp311-none-macosx_10_9_x86_64.whl", hash = "sha256:ef654427d91600129864644e35deea761fb1fe131710180b952a6f2e2207075e"}, + {file = "torch-2.0.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:25aa43ca80dcdf32f13da04c503ec7afdf8e77e3a0183dd85cd3e53b2842e527"}, + {file = "torch-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5ef3ea3d25441d3957348f7e99c7824d33798258a2bf5f0f0277cbcadad2e20d"}, + {file = "torch-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:0882243755ff28895e8e6dc6bc26ebcf5aa0911ed81b2a12f241fc4b09075b13"}, + {file = "torch-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:f66aa6b9580a22b04d0af54fcd042f52406a8479e2b6a550e3d9f95963e168c8"}, + {file = "torch-2.0.1-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:1adb60d369f2650cac8e9a95b1d5758e25d526a34808f7448d0bd599e4ae9072"}, + {file = "torch-2.0.1-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:1bcffc16b89e296826b33b98db5166f990e3b72654a2b90673e817b16c50e32b"}, + {file = "torch-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:e10e1597f2175365285db1b24019eb6f04d53dcd626c735fc502f1e8b6be9875"}, + {file = "torch-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:423e0ae257b756bb45a4b49072046772d1ad0c592265c5080070e0767da4e490"}, + {file = "torch-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:8742bdc62946c93f75ff92da00e3803216c6cce9b132fbca69664ca38cfb3e18"}, + {file = "torch-2.0.1-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:c62df99352bd6ee5a5a8d1832452110435d178b5164de450831a3a8cc14dc680"}, + {file = "torch-2.0.1-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:671a2565e3f63b8fe8e42ae3e36ad249fe5e567435ea27b94edaa672a7d0c416"}, +] + +[package.dependencies] +filelock = "*" +jinja2 = "*" +networkx = "*" +sympy = "*" +typing-extensions = "*" + +[package.extras] +opt-einsum = ["opt-einsum (>=3.3)"] + +[[package]] +name = "torchmetrics" +version = "1.1.1" +description = "PyTorch native Metrics" +optional = false +python-versions = ">=3.8" +files = [ + {file = "torchmetrics-1.1.1-py3-none-any.whl", hash = "sha256:903b4fc30537acfc5221505c48f7627e58dbf6d9dea85c16ea7b4323f9e13793"}, + {file = "torchmetrics-1.1.1.tar.gz", hash = "sha256:65ea34205c0506eecfd06b98f63f4d2a2c5c0e17367cf324e1747adc854c80a5"}, +] + +[package.dependencies] +lightning-utilities = ">=0.8.0" +numpy = ">1.20.0" +torch = ">=1.8.1" +typing-extensions = {version = "*", markers = "python_version < \"3.9\""} + +[package.extras] +all = ["SciencePlots (>=2.0.0)", "lpips (<=0.1.4)", "matplotlib (>=3.2.0)", "mypy (==1.5.1)", "nltk (>=3.6)", "piq (<=0.8.0)", "pycocotools (>2.0.0)", "pystoi (>=0.3.0)", "regex (>=2021.9.24)", "scipy (>1.0.0)", "torch-fidelity (<=0.4.0)", "torchaudio (>=0.10.0)", "torchvision (>=0.8)", "tqdm (>=4.41.0)", "transformers (>4.4.0)", "transformers (>=4.10.0)", "types-PyYAML", "types-emoji", "types-protobuf", "types-requests", "types-setuptools", "types-six", "types-tabulate"] +audio = ["pystoi (>=0.3.0)", "torchaudio (>=0.10.0)"] +detection = ["pycocotools (>2.0.0)", "torchvision (>=0.8)"] +dev = ["SciencePlots (>=2.0.0)", "bert-score (==0.3.13)", "cloudpickle (>1.3)", "coverage (==7.3.0)", "dython (<=0.7.4)", "fairlearn", "fast-bss-eval (>=0.1.0)", "fire (<=0.5.0)", "huggingface-hub (<0.16)", "jiwer (>=2.3.0)", "kornia (>=0.6.7)", "lpips (<=0.1.4)", "matplotlib (>=3.2.0)", "mir-eval (>=0.6)", "mypy (==1.5.1)", "netcal (>1.0.0)", "nltk (>=3.6)", "numpy (<1.25.0)", "pandas (>1.0.0)", "pandas (>=1.4.0)", "phmdoctest (==1.4.0)", "piq (<=0.8.0)", "psutil (<=5.9.5)", "pycocotools (>2.0.0)", "pystoi (>=0.3.0)", "pytest (==7.4.0)", "pytest-cov (==4.1.0)", "pytest-doctestplus (==1.0.0)", "pytest-rerunfailures (==12.0)", "pytest-timeout (==2.1.0)", "pytorch-msssim (==1.0.0)", "regex (>=2021.9.24)", "requests (<=2.31.0)", "rouge-score (>0.1.0)", "sacrebleu (>=2.0.0)", "scikit-image (>=0.19.0)", "scikit-learn (>=1.1.1)", "scipy (>1.0.0)", "sewar (>=0.4.4)", "statsmodels (>0.13.5)", "torch-complex (<=0.4.3)", "torch-fidelity (<=0.4.0)", "torchaudio (>=0.10.0)", "torchvision (>=0.8)", "tqdm (>=4.41.0)", "transformers (>4.4.0)", "transformers (>=4.10.0)", "types-PyYAML", "types-emoji", "types-protobuf", "types-requests", "types-setuptools", "types-six", "types-tabulate"] +image = ["lpips (<=0.1.4)", "scipy (>1.0.0)", "torch-fidelity (<=0.4.0)", "torchvision (>=0.8)"] +multimodal = ["piq (<=0.8.0)", "transformers (>=4.10.0)"] +test = ["bert-score (==0.3.13)", "cloudpickle (>1.3)", "coverage (==7.3.0)", "dython (<=0.7.4)", "fairlearn", "fast-bss-eval (>=0.1.0)", "fire (<=0.5.0)", "huggingface-hub (<0.16)", "jiwer (>=2.3.0)", "kornia (>=0.6.7)", "mir-eval (>=0.6)", "netcal (>1.0.0)", "numpy (<1.25.0)", "pandas (>1.0.0)", "pandas (>=1.4.0)", "phmdoctest (==1.4.0)", "psutil (<=5.9.5)", "pytest (==7.4.0)", "pytest-cov (==4.1.0)", "pytest-doctestplus (==1.0.0)", "pytest-rerunfailures (==12.0)", "pytest-timeout (==2.1.0)", "pytorch-msssim (==1.0.0)", "requests (<=2.31.0)", "rouge-score (>0.1.0)", "sacrebleu (>=2.0.0)", "scikit-image (>=0.19.0)", "scikit-learn (>=1.1.1)", "scipy (>1.0.0)", "sewar (>=0.4.4)", "statsmodels (>0.13.5)", "torch-complex (<=0.4.3)"] +text = ["nltk (>=3.6)", "regex (>=2021.9.24)", "tqdm (>=4.41.0)", "transformers (>4.4.0)"] +typing = ["mypy (==1.5.1)", "types-PyYAML", "types-emoji", "types-protobuf", "types-requests", "types-setuptools", "types-six", "types-tabulate"] +visual = ["SciencePlots (>=2.0.0)", "matplotlib (>=3.2.0)"] + +[[package]] +name = "tqdm" +version = "4.66.1" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.1-py3-none-any.whl", hash = "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386"}, + {file = "tqdm-4.66.1.tar.gz", hash = "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + [[package]] name = "typing-extensions" version = "4.7.1" @@ -2308,7 +2448,6 @@ files = [ [package.dependencies] distlib = ">=0.3.6,<1" filelock = ">=3.4.1,<4" -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.8\""} platformdirs = ">=2.4,<4" [package.extras] @@ -2428,7 +2567,6 @@ files = [ [package.dependencies] idna = ">=2.0" multidict = ">=4.0" -typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} [[package]] name = "zipp" @@ -2447,5 +2585,5 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "2.0" -python-versions = "^3.7" -content-hash = "53b760742497ab5536551d316f6844e60ade48e3e3b9172b0ed4ba71ee608852" +python-versions = "^3.8" +content-hash = "39b6e2a2b976633df18fa59b28a2b0dc7e8e05cda4629bd37c868b993742d750" diff --git a/pyproject.toml b/pyproject.toml index 5a2dbc31..cd5facf7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,7 +20,7 @@ homepage = "https://github.com/project-codeflare/codeflare-sdk" keywords = ['codeflare', 'python', 'sdk', 'client', 'batch', 'scale'] [tool.poetry.dependencies] -python = "^3.7" +python = "^3.8" openshift-client = "1.0.18" rich = "^12.5" ray = {version = "2.5.0", extras = ["default"]} @@ -29,6 +29,7 @@ codeflare-torchx = "0.6.0.dev1" cryptography = "40.0.2" executing = "1.2.0" pydantic = "< 2" +pytorch-lightning = "^2.0.8" [tool.poetry.group.docs] optional = true From 6e433e214f4855842629d18c5290a098d556aedb Mon Sep 17 00:00:00 2001 From: Fiona Waters Date: Thu, 7 Sep 2023 12:04:12 +0100 Subject: [PATCH 015/496] Removing poetry virtual env in test container and updating README.md --- .github/workflows/unit-tests.yml | 10 +++++++--- README.md | 15 +++++++++------ 2 files changed, 16 insertions(+), 9 deletions(-) diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index fcd22e9c..d62c0eff 100755 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -14,14 +14,18 @@ jobs: image: quay.io/project-codeflare/codeflare-sdk-precommit:v0.0.3 steps: - uses: actions/checkout@v3 + - name: Install poetry + run: pip install poetry + - uses: actions/setup-python@v4 + with: + python-version: '3.8' - name: Install dependencies run: | - python -m pip install --upgrade pip - pip3 install poetry + poetry config virtualenvs.create false poetry lock --no-update poetry install --with test - name: Test with pytest and check coverage run: | - poetry run coverage run -m --source=src pytest -v tests/unit_test.py + coverage run -m --source=src pytest -v tests/unit_test.py coverage=$(coverage report -m | tail -1 | tail -c 4 | head -c 2) if (( $coverage < 90 )); then exit 1; else echo "Coverage passed, ${coverage}%"; fi diff --git a/README.md b/README.md index c3e5d5d8..e64555d7 100644 --- a/README.md +++ b/README.md @@ -22,14 +22,17 @@ Can be installed via `pip`: `pip install codeflare-sdk` ### Prerequisites We recommend using Python 3.9 for development. -Install dependencies: `poetry install` + +#### Install dependencies: + - run: `poetry install` + This will install standard requirements as specified in the poetry.lock file. Test and docs dependencies are optional. -To include test dependencies run: `poetry install --with test` +- To include test dependencies run: `poetry install --with test` -To include docs dependencies run: `poetry install --with docs` +- To include docs dependencies run: `poetry install --with docs` -To include test and docs dependencies run: `poetry install --with test,docs` +- To include test and docs dependencies run: `poetry install --with test,docs` If you require a requirements.txt file you can run: @@ -44,13 +47,13 @@ To build the codeflare-sdk pre-commit image run `podman build -f .github/build/C ### Testing - To install codeflare-sdk in editable mode, run `pip install -e .` from the repo root. -- To run the unit tests, run `pytest -v tests/unit_test.py` +- To run the unit tests, run `poetry run pytest -v tests/unit_test.py` - Any new test functions/scripts can be added into the `tests` folder - NOTE: Functional tests coming soon, will live in `tests/func_test.py` #### Code Coverage -- Run tests with the following command: `coverage run -m --source=src pytest tests/unit_test.py` +- Run tests with the following command: `poetry run coverage run -m --source=src pytest tests/unit_test.py` - To then view a code coverage report w/ missing lines, run `coverage report -m` ### Code Formatting From 4b484bf2fe49794610e3dfcc668f4b56f05db4f1 Mon Sep 17 00:00:00 2001 From: Fiona Waters Date: Fri, 8 Sep 2023 09:50:36 +0100 Subject: [PATCH 016/496] updating READNE.md to use poetry shell --- README.md | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index e64555d7..e72512dd 100644 --- a/README.md +++ b/README.md @@ -21,9 +21,13 @@ Can be installed via `pip`: `pip install codeflare-sdk` ### Prerequisites -We recommend using Python 3.9 for development. +We recommend using Python 3.9 for development, along with Poetry. +Create a Poetry virtual environment with the required Python version 3.9, and run all commands within this environment. + + - run: `poetry shell` #### Install dependencies: + - run: `poetry install` This will install standard requirements as specified in the poetry.lock file. Test and docs dependencies are optional. @@ -47,13 +51,13 @@ To build the codeflare-sdk pre-commit image run `podman build -f .github/build/C ### Testing - To install codeflare-sdk in editable mode, run `pip install -e .` from the repo root. -- To run the unit tests, run `poetry run pytest -v tests/unit_test.py` +- To run the unit tests, run `pytest -v tests/unit_test.py` - Any new test functions/scripts can be added into the `tests` folder - NOTE: Functional tests coming soon, will live in `tests/func_test.py` #### Code Coverage -- Run tests with the following command: `poetry run coverage run -m --source=src pytest tests/unit_test.py` +- Run tests with the following command: `coverage run -m --source=src pytest tests/unit_test.py` - To then view a code coverage report w/ missing lines, run `coverage report -m` ### Code Formatting From 6c24d746f8702d9807f895d8258926b5e354ab0a Mon Sep 17 00:00:00 2001 From: Fiona Waters Date: Wed, 13 Sep 2023 10:12:16 +0100 Subject: [PATCH 017/496] updating containerfile and coverage-badge workflow --- .github/build/Containerfile | 3 ++- .github/workflows/coverage-badge.yaml | 2 ++ poetry.lock | 8 ++++---- 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/.github/build/Containerfile b/.github/build/Containerfile index a6e91faa..8345fbe1 100644 --- a/.github/build/Containerfile +++ b/.github/build/Containerfile @@ -14,7 +14,8 @@ RUN tar -C /usr/local/bin -xvf $TMPDIR/oc.tar.gz && \ rm $TMPDIR/oc.tar.gz USER $USERID -RUN pip3 install poetry +RUN pip3 install poetry && \ + poetry config virtualenvs.create false COPY pyproject.toml ./ RUN poetry install diff --git a/.github/workflows/coverage-badge.yaml b/.github/workflows/coverage-badge.yaml index 692998af..a58ab8db 100644 --- a/.github/workflows/coverage-badge.yaml +++ b/.github/workflows/coverage-badge.yaml @@ -21,6 +21,8 @@ jobs: run: | python -m pip install --upgrade pip pip install poetry + poetry config virtualenvs.create false + poetry lock --no-update poetry install --with test - name: Generate coverage report run: | diff --git a/poetry.lock b/poetry.lock index 02235ac1..531f5a85 100644 --- a/poetry.lock +++ b/poetry.lock @@ -424,12 +424,12 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "codeflare-torchx" -version = "0.6.0.dev0" +version = "0.6.0.dev1" description = "TorchX SDK and Components" optional = false python-versions = ">=3.7" files = [ - {file = "codeflare_torchx-0.6.0.dev0-py3-none-any.whl", hash = "sha256:fd6b2ee084097bc0e84a4b4b44772a7c44fc90e3490a93ec804ed088534cbd0a"}, + {file = "codeflare_torchx-0.6.0.dev1-py3-none-any.whl", hash = "sha256:a03bf570c775cb7fc7787a436ef5a76c726669e287af510513c5bc03033d1fca"}, ] [package.dependencies] @@ -444,7 +444,7 @@ tabulate = "*" urllib3 = ">=1.21.1,<1.27" [package.extras] -dev = ["aiobotocore (==2.4.2)", "ax-platform[mysql] (==0.2.3)", "black (==23.3.0)", "boto3 (==1.24.59)", "captum (>=0.4.0)", "flake8 (==3.9.0)", "fsspec[s3] (==2023.1.0)", "google-api-core", "google-cloud-batch (>=0.5.0)", "google-cloud-logging (>=3.0.0)", "google-cloud-runtimeconfig (>=0.33.2)", "hydra-core", "ipython", "kfp (==1.8.9)", "mlflow-skinny", "moto (==4.1.6)", "pyre-check", "pyre-extensions", "pytest", "pytorch-lightning (==1.5.10)", "ray[default]", "torch (>=1.10.0)", "torch-model-archiver (>=0.4.2)", "torchmetrics (<0.11.0)", "torchserve (>=0.4.2)", "torchtext (>=0.11.0)", "torchvision (>=0.11.1)", "ts (==0.5.1)", "usort (==1.0.2)"] +dev = ["aiobotocore (==2.4.2)", "ax-platform[mysql] (==0.2.3)", "black (==23.3.0)", "boto3 (==1.24.59)", "captum (>=0.4.0)", "flake8 (==3.9.0)", "fsspec[s3] (==2023.1.0)", "google-api-core", "google-cloud-batch (>=0.5.0)", "google-cloud-logging (>=3.0.0)", "google-cloud-runtimeconfig (>=0.33.2)", "hydra-core", "ipython", "kfp (==1.8.22)", "mlflow-skinny", "moto (==4.1.6)", "pyre-check", "pyre-extensions", "pytest", "pytorch-lightning (==1.5.10)", "ray[default]", "torch (>=1.10.0)", "torch-model-archiver (>=0.4.2)", "torchmetrics (<0.11.0)", "torchserve (>=0.4.2)", "torchtext (>=0.11.0)", "torchvision (>=0.11.1)", "ts (==0.5.1)", "usort (==1.0.2)"] gcp-batch = ["google-cloud-batch (>=0.5.0)", "google-cloud-logging (>=3.0.0)", "google-cloud-runtimeconfig (>=0.33.2)"] kfp = ["kfp (==1.6.2)"] kubernetes = ["kubernetes (>=11)"] @@ -2586,4 +2586,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "39b6e2a2b976633df18fa59b28a2b0dc7e8e05cda4629bd37c868b993742d750" +content-hash = "18e18b306f8c91c2d0f995531cf7f20be3180815e8c82ac1d1fc2913d0bc90d8" From 17e62ae668800c2943dea311bb5c09e37beb93df Mon Sep 17 00:00:00 2001 From: Fiona Waters Date: Fri, 15 Sep 2023 15:58:46 +0100 Subject: [PATCH 018/496] Fixing python tests badge on readme --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index e72512dd..e864189b 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Codeflare-SDK -[![Python application](https://github.com/project-codeflare/codeflare-sdk/actions/workflows/python-app.yml/badge.svg?branch=main)](https://github.com/project-codeflare/codeflare-sdk/actions/workflows/python-app.yml) +[![Python application](https://github.com/project-codeflare/codeflare-sdk/actions/workflows/unit-tests.yml/badge.svg?branch=main)](https://github.com/project-codeflare/codeflare-sdk/actions/workflows/unit-tests.yml) ![coverage badge](./coverage.svg) An intuitive, easy-to-use python interface for batch resource requesting, access, job submission, and observation. Simplifying the developer's life while enabling access to high-performance compute resources, either in the cloud or on-prem. From abec0ef2f7a89b605cbc314078264eb0133ff946 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Fri, 15 Sep 2023 13:40:45 +0100 Subject: [PATCH 019/496] Replaced load_kube_config() with config_check() --- src/codeflare_sdk/cluster/cluster.py | 6 +++--- src/codeflare_sdk/utils/generate_yaml.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index b496ab6e..88081a45 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -455,8 +455,8 @@ def get_current_namespace(): # pragma: no cover def get_cluster(cluster_name: str, namespace: str = "default"): try: - config.load_kube_config() - api_instance = client.CustomObjectsApi() + config_check() + api_instance = client.CustomObjectsApi(api_config_handler()) rcs = api_instance.list_namespaced_custom_object( group="ray.io", version="v1alpha1", @@ -477,7 +477,7 @@ def get_cluster(cluster_name: str, namespace: str = "default"): # private methods def _get_ingress_domain(): try: - config.load_kube_config() + config_check() api_client = client.CustomObjectsApi(api_config_handler()) ingress = api_client.get_cluster_custom_object( "config.openshift.io", "v1", "ingresses", "cluster" diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index f128ef8b..54196fce 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -23,7 +23,7 @@ import uuid from kubernetes import client, config from .kube_api_helpers import _kube_api_error_handling -from ..cluster.auth import api_config_handler +from ..cluster.auth import api_config_handler, config_check def read_template(template): @@ -268,7 +268,7 @@ def enable_local_interactive(resources, cluster_name, namespace): command = command.replace("deployment-name", cluster_name) try: - config.load_kube_config() + config_check() api_client = client.CustomObjectsApi(api_config_handler()) ingress = api_client.get_cluster_custom_object( "config.openshift.io", "v1", "ingresses", "cluster" From 2e543caff59c1cfe83db12d98eac8d6147a06183 Mon Sep 17 00:00:00 2001 From: codeflare-machine-account Date: Wed, 20 Sep 2023 13:29:59 +0000 Subject: [PATCH 020/496] Changes in docs for release: v0.8.0 --- docs/cluster/cluster.html | 12 ++++++------ docs/utils/generate_yaml.html | 6 +++--- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/cluster/cluster.html b/docs/cluster/cluster.html index dbb6cec5..21950889 100644 --- a/docs/cluster/cluster.html +++ b/docs/cluster/cluster.html @@ -487,8 +487,8 @@

Module codeflare_sdk.cluster.cluster

def get_cluster(cluster_name: str, namespace: str = "default"): try: - config.load_kube_config() - api_instance = client.CustomObjectsApi() + config_check() + api_instance = client.CustomObjectsApi(api_config_handler()) rcs = api_instance.list_namespaced_custom_object( group="ray.io", version="v1alpha1", @@ -509,7 +509,7 @@

Module codeflare_sdk.cluster.cluster

# private methods def _get_ingress_domain(): try: - config.load_kube_config() + config_check() api_client = client.CustomObjectsApi(api_config_handler()) ingress = api_client.get_cluster_custom_object( "config.openshift.io", "v1", "ingresses", "cluster" @@ -604,7 +604,7 @@

Module codeflare_sdk.cluster.cluster

def _map_to_ray_cluster(rc) -> Optional[RayCluster]: - if "state" in rc["status"]: + if "status" in rc and "state" in rc["status"]: status = RayClusterStatus(rc["status"]["state"].lower()) else: status = RayClusterStatus.UNKNOWN @@ -694,8 +694,8 @@

Functions

def get_cluster(cluster_name: str, namespace: str = "default"):
     try:
-        config.load_kube_config()
-        api_instance = client.CustomObjectsApi()
+        config_check()
+        api_instance = client.CustomObjectsApi(api_config_handler())
         rcs = api_instance.list_namespaced_custom_object(
             group="ray.io",
             version="v1alpha1",
diff --git a/docs/utils/generate_yaml.html b/docs/utils/generate_yaml.html
index ea1e7302..6d94e4c7 100644
--- a/docs/utils/generate_yaml.html
+++ b/docs/utils/generate_yaml.html
@@ -54,7 +54,7 @@ 

Module codeflare_sdk.utils.generate_yaml

import uuid from kubernetes import client, config from .kube_api_helpers import _kube_api_error_handling -from ..cluster.auth import api_config_handler +from ..cluster.auth import api_config_handler, config_check def read_template(template): @@ -299,7 +299,7 @@

Module codeflare_sdk.utils.generate_yaml

command = command.replace("deployment-name", cluster_name) try: - config.load_kube_config() + config_check() api_client = client.CustomObjectsApi(api_config_handler()) ingress = api_client.get_cluster_custom_object( "config.openshift.io", "v1", "ingresses", "cluster" @@ -542,7 +542,7 @@

Functions

command = command.replace("deployment-name", cluster_name) try: - config.load_kube_config() + config_check() api_client = client.CustomObjectsApi(api_config_handler()) ingress = api_client.get_cluster_custom_object( "config.openshift.io", "v1", "ingresses", "cluster" From 2b2882c9bae960fd7321f4af75f7b6171bb95262 Mon Sep 17 00:00:00 2001 From: MichaelClifford Date: Sat, 23 Sep 2023 22:40:51 +0000 Subject: [PATCH 021/496] add ability to configure head node --- src/codeflare_sdk/cluster/cluster.py | 6 ++ src/codeflare_sdk/cluster/config.py | 3 + src/codeflare_sdk/cluster/model.py | 3 + src/codeflare_sdk/utils/generate_yaml.py | 71 ++++++++++++++---------- 4 files changed, 55 insertions(+), 28 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 88081a45..84f73198 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -109,6 +109,9 @@ def create_app_wrapper(self): name = self.config.name namespace = self.config.namespace + head_cpus = self.config.head_cpus + head_memory = self.config.head_memory + head_gpus = self.config.head_gpus min_cpu = self.config.min_cpus max_cpu = self.config.max_cpus min_memory = self.config.min_memory @@ -126,6 +129,9 @@ def create_app_wrapper(self): return generate_appwrapper( name=name, namespace=namespace, + head_cpus=head_cpus, + head_memory=head_memory, + head_gpus=head_gpus, min_cpu=min_cpu, max_cpu=max_cpu, min_memory=min_memory, diff --git a/src/codeflare_sdk/cluster/config.py b/src/codeflare_sdk/cluster/config.py index cb935e79..bde3f4ca 100644 --- a/src/codeflare_sdk/cluster/config.py +++ b/src/codeflare_sdk/cluster/config.py @@ -34,6 +34,9 @@ class ClusterConfiguration: name: str namespace: str = None head_info: list = field(default_factory=list) + head_cpus: int = 2 + head_memory: int = 8 + head_gpus: int = 0 machine_types: list = field(default_factory=list) # ["m4.xlarge", "g4dn.xlarge"] min_cpus: int = 1 max_cpus: int = 1 diff --git a/src/codeflare_sdk/cluster/model.py b/src/codeflare_sdk/cluster/model.py index 639cc734..fb8873e4 100644 --- a/src/codeflare_sdk/cluster/model.py +++ b/src/codeflare_sdk/cluster/model.py @@ -69,6 +69,9 @@ class RayCluster: name: str status: RayClusterStatus + head_cpus: int + head_mem: str + head_gpu: int workers: int worker_mem_min: str worker_mem_max: str diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 54196fce..eb45611f 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -107,35 +107,41 @@ def update_priority(yaml, item, dispatch_priority, priority_val): def update_custompodresources( - item, min_cpu, max_cpu, min_memory, max_memory, gpu, workers -): + item, min_cpu, max_cpu, min_memory, max_memory, gpu, workers, head_cpus, head_memory, head_gpus): if "custompodresources" in item.keys(): custompodresources = item.get("custompodresources") for i in range(len(custompodresources)): + resource = custompodresources[i] if i == 0: # Leave head node resources as template default - continue - resource = custompodresources[i] - for k, v in resource.items(): - if k == "replicas" and i == 1: - resource[k] = workers - if k == "requests" or k == "limits": - for spec, _ in v.items(): - if spec == "cpu": - if k == "limits": - resource[k][spec] = max_cpu - else: - resource[k][spec] = min_cpu - if spec == "memory": - if k == "limits": - resource[k][spec] = str(max_memory) + "G" - else: - resource[k][spec] = str(min_memory) + "G" - if spec == "nvidia.com/gpu": - if i == 0: - resource[k][spec] = 0 - else: - resource[k][spec] = gpu + resource["requests"]["cpu"] = head_cpus + resource["limits"]["cpu"] = head_cpus + resource["requests"]["memory"] = str(head_memory) + "G" + resource["limits"]["memory"] = str(head_memory) + "G" + resource["requests"]["nvidia.com/gpu"] = head_gpus + resource["limits"]["nvidia.com/gpu"] = head_gpus + + else: + for k, v in resource.items(): + if k == "replicas" and i == 1: + resource[k] = workers + if k == "requests" or k == "limits": + for spec, _ in v.items(): + if spec == "cpu": + if k == "limits": + resource[k][spec] = max_cpu + else: + resource[k][spec] = min_cpu + if spec == "memory": + if k == "limits": + resource[k][spec] = str(max_memory) + "G" + else: + resource[k][spec] = str(min_memory) + "G" + if spec == "nvidia.com/gpu": + if i == 0: + resource[k][spec] = 0 + else: + resource[k][spec] = gpu else: sys.exit("Error: malformed template") @@ -205,11 +211,15 @@ def update_nodes( instascale, env, image_pull_secrets, + head_cpus, + head_memory, + head_gpus, ): if "generictemplate" in item.keys(): head = item.get("generictemplate").get("spec").get("headGroupSpec") + head["rayStartParams"]["num_gpus"] = str(int(head_gpus)) + worker = item.get("generictemplate").get("spec").get("workerGroupSpecs")[0] - # Head counts as first worker worker["replicas"] = workers worker["minReplicas"] = workers @@ -225,7 +235,7 @@ def update_nodes( update_env(spec, env) if comp == head: # TODO: Eventually add head node configuration outside of template - continue + update_resources(spec, head_cpus, head_cpus, head_memory, head_memory, head_gpus) else: update_resources(spec, min_cpu, max_cpu, min_memory, max_memory, gpu) @@ -350,6 +360,9 @@ def write_user_appwrapper(user_yaml, output_file_name): def generate_appwrapper( name: str, namespace: str, + head_cpus: int, + head_memory: int, + head_gpus: int, min_cpu: int, max_cpu: int, min_memory: int, @@ -375,8 +388,7 @@ def generate_appwrapper( update_labels(user_yaml, instascale, instance_types) update_priority(user_yaml, item, dispatch_priority, priority_val) update_custompodresources( - item, min_cpu, max_cpu, min_memory, max_memory, gpu, workers - ) + item, min_cpu, max_cpu, min_memory, max_memory, gpu, workers, head_cpus, head_memory, head_gpus) update_nodes( item, appwrapper_name, @@ -390,6 +402,9 @@ def generate_appwrapper( instascale, env, image_pull_secrets, + head_cpus, + head_memory, + head_gpus, ) update_dashboard_route(route_item, cluster_name, namespace) if local_interactive: From 54a5a12a103807590b26ab076ea4032d657451fb Mon Sep 17 00:00:00 2001 From: Michael Clifford Date: Sun, 24 Sep 2023 16:27:28 -0400 Subject: [PATCH 022/496] minor fixes and unit tests additions --- src/codeflare_sdk/cluster/cluster.py | 12 ++++++++ src/codeflare_sdk/utils/generate_yaml.py | 36 +++++++++++++++++++----- tests/unit_test.py | 6 ++++ 3 files changed, 47 insertions(+), 7 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 84f73198..99b11582 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -614,6 +614,15 @@ def _map_to_ray_cluster(rc) -> Optional[RayCluster]: worker_gpu=0, # hard to detect currently how many gpus, can override it with what the user asked for namespace=rc["metadata"]["namespace"], dashboard=ray_route, + head_cpus=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][0][ + "resources" + ]["limits"]["cpu"], + head_mem=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][0][ + "resources" + ]["limits"]["memory"], + head_gpu=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][0][ + "resources" + ]["limits"]["nvidia.com/gpu"], ) @@ -644,6 +653,9 @@ def _copy_to_ray(cluster: Cluster) -> RayCluster: worker_gpu=cluster.config.num_gpus, namespace=cluster.config.namespace, dashboard=cluster.cluster_dashboard_uri(), + head_cpus=cluster.config.head_cpus, + head_mem=cluster.config.head_memory, + head_gpu=cluster.config.head_gpus, ) if ray.status == CodeFlareClusterStatus.READY: ray.status = RayClusterStatus.READY diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index eb45611f..95e1c5ec 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -107,7 +107,17 @@ def update_priority(yaml, item, dispatch_priority, priority_val): def update_custompodresources( - item, min_cpu, max_cpu, min_memory, max_memory, gpu, workers, head_cpus, head_memory, head_gpus): + item, + min_cpu, + max_cpu, + min_memory, + max_memory, + gpu, + workers, + head_cpus, + head_memory, + head_gpus, +): if "custompodresources" in item.keys(): custompodresources = item.get("custompodresources") for i in range(len(custompodresources)): @@ -120,8 +130,8 @@ def update_custompodresources( resource["limits"]["memory"] = str(head_memory) + "G" resource["requests"]["nvidia.com/gpu"] = head_gpus resource["limits"]["nvidia.com/gpu"] = head_gpus - - else: + + else: for k, v in resource.items(): if k == "replicas" and i == 1: resource[k] = workers @@ -217,8 +227,8 @@ def update_nodes( ): if "generictemplate" in item.keys(): head = item.get("generictemplate").get("spec").get("headGroupSpec") - head["rayStartParams"]["num_gpus"] = str(int(head_gpus)) - + head["rayStartParams"]["num-gpus"] = str(int(head_gpus)) + worker = item.get("generictemplate").get("spec").get("workerGroupSpecs")[0] # Head counts as first worker worker["replicas"] = workers @@ -235,7 +245,9 @@ def update_nodes( update_env(spec, env) if comp == head: # TODO: Eventually add head node configuration outside of template - update_resources(spec, head_cpus, head_cpus, head_memory, head_memory, head_gpus) + update_resources( + spec, head_cpus, head_cpus, head_memory, head_memory, head_gpus + ) else: update_resources(spec, min_cpu, max_cpu, min_memory, max_memory, gpu) @@ -388,7 +400,17 @@ def generate_appwrapper( update_labels(user_yaml, instascale, instance_types) update_priority(user_yaml, item, dispatch_priority, priority_val) update_custompodresources( - item, min_cpu, max_cpu, min_memory, max_memory, gpu, workers, head_cpus, head_memory, head_gpus) + item, + min_cpu, + max_cpu, + min_memory, + max_memory, + gpu, + workers, + head_cpus, + head_memory, + head_gpus, + ) update_nodes( item, appwrapper_name, diff --git a/tests/unit_test.py b/tests/unit_test.py index b046b1f1..4a8e2f44 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -525,6 +525,9 @@ def test_ray_details(mocker, capsys): worker_gpu=0, namespace="ns", dashboard="fake-uri", + head_cpus=2, + head_mem=8, + head_gpu=0, ) mocker.patch( "codeflare_sdk.cluster.cluster.Cluster.status", @@ -1685,6 +1688,9 @@ def test_cluster_status(mocker): worker_gpu=0, namespace="ns", dashboard="fake-uri", + head_cpus=2, + head_mem=8, + head_gpu=0, ) cf = Cluster(ClusterConfiguration(name="test", namespace="ns")) mocker.patch("codeflare_sdk.cluster.cluster._app_wrapper_status", return_value=None) From 47c0093966f3b90a3a0347fbd335c99b048afcb6 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Mon, 25 Sep 2023 12:25:12 +0100 Subject: [PATCH 023/496] Check for dashboard readiness after cluster is ready --- src/codeflare_sdk/cluster/cluster.py | 18 +++++++++++++----- tests/unit_test.py | 2 +- 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 99b11582..d9659079 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -274,19 +274,27 @@ def wait_ready(self, timeout: Optional[int] = None): dashboard_ready = False status = None time = 0 - while not ready or not dashboard_ready: + while not ready: status, ready = self.status(print_to_console=False) - dashboard_ready = self.is_dashboard_ready() if status == CodeFlareClusterStatus.UNKNOWN: print( "WARNING: Current cluster status is unknown, have you run cluster.up yet?" ) - if not ready or not dashboard_ready: + if not ready: + if timeout and time >= timeout: + raise TimeoutError(f"wait() timed out after waiting {timeout}s for cluster to be ready") + sleep(5) + time += 5 + print("Requested cluster is up and running!") + + while not dashboard_ready: + dashboard_ready = self.is_dashboard_ready() + if not dashboard_ready: if timeout and time >= timeout: - raise TimeoutError(f"wait() timed out after waiting {timeout}s") + raise TimeoutError(f"wait() timed out after waiting {timeout}s for dashboard to be ready") sleep(5) time += 5 - print("Requested cluster and dashboard are up and running!") + print("Dashboard is ready!") def details(self, print_to_console: bool = True) -> RayCluster: cluster = _copy_to_ray(self) diff --git a/tests/unit_test.py b/tests/unit_test.py index 4a8e2f44..0b8e79bb 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -1794,7 +1794,7 @@ def test_wait_ready(mocker, capsys): captured = capsys.readouterr() assert ( captured.out - == "Waiting for requested resources to be set up...\nRequested cluster and dashboard are up and running!\n" + == "Waiting for requested resources to be set up...\nRequested cluster is up and running!\nDashboard is ready!\n" ) From 37352841db87c3dcfb3379552936871b5fc7247b Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Tue, 26 Sep 2023 17:27:28 +0100 Subject: [PATCH 024/496] Add argument dashboard_check bool and checks --- src/codeflare_sdk/cluster/cluster.py | 15 ++++++++++----- tests/unit_test.py | 6 ++++++ 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index d9659079..5d00cdae 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -264,7 +264,7 @@ def is_dashboard_ready(self) -> bool: else: return False - def wait_ready(self, timeout: Optional[int] = None): + def wait_ready(self, timeout: Optional[int] = None, dashboard_check: bool = True): """ Waits for requested cluster to be ready, up to an optional timeout (s). Checks every five seconds. @@ -282,19 +282,24 @@ def wait_ready(self, timeout: Optional[int] = None): ) if not ready: if timeout and time >= timeout: - raise TimeoutError(f"wait() timed out after waiting {timeout}s for cluster to be ready") + raise TimeoutError( + f"wait() timed out after waiting {timeout}s for cluster to be ready" + ) sleep(5) time += 5 print("Requested cluster is up and running!") - while not dashboard_ready: + while dashboard_check and not dashboard_ready: dashboard_ready = self.is_dashboard_ready() if not dashboard_ready: if timeout and time >= timeout: - raise TimeoutError(f"wait() timed out after waiting {timeout}s for dashboard to be ready") + raise TimeoutError( + f"wait() timed out after waiting {timeout}s for dashboard to be ready" + ) sleep(5) time += 5 - print("Dashboard is ready!") + if dashboard_ready: + print("Dashboard is ready!") def details(self, print_to_console: bool = True) -> RayCluster: cluster = _copy_to_ray(self) diff --git a/tests/unit_test.py b/tests/unit_test.py index 0b8e79bb..78925226 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -1796,6 +1796,12 @@ def test_wait_ready(mocker, capsys): captured.out == "Waiting for requested resources to be set up...\nRequested cluster is up and running!\nDashboard is ready!\n" ) + cf.wait_ready(dashboard_check=False) + captured = capsys.readouterr() + assert ( + captured.out + == "Waiting for requested resources to be set up...\nRequested cluster is up and running!\n" + ) def test_jobdefinition_coverage(): From 286a2179c0aef265cc2bcf404fcf462cfa578502 Mon Sep 17 00:00:00 2001 From: Mustafa Eyceoz Date: Wed, 27 Sep 2023 11:48:44 -0400 Subject: [PATCH 025/496] Remove pytorch-lightning dep --- poetry.lock | 890 ++++++++++++++++++++++--------------------------- pyproject.toml | 1 - 2 files changed, 404 insertions(+), 487 deletions(-) diff --git a/poetry.lock b/poetry.lock index 531f5a85..c933bd3b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "aiohttp" @@ -149,13 +149,13 @@ files = [ [[package]] name = "async-timeout" -version = "4.0.2" +version = "4.0.3" description = "Timeout context manager for asyncio programs" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, - {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, ] [[package]] @@ -410,13 +410,13 @@ files = [ [[package]] name = "click" -version = "8.1.6" +version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.6-py3-none-any.whl", hash = "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5"}, - {file = "click-8.1.6.tar.gz", hash = "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd"}, + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] @@ -646,13 +646,13 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.1.2" +version = "1.1.3" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, - {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, ] [package.extras] @@ -674,124 +674,109 @@ tests = ["asttokens", "littleutils", "pytest", "rich"] [[package]] name = "filelock" -version = "3.12.2" +version = "3.12.4" description = "A platform independent file lock." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"}, - {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, + {file = "filelock-3.12.4-py3-none-any.whl", hash = "sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4"}, + {file = "filelock-3.12.4.tar.gz", hash = "sha256:2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd"}, ] [package.extras] -docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"] +typing = ["typing-extensions (>=4.7.1)"] [[package]] name = "frozenlist" -version = "1.3.3" +version = "1.4.0" description = "A list-like structure which implements collections.abc.MutableSequence" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4"}, - {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0"}, - {file = "frozenlist-1.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd"}, - {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f"}, - {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420"}, - {file = "frozenlist-1.3.3-cp310-cp310-win32.whl", hash = "sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642"}, - {file = "frozenlist-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1"}, - {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7"}, - {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678"}, - {file = "frozenlist-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b"}, - {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4"}, - {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81"}, - {file = "frozenlist-1.3.3-cp311-cp311-win32.whl", hash = "sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8"}, - {file = "frozenlist-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32"}, - {file = "frozenlist-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d"}, - {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb"}, - {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401"}, - {file = "frozenlist-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a"}, - {file = "frozenlist-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411"}, - {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a"}, - {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5"}, - {file = "frozenlist-1.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2"}, - {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13"}, - {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3"}, - {file = "frozenlist-1.3.3-cp38-cp38-win32.whl", hash = "sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b"}, - {file = "frozenlist-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef"}, - {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf"}, - {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1"}, - {file = "frozenlist-1.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b"}, - {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab"}, - {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1"}, - {file = "frozenlist-1.3.3-cp39-cp39-win32.whl", hash = "sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38"}, - {file = "frozenlist-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9"}, - {file = "frozenlist-1.3.3.tar.gz", hash = "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a"}, + {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:764226ceef3125e53ea2cb275000e309c0aa5464d43bd72abd661e27fffc26ab"}, + {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6484756b12f40003c6128bfcc3fa9f0d49a687e171186c2d85ec82e3758c559"}, + {file = "frozenlist-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ac08e601308e41eb533f232dbf6b7e4cea762f9f84f6357136eed926c15d12c"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d081f13b095d74b67d550de04df1c756831f3b83dc9881c38985834387487f1b"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71932b597f9895f011f47f17d6428252fc728ba2ae6024e13c3398a087c2cdea"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:981b9ab5a0a3178ff413bca62526bb784249421c24ad7381e39d67981be2c326"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e41f3de4df3e80de75845d3e743b3f1c4c8613c3997a912dbf0229fc61a8b963"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6918d49b1f90821e93069682c06ffde41829c346c66b721e65a5c62b4bab0300"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e5c8764c7829343d919cc2dfc587a8db01c4f70a4ebbc49abde5d4b158b007b"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8d0edd6b1c7fb94922bf569c9b092ee187a83f03fb1a63076e7774b60f9481a8"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e29cda763f752553fa14c68fb2195150bfab22b352572cb36c43c47bedba70eb"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0c7c1b47859ee2cac3846fde1c1dc0f15da6cec5a0e5c72d101e0f83dcb67ff9"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:901289d524fdd571be1c7be054f48b1f88ce8dddcbdf1ec698b27d4b8b9e5d62"}, + {file = "frozenlist-1.4.0-cp310-cp310-win32.whl", hash = "sha256:1a0848b52815006ea6596c395f87449f693dc419061cc21e970f139d466dc0a0"}, + {file = "frozenlist-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:b206646d176a007466358aa21d85cd8600a415c67c9bd15403336c331a10d956"}, + {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de343e75f40e972bae1ef6090267f8260c1446a1695e77096db6cfa25e759a95"}, + {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad2a9eb6d9839ae241701d0918f54c51365a51407fd80f6b8289e2dfca977cc3"}, + {file = "frozenlist-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7bd3b3830247580de99c99ea2a01416dfc3c34471ca1298bccabf86d0ff4dc"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdf1847068c362f16b353163391210269e4f0569a3c166bc6a9f74ccbfc7e839"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38461d02d66de17455072c9ba981d35f1d2a73024bee7790ac2f9e361ef1cd0c"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5a32087d720c608f42caed0ef36d2b3ea61a9d09ee59a5142d6070da9041b8f"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd65632acaf0d47608190a71bfe46b209719bf2beb59507db08ccdbe712f969b"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261b9f5d17cac914531331ff1b1d452125bf5daa05faf73b71d935485b0c510b"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b89ac9768b82205936771f8d2eb3ce88503b1556324c9f903e7156669f521472"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:008eb8b31b3ea6896da16c38c1b136cb9fec9e249e77f6211d479db79a4eaf01"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e74b0506fa5aa5598ac6a975a12aa8928cbb58e1f5ac8360792ef15de1aa848f"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:490132667476f6781b4c9458298b0c1cddf237488abd228b0b3650e5ecba7467"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:76d4711f6f6d08551a7e9ef28c722f4a50dd0fc204c56b4bcd95c6cc05ce6fbb"}, + {file = "frozenlist-1.4.0-cp311-cp311-win32.whl", hash = "sha256:a02eb8ab2b8f200179b5f62b59757685ae9987996ae549ccf30f983f40602431"}, + {file = "frozenlist-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:515e1abc578dd3b275d6a5114030b1330ba044ffba03f94091842852f806f1c1"}, + {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0ed05f5079c708fe74bf9027e95125334b6978bf07fd5ab923e9e55e5fbb9d3"}, + {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca265542ca427bf97aed183c1676e2a9c66942e822b14dc6e5f42e038f92a503"}, + {file = "frozenlist-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:491e014f5c43656da08958808588cc6c016847b4360e327a62cb308c791bd2d9"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ae5cd0f333f94f2e03aaf140bb762c64783935cc764ff9c82dff626089bebf"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e78fb68cf9c1a6aa4a9a12e960a5c9dfbdb89b3695197aa7064705662515de2"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5655a942f5f5d2c9ed93d72148226d75369b4f6952680211972a33e59b1dfdc"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11b0746f5d946fecf750428a95f3e9ebe792c1ee3b1e96eeba145dc631a9672"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e66d2a64d44d50d2543405fb183a21f76b3b5fd16f130f5c99187c3fb4e64919"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:88f7bc0fcca81f985f78dd0fa68d2c75abf8272b1f5c323ea4a01a4d7a614efc"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5833593c25ac59ede40ed4de6d67eb42928cca97f26feea219f21d0ed0959b79"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fec520865f42e5c7f050c2a79038897b1c7d1595e907a9e08e3353293ffc948e"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:b826d97e4276750beca7c8f0f1a4938892697a6bcd8ec8217b3312dad6982781"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ceb6ec0a10c65540421e20ebd29083c50e6d1143278746a4ef6bcf6153171eb8"}, + {file = "frozenlist-1.4.0-cp38-cp38-win32.whl", hash = "sha256:2b8bcf994563466db019fab287ff390fffbfdb4f905fc77bc1c1d604b1c689cc"}, + {file = "frozenlist-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:a6c8097e01886188e5be3e6b14e94ab365f384736aa1fca6a0b9e35bd4a30bc7"}, + {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6c38721585f285203e4b4132a352eb3daa19121a035f3182e08e437cface44bf"}, + {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0c6da9aee33ff0b1a451e867da0c1f47408112b3391dd43133838339e410963"}, + {file = "frozenlist-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93ea75c050c5bb3d98016b4ba2497851eadf0ac154d88a67d7a6816206f6fa7f"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f61e2dc5ad442c52b4887f1fdc112f97caeff4d9e6ebe78879364ac59f1663e1"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa384489fefeb62321b238e64c07ef48398fe80f9e1e6afeff22e140e0850eef"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10ff5faaa22786315ef57097a279b833ecab1a0bfb07d604c9cbb1c4cdc2ed87"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:007df07a6e3eb3e33e9a1fe6a9db7af152bbd8a185f9aaa6ece10a3529e3e1c6"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4f399d28478d1f604c2ff9119907af9726aed73680e5ed1ca634d377abb087"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5374b80521d3d3f2ec5572e05adc94601985cc526fb276d0c8574a6d749f1b3"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ce31ae3e19f3c902de379cf1323d90c649425b86de7bbdf82871b8a2a0615f3d"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7211ef110a9194b6042449431e08c4d80c0481e5891e58d429df5899690511c2"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:556de4430ce324c836789fa4560ca62d1591d2538b8ceb0b4f68fb7b2384a27a"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7645a8e814a3ee34a89c4a372011dcd817964ce8cb273c8ed6119d706e9613e3"}, + {file = "frozenlist-1.4.0-cp39-cp39-win32.whl", hash = "sha256:19488c57c12d4e8095a922f328df3f179c820c212940a498623ed39160bc3c2f"}, + {file = "frozenlist-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:6221d84d463fb110bdd7619b69cb43878a11d51cbb9394ae3105d082d5199167"}, + {file = "frozenlist-1.4.0.tar.gz", hash = "sha256:09163bdf0b2907454042edb19f887c6d33806adc71fbd54afc14908bfdc22251"}, ] [[package]] name = "fsspec" -version = "2023.1.0" +version = "2023.9.2" description = "File-system specification" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "fsspec-2023.1.0-py3-none-any.whl", hash = "sha256:b833e2e541e9e8cde0ab549414187871243177feb3d344f9d27b25a93f5d8139"}, - {file = "fsspec-2023.1.0.tar.gz", hash = "sha256:fbae7f20ff801eb5f7d0bedf81f25c787c0dfac5e982d98fa3884a9cde2b5411"}, + {file = "fsspec-2023.9.2-py3-none-any.whl", hash = "sha256:603dbc52c75b84da501b9b2ec8c11e1f61c25984c4a0dda1f129ef391fbfc9b4"}, + {file = "fsspec-2023.9.2.tar.gz", hash = "sha256:80bfb8c70cc27b2178cc62a935ecf242fc6e8c3fb801f9c571fc01b1e715ba7d"}, ] -[package.dependencies] -aiohttp = {version = "<4.0.0a0 || >4.0.0a0,<4.0.0a1 || >4.0.0a1", optional = true, markers = "extra == \"http\""} -requests = {version = "*", optional = true, markers = "extra == \"http\""} - [package.extras] abfs = ["adlfs"] adl = ["adlfs"] arrow = ["pyarrow (>=1)"] dask = ["dask", "distributed"] +devel = ["pytest", "pytest-cov"] dropbox = ["dropbox", "dropboxdrivefs", "requests"] -entrypoints = ["importlib-metadata"] +full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] fuse = ["fusepy"] gcs = ["gcsfs"] git = ["pygit2"] @@ -810,13 +795,13 @@ tqdm = ["tqdm"] [[package]] name = "google-api-core" -version = "2.11.1" +version = "2.12.0" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.11.1.tar.gz", hash = "sha256:25d29e05a0058ed5f19c61c0a78b1b53adea4d9364b464d014fbda941f6d1c9a"}, - {file = "google_api_core-2.11.1-py3-none-any.whl", hash = "sha256:d92a5a92dc36dd4f4b9ee4e55528a90e432b059f93aee6ad857f9de8cc7ae94a"}, + {file = "google-api-core-2.12.0.tar.gz", hash = "sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553"}, + {file = "google_api_core-2.12.0-py3-none-any.whl", hash = "sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160"}, ] [package.dependencies] @@ -832,20 +817,19 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.22.0" +version = "2.23.0" description = "Google Authentication Library" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "google-auth-2.22.0.tar.gz", hash = "sha256:164cba9af4e6e4e40c3a4f90a1a6c12ee56f14c0b4868d1ca91b32826ab334ce"}, - {file = "google_auth-2.22.0-py2.py3-none-any.whl", hash = "sha256:d61d1b40897407b574da67da1a833bdc10d5a11642566e506565d1b1a46ba873"}, + {file = "google-auth-2.23.0.tar.gz", hash = "sha256:753a26312e6f1eaeec20bc6f2644a10926697da93446e1f8e24d6d32d45a922a"}, + {file = "google_auth-2.23.0-py2.py3-none-any.whl", hash = "sha256:2cec41407bd1e207f5b802638e32bb837df968bb5c05f413d0fa526fac4cf7a7"}, ] [package.dependencies] cachetools = ">=2.0.0,<6.0" pyasn1-modules = ">=0.2.1" rsa = ">=3.1.4,<5" -six = ">=1.9.0" urllib3 = "<2.0" [package.extras] @@ -874,12 +858,12 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] [[package]] name = "gpustat" -version = "1.1" +version = "1.1.1" description = "An utility to monitor NVIDIA GPU status and usage" optional = false python-versions = ">=3.6" files = [ - {file = "gpustat-1.1.tar.gz", hash = "sha256:c8fc105404aac11884f70ed2fa06cfd210d3cd389ccae4a9bc38579c72460cee"}, + {file = "gpustat-1.1.1.tar.gz", hash = "sha256:c18d3ed5518fc16300c42d694debc70aebb3be55cae91f1db64d63b5fa8af9d8"}, ] [package.dependencies] @@ -1021,13 +1005,13 @@ files = [ [[package]] name = "importlib-metadata" -version = "6.7.0" +version = "6.8.0" description = "Read metadata from Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"}, - {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"}, + {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, + {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, ] [package.dependencies] @@ -1036,25 +1020,25 @@ zipp = ">=0.5" [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] [[package]] name = "importlib-resources" -version = "5.12.0" +version = "6.1.0" description = "Read resources from Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "importlib_resources-5.12.0-py3-none-any.whl", hash = "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a"}, - {file = "importlib_resources-5.12.0.tar.gz", hash = "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6"}, + {file = "importlib_resources-6.1.0-py3-none-any.whl", hash = "sha256:aa50258bbfa56d4e33fbd8aa3ef48ded10d1735f11532b8df95388cc6bdb7e83"}, + {file = "importlib_resources-6.1.0.tar.gz", hash = "sha256:9d48dcccc213325e810fd723e7fbb45ccb39f6cf5c31f00cf2b965f5f10f3cb9"}, ] [package.dependencies] zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff", "zipp (>=3.17)"] [[package]] name = "iniconfig" @@ -1067,23 +1051,6 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] -[[package]] -name = "jinja2" -version = "3.1.2" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - [[package]] name = "jinxed" version = "1.2.0" @@ -1100,25 +1067,42 @@ ansicon = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "jsonschema" -version = "4.17.3" +version = "4.19.1" description = "An implementation of JSON Schema validation for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, - {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, + {file = "jsonschema-4.19.1-py3-none-any.whl", hash = "sha256:cd5f1f9ed9444e554b38ba003af06c0a8c2868131e56bfbef0550fb450c0330e"}, + {file = "jsonschema-4.19.1.tar.gz", hash = "sha256:ec84cc37cfa703ef7cd4928db24f9cb31428a5d0fa77747b8b51a847458e0bbf"}, ] [package.dependencies] -attrs = ">=17.4.0" +attrs = ">=22.2.0" importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +jsonschema-specifications = ">=2023.03.6" pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} -pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" +referencing = ">=0.28.4" +rpds-py = ">=0.7.1" [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] +[[package]] +name = "jsonschema-specifications" +version = "2023.7.1" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema_specifications-2023.7.1-py3-none-any.whl", hash = "sha256:05adf340b659828a004220a9613be00fa3f223f2b82002e273dee62fd50524b1"}, + {file = "jsonschema_specifications-2023.7.1.tar.gz", hash = "sha256:c91a50404e88a1f6ba40636778e2ee08f6e24c5613fe4c53ac24578a5a7f72bb"}, +] + +[package.dependencies] +importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +referencing = ">=0.28.0" + [[package]] name = "kubernetes" version = "26.1.0" @@ -1145,26 +1129,6 @@ websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" [package.extras] adal = ["adal (>=1.0.2)"] -[[package]] -name = "lightning-utilities" -version = "0.9.0" -description = "PyTorch Lightning Sample project." -optional = false -python-versions = ">=3.7" -files = [ - {file = "lightning-utilities-0.9.0.tar.gz", hash = "sha256:efbf2c488c257f942abdfd06cf646fb84ca215a9663b60081811e22a15ee033b"}, - {file = "lightning_utilities-0.9.0-py3-none-any.whl", hash = "sha256:918dd90c775719e3855631db6282ad75c14da4c5727c4cebdd1589d865fad03d"}, -] - -[package.dependencies] -packaging = ">=17.1" -typing-extensions = "*" - -[package.extras] -cli = ["fire"] -docs = ["requests (>=2.0.0)"] -typing = ["mypy (>=1.0.0)"] - [[package]] name = "mako" version = "1.2.4" @@ -1261,93 +1225,69 @@ files = [ {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] -[[package]] -name = "mpmath" -version = "1.3.0" -description = "Python library for arbitrary-precision floating-point arithmetic" -optional = false -python-versions = "*" -files = [ - {file = "mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c"}, - {file = "mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f"}, -] - -[package.extras] -develop = ["codecov", "pycodestyle", "pytest (>=4.6)", "pytest-cov", "wheel"] -docs = ["sphinx"] -gmpy = ["gmpy2 (>=2.1.0a4)"] -tests = ["pytest (>=4.6)"] - [[package]] name = "msgpack" -version = "1.0.5" +version = "1.0.6" description = "MessagePack serializer" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "msgpack-1.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:525228efd79bb831cf6830a732e2e80bc1b05436b086d4264814b4b2955b2fa9"}, - {file = "msgpack-1.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4f8d8b3bf1ff2672567d6b5c725a1b347fe838b912772aa8ae2bf70338d5a198"}, - {file = "msgpack-1.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdc793c50be3f01106245a61b739328f7dccc2c648b501e237f0699fe1395b81"}, - {file = "msgpack-1.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cb47c21a8a65b165ce29f2bec852790cbc04936f502966768e4aae9fa763cb7"}, - {file = "msgpack-1.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e42b9594cc3bf4d838d67d6ed62b9e59e201862a25e9a157019e171fbe672dd3"}, - {file = "msgpack-1.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55b56a24893105dc52c1253649b60f475f36b3aa0fc66115bffafb624d7cb30b"}, - {file = "msgpack-1.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1967f6129fc50a43bfe0951c35acbb729be89a55d849fab7686004da85103f1c"}, - {file = "msgpack-1.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20a97bf595a232c3ee6d57ddaadd5453d174a52594bf9c21d10407e2a2d9b3bd"}, - {file = "msgpack-1.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d25dd59bbbbb996eacf7be6b4ad082ed7eacc4e8f3d2df1ba43822da9bfa122a"}, - {file = "msgpack-1.0.5-cp310-cp310-win32.whl", hash = "sha256:382b2c77589331f2cb80b67cc058c00f225e19827dbc818d700f61513ab47bea"}, - {file = "msgpack-1.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:4867aa2df9e2a5fa5f76d7d5565d25ec76e84c106b55509e78c1ede0f152659a"}, - {file = "msgpack-1.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9f5ae84c5c8a857ec44dc180a8b0cc08238e021f57abdf51a8182e915e6299f0"}, - {file = "msgpack-1.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9e6ca5d5699bcd89ae605c150aee83b5321f2115695e741b99618f4856c50898"}, - {file = "msgpack-1.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5494ea30d517a3576749cad32fa27f7585c65f5f38309c88c6d137877fa28a5a"}, - {file = "msgpack-1.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ab2f3331cb1b54165976a9d976cb251a83183631c88076613c6c780f0d6e45a"}, - {file = "msgpack-1.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28592e20bbb1620848256ebc105fc420436af59515793ed27d5c77a217477705"}, - {file = "msgpack-1.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe5c63197c55bce6385d9aee16c4d0641684628f63ace85f73571e65ad1c1e8d"}, - {file = "msgpack-1.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ed40e926fa2f297e8a653c954b732f125ef97bdd4c889f243182299de27e2aa9"}, - {file = "msgpack-1.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b2de4c1c0538dcb7010902a2b97f4e00fc4ddf2c8cda9749af0e594d3b7fa3d7"}, - {file = "msgpack-1.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bf22a83f973b50f9d38e55c6aade04c41ddda19b00c4ebc558930d78eecc64ed"}, - {file = "msgpack-1.0.5-cp311-cp311-win32.whl", hash = "sha256:c396e2cc213d12ce017b686e0f53497f94f8ba2b24799c25d913d46c08ec422c"}, - {file = "msgpack-1.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c4c68d87497f66f96d50142a2b73b97972130d93677ce930718f68828b382e2"}, - {file = "msgpack-1.0.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a2b031c2e9b9af485d5e3c4520f4220d74f4d222a5b8dc8c1a3ab9448ca79c57"}, - {file = "msgpack-1.0.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f837b93669ce4336e24d08286c38761132bc7ab29782727f8557e1eb21b2080"}, - {file = "msgpack-1.0.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1d46dfe3832660f53b13b925d4e0fa1432b00f5f7210eb3ad3bb9a13c6204a6"}, - {file = "msgpack-1.0.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:366c9a7b9057e1547f4ad51d8facad8b406bab69c7d72c0eb6f529cf76d4b85f"}, - {file = "msgpack-1.0.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:4c075728a1095efd0634a7dccb06204919a2f67d1893b6aa8e00497258bf926c"}, - {file = "msgpack-1.0.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:f933bbda5a3ee63b8834179096923b094b76f0c7a73c1cfe8f07ad608c58844b"}, - {file = "msgpack-1.0.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:36961b0568c36027c76e2ae3ca1132e35123dcec0706c4b7992683cc26c1320c"}, - {file = "msgpack-1.0.5-cp36-cp36m-win32.whl", hash = "sha256:b5ef2f015b95f912c2fcab19c36814963b5463f1fb9049846994b007962743e9"}, - {file = "msgpack-1.0.5-cp36-cp36m-win_amd64.whl", hash = "sha256:288e32b47e67f7b171f86b030e527e302c91bd3f40fd9033483f2cacc37f327a"}, - {file = "msgpack-1.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:137850656634abddfb88236008339fdaba3178f4751b28f270d2ebe77a563b6c"}, - {file = "msgpack-1.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c05a4a96585525916b109bb85f8cb6511db1c6f5b9d9cbcbc940dc6b4be944b"}, - {file = "msgpack-1.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56a62ec00b636583e5cb6ad313bbed36bb7ead5fa3a3e38938503142c72cba4f"}, - {file = "msgpack-1.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef8108f8dedf204bb7b42994abf93882da1159728a2d4c5e82012edd92c9da9f"}, - {file = "msgpack-1.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1835c84d65f46900920b3708f5ba829fb19b1096c1800ad60bae8418652a951d"}, - {file = "msgpack-1.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e57916ef1bd0fee4f21c4600e9d1da352d8816b52a599c46460e93a6e9f17086"}, - {file = "msgpack-1.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:17358523b85973e5f242ad74aa4712b7ee560715562554aa2134d96e7aa4cbbf"}, - {file = "msgpack-1.0.5-cp37-cp37m-win32.whl", hash = "sha256:cb5aaa8c17760909ec6cb15e744c3ebc2ca8918e727216e79607b7bbce9c8f77"}, - {file = "msgpack-1.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:ab31e908d8424d55601ad7075e471b7d0140d4d3dd3272daf39c5c19d936bd82"}, - {file = "msgpack-1.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b72d0698f86e8d9ddf9442bdedec15b71df3598199ba33322d9711a19f08145c"}, - {file = "msgpack-1.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:379026812e49258016dd84ad79ac8446922234d498058ae1d415f04b522d5b2d"}, - {file = "msgpack-1.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:332360ff25469c346a1c5e47cbe2a725517919892eda5cfaffe6046656f0b7bb"}, - {file = "msgpack-1.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:476a8fe8fae289fdf273d6d2a6cb6e35b5a58541693e8f9f019bfe990a51e4ba"}, - {file = "msgpack-1.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9985b214f33311df47e274eb788a5893a761d025e2b92c723ba4c63936b69b1"}, - {file = "msgpack-1.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48296af57cdb1d885843afd73c4656be5c76c0c6328db3440c9601a98f303d87"}, - {file = "msgpack-1.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:addab7e2e1fcc04bd08e4eb631c2a90960c340e40dfc4a5e24d2ff0d5a3b3edb"}, - {file = "msgpack-1.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:916723458c25dfb77ff07f4c66aed34e47503b2eb3188b3adbec8d8aa6e00f48"}, - {file = "msgpack-1.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:821c7e677cc6acf0fd3f7ac664c98803827ae6de594a9f99563e48c5a2f27eb0"}, - {file = "msgpack-1.0.5-cp38-cp38-win32.whl", hash = "sha256:1c0f7c47f0087ffda62961d425e4407961a7ffd2aa004c81b9c07d9269512f6e"}, - {file = "msgpack-1.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:bae7de2026cbfe3782c8b78b0db9cbfc5455e079f1937cb0ab8d133496ac55e1"}, - {file = "msgpack-1.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:20c784e66b613c7f16f632e7b5e8a1651aa5702463d61394671ba07b2fc9e025"}, - {file = "msgpack-1.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:266fa4202c0eb94d26822d9bfd7af25d1e2c088927fe8de9033d929dd5ba24c5"}, - {file = "msgpack-1.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18334484eafc2b1aa47a6d42427da7fa8f2ab3d60b674120bce7a895a0a85bdd"}, - {file = "msgpack-1.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57e1f3528bd95cc44684beda696f74d3aaa8a5e58c816214b9046512240ef437"}, - {file = "msgpack-1.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:586d0d636f9a628ddc6a17bfd45aa5b5efaf1606d2b60fa5d87b8986326e933f"}, - {file = "msgpack-1.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a740fa0e4087a734455f0fc3abf5e746004c9da72fbd541e9b113013c8dc3282"}, - {file = "msgpack-1.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3055b0455e45810820db1f29d900bf39466df96ddca11dfa6d074fa47054376d"}, - {file = "msgpack-1.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a61215eac016f391129a013c9e46f3ab308db5f5ec9f25811e811f96962599a8"}, - {file = "msgpack-1.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:362d9655cd369b08fda06b6657a303eb7172d5279997abe094512e919cf74b11"}, - {file = "msgpack-1.0.5-cp39-cp39-win32.whl", hash = "sha256:ac9dd47af78cae935901a9a500104e2dea2e253207c924cc95de149606dc43cc"}, - {file = "msgpack-1.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:06f5174b5f8ed0ed919da0e62cbd4ffde676a374aba4020034da05fab67b9164"}, - {file = "msgpack-1.0.5.tar.gz", hash = "sha256:c075544284eadc5cddc70f4757331d99dcbc16b2bbd4849d15f8aae4cf36d31c"}, + {file = "msgpack-1.0.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f4321692e7f299277e55f322329b2c972d93bb612d85f3fda8741bec5c6285ce"}, + {file = "msgpack-1.0.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f0e36a5fa7a182cde391a128a64f437657d2b9371dfa42eda3436245adccbf5"}, + {file = "msgpack-1.0.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b5c8dd9a386a66e50bd7fa22b7a49fb8ead2b3574d6bd69eb1caced6caea0803"}, + {file = "msgpack-1.0.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f85200ea102276afdd3749ca94747f057bbb868d1c52921ee2446730b508d0f"}, + {file = "msgpack-1.0.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a006c300e82402c0c8f1ded11352a3ba2a61b87e7abb3054c845af2ca8d553c"}, + {file = "msgpack-1.0.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33bbf47ea5a6ff20c23426106e81863cdbb5402de1825493026ce615039cc99d"}, + {file = "msgpack-1.0.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:04450e4b5e1e662e7c86b6aafb7c230af9334fd0becf5e6b80459a507884241c"}, + {file = "msgpack-1.0.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b06a5095a79384760625b5de3f83f40b3053a385fb893be8a106fbbd84c14980"}, + {file = "msgpack-1.0.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3910211b0ab20be3a38e0bb944ed45bd4265d8d9f11a3d1674b95b298e08dd5c"}, + {file = "msgpack-1.0.6-cp310-cp310-win32.whl", hash = "sha256:1dc67b40fe81217b308ab12651adba05e7300b3a2ccf84d6b35a878e308dd8d4"}, + {file = "msgpack-1.0.6-cp310-cp310-win_amd64.whl", hash = "sha256:885de1ed5ea01c1bfe0a34c901152a264c3c1f8f1d382042b92ea354bd14bb0e"}, + {file = "msgpack-1.0.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:099c3d8a027367e1a6fc55d15336f04ff65c60c4f737b5739f7db4525c65fe9e"}, + {file = "msgpack-1.0.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b88dc97ba86c96b964c3745a445d9a65f76fe21955a953064fe04adb63e9367"}, + {file = "msgpack-1.0.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:00ce5f827d4f26fc094043e6f08b6069c1b148efa2631c47615ae14fb6cafc89"}, + {file = "msgpack-1.0.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd6af61388be65a8701f5787362cb54adae20007e0cc67ca9221a4b95115583b"}, + {file = "msgpack-1.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:652e4b7497825b0af6259e2c54700e6dc33d2fc4ed92b8839435090d4c9cc911"}, + {file = "msgpack-1.0.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b08676a17e3f791daad34d5fcb18479e9c85e7200d5a17cbe8de798643a7e37"}, + {file = "msgpack-1.0.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:229ccb6713c8b941eaa5cf13dc7478eba117f21513b5893c35e44483e2f0c9c8"}, + {file = "msgpack-1.0.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:95ade0bd4cf69e04e8b8f8ec2d197d9c9c4a9b6902e048dc7456bf6d82e12a80"}, + {file = "msgpack-1.0.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b16344032a27b2ccfd341f89dadf3e4ef6407d91e4b93563c14644a8abb3ad7"}, + {file = "msgpack-1.0.6-cp311-cp311-win32.whl", hash = "sha256:55bb4a1bf94e39447bc08238a2fb8a767460388a8192f67c103442eb36920887"}, + {file = "msgpack-1.0.6-cp311-cp311-win_amd64.whl", hash = "sha256:ae97504958d0bc58c1152045c170815d5c4f8af906561ce044b6358b43d0c97e"}, + {file = "msgpack-1.0.6-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7ecf431786019a7bfedc28281531d706627f603e3691d64eccdbce3ecd353823"}, + {file = "msgpack-1.0.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a635aecf1047255576dbb0927cbf9a7aa4a68e9d54110cc3c926652d18f144e0"}, + {file = "msgpack-1.0.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:102cfb54eaefa73e8ca1e784b9352c623524185c98e057e519545131a56fb0af"}, + {file = "msgpack-1.0.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c5e05e4f5756758c58a8088aa10dc70d851c89f842b611fdccfc0581c1846bc"}, + {file = "msgpack-1.0.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68569509dd015fcdd1e6b2b3ccc8c51fd27d9a97f461ccc909270e220ee09685"}, + {file = "msgpack-1.0.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf652839d16de91fe1cfb253e0a88db9a548796939533894e07f45d4bdf90a5f"}, + {file = "msgpack-1.0.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14db7e1b7a7ed362b2f94897bf2486c899c8bb50f6e34b2db92fe534cdab306f"}, + {file = "msgpack-1.0.6-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:159cfec18a6e125dd4723e2b1de6f202b34b87c850fb9d509acfd054c01135e9"}, + {file = "msgpack-1.0.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6a01a072b2219b65a6ff74df208f20b2cac9401c60adb676ee34e53b4c651077"}, + {file = "msgpack-1.0.6-cp312-cp312-win32.whl", hash = "sha256:e36560d001d4ba469d469b02037f2dd404421fd72277d9474efe9f03f83fced5"}, + {file = "msgpack-1.0.6-cp312-cp312-win_amd64.whl", hash = "sha256:5e7fae9ca93258a956551708cf60dc6c8145574e32ce8c8c4d894e63bcb04341"}, + {file = "msgpack-1.0.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:40b801b768f5a765e33c68f30665d3c6ee1c8623a2d2bb78e6e59f2db4e4ceb7"}, + {file = "msgpack-1.0.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:da057d3652e698b00746e47f06dbb513314f847421e857e32e1dc61c46f6c052"}, + {file = "msgpack-1.0.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f75114c05ec56566da6b55122791cf5bb53d5aada96a98c016d6231e03132f76"}, + {file = "msgpack-1.0.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61213482b5a387ead9e250e9e3cb290292feca39dc83b41c3b1b7b8ffc8d8ecb"}, + {file = "msgpack-1.0.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bae6c561f11b444b258b1b4be2bdd1e1cf93cd1d80766b7e869a79db4543a8a8"}, + {file = "msgpack-1.0.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:619a63753ba9e792fe3c6c0fc2b9ee2cfbd92153dd91bee029a89a71eb2942cd"}, + {file = "msgpack-1.0.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:70843788c85ca385846a2d2f836efebe7bb2687ca0734648bf5c9dc6c55602d2"}, + {file = "msgpack-1.0.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:fb4571efe86545b772a4630fee578c213c91cbcfd20347806e47fd4e782a18fe"}, + {file = "msgpack-1.0.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bbb4448a05d261fae423d5c0b0974ad899f60825bc77eabad5a0c518e78448c2"}, + {file = "msgpack-1.0.6-cp38-cp38-win32.whl", hash = "sha256:5cd67674db3c73026e0a2c729b909780e88bd9cbc8184256f9567640a5d299a8"}, + {file = "msgpack-1.0.6-cp38-cp38-win_amd64.whl", hash = "sha256:a1cf98afa7ad5e7012454ca3fde254499a13f9d92fd50cb46118118a249a1355"}, + {file = "msgpack-1.0.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d6d25b8a5c70e2334ed61a8da4c11cd9b97c6fbd980c406033f06e4463fda006"}, + {file = "msgpack-1.0.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:88cdb1da7fdb121dbb3116910722f5acab4d6e8bfcacab8fafe27e2e7744dc6a"}, + {file = "msgpack-1.0.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3b5658b1f9e486a2eec4c0c688f213a90085b9cf2fec76ef08f98fdf6c62f4b9"}, + {file = "msgpack-1.0.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76820f2ece3b0a7c948bbb6a599020e29574626d23a649476def023cbb026787"}, + {file = "msgpack-1.0.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c780d992f5d734432726b92a0c87bf1857c3d85082a8dea29cbf56e44a132b3"}, + {file = "msgpack-1.0.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0ed35d6d6122d0baa9a1b59ebca4ee302139f4cfb57dab85e4c73ab793ae7ed"}, + {file = "msgpack-1.0.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:32c0aff31f33033f4961abc01f78497e5e07bac02a508632aef394b384d27428"}, + {file = "msgpack-1.0.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:35ad5aed9b52217d4cea739d0ea3a492a18dd86fecb4b132668a69f27fb0363b"}, + {file = "msgpack-1.0.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47275ff73005a3e5e146e50baa2378e1730cba6e292f0222bc496a8e4c4adfc8"}, + {file = "msgpack-1.0.6-cp39-cp39-win32.whl", hash = "sha256:7baf16fd8908a025c4a8d7b699103e72d41f967e2aee5a2065432bcdbd9fd06e"}, + {file = "msgpack-1.0.6-cp39-cp39-win_amd64.whl", hash = "sha256:fc97aa4b4fb928ff4d3b74da7c30b360d0cb3ede49a5a6e1fd9705f49aea1deb"}, + {file = "msgpack-1.0.6.tar.gz", hash = "sha256:25d3746da40f3c8c59c3b1d001e49fd2aa17904438f980d9a391370366df001e"}, ] [[package]] @@ -1444,24 +1384,6 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] -[[package]] -name = "networkx" -version = "3.1" -description = "Python package for creating and manipulating graphs and networks" -optional = false -python-versions = ">=3.8" -files = [ - {file = "networkx-3.1-py3-none-any.whl", hash = "sha256:4f33f68cb2afcf86f28a45f43efc27a9386b535d567d2127f8f61d51dec58d36"}, - {file = "networkx-3.1.tar.gz", hash = "sha256:de346335408f84de0eada6ff9fafafff9bcda11f0a0dfaa931133debb146ab61"}, -] - -[package.extras] -default = ["matplotlib (>=3.4)", "numpy (>=1.20)", "pandas (>=1.3)", "scipy (>=1.8)"] -developer = ["mypy (>=1.1)", "pre-commit (>=3.2)"] -doc = ["nb2plots (>=0.6)", "numpydoc (>=1.5)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.13)", "sphinx (>=6.1)", "sphinx-gallery (>=0.12)", "texext (>=0.6.7)"] -extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.10)", "sympy (>=1.10)"] -test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] - [[package]] name = "numpy" version = "1.24.4" @@ -1499,15 +1421,49 @@ files = [ {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, ] +[[package]] +name = "numpy" +version = "1.25.2" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.25.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:db3ccc4e37a6873045580d413fe79b68e47a681af8db2e046f1dacfa11f86eb3"}, + {file = "numpy-1.25.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:90319e4f002795ccfc9050110bbbaa16c944b1c37c0baeea43c5fb881693ae1f"}, + {file = "numpy-1.25.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe4a913e29b418d096e696ddd422d8a5d13ffba4ea91f9f60440a3b759b0187"}, + {file = "numpy-1.25.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f08f2e037bba04e707eebf4bc934f1972a315c883a9e0ebfa8a7756eabf9e357"}, + {file = "numpy-1.25.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bec1e7213c7cb00d67093247f8c4db156fd03075f49876957dca4711306d39c9"}, + {file = "numpy-1.25.2-cp310-cp310-win32.whl", hash = "sha256:7dc869c0c75988e1c693d0e2d5b26034644399dd929bc049db55395b1379e044"}, + {file = "numpy-1.25.2-cp310-cp310-win_amd64.whl", hash = "sha256:834b386f2b8210dca38c71a6e0f4fd6922f7d3fcff935dbe3a570945acb1b545"}, + {file = "numpy-1.25.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5462d19336db4560041517dbb7759c21d181a67cb01b36ca109b2ae37d32418"}, + {file = "numpy-1.25.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c5652ea24d33585ea39eb6a6a15dac87a1206a692719ff45d53c5282e66d4a8f"}, + {file = "numpy-1.25.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d60fbae8e0019865fc4784745814cff1c421df5afee233db6d88ab4f14655a2"}, + {file = "numpy-1.25.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e7f0f7f6d0eee8364b9a6304c2845b9c491ac706048c7e8cf47b83123b8dbf"}, + {file = "numpy-1.25.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bb33d5a1cf360304754913a350edda36d5b8c5331a8237268c48f91253c3a364"}, + {file = "numpy-1.25.2-cp311-cp311-win32.whl", hash = "sha256:5883c06bb92f2e6c8181df7b39971a5fb436288db58b5a1c3967702d4278691d"}, + {file = "numpy-1.25.2-cp311-cp311-win_amd64.whl", hash = "sha256:5c97325a0ba6f9d041feb9390924614b60b99209a71a69c876f71052521d42a4"}, + {file = "numpy-1.25.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b79e513d7aac42ae918db3ad1341a015488530d0bb2a6abcbdd10a3a829ccfd3"}, + {file = "numpy-1.25.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eb942bfb6f84df5ce05dbf4b46673ffed0d3da59f13635ea9b926af3deb76926"}, + {file = "numpy-1.25.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e0746410e73384e70d286f93abf2520035250aad8c5714240b0492a7302fdca"}, + {file = "numpy-1.25.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7806500e4f5bdd04095e849265e55de20d8cc4b661b038957354327f6d9b295"}, + {file = "numpy-1.25.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8b77775f4b7df768967a7c8b3567e309f617dd5e99aeb886fa14dc1a0791141f"}, + {file = "numpy-1.25.2-cp39-cp39-win32.whl", hash = "sha256:2792d23d62ec51e50ce4d4b7d73de8f67a2fd3ea710dcbc8563a51a03fb07b01"}, + {file = "numpy-1.25.2-cp39-cp39-win_amd64.whl", hash = "sha256:76b4115d42a7dfc5d485d358728cdd8719be33cc5ec6ec08632a5d6fca2ed380"}, + {file = "numpy-1.25.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1a1329e26f46230bf77b02cc19e900db9b52f398d6722ca853349a782d4cff55"}, + {file = "numpy-1.25.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c3abc71e8b6edba80a01a52e66d83c5d14433cbcd26a40c329ec7ed09f37901"}, + {file = "numpy-1.25.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1b9735c27cea5d995496f46a8b1cd7b408b3f34b6d50459d9ac8fe3a20cc17bf"}, + {file = "numpy-1.25.2.tar.gz", hash = "sha256:fd608e19c8d7c55021dffd43bfe5492fab8cc105cc8986f813f8c3c048b38760"}, +] + [[package]] name = "nvidia-ml-py" -version = "12.535.77" +version = "12.535.108" description = "Python Bindings for the NVIDIA Management Library" optional = false python-versions = "*" files = [ - {file = "nvidia-ml-py-12.535.77.tar.gz", hash = "sha256:8d81e5ed993c84006454102af84c4bffdf72ba5c51212b6c0121c65688983e14"}, - {file = "nvidia_ml_py-12.535.77-py3-none-any.whl", hash = "sha256:ae246ec810a05438375ce345e35171bc3f4a906487e9ea2632473d7e4f4bd375"}, + {file = "nvidia-ml-py-12.535.108.tar.gz", hash = "sha256:141fe818771a165fb93f75dbe7f01f767c3bafa7c13f6876f53583511b078ee1"}, + {file = "nvidia_ml_py-12.535.108-py3-none-any.whl", hash = "sha256:f4e260ad0adb06d7ca1ea5574862ed4ef70f0a17720836854594fe188a3acaf4"}, ] [[package]] @@ -1528,13 +1484,13 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "opencensus" -version = "0.11.2" +version = "0.11.3" description = "A stats collection and distributed tracing framework" optional = false python-versions = "*" files = [ - {file = "opencensus-0.11.2-py2.py3-none-any.whl", hash = "sha256:7a1a34b87c8db3d9984e97ff05739058342f24de1d700766d59044eee8fb3b3f"}, - {file = "opencensus-0.11.2.tar.gz", hash = "sha256:6154042a236b9ecdd55a23dfbb2743bb3deacd0687e3e0391ec2e0c74950d66f"}, + {file = "opencensus-0.11.3-py2.py3-none-any.whl", hash = "sha256:9c33d572059f0f0e874fc34c697a39a4193aa9cf3203f7e777df42e9edeea56a"}, + {file = "opencensus-0.11.3.tar.gz", hash = "sha256:af7a98bd51e63968144d772f346d696ed498a32dbdc4be267cd6011c4ce05da8"}, ] [package.dependencies] @@ -1643,13 +1599,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co [[package]] name = "pluggy" -version = "1.2.0" +version = "1.3.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, - {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, ] [package.extras] @@ -1672,24 +1628,24 @@ twisted = ["twisted"] [[package]] name = "protobuf" -version = "4.24.0" +version = "4.24.3" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "protobuf-4.24.0-cp310-abi3-win32.whl", hash = "sha256:81cb9c4621d2abfe181154354f63af1c41b00a4882fb230b4425cbaed65e8f52"}, - {file = "protobuf-4.24.0-cp310-abi3-win_amd64.whl", hash = "sha256:6c817cf4a26334625a1904b38523d1b343ff8b637d75d2c8790189a4064e51c3"}, - {file = "protobuf-4.24.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:ae97b5de10f25b7a443b40427033e545a32b0e9dda17bcd8330d70033379b3e5"}, - {file = "protobuf-4.24.0-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:567fe6b0647494845d0849e3d5b260bfdd75692bf452cdc9cb660d12457c055d"}, - {file = "protobuf-4.24.0-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:a6b1ca92ccabfd9903c0c7dde8876221dc7d8d87ad5c42e095cc11b15d3569c7"}, - {file = "protobuf-4.24.0-cp37-cp37m-win32.whl", hash = "sha256:a38400a692fd0c6944c3c58837d112f135eb1ed6cdad5ca6c5763336e74f1a04"}, - {file = "protobuf-4.24.0-cp37-cp37m-win_amd64.whl", hash = "sha256:5ab19ee50037d4b663c02218a811a5e1e7bb30940c79aac385b96e7a4f9daa61"}, - {file = "protobuf-4.24.0-cp38-cp38-win32.whl", hash = "sha256:e8834ef0b4c88666ebb7c7ec18045aa0f4325481d724daa624a4cf9f28134653"}, - {file = "protobuf-4.24.0-cp38-cp38-win_amd64.whl", hash = "sha256:8bb52a2be32db82ddc623aefcedfe1e0eb51da60e18fcc908fb8885c81d72109"}, - {file = "protobuf-4.24.0-cp39-cp39-win32.whl", hash = "sha256:ae7a1835721086013de193311df858bc12cd247abe4ef9710b715d930b95b33e"}, - {file = "protobuf-4.24.0-cp39-cp39-win_amd64.whl", hash = "sha256:44825e963008f8ea0d26c51911c30d3e82e122997c3c4568fd0385dd7bacaedf"}, - {file = "protobuf-4.24.0-py3-none-any.whl", hash = "sha256:82e6e9ebdd15b8200e8423676eab38b774624d6a1ad696a60d86a2ac93f18201"}, - {file = "protobuf-4.24.0.tar.gz", hash = "sha256:5d0ceb9de6e08311832169e601d1fc71bd8e8c779f3ee38a97a78554945ecb85"}, + {file = "protobuf-4.24.3-cp310-abi3-win32.whl", hash = "sha256:20651f11b6adc70c0f29efbe8f4a94a74caf61b6200472a9aea6e19898f9fcf4"}, + {file = "protobuf-4.24.3-cp310-abi3-win_amd64.whl", hash = "sha256:3d42e9e4796a811478c783ef63dc85b5a104b44aaaca85d4864d5b886e4b05e3"}, + {file = "protobuf-4.24.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:6e514e8af0045be2b56e56ae1bb14f43ce7ffa0f68b1c793670ccbe2c4fc7d2b"}, + {file = "protobuf-4.24.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:ba53c2f04798a326774f0e53b9c759eaef4f6a568ea7072ec6629851c8435959"}, + {file = "protobuf-4.24.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:f6ccbcf027761a2978c1406070c3788f6de4a4b2cc20800cc03d52df716ad675"}, + {file = "protobuf-4.24.3-cp37-cp37m-win32.whl", hash = "sha256:1b182c7181a2891e8f7f3a1b5242e4ec54d1f42582485a896e4de81aa17540c2"}, + {file = "protobuf-4.24.3-cp37-cp37m-win_amd64.whl", hash = "sha256:b0271a701e6782880d65a308ba42bc43874dabd1a0a0f41f72d2dac3b57f8e76"}, + {file = "protobuf-4.24.3-cp38-cp38-win32.whl", hash = "sha256:e29d79c913f17a60cf17c626f1041e5288e9885c8579832580209de8b75f2a52"}, + {file = "protobuf-4.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:067f750169bc644da2e1ef18c785e85071b7c296f14ac53e0900e605da588719"}, + {file = "protobuf-4.24.3-cp39-cp39-win32.whl", hash = "sha256:2da777d34b4f4f7613cdf85c70eb9a90b1fbef9d36ae4a0ccfe014b0b07906f1"}, + {file = "protobuf-4.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:f631bb982c5478e0c1c70eab383af74a84be66945ebf5dd6b06fc90079668d0b"}, + {file = "protobuf-4.24.3-py3-none-any.whl", hash = "sha256:f6f8dc65625dadaad0c8545319c2e2f0424fede988368893ca3844261342c11a"}, + {file = "protobuf-4.24.3.tar.gz", hash = "sha256:12e9ad2ec079b833176d2921be2cb24281fa591f0b119b208b788adc48c2561d"}, ] [[package]] @@ -1877,42 +1833,6 @@ files = [ typing-extensions = "*" typing-inspect = "*" -[[package]] -name = "pyrsistent" -version = "0.19.3" -description = "Persistent/Functional/Immutable data structures" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyrsistent-0.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a"}, - {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64"}, - {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf"}, - {file = "pyrsistent-0.19.3-cp310-cp310-win32.whl", hash = "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a"}, - {file = "pyrsistent-0.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da"}, - {file = "pyrsistent-0.19.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9"}, - {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393"}, - {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19"}, - {file = "pyrsistent-0.19.3-cp311-cp311-win32.whl", hash = "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3"}, - {file = "pyrsistent-0.19.3-cp311-cp311-win_amd64.whl", hash = "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-win32.whl", hash = "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b"}, - {file = "pyrsistent-0.19.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8"}, - {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a"}, - {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c"}, - {file = "pyrsistent-0.19.3-cp38-cp38-win32.whl", hash = "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c"}, - {file = "pyrsistent-0.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7"}, - {file = "pyrsistent-0.19.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc"}, - {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2"}, - {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3"}, - {file = "pyrsistent-0.19.3-cp39-cp39-win32.whl", hash = "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2"}, - {file = "pyrsistent-0.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98"}, - {file = "pyrsistent-0.19.3-py3-none-any.whl", hash = "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64"}, - {file = "pyrsistent-0.19.3.tar.gz", hash = "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440"}, -] - [[package]] name = "pytest" version = "7.4.0" @@ -1966,37 +1886,6 @@ files = [ [package.dependencies] six = ">=1.5" -[[package]] -name = "pytorch-lightning" -version = "2.0.8" -description = "PyTorch Lightning is the lightweight PyTorch wrapper for ML researchers. Scale your models. Write less boilerplate." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytorch-lightning-2.0.8.tar.gz", hash = "sha256:fb7e8fbe473158b2c1666b6b31fb996c8aa2c3f5e8e2a54a3f50a7b5b2d00a20"}, - {file = "pytorch_lightning-2.0.8-py3-none-any.whl", hash = "sha256:718d11f22551d95ef38614b4727433553c95ea2b50cf843938fb13baf34325a6"}, -] - -[package.dependencies] -fsspec = {version = ">2021.06.0", extras = ["http"]} -lightning-utilities = ">=0.7.0" -numpy = ">=1.17.2" -packaging = ">=17.1" -PyYAML = ">=5.4" -torch = ">=1.11.0" -torchmetrics = ">=0.7.0" -tqdm = ">=4.57.0" -typing-extensions = ">=4.0.0" - -[package.extras] -all = ["deepspeed (>=0.8.2)", "gym[classic-control] (>=0.17.0)", "hydra-core (>=1.0.5)", "ipython[all] (<8.14.1)", "jsonargparse[signatures] (>=4.18.0,<4.23.0)", "lightning-utilities (>=0.7.0)", "matplotlib (>3.1)", "omegaconf (>=2.0.5)", "rich (>=12.3.0)", "tensorboardX (>=2.2)", "torchmetrics (>=0.10.0)", "torchvision (>=0.12.0)"] -deepspeed = ["deepspeed (>=0.8.2)"] -dev = ["cloudpickle (>=1.3)", "coverage (==7.3.0)", "deepspeed (>=0.8.2)", "fastapi (<0.100.0)", "gym[classic-control] (>=0.17.0)", "hydra-core (>=1.0.5)", "ipython[all] (<8.14.1)", "jsonargparse[signatures] (>=4.18.0,<4.23.0)", "lightning-utilities (>=0.7.0)", "matplotlib (>3.1)", "omegaconf (>=2.0.5)", "onnx (<1.15.0)", "onnxruntime (<1.16.0)", "pandas (>1.0)", "protobuf (<=3.20.1)", "psutil (<5.9.6)", "pytest (==7.4.0)", "pytest-cov (==4.1.0)", "pytest-forked (==1.4.0)", "pytest-rerunfailures (==10.3)", "rich (>=12.3.0)", "scikit-learn (>0.22.1)", "tensorboard (>=2.9.1)", "tensorboardX (>=2.2)", "torchmetrics (>=0.10.0)", "torchvision (>=0.12.0)", "uvicorn (<0.23.3)"] -examples = ["gym[classic-control] (>=0.17.0)", "ipython[all] (<8.14.1)", "lightning-utilities (>=0.7.0)", "torchmetrics (>=0.10.0)", "torchvision (>=0.12.0)"] -extra = ["hydra-core (>=1.0.5)", "jsonargparse[signatures] (>=4.18.0,<4.23.0)", "matplotlib (>3.1)", "omegaconf (>=2.0.5)", "rich (>=12.3.0)", "tensorboardX (>=2.2)"] -strategies = ["deepspeed (>=0.8.2)"] -test = ["cloudpickle (>=1.3)", "coverage (==7.3.0)", "fastapi (<0.100.0)", "onnx (<1.15.0)", "onnxruntime (<1.16.0)", "pandas (>1.0)", "protobuf (<=3.20.1)", "psutil (<5.9.6)", "pytest (==7.4.0)", "pytest-cov (==4.1.0)", "pytest-forked (==1.4.0)", "pytest-rerunfailures (==10.3)", "scikit-learn (>0.22.1)", "tensorboard (>=2.9.1)", "uvicorn (<0.23.3)"] - [[package]] name = "pywin32" version = "306" @@ -2032,6 +1921,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2039,8 +1929,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2057,6 +1954,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2064,6 +1962,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -2145,6 +2044,21 @@ serve = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi", train = ["pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1.9)"] tune = ["pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1.9)"] +[[package]] +name = "referencing" +version = "0.30.2" +description = "JSON Referencing + Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "referencing-0.30.2-py3-none-any.whl", hash = "sha256:449b6669b6121a9e96a7f9e410b245d471e8d48964c67113ce9afe50c8dd7bdf"}, + {file = "referencing-0.30.2.tar.gz", hash = "sha256:794ad8003c65938edcdbc027f1933215e0d0ccc0291e3ce20a4d87432b59efc0"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" + [[package]] name = "requests" version = "2.31.0" @@ -2203,6 +2117,112 @@ typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9 [package.extras] jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] +[[package]] +name = "rpds-py" +version = "0.10.3" +description = "Python bindings to Rust's persistent data structures (rpds)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rpds_py-0.10.3-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:485747ee62da83366a44fbba963c5fe017860ad408ccd6cd99aa66ea80d32b2e"}, + {file = "rpds_py-0.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c55f9821f88e8bee4b7a72c82cfb5ecd22b6aad04033334f33c329b29bfa4da0"}, + {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3b52a67ac66a3a64a7e710ba629f62d1e26ca0504c29ee8cbd99b97df7079a8"}, + {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3aed39db2f0ace76faa94f465d4234aac72e2f32b009f15da6492a561b3bbebd"}, + {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:271c360fdc464fe6a75f13ea0c08ddf71a321f4c55fc20a3fe62ea3ef09df7d9"}, + {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef5fddfb264e89c435be4adb3953cef5d2936fdeb4463b4161a6ba2f22e7b740"}, + {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a771417c9c06c56c9d53d11a5b084d1de75de82978e23c544270ab25e7c066ff"}, + {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:52b5cbc0469328e58180021138207e6ec91d7ca2e037d3549cc9e34e2187330a"}, + {file = "rpds_py-0.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6ac3fefb0d168c7c6cab24fdfc80ec62cd2b4dfd9e65b84bdceb1cb01d385c33"}, + {file = "rpds_py-0.10.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8d54bbdf5d56e2c8cf81a1857250f3ea132de77af543d0ba5dce667183b61fec"}, + {file = "rpds_py-0.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cd2163f42868865597d89399a01aa33b7594ce8e2c4a28503127c81a2f17784e"}, + {file = "rpds_py-0.10.3-cp310-none-win32.whl", hash = "sha256:ea93163472db26ac6043e8f7f93a05d9b59e0505c760da2a3cd22c7dd7111391"}, + {file = "rpds_py-0.10.3-cp310-none-win_amd64.whl", hash = "sha256:7cd020b1fb41e3ab7716d4d2c3972d4588fdfbab9bfbbb64acc7078eccef8860"}, + {file = "rpds_py-0.10.3-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:1d9b5ee46dcb498fa3e46d4dfabcb531e1f2e76b477e0d99ef114f17bbd38453"}, + {file = "rpds_py-0.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:563646d74a4b4456d0cf3b714ca522e725243c603e8254ad85c3b59b7c0c4bf0"}, + {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e626b864725680cd3904414d72e7b0bd81c0e5b2b53a5b30b4273034253bb41f"}, + {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:485301ee56ce87a51ccb182a4b180d852c5cb2b3cb3a82f7d4714b4141119d8c"}, + {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42f712b4668831c0cd85e0a5b5a308700fe068e37dcd24c0062904c4e372b093"}, + {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c9141af27a4e5819d74d67d227d5047a20fa3c7d4d9df43037a955b4c748ec5"}, + {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef750a20de1b65657a1425f77c525b0183eac63fe7b8f5ac0dd16f3668d3e64f"}, + {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e1a0ffc39f51aa5f5c22114a8f1906b3c17eba68c5babb86c5f77d8b1bba14d1"}, + {file = "rpds_py-0.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f4c179a7aeae10ddf44c6bac87938134c1379c49c884529f090f9bf05566c836"}, + {file = "rpds_py-0.10.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:176287bb998fd1e9846a9b666e240e58f8d3373e3bf87e7642f15af5405187b8"}, + {file = "rpds_py-0.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6446002739ca29249f0beaaf067fcbc2b5aab4bc7ee8fb941bd194947ce19aff"}, + {file = "rpds_py-0.10.3-cp311-none-win32.whl", hash = "sha256:c7aed97f2e676561416c927b063802c8a6285e9b55e1b83213dfd99a8f4f9e48"}, + {file = "rpds_py-0.10.3-cp311-none-win_amd64.whl", hash = "sha256:8bd01ff4032abaed03f2db702fa9a61078bee37add0bd884a6190b05e63b028c"}, + {file = "rpds_py-0.10.3-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:4cf0855a842c5b5c391dd32ca273b09e86abf8367572073bd1edfc52bc44446b"}, + {file = "rpds_py-0.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:69b857a7d8bd4f5d6e0db4086da8c46309a26e8cefdfc778c0c5cc17d4b11e08"}, + {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:975382d9aa90dc59253d6a83a5ca72e07f4ada3ae3d6c0575ced513db322b8ec"}, + {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:35fbd23c1c8732cde7a94abe7fb071ec173c2f58c0bd0d7e5b669fdfc80a2c7b"}, + {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:106af1653007cc569d5fbb5f08c6648a49fe4de74c2df814e234e282ebc06957"}, + {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce5e7504db95b76fc89055c7f41e367eaadef5b1d059e27e1d6eabf2b55ca314"}, + {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aca759ada6b1967fcfd4336dcf460d02a8a23e6abe06e90ea7881e5c22c4de6"}, + {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b5d4bdd697195f3876d134101c40c7d06d46c6ab25159ed5cbd44105c715278a"}, + {file = "rpds_py-0.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a657250807b6efd19b28f5922520ae002a54cb43c2401e6f3d0230c352564d25"}, + {file = "rpds_py-0.10.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:177c9dd834cdf4dc39c27436ade6fdf9fe81484758885f2d616d5d03c0a83bd2"}, + {file = "rpds_py-0.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e22491d25f97199fc3581ad8dd8ce198d8c8fdb8dae80dea3512e1ce6d5fa99f"}, + {file = "rpds_py-0.10.3-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:2f3e1867dd574014253b4b8f01ba443b9c914e61d45f3674e452a915d6e929a3"}, + {file = "rpds_py-0.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c22211c165166de6683de8136229721f3d5c8606cc2c3d1562da9a3a5058049c"}, + {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40bc802a696887b14c002edd43c18082cb7b6f9ee8b838239b03b56574d97f71"}, + {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e271dd97c7bb8eefda5cca38cd0b0373a1fea50f71e8071376b46968582af9b"}, + {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:95cde244e7195b2c07ec9b73fa4c5026d4a27233451485caa1cd0c1b55f26dbd"}, + {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08a80cf4884920863623a9ee9a285ee04cef57ebedc1cc87b3e3e0f24c8acfe5"}, + {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:763ad59e105fca09705d9f9b29ecffb95ecdc3b0363be3bb56081b2c6de7977a"}, + {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:187700668c018a7e76e89424b7c1042f317c8df9161f00c0c903c82b0a8cac5c"}, + {file = "rpds_py-0.10.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5267cfda873ad62591b9332fd9472d2409f7cf02a34a9c9cb367e2c0255994bf"}, + {file = "rpds_py-0.10.3-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:2ed83d53a8c5902ec48b90b2ac045e28e1698c0bea9441af9409fc844dc79496"}, + {file = "rpds_py-0.10.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:255f1a10ae39b52122cce26ce0781f7a616f502feecce9e616976f6a87992d6b"}, + {file = "rpds_py-0.10.3-cp38-none-win32.whl", hash = "sha256:a019a344312d0b1f429c00d49c3be62fa273d4a1094e1b224f403716b6d03be1"}, + {file = "rpds_py-0.10.3-cp38-none-win_amd64.whl", hash = "sha256:efb9ece97e696bb56e31166a9dd7919f8f0c6b31967b454718c6509f29ef6fee"}, + {file = "rpds_py-0.10.3-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:570cc326e78ff23dec7f41487aa9c3dffd02e5ee9ab43a8f6ccc3df8f9327623"}, + {file = "rpds_py-0.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cff7351c251c7546407827b6a37bcef6416304fc54d12d44dbfecbb717064717"}, + {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:177914f81f66c86c012311f8c7f46887ec375cfcfd2a2f28233a3053ac93a569"}, + {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:448a66b8266de0b581246ca7cd6a73b8d98d15100fb7165974535fa3b577340e"}, + {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bbac1953c17252f9cc675bb19372444aadf0179b5df575ac4b56faaec9f6294"}, + {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9dd9d9d9e898b9d30683bdd2b6c1849449158647d1049a125879cb397ee9cd12"}, + {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8c71ea77536149e36c4c784f6d420ffd20bea041e3ba21ed021cb40ce58e2c9"}, + {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16a472300bc6c83fe4c2072cc22b3972f90d718d56f241adabc7ae509f53f154"}, + {file = "rpds_py-0.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b9255e7165083de7c1d605e818025e8860636348f34a79d84ec533546064f07e"}, + {file = "rpds_py-0.10.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:53d7a3cd46cdc1689296348cb05ffd4f4280035770aee0c8ead3bbd4d6529acc"}, + {file = "rpds_py-0.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22da15b902f9f8e267020d1c8bcfc4831ca646fecb60254f7bc71763569f56b1"}, + {file = "rpds_py-0.10.3-cp39-none-win32.whl", hash = "sha256:850c272e0e0d1a5c5d73b1b7871b0a7c2446b304cec55ccdb3eaac0d792bb065"}, + {file = "rpds_py-0.10.3-cp39-none-win_amd64.whl", hash = "sha256:de61e424062173b4f70eec07e12469edde7e17fa180019a2a0d75c13a5c5dc57"}, + {file = "rpds_py-0.10.3-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:af247fd4f12cca4129c1b82090244ea5a9d5bb089e9a82feb5a2f7c6a9fe181d"}, + {file = "rpds_py-0.10.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3ad59efe24a4d54c2742929001f2d02803aafc15d6d781c21379e3f7f66ec842"}, + {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642ed0a209ced4be3a46f8cb094f2d76f1f479e2a1ceca6de6346a096cd3409d"}, + {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:37d0c59548ae56fae01c14998918d04ee0d5d3277363c10208eef8c4e2b68ed6"}, + {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aad6ed9e70ddfb34d849b761fb243be58c735be6a9265b9060d6ddb77751e3e8"}, + {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f94fdd756ba1f79f988855d948ae0bad9ddf44df296770d9a58c774cfbcca72"}, + {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77076bdc8776a2b029e1e6ffbe6d7056e35f56f5e80d9dc0bad26ad4a024a762"}, + {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:87d9b206b1bd7a0523375dc2020a6ce88bca5330682ae2fe25e86fd5d45cea9c"}, + {file = "rpds_py-0.10.3-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:8efaeb08ede95066da3a3e3c420fcc0a21693fcd0c4396d0585b019613d28515"}, + {file = "rpds_py-0.10.3-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:a4d9bfda3f84fc563868fe25ca160c8ff0e69bc4443c5647f960d59400ce6557"}, + {file = "rpds_py-0.10.3-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:d27aa6bbc1f33be920bb7adbb95581452cdf23005d5611b29a12bb6a3468cc95"}, + {file = "rpds_py-0.10.3-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ed8313809571a5463fd7db43aaca68ecb43ca7a58f5b23b6e6c6c5d02bdc7882"}, + {file = "rpds_py-0.10.3-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:e10e6a1ed2b8661201e79dff5531f8ad4cdd83548a0f81c95cf79b3184b20c33"}, + {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:015de2ce2af1586ff5dc873e804434185199a15f7d96920ce67e50604592cae9"}, + {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae87137951bb3dc08c7d8bfb8988d8c119f3230731b08a71146e84aaa919a7a9"}, + {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0bb4f48bd0dd18eebe826395e6a48b7331291078a879295bae4e5d053be50d4c"}, + {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:09362f86ec201288d5687d1dc476b07bf39c08478cde837cb710b302864e7ec9"}, + {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:821392559d37759caa67d622d0d2994c7a3f2fb29274948ac799d496d92bca73"}, + {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7170cbde4070dc3c77dec82abf86f3b210633d4f89550fa0ad2d4b549a05572a"}, + {file = "rpds_py-0.10.3-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:5de11c041486681ce854c814844f4ce3282b6ea1656faae19208ebe09d31c5b8"}, + {file = "rpds_py-0.10.3-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:4ed172d0c79f156c1b954e99c03bc2e3033c17efce8dd1a7c781bc4d5793dfac"}, + {file = "rpds_py-0.10.3-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:11fdd1192240dda8d6c5d18a06146e9045cb7e3ba7c06de6973000ff035df7c6"}, + {file = "rpds_py-0.10.3-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:f602881d80ee4228a2355c68da6b296a296cd22bbb91e5418d54577bbf17fa7c"}, + {file = "rpds_py-0.10.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:691d50c99a937709ac4c4cd570d959a006bd6a6d970a484c84cc99543d4a5bbb"}, + {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24cd91a03543a0f8d09cb18d1cb27df80a84b5553d2bd94cba5979ef6af5c6e7"}, + {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fc2200e79d75b5238c8d69f6a30f8284290c777039d331e7340b6c17cad24a5a"}, + {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea65b59882d5fa8c74a23f8960db579e5e341534934f43f3b18ec1839b893e41"}, + {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:829e91f3a8574888b73e7a3feb3b1af698e717513597e23136ff4eba0bc8387a"}, + {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eab75a8569a095f2ad470b342f2751d9902f7944704f0571c8af46bede438475"}, + {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:061c3ff1f51ecec256e916cf71cc01f9975af8fb3af9b94d3c0cc8702cfea637"}, + {file = "rpds_py-0.10.3-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:39d05e65f23a0fe897b6ac395f2a8d48c56ac0f583f5d663e0afec1da89b95da"}, + {file = "rpds_py-0.10.3-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:4eca20917a06d2fca7628ef3c8b94a8c358f6b43f1a621c9815243462dcccf97"}, + {file = "rpds_py-0.10.3-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e8d0f0eca087630d58b8c662085529781fd5dc80f0a54eda42d5c9029f812599"}, + {file = "rpds_py-0.10.3.tar.gz", hash = "sha256:fcc1ebb7561a3e24a6588f7c6ded15d80aec22c66a070c757559b57b17ffd1cb"}, +] + [[package]] name = "rsa" version = "4.9" @@ -2219,19 +2239,19 @@ pyasn1 = ">=0.1.3" [[package]] name = "setuptools" -version = "68.0.0" +version = "68.2.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"}, - {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"}, + {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, + {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -2246,13 +2266,13 @@ files = [ [[package]] name = "smart-open" -version = "6.3.0" +version = "6.4.0" description = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)" optional = false python-versions = ">=3.6,<4.0" files = [ - {file = "smart_open-6.3.0-py3-none-any.whl", hash = "sha256:b4c9ae193ad6d3e7add50944b86afa0d150bd821ab8ec21edb26d9a06b66f6a8"}, - {file = "smart_open-6.3.0.tar.gz", hash = "sha256:d5238825fe9a9340645fac3d75b287c08fbb99fb2b422477de781c9f5f09e019"}, + {file = "smart_open-6.4.0-py3-none-any.whl", hash = "sha256:8d3ef7e6997e8e42dd55c74166ed21e6ac70664caa32dd940b26d54a8f6b4142"}, + {file = "smart_open-6.4.0.tar.gz", hash = "sha256:be3c92c246fbe80ebce8fbacb180494a481a77fcdcb7c1aadb2ea5b9c2bee8b9"}, ] [package.extras] @@ -2265,20 +2285,6 @@ ssh = ["paramiko"] test = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "moto[server]", "paramiko", "pytest", "pytest-rerunfailures", "requests", "responses"] webhdfs = ["requests"] -[[package]] -name = "sympy" -version = "1.12" -description = "Computer algebra system (CAS) in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "sympy-1.12-py3-none-any.whl", hash = "sha256:c3588cd4295d0c0f603d0f2ae780587e64e2efeedb3521e46b9bb1d08d184fa5"}, - {file = "sympy-1.12.tar.gz", hash = "sha256:ebf595c8dac3e0fdc4152c51878b498396ec7f30e7a914d6071e674d49420fb8"}, -] - -[package.dependencies] -mpmath = ">=0.19" - [[package]] name = "tabulate" version = "0.9.0" @@ -2304,103 +2310,15 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -[[package]] -name = "torch" -version = "2.0.1" -description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "torch-2.0.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:8ced00b3ba471856b993822508f77c98f48a458623596a4c43136158781e306a"}, - {file = "torch-2.0.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:359bfaad94d1cda02ab775dc1cc386d585712329bb47b8741607ef6ef4950747"}, - {file = "torch-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:7c84e44d9002182edd859f3400deaa7410f5ec948a519cc7ef512c2f9b34d2c4"}, - {file = "torch-2.0.1-cp310-none-macosx_10_9_x86_64.whl", hash = "sha256:567f84d657edc5582d716900543e6e62353dbe275e61cdc36eda4929e46df9e7"}, - {file = "torch-2.0.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:787b5a78aa7917465e9b96399b883920c88a08f4eb63b5a5d2d1a16e27d2f89b"}, - {file = "torch-2.0.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:e617b1d0abaf6ced02dbb9486803abfef0d581609b09641b34fa315c9c40766d"}, - {file = "torch-2.0.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:b6019b1de4978e96daa21d6a3ebb41e88a0b474898fe251fd96189587408873e"}, - {file = "torch-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:dbd68cbd1cd9da32fe5d294dd3411509b3d841baecb780b38b3b7b06c7754434"}, - {file = "torch-2.0.1-cp311-none-macosx_10_9_x86_64.whl", hash = "sha256:ef654427d91600129864644e35deea761fb1fe131710180b952a6f2e2207075e"}, - {file = "torch-2.0.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:25aa43ca80dcdf32f13da04c503ec7afdf8e77e3a0183dd85cd3e53b2842e527"}, - {file = "torch-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5ef3ea3d25441d3957348f7e99c7824d33798258a2bf5f0f0277cbcadad2e20d"}, - {file = "torch-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:0882243755ff28895e8e6dc6bc26ebcf5aa0911ed81b2a12f241fc4b09075b13"}, - {file = "torch-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:f66aa6b9580a22b04d0af54fcd042f52406a8479e2b6a550e3d9f95963e168c8"}, - {file = "torch-2.0.1-cp38-none-macosx_10_9_x86_64.whl", hash = "sha256:1adb60d369f2650cac8e9a95b1d5758e25d526a34808f7448d0bd599e4ae9072"}, - {file = "torch-2.0.1-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:1bcffc16b89e296826b33b98db5166f990e3b72654a2b90673e817b16c50e32b"}, - {file = "torch-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:e10e1597f2175365285db1b24019eb6f04d53dcd626c735fc502f1e8b6be9875"}, - {file = "torch-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:423e0ae257b756bb45a4b49072046772d1ad0c592265c5080070e0767da4e490"}, - {file = "torch-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:8742bdc62946c93f75ff92da00e3803216c6cce9b132fbca69664ca38cfb3e18"}, - {file = "torch-2.0.1-cp39-none-macosx_10_9_x86_64.whl", hash = "sha256:c62df99352bd6ee5a5a8d1832452110435d178b5164de450831a3a8cc14dc680"}, - {file = "torch-2.0.1-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:671a2565e3f63b8fe8e42ae3e36ad249fe5e567435ea27b94edaa672a7d0c416"}, -] - -[package.dependencies] -filelock = "*" -jinja2 = "*" -networkx = "*" -sympy = "*" -typing-extensions = "*" - -[package.extras] -opt-einsum = ["opt-einsum (>=3.3)"] - -[[package]] -name = "torchmetrics" -version = "1.1.1" -description = "PyTorch native Metrics" -optional = false -python-versions = ">=3.8" -files = [ - {file = "torchmetrics-1.1.1-py3-none-any.whl", hash = "sha256:903b4fc30537acfc5221505c48f7627e58dbf6d9dea85c16ea7b4323f9e13793"}, - {file = "torchmetrics-1.1.1.tar.gz", hash = "sha256:65ea34205c0506eecfd06b98f63f4d2a2c5c0e17367cf324e1747adc854c80a5"}, -] - -[package.dependencies] -lightning-utilities = ">=0.8.0" -numpy = ">1.20.0" -torch = ">=1.8.1" -typing-extensions = {version = "*", markers = "python_version < \"3.9\""} - -[package.extras] -all = ["SciencePlots (>=2.0.0)", "lpips (<=0.1.4)", "matplotlib (>=3.2.0)", "mypy (==1.5.1)", "nltk (>=3.6)", "piq (<=0.8.0)", "pycocotools (>2.0.0)", "pystoi (>=0.3.0)", "regex (>=2021.9.24)", "scipy (>1.0.0)", "torch-fidelity (<=0.4.0)", "torchaudio (>=0.10.0)", "torchvision (>=0.8)", "tqdm (>=4.41.0)", "transformers (>4.4.0)", "transformers (>=4.10.0)", "types-PyYAML", "types-emoji", "types-protobuf", "types-requests", "types-setuptools", "types-six", "types-tabulate"] -audio = ["pystoi (>=0.3.0)", "torchaudio (>=0.10.0)"] -detection = ["pycocotools (>2.0.0)", "torchvision (>=0.8)"] -dev = ["SciencePlots (>=2.0.0)", "bert-score (==0.3.13)", "cloudpickle (>1.3)", "coverage (==7.3.0)", "dython (<=0.7.4)", "fairlearn", "fast-bss-eval (>=0.1.0)", "fire (<=0.5.0)", "huggingface-hub (<0.16)", "jiwer (>=2.3.0)", "kornia (>=0.6.7)", "lpips (<=0.1.4)", "matplotlib (>=3.2.0)", "mir-eval (>=0.6)", "mypy (==1.5.1)", "netcal (>1.0.0)", "nltk (>=3.6)", "numpy (<1.25.0)", "pandas (>1.0.0)", "pandas (>=1.4.0)", "phmdoctest (==1.4.0)", "piq (<=0.8.0)", "psutil (<=5.9.5)", "pycocotools (>2.0.0)", "pystoi (>=0.3.0)", "pytest (==7.4.0)", "pytest-cov (==4.1.0)", "pytest-doctestplus (==1.0.0)", "pytest-rerunfailures (==12.0)", "pytest-timeout (==2.1.0)", "pytorch-msssim (==1.0.0)", "regex (>=2021.9.24)", "requests (<=2.31.0)", "rouge-score (>0.1.0)", "sacrebleu (>=2.0.0)", "scikit-image (>=0.19.0)", "scikit-learn (>=1.1.1)", "scipy (>1.0.0)", "sewar (>=0.4.4)", "statsmodels (>0.13.5)", "torch-complex (<=0.4.3)", "torch-fidelity (<=0.4.0)", "torchaudio (>=0.10.0)", "torchvision (>=0.8)", "tqdm (>=4.41.0)", "transformers (>4.4.0)", "transformers (>=4.10.0)", "types-PyYAML", "types-emoji", "types-protobuf", "types-requests", "types-setuptools", "types-six", "types-tabulate"] -image = ["lpips (<=0.1.4)", "scipy (>1.0.0)", "torch-fidelity (<=0.4.0)", "torchvision (>=0.8)"] -multimodal = ["piq (<=0.8.0)", "transformers (>=4.10.0)"] -test = ["bert-score (==0.3.13)", "cloudpickle (>1.3)", "coverage (==7.3.0)", "dython (<=0.7.4)", "fairlearn", "fast-bss-eval (>=0.1.0)", "fire (<=0.5.0)", "huggingface-hub (<0.16)", "jiwer (>=2.3.0)", "kornia (>=0.6.7)", "mir-eval (>=0.6)", "netcal (>1.0.0)", "numpy (<1.25.0)", "pandas (>1.0.0)", "pandas (>=1.4.0)", "phmdoctest (==1.4.0)", "psutil (<=5.9.5)", "pytest (==7.4.0)", "pytest-cov (==4.1.0)", "pytest-doctestplus (==1.0.0)", "pytest-rerunfailures (==12.0)", "pytest-timeout (==2.1.0)", "pytorch-msssim (==1.0.0)", "requests (<=2.31.0)", "rouge-score (>0.1.0)", "sacrebleu (>=2.0.0)", "scikit-image (>=0.19.0)", "scikit-learn (>=1.1.1)", "scipy (>1.0.0)", "sewar (>=0.4.4)", "statsmodels (>0.13.5)", "torch-complex (<=0.4.3)"] -text = ["nltk (>=3.6)", "regex (>=2021.9.24)", "tqdm (>=4.41.0)", "transformers (>4.4.0)"] -typing = ["mypy (==1.5.1)", "types-PyYAML", "types-emoji", "types-protobuf", "types-requests", "types-setuptools", "types-six", "types-tabulate"] -visual = ["SciencePlots (>=2.0.0)", "matplotlib (>=3.2.0)"] - -[[package]] -name = "tqdm" -version = "4.66.1" -description = "Fast, Extensible Progress Meter" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tqdm-4.66.1-py3-none-any.whl", hash = "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386"}, - {file = "tqdm-4.66.1.tar.gz", hash = "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] -notebook = ["ipywidgets (>=6)"] -slack = ["slack-sdk"] -telegram = ["requests"] - [[package]] name = "typing-extensions" -version = "4.7.1" -description = "Backported and Experimental Type Hints for Python 3.7+" +version = "4.8.0" +description = "Backported and Experimental Type Hints for Python 3.8+" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, - {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, + {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, + {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, ] [[package]] @@ -2467,17 +2385,17 @@ files = [ [[package]] name = "websocket-client" -version = "1.6.1" +version = "1.6.3" description = "WebSocket client for Python with low level API options" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "websocket-client-1.6.1.tar.gz", hash = "sha256:c951af98631d24f8df89ab1019fc365f2227c0892f12fd150e935607c79dd0dd"}, - {file = "websocket_client-1.6.1-py3-none-any.whl", hash = "sha256:f1f9f2ad5291f0225a49efad77abf9e700b6fef553900623060dad6e26503b9d"}, + {file = "websocket-client-1.6.3.tar.gz", hash = "sha256:3aad25d31284266bcfcfd1fd8a743f63282305a364b8d0948a43bd606acc652f"}, + {file = "websocket_client-1.6.3-py3-none-any.whl", hash = "sha256:6cfc30d051ebabb73a5fa246efdcc14c8fbebbd0330f8984ac3bb6d9edd2ad03"}, ] [package.extras] -docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] +docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] optional = ["python-socks", "wsaccel"] test = ["websockets"] @@ -2570,20 +2488,20 @@ multidict = ">=4.0" [[package]] name = "zipp" -version = "3.15.0" +version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, - {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "18e18b306f8c91c2d0f995531cf7f20be3180815e8c82ac1d1fc2913d0bc90d8" +content-hash = "2d87d987bc2dbc1dbebe7ada682b5f2f0a475e51147f9045db752d07c057eb24" diff --git a/pyproject.toml b/pyproject.toml index cd5facf7..8f6505dc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,6 @@ codeflare-torchx = "0.6.0.dev1" cryptography = "40.0.2" executing = "1.2.0" pydantic = "< 2" -pytorch-lightning = "^2.0.8" [tool.poetry.group.docs] optional = true From 7e69378f743cfbebaf21e30bbb7f44357eeea321 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Wed, 27 Sep 2023 22:58:50 +0000 Subject: [PATCH 026/496] Updated coverage.svg --- coverage.svg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/coverage.svg b/coverage.svg index 607d3de4..c1490035 100644 --- a/coverage.svg +++ b/coverage.svg @@ -15,7 +15,7 @@ coverage coverage - 91% - 91% + 90% + 90% From 6375c87e4c729e1c879a475292916dda68fd7013 Mon Sep 17 00:00:00 2001 From: openshift-merge-robot Date: Wed, 27 Sep 2023 22:58:51 +0000 Subject: [PATCH 027/496] [create-pull-request] automated change --- poetry.lock | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index c933bd3b..7c315003 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "aiohttp" @@ -1193,6 +1193,16 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, From 996264a32cecea6108e887d2fb0d82f0174397bd Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Wed, 4 Oct 2023 16:06:20 +0100 Subject: [PATCH 028/496] Added approvers dimakis & Bobbins228 --- OWNERS | 2 ++ 1 file changed, 2 insertions(+) diff --git a/OWNERS b/OWNERS index 1b962d5f..22fbcf24 100644 --- a/OWNERS +++ b/OWNERS @@ -1,4 +1,6 @@ approvers: + - Bobbins228 + - dimakis - kpostoffice - maxusmusti - MichaelClifford From 9c8c9fab548bc3622bd7a030a9c053ce91cbd9d4 Mon Sep 17 00:00:00 2001 From: Dimitri Saridakis Date: Tue, 10 Oct 2023 13:58:14 +0100 Subject: [PATCH 029/496] build: update python to v3.9 and ray to v2.7 --- poetry.lock | 1265 +++++++++++++++++++++--------------------------- pyproject.toml | 4 +- 2 files changed, 565 insertions(+), 704 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7c315003..353c743c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,99 +1,99 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "aiohttp" -version = "3.8.5" +version = "3.8.6" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.6" files = [ - {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a94159871304770da4dd371f4291b20cac04e8c94f11bdea1c3478e557fbe0d8"}, - {file = "aiohttp-3.8.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:13bf85afc99ce6f9ee3567b04501f18f9f8dbbb2ea11ed1a2e079670403a7c84"}, - {file = "aiohttp-3.8.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ce2ac5708501afc4847221a521f7e4b245abf5178cf5ddae9d5b3856ddb2f3a"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96943e5dcc37a6529d18766597c491798b7eb7a61d48878611298afc1fca946c"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ad5c3c4590bb3cc28b4382f031f3783f25ec223557124c68754a2231d989e2b"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c413c633d0512df4dc7fd2373ec06cc6a815b7b6d6c2f208ada7e9e93a5061d"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df72ac063b97837a80d80dec8d54c241af059cc9bb42c4de68bd5b61ceb37caa"}, - {file = "aiohttp-3.8.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c48c5c0271149cfe467c0ff8eb941279fd6e3f65c9a388c984e0e6cf57538e14"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:368a42363c4d70ab52c2c6420a57f190ed3dfaca6a1b19afda8165ee16416a82"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7607ec3ce4993464368505888af5beb446845a014bc676d349efec0e05085905"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0d21c684808288a98914e5aaf2a7c6a3179d4df11d249799c32d1808e79503b5"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:312fcfbacc7880a8da0ae8b6abc6cc7d752e9caa0051a53d217a650b25e9a691"}, - {file = "aiohttp-3.8.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad093e823df03bb3fd37e7dec9d4670c34f9e24aeace76808fc20a507cace825"}, - {file = "aiohttp-3.8.5-cp310-cp310-win32.whl", hash = "sha256:33279701c04351a2914e1100b62b2a7fdb9a25995c4a104259f9a5ead7ed4802"}, - {file = "aiohttp-3.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:6e4a280e4b975a2e7745573e3fc9c9ba0d1194a3738ce1cbaa80626cc9b4f4df"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae871a964e1987a943d83d6709d20ec6103ca1eaf52f7e0d36ee1b5bebb8b9b9"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:461908b2578955045efde733719d62f2b649c404189a09a632d245b445c9c975"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:72a860c215e26192379f57cae5ab12b168b75db8271f111019509a1196dfc780"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc14be025665dba6202b6a71cfcdb53210cc498e50068bc088076624471f8bb9"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af740fc2711ad85f1a5c034a435782fbd5b5f8314c9a3ef071424a8158d7f6b"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:841cd8233cbd2111a0ef0a522ce016357c5e3aff8a8ce92bcfa14cef890d698f"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed1c46fb119f1b59304b5ec89f834f07124cd23ae5b74288e364477641060ff"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84f8ae3e09a34f35c18fa57f015cc394bd1389bce02503fb30c394d04ee6b938"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62360cb771707cb70a6fd114b9871d20d7dd2163a0feafe43fd115cfe4fe845e"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:23fb25a9f0a1ca1f24c0a371523546366bb642397c94ab45ad3aedf2941cec6a"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0ba0d15164eae3d878260d4c4df859bbdc6466e9e6689c344a13334f988bb53"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5d20003b635fc6ae3f96d7260281dfaf1894fc3aa24d1888a9b2628e97c241e5"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0175d745d9e85c40dcc51c8f88c74bfbaef9e7afeeeb9d03c37977270303064c"}, - {file = "aiohttp-3.8.5-cp311-cp311-win32.whl", hash = "sha256:2e1b1e51b0774408f091d268648e3d57f7260c1682e7d3a63cb00d22d71bb945"}, - {file = "aiohttp-3.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:043d2299f6dfdc92f0ac5e995dfc56668e1587cea7f9aa9d8a78a1b6554e5755"}, - {file = "aiohttp-3.8.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cae533195e8122584ec87531d6df000ad07737eaa3c81209e85c928854d2195c"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f21e83f355643c345177a5d1d8079f9f28b5133bcd154193b799d380331d5d3"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a7a75ef35f2df54ad55dbf4b73fe1da96f370e51b10c91f08b19603c64004acc"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e2e9839e14dd5308ee773c97115f1e0a1cb1d75cbeeee9f33824fa5144c7634"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44e65da1de4403d0576473e2344828ef9c4c6244d65cf4b75549bb46d40b8dd"}, - {file = "aiohttp-3.8.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78d847e4cde6ecc19125ccbc9bfac4a7ab37c234dd88fbb3c5c524e8e14da543"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:c7a815258e5895d8900aec4454f38dca9aed71085f227537208057853f9d13f2"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:8b929b9bd7cd7c3939f8bcfffa92fae7480bd1aa425279d51a89327d600c704d"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:5db3a5b833764280ed7618393832e0853e40f3d3e9aa128ac0ba0f8278d08649"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:a0215ce6041d501f3155dc219712bc41252d0ab76474615b9700d63d4d9292af"}, - {file = "aiohttp-3.8.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:fd1ed388ea7fbed22c4968dd64bab0198de60750a25fe8c0c9d4bef5abe13824"}, - {file = "aiohttp-3.8.5-cp36-cp36m-win32.whl", hash = "sha256:6e6783bcc45f397fdebc118d772103d751b54cddf5b60fbcc958382d7dd64f3e"}, - {file = "aiohttp-3.8.5-cp36-cp36m-win_amd64.whl", hash = "sha256:b5411d82cddd212644cf9360879eb5080f0d5f7d809d03262c50dad02f01421a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:01d4c0c874aa4ddfb8098e85d10b5e875a70adc63db91f1ae65a4b04d3344cda"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5980a746d547a6ba173fd5ee85ce9077e72d118758db05d229044b469d9029a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a482e6da906d5e6e653be079b29bc173a48e381600161c9932d89dfae5942ef"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80bd372b8d0715c66c974cf57fe363621a02f359f1ec81cba97366948c7fc873"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1161b345c0a444ebcf46bf0a740ba5dcf50612fd3d0528883fdc0eff578006a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd56db019015b6acfaaf92e1ac40eb8434847d9bf88b4be4efe5bfd260aee692"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:153c2549f6c004d2754cc60603d4668899c9895b8a89397444a9c4efa282aaf4"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4a01951fabc4ce26ab791da5f3f24dca6d9a6f24121746eb19756416ff2d881b"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bfb9162dcf01f615462b995a516ba03e769de0789de1cadc0f916265c257e5d8"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:7dde0009408969a43b04c16cbbe252c4f5ef4574ac226bc8815cd7342d2028b6"}, - {file = "aiohttp-3.8.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4149d34c32f9638f38f544b3977a4c24052042affa895352d3636fa8bffd030a"}, - {file = "aiohttp-3.8.5-cp37-cp37m-win32.whl", hash = "sha256:68c5a82c8779bdfc6367c967a4a1b2aa52cd3595388bf5961a62158ee8a59e22"}, - {file = "aiohttp-3.8.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2cf57fb50be5f52bda004b8893e63b48530ed9f0d6c96c84620dc92fe3cd9b9d"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:eca4bf3734c541dc4f374ad6010a68ff6c6748f00451707f39857f429ca36ced"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1274477e4c71ce8cfe6c1ec2f806d57c015ebf84d83373676036e256bc55d690"}, - {file = "aiohttp-3.8.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:28c543e54710d6158fc6f439296c7865b29e0b616629767e685a7185fab4a6b9"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:910bec0c49637d213f5d9877105d26e0c4a4de2f8b1b29405ff37e9fc0ad52b8"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5443910d662db951b2e58eb70b0fbe6b6e2ae613477129a5805d0b66c54b6cb7"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e460be6978fc24e3df83193dc0cc4de46c9909ed92dd47d349a452ef49325b7"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb1558def481d84f03b45888473fc5a1f35747b5f334ef4e7a571bc0dfcb11f8"}, - {file = "aiohttp-3.8.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34dd0c107799dcbbf7d48b53be761a013c0adf5571bf50c4ecad5643fe9cfcd0"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aa1990247f02a54185dc0dff92a6904521172a22664c863a03ff64c42f9b5410"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0e584a10f204a617d71d359fe383406305a4b595b333721fa50b867b4a0a1548"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a3cf433f127efa43fee6b90ea4c6edf6c4a17109d1d037d1a52abec84d8f2e42"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c11f5b099adafb18e65c2c997d57108b5bbeaa9eeee64a84302c0978b1ec948b"}, - {file = "aiohttp-3.8.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:84de26ddf621d7ac4c975dbea4c945860e08cccde492269db4e1538a6a6f3c35"}, - {file = "aiohttp-3.8.5-cp38-cp38-win32.whl", hash = "sha256:ab88bafedc57dd0aab55fa728ea10c1911f7e4d8b43e1d838a1739f33712921c"}, - {file = "aiohttp-3.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:5798a9aad1879f626589f3df0f8b79b3608a92e9beab10e5fda02c8a2c60db2e"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a6ce61195c6a19c785df04e71a4537e29eaa2c50fe745b732aa937c0c77169f3"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:773dd01706d4db536335fcfae6ea2440a70ceb03dd3e7378f3e815b03c97ab51"}, - {file = "aiohttp-3.8.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f83a552443a526ea38d064588613aca983d0ee0038801bc93c0c916428310c28"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f7372f7341fcc16f57b2caded43e81ddd18df53320b6f9f042acad41f8e049a"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea353162f249c8097ea63c2169dd1aa55de1e8fecbe63412a9bc50816e87b761"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d47ae48db0b2dcf70bc8a3bc72b3de86e2a590fc299fdbbb15af320d2659de"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d827176898a2b0b09694fbd1088c7a31836d1a505c243811c87ae53a3f6273c1"}, - {file = "aiohttp-3.8.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3562b06567c06439d8b447037bb655ef69786c590b1de86c7ab81efe1c9c15d8"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4e874cbf8caf8959d2adf572a78bba17cb0e9d7e51bb83d86a3697b686a0ab4d"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6809a00deaf3810e38c628e9a33271892f815b853605a936e2e9e5129762356c"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:33776e945d89b29251b33a7e7d006ce86447b2cfd66db5e5ded4e5cd0340585c"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eaeed7abfb5d64c539e2db173f63631455f1196c37d9d8d873fc316470dfbacd"}, - {file = "aiohttp-3.8.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e91d635961bec2d8f19dfeb41a539eb94bd073f075ca6dae6c8dc0ee89ad6f91"}, - {file = "aiohttp-3.8.5-cp39-cp39-win32.whl", hash = "sha256:00ad4b6f185ec67f3e6562e8a1d2b69660be43070bd0ef6fcec5211154c7df67"}, - {file = "aiohttp-3.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:c0a9034379a37ae42dea7ac1e048352d96286626251862e448933c0f59cbd79c"}, - {file = "aiohttp-3.8.5.tar.gz", hash = "sha256:b9552ec52cc147dbf1944ac7ac98af7602e51ea2dcd076ed194ca3c0d1c7d0bc"}, + {file = "aiohttp-3.8.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:41d55fc043954cddbbd82503d9cc3f4814a40bcef30b3569bc7b5e34130718c1"}, + {file = "aiohttp-3.8.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1d84166673694841d8953f0a8d0c90e1087739d24632fe86b1a08819168b4566"}, + {file = "aiohttp-3.8.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:253bf92b744b3170eb4c4ca2fa58f9c4b87aeb1df42f71d4e78815e6e8b73c9e"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fd194939b1f764d6bb05490987bfe104287bbf51b8d862261ccf66f48fb4096"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c5f938d199a6fdbdc10bbb9447496561c3a9a565b43be564648d81e1102ac22"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2817b2f66ca82ee699acd90e05c95e79bbf1dc986abb62b61ec8aaf851e81c93"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fa375b3d34e71ccccf172cab401cd94a72de7a8cc01847a7b3386204093bb47"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9de50a199b7710fa2904be5a4a9b51af587ab24c8e540a7243ab737b45844543"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e1d8cb0b56b3587c5c01de3bf2f600f186da7e7b5f7353d1bf26a8ddca57f965"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8e31e9db1bee8b4f407b77fd2507337a0a80665ad7b6c749d08df595d88f1cf5"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7bc88fc494b1f0311d67f29fee6fd636606f4697e8cc793a2d912ac5b19aa38d"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ec00c3305788e04bf6d29d42e504560e159ccaf0be30c09203b468a6c1ccd3b2"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad1407db8f2f49329729564f71685557157bfa42b48f4b93e53721a16eb813ed"}, + {file = "aiohttp-3.8.6-cp310-cp310-win32.whl", hash = "sha256:ccc360e87341ad47c777f5723f68adbb52b37ab450c8bc3ca9ca1f3e849e5fe2"}, + {file = "aiohttp-3.8.6-cp310-cp310-win_amd64.whl", hash = "sha256:93c15c8e48e5e7b89d5cb4613479d144fda8344e2d886cf694fd36db4cc86865"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e2f9cc8e5328f829f6e1fb74a0a3a939b14e67e80832975e01929e320386b34"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e6a00ffcc173e765e200ceefb06399ba09c06db97f401f920513a10c803604ca"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:41bdc2ba359032e36c0e9de5a3bd00d6fb7ea558a6ce6b70acedf0da86458321"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14cd52ccf40006c7a6cd34a0f8663734e5363fd981807173faf3a017e202fec9"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d5b785c792802e7b275c420d84f3397668e9d49ab1cb52bd916b3b3ffcf09ad"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1bed815f3dc3d915c5c1e556c397c8667826fbc1b935d95b0ad680787896a358"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96603a562b546632441926cd1293cfcb5b69f0b4159e6077f7c7dbdfb686af4d"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d76e8b13161a202d14c9584590c4df4d068c9567c99506497bdd67eaedf36403"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e3f1e3f1a1751bb62b4a1b7f4e435afcdade6c17a4fd9b9d43607cebd242924a"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:76b36b3124f0223903609944a3c8bf28a599b2cc0ce0be60b45211c8e9be97f8"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a2ece4af1f3c967a4390c284797ab595a9f1bc1130ef8b01828915a05a6ae684"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:16d330b3b9db87c3883e565340d292638a878236418b23cc8b9b11a054aaa887"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:42c89579f82e49db436b69c938ab3e1559e5a4409eb8639eb4143989bc390f2f"}, + {file = "aiohttp-3.8.6-cp311-cp311-win32.whl", hash = "sha256:efd2fcf7e7b9d7ab16e6b7d54205beded0a9c8566cb30f09c1abe42b4e22bdcb"}, + {file = "aiohttp-3.8.6-cp311-cp311-win_amd64.whl", hash = "sha256:3b2ab182fc28e7a81f6c70bfbd829045d9480063f5ab06f6e601a3eddbbd49a0"}, + {file = "aiohttp-3.8.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fdee8405931b0615220e5ddf8cd7edd8592c606a8e4ca2a00704883c396e4479"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d25036d161c4fe2225d1abff2bd52c34ed0b1099f02c208cd34d8c05729882f0"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d791245a894be071d5ab04bbb4850534261a7d4fd363b094a7b9963e8cdbd31"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0cccd1de239afa866e4ce5c789b3032442f19c261c7d8a01183fd956b1935349"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f13f60d78224f0dace220d8ab4ef1dbc37115eeeab8c06804fec11bec2bbd07"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a9b5a0606faca4f6cc0d338359d6fa137104c337f489cd135bb7fbdbccb1e39"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:13da35c9ceb847732bf5c6c5781dcf4780e14392e5d3b3c689f6d22f8e15ae31"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:4d4cbe4ffa9d05f46a28252efc5941e0462792930caa370a6efaf491f412bc66"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:229852e147f44da0241954fc6cb910ba074e597f06789c867cb7fb0621e0ba7a"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:713103a8bdde61d13490adf47171a1039fd880113981e55401a0f7b42c37d071"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:45ad816b2c8e3b60b510f30dbd37fe74fd4a772248a52bb021f6fd65dff809b6"}, + {file = "aiohttp-3.8.6-cp36-cp36m-win32.whl", hash = "sha256:2b8d4e166e600dcfbff51919c7a3789ff6ca8b3ecce16e1d9c96d95dd569eb4c"}, + {file = "aiohttp-3.8.6-cp36-cp36m-win_amd64.whl", hash = "sha256:0912ed87fee967940aacc5306d3aa8ba3a459fcd12add0b407081fbefc931e53"}, + {file = "aiohttp-3.8.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e2a988a0c673c2e12084f5e6ba3392d76c75ddb8ebc6c7e9ead68248101cd446"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebf3fd9f141700b510d4b190094db0ce37ac6361a6806c153c161dc6c041ccda"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3161ce82ab85acd267c8f4b14aa226047a6bee1e4e6adb74b798bd42c6ae1f80"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95fc1bf33a9a81469aa760617b5971331cdd74370d1214f0b3109272c0e1e3c"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c43ecfef7deaf0617cee936836518e7424ee12cb709883f2c9a1adda63cc460"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca80e1b90a05a4f476547f904992ae81eda5c2c85c66ee4195bb8f9c5fb47f28"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:90c72ebb7cb3a08a7f40061079817133f502a160561d0675b0a6adf231382c92"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bb54c54510e47a8c7c8e63454a6acc817519337b2b78606c4e840871a3e15349"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:de6a1c9f6803b90e20869e6b99c2c18cef5cc691363954c93cb9adeb26d9f3ae"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:a3628b6c7b880b181a3ae0a0683698513874df63783fd89de99b7b7539e3e8a8"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fc37e9aef10a696a5a4474802930079ccfc14d9f9c10b4662169671ff034b7df"}, + {file = "aiohttp-3.8.6-cp37-cp37m-win32.whl", hash = "sha256:f8ef51e459eb2ad8e7a66c1d6440c808485840ad55ecc3cafefadea47d1b1ba2"}, + {file = "aiohttp-3.8.6-cp37-cp37m-win_amd64.whl", hash = "sha256:b2fe42e523be344124c6c8ef32a011444e869dc5f883c591ed87f84339de5976"}, + {file = "aiohttp-3.8.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9e2ee0ac5a1f5c7dd3197de309adfb99ac4617ff02b0603fd1e65b07dc772e4b"}, + {file = "aiohttp-3.8.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01770d8c04bd8db568abb636c1fdd4f7140b284b8b3e0b4584f070180c1e5c62"}, + {file = "aiohttp-3.8.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3c68330a59506254b556b99a91857428cab98b2f84061260a67865f7f52899f5"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89341b2c19fb5eac30c341133ae2cc3544d40d9b1892749cdd25892bbc6ac951"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71783b0b6455ac8f34b5ec99d83e686892c50498d5d00b8e56d47f41b38fbe04"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f628dbf3c91e12f4d6c8b3f092069567d8eb17814aebba3d7d60c149391aee3a"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b04691bc6601ef47c88f0255043df6f570ada1a9ebef99c34bd0b72866c217ae"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ee912f7e78287516df155f69da575a0ba33b02dd7c1d6614dbc9463f43066e3"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9c19b26acdd08dd239e0d3669a3dddafd600902e37881f13fbd8a53943079dbc"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:99c5ac4ad492b4a19fc132306cd57075c28446ec2ed970973bbf036bcda1bcc6"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f0f03211fd14a6a0aed2997d4b1c013d49fb7b50eeb9ffdf5e51f23cfe2c77fa"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:8d399dade330c53b4106160f75f55407e9ae7505263ea86f2ccca6bfcbdb4921"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ec4fd86658c6a8964d75426517dc01cbf840bbf32d055ce64a9e63a40fd7b771"}, + {file = "aiohttp-3.8.6-cp38-cp38-win32.whl", hash = "sha256:33164093be11fcef3ce2571a0dccd9041c9a93fa3bde86569d7b03120d276c6f"}, + {file = "aiohttp-3.8.6-cp38-cp38-win_amd64.whl", hash = "sha256:bdf70bfe5a1414ba9afb9d49f0c912dc524cf60141102f3a11143ba3d291870f"}, + {file = "aiohttp-3.8.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d52d5dc7c6682b720280f9d9db41d36ebe4791622c842e258c9206232251ab2b"}, + {file = "aiohttp-3.8.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4ac39027011414dbd3d87f7edb31680e1f430834c8cef029f11c66dad0670aa5"}, + {file = "aiohttp-3.8.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3f5c7ce535a1d2429a634310e308fb7d718905487257060e5d4598e29dc17f0b"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b30e963f9e0d52c28f284d554a9469af073030030cef8693106d918b2ca92f54"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:918810ef188f84152af6b938254911055a72e0f935b5fbc4c1a4ed0b0584aed1"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:002f23e6ea8d3dd8d149e569fd580c999232b5fbc601c48d55398fbc2e582e8c"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fcf3eabd3fd1a5e6092d1242295fa37d0354b2eb2077e6eb670accad78e40e1"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:255ba9d6d5ff1a382bb9a578cd563605aa69bec845680e21c44afc2670607a95"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d67f8baed00870aa390ea2590798766256f31dc5ed3ecc737debb6e97e2ede78"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:86f20cee0f0a317c76573b627b954c412ea766d6ada1a9fcf1b805763ae7feeb"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:39a312d0e991690ccc1a61f1e9e42daa519dcc34ad03eb6f826d94c1190190dd"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e827d48cf802de06d9c935088c2924e3c7e7533377d66b6f31ed175c1620e05e"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bd111d7fc5591ddf377a408ed9067045259ff2770f37e2d94e6478d0f3fc0c17"}, + {file = "aiohttp-3.8.6-cp39-cp39-win32.whl", hash = "sha256:caf486ac1e689dda3502567eb89ffe02876546599bbf915ec94b1fa424eeffd4"}, + {file = "aiohttp-3.8.6-cp39-cp39-win_amd64.whl", hash = "sha256:3f0e27e5b733803333bb2371249f41cf42bae8884863e8e8965ec69bebe53132"}, + {file = "aiohttp-3.8.6.tar.gz", hash = "sha256:b0cf2a4501bff9330a8a5248b4ce951851e415bdcce9dc158e76cfd55e15085c"}, ] [package.dependencies] @@ -250,75 +250,63 @@ files = [ [[package]] name = "cffi" -version = "1.15.1" +version = "1.16.0" description = "Foreign Function Interface for Python calling C code." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, - {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, - {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, - {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, - {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, - {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, - {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, - {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, - {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, - {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, - {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, - {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, - {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, - {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, - {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, - {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, ] [package.dependencies] @@ -326,86 +314,101 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.2.0" +version = "3.3.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, - {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, - {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, - {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, - {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, - {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, - {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, + {file = "charset-normalizer-3.3.0.tar.gz", hash = "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-win32.whl", hash = "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-win32.whl", hash = "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-win32.whl", hash = "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-win32.whl", hash = "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-win32.whl", hash = "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884"}, + {file = "charset_normalizer-3.3.0-py3-none-any.whl", hash = "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2"}, ] [[package]] @@ -817,20 +820,19 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.23.0" +version = "2.23.3" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.23.0.tar.gz", hash = "sha256:753a26312e6f1eaeec20bc6f2644a10926697da93446e1f8e24d6d32d45a922a"}, - {file = "google_auth-2.23.0-py2.py3-none-any.whl", hash = "sha256:2cec41407bd1e207f5b802638e32bb837df968bb5c05f413d0fa526fac4cf7a7"}, + {file = "google-auth-2.23.3.tar.gz", hash = "sha256:6864247895eea5d13b9c57c9e03abb49cb94ce2dc7c58e91cba3248c7477c9e3"}, + {file = "google_auth-2.23.3-py2.py3-none-any.whl", hash = "sha256:a8f4608e65c244ead9e0538f181a96c6e11199ec114d41f1d7b1bffa96937bda"}, ] [package.dependencies] cachetools = ">=2.0.0,<6.0" pyasn1-modules = ">=0.2.1" rsa = ">=3.1.4,<5" -urllib3 = "<2.0" [package.extras] aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] @@ -877,120 +879,69 @@ test = ["mockito (>=1.2.1)", "pytest (>=5.4.1)", "pytest-runner"] [[package]] name = "grpcio" -version = "1.49.1" -description = "HTTP/2-based RPC framework" -optional = false -python-versions = ">=3.7" -files = [ - {file = "grpcio-1.49.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:fd86040232e805b8e6378b2348c928490ee595b058ce9aaa27ed8e4b0f172b20"}, - {file = "grpcio-1.49.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:6fd0c9cede9552bf00f8c5791d257d5bf3790d7057b26c59df08be5e7a1e021d"}, - {file = "grpcio-1.49.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:d0d402e158d4e84e49c158cb5204119d55e1baf363ee98d6cb5dce321c3a065d"}, - {file = "grpcio-1.49.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:822ceec743d42a627e64ea266059a62d214c5a3cdfcd0d7fe2b7a8e4e82527c7"}, - {file = "grpcio-1.49.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2106d9c16527f0a85e2eea6e6b91a74fc99579c60dd810d8690843ea02bc0f5f"}, - {file = "grpcio-1.49.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:52dd02b7e7868233c571b49bc38ebd347c3bb1ff8907bb0cb74cb5f00c790afc"}, - {file = "grpcio-1.49.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:120fecba2ec5d14b5a15d11063b39783fda8dc8d24addd83196acb6582cabd9b"}, - {file = "grpcio-1.49.1-cp310-cp310-win32.whl", hash = "sha256:f1a3b88e3c53c1a6e6bed635ec1bbb92201bb6a1f2db186179f7f3f244829788"}, - {file = "grpcio-1.49.1-cp310-cp310-win_amd64.whl", hash = "sha256:a7d0017b92d3850abea87c1bdec6ea41104e71c77bca44c3e17f175c6700af62"}, - {file = "grpcio-1.49.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:9fb17ff8c0d56099ac6ebfa84f670c5a62228d6b5c695cf21c02160c2ac1446b"}, - {file = "grpcio-1.49.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:075f2d06e3db6b48a2157a1bcd52d6cbdca980dd18988fe6afdb41795d51625f"}, - {file = "grpcio-1.49.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46d93a1b4572b461a227f1db6b8d35a88952db1c47e5fadcf8b8a2f0e1dd9201"}, - {file = "grpcio-1.49.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc79b2b37d779ac42341ddef40ad5bf0966a64af412c89fc2b062e3ddabb093f"}, - {file = "grpcio-1.49.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5f8b3a971c7820ea9878f3fd70086240a36aeee15d1b7e9ecbc2743b0e785568"}, - {file = "grpcio-1.49.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49b301740cf5bc8fed4fee4c877570189ae3951432d79fa8e524b09353659811"}, - {file = "grpcio-1.49.1-cp311-cp311-win32.whl", hash = "sha256:1c66a25afc6c71d357867b341da594a5587db5849b48f4b7d5908d236bb62ede"}, - {file = "grpcio-1.49.1-cp311-cp311-win_amd64.whl", hash = "sha256:6b6c3a95d27846f4145d6967899b3ab25fffc6ae99544415e1adcacef84842d2"}, - {file = "grpcio-1.49.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:1cc400c8a2173d1c042997d98a9563e12d9bb3fb6ad36b7f355bc77c7663b8af"}, - {file = "grpcio-1.49.1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:34f736bd4d0deae90015c0e383885b431444fe6b6c591dea288173df20603146"}, - {file = "grpcio-1.49.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:196082b9c89ebf0961dcd77cb114bed8171964c8e3063b9da2fb33536a6938ed"}, - {file = "grpcio-1.49.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c9f89c42749890618cd3c2464e1fbf88446e3d2f67f1e334c8e5db2f3272bbd"}, - {file = "grpcio-1.49.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64419cb8a5b612cdb1550c2fd4acbb7d4fb263556cf4625f25522337e461509e"}, - {file = "grpcio-1.49.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8a5272061826e6164f96e3255405ef6f73b88fd3e8bef464c7d061af8585ac62"}, - {file = "grpcio-1.49.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ea9d0172445241ad7cb49577314e39d0af2c5267395b3561d7ced5d70458a9f3"}, - {file = "grpcio-1.49.1-cp37-cp37m-win32.whl", hash = "sha256:2070e87d95991473244c72d96d13596c751cb35558e11f5df5414981e7ed2492"}, - {file = "grpcio-1.49.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fcedcab49baaa9db4a2d240ac81f2d57eb0052b1c6a9501b46b8ae912720fbf"}, - {file = "grpcio-1.49.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:afbb3475cf7f4f7d380c2ca37ee826e51974f3e2665613996a91d6a58583a534"}, - {file = "grpcio-1.49.1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:a4f9ba141380abde6c3adc1727f21529137a2552002243fa87c41a07e528245c"}, - {file = "grpcio-1.49.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:cf0a1fb18a7204b9c44623dfbd1465b363236ce70c7a4ed30402f9f60d8b743b"}, - {file = "grpcio-1.49.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17bb6fe72784b630728c6cff9c9d10ccc3b6d04e85da6e0a7b27fb1d135fac62"}, - {file = "grpcio-1.49.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18305d5a082d1593b005a895c10041f833b16788e88b02bb81061f5ebcc465df"}, - {file = "grpcio-1.49.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b6a1b39e59ac5a3067794a0e498911cf2e37e4b19ee9e9977dc5e7051714f13f"}, - {file = "grpcio-1.49.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0e20d59aafc086b1cc68400463bddda6e41d3e5ed30851d1e2e0f6a2e7e342d3"}, - {file = "grpcio-1.49.1-cp38-cp38-win32.whl", hash = "sha256:e1e83233d4680863a421f3ee4a7a9b80d33cd27ee9ed7593bc93f6128302d3f2"}, - {file = "grpcio-1.49.1-cp38-cp38-win_amd64.whl", hash = "sha256:221d42c654d2a41fa31323216279c73ed17d92f533bc140a3390cc1bd78bf63c"}, - {file = "grpcio-1.49.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:fa9e6e61391e99708ac87fc3436f6b7b9c6b845dc4639b406e5e61901e1aacde"}, - {file = "grpcio-1.49.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:9b449e966ef518ce9c860d21f8afe0b0f055220d95bc710301752ac1db96dd6a"}, - {file = "grpcio-1.49.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:aa34d2ad9f24e47fa9a3172801c676e4037d862247e39030165fe83821a7aafd"}, - {file = "grpcio-1.49.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5207f4eed1b775d264fcfe379d8541e1c43b878f2b63c0698f8f5c56c40f3d68"}, - {file = "grpcio-1.49.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b24a74651438d45619ac67004638856f76cc13d78b7478f2457754cbcb1c8ad"}, - {file = "grpcio-1.49.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:fe763781669790dc8b9618e7e677c839c87eae6cf28b655ee1fa69ae04eea03f"}, - {file = "grpcio-1.49.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2f2ff7ba0f8f431f32d4b4bc3a3713426949d3533b08466c4ff1b2b475932ca8"}, - {file = "grpcio-1.49.1-cp39-cp39-win32.whl", hash = "sha256:08ff74aec8ff457a89b97152d36cb811dcc1d17cd5a92a65933524e363327394"}, - {file = "grpcio-1.49.1-cp39-cp39-win_amd64.whl", hash = "sha256:274ffbb39717918c514b35176510ae9be06e1d93121e84d50b350861dcb9a705"}, - {file = "grpcio-1.49.1.tar.gz", hash = "sha256:d4725fc9ec8e8822906ae26bb26f5546891aa7fbc3443de970cc556d43a5c99f"}, -] - -[package.dependencies] -six = ">=1.5.2" - -[package.extras] -protobuf = ["grpcio-tools (>=1.49.1)"] - -[[package]] -name = "grpcio" -version = "1.51.3" +version = "1.59.0" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.7" files = [ - {file = "grpcio-1.51.3-cp310-cp310-linux_armv7l.whl", hash = "sha256:f601aaeae18dab81930fb8d4f916b0da21e89bb4b5f7367ef793f46b4a76b7b0"}, - {file = "grpcio-1.51.3-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:eef0450a4b5ed11feab639bf3eb1b6e23d0efa9b911bf7b06fb60e14f5f8a585"}, - {file = "grpcio-1.51.3-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:82b0ad8ac825d4bb31bff9f638557c045f4a6d824d84b21e893968286f88246b"}, - {file = "grpcio-1.51.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3667c06e37d6cd461afdd51cefe6537702f3d1dc5ff4cac07e88d8b4795dc16f"}, - {file = "grpcio-1.51.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3709048fe0aa23dda09b3e69849a12055790171dab9e399a72ea8f9dfbf9ac80"}, - {file = "grpcio-1.51.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:200d69857f9910f7458b39b9bcf83ee4a180591b40146ba9e49314e3a7419313"}, - {file = "grpcio-1.51.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cd9a5e68e79c5f031500e67793048a90209711e0854a9ddee8a3ce51728de4e5"}, - {file = "grpcio-1.51.3-cp310-cp310-win32.whl", hash = "sha256:6604f614016127ae10969176bbf12eb0e03d2fb3d643f050b3b69e160d144fb4"}, - {file = "grpcio-1.51.3-cp310-cp310-win_amd64.whl", hash = "sha256:e95c7ccd4c5807adef1602005513bf7c7d14e5a41daebcf9d8d30d8bf51b8f81"}, - {file = "grpcio-1.51.3-cp311-cp311-linux_armv7l.whl", hash = "sha256:5e77ee138100f0bb55cbd147840f87ee6241dbd25f09ea7cd8afe7efff323449"}, - {file = "grpcio-1.51.3-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:68a7514b754e38e8de9075f7bb4dee919919515ec68628c43a894027e40ddec4"}, - {file = "grpcio-1.51.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c1b9f8afa62ff265d86a4747a2990ec5a96e4efce5d5888f245a682d66eca47"}, - {file = "grpcio-1.51.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8de30f0b417744288cec65ec8cf84b8a57995cf7f1e84ccad2704d93f05d0aae"}, - {file = "grpcio-1.51.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b69c7adc7ed60da1cb1b502853db61f453fc745f940cbcc25eb97c99965d8f41"}, - {file = "grpcio-1.51.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d81528ffe0e973dc840ec73a4132fd18b8203ad129d7410155d951a0a7e4f5d0"}, - {file = "grpcio-1.51.3-cp311-cp311-win32.whl", hash = "sha256:040eb421613b57c696063abde405916dd830203c184c9000fc8c3b3b3c950325"}, - {file = "grpcio-1.51.3-cp311-cp311-win_amd64.whl", hash = "sha256:2a8e17286c4240137d933b8ca506465472248b4ce0fe46f3404459e708b65b68"}, - {file = "grpcio-1.51.3-cp37-cp37m-linux_armv7l.whl", hash = "sha256:d5cd1389669a847555df54177b911d9ff6f17345b2a6f19388707b7a9f724c88"}, - {file = "grpcio-1.51.3-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:be1bf35ce82cdbcac14e39d5102d8de4079a1c1a6a06b68e41fcd9ef64f9dd28"}, - {file = "grpcio-1.51.3-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:5eed34994c095e2bf7194ffac7381c6068b057ef1e69f8f08db77771350a7566"}, - {file = "grpcio-1.51.3-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f9a7d88082b2a17ae7bd3c2354d13bab0453899e0851733f6afa6918373f476"}, - {file = "grpcio-1.51.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c8abbc5f837111e7bd619612eedc223c290b0903b952ce0c7b00840ea70f14"}, - {file = "grpcio-1.51.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:165b05af77e6aecb4210ae7663e25acf234ba78a7c1c157fa5f2efeb0d6ec53c"}, - {file = "grpcio-1.51.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:54e36c2ee304ff15f2bfbdc43d2b56c63331c52d818c364e5b5214e5bc2ad9f6"}, - {file = "grpcio-1.51.3-cp37-cp37m-win32.whl", hash = "sha256:cd0daac21d9ef5e033a5100c1d3aa055bbed28bfcf070b12d8058045c4e821b1"}, - {file = "grpcio-1.51.3-cp37-cp37m-win_amd64.whl", hash = "sha256:2fdd6333ce96435408565a9dbbd446212cd5d62e4d26f6a3c0feb1e3c35f1cc8"}, - {file = "grpcio-1.51.3-cp38-cp38-linux_armv7l.whl", hash = "sha256:54b0c29bdd9a3b1e1b61443ab152f060fc719f1c083127ab08d03fac5efd51be"}, - {file = "grpcio-1.51.3-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:ffaaf7e93fcb437356b5a4b23bf36e8a3d0221399ff77fd057e4bc77776a24be"}, - {file = "grpcio-1.51.3-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:eafbe7501a3268d05f2e450e1ddaffb950d842a8620c13ec328b501d25d2e2c3"}, - {file = "grpcio-1.51.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:881ecb34feabf31c6b3b9bbbddd1a5b57e69f805041e5a2c6c562a28574f71c4"}, - {file = "grpcio-1.51.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e860a3222139b41d430939bbec2ec9c3f6c740938bf7a04471a9a8caaa965a2e"}, - {file = "grpcio-1.51.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:49ede0528e9dac7e8a9fe30b16c73b630ddd9a576bf4b675eb6b0c53ee5ca00f"}, - {file = "grpcio-1.51.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6972b009638b40a448d10e1bc18e2223143b8a7aa20d7def0d78dd4af4126d12"}, - {file = "grpcio-1.51.3-cp38-cp38-win32.whl", hash = "sha256:5694448256e3cdfe5bd358f1574a3f2f51afa20cc834713c4b9788d60b7cc646"}, - {file = "grpcio-1.51.3-cp38-cp38-win_amd64.whl", hash = "sha256:3ea4341efe603b049e8c9a5f13c696ca37fcdf8a23ca35f650428ad3606381d9"}, - {file = "grpcio-1.51.3-cp39-cp39-linux_armv7l.whl", hash = "sha256:6c677581ce129f5fa228b8f418cee10bd28dd449f3a544ea73c8ba590ee49d0b"}, - {file = "grpcio-1.51.3-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:30e09b5e0531685e176f49679b6a3b190762cc225f4565e55a899f5e14b3aa62"}, - {file = "grpcio-1.51.3-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:c831f31336e81243f85b6daff3e5e8a123302ce0ea1f2726ad752fd7a59f3aee"}, - {file = "grpcio-1.51.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2cd2e4cefb724cab1ba2df4b7535a9980531b9ec51b4dbb5f137a1f3a3754ef0"}, - {file = "grpcio-1.51.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7a0d0bf44438869d307f85a54f25a896ad6b4b0ca12370f76892ad732928d87"}, - {file = "grpcio-1.51.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c02abd55409bfb293371554adf6a4401197ec2133dd97727c01180889014ba4d"}, - {file = "grpcio-1.51.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2f8ff75e61e1227ba7a3f16b2eadbcc11d0a54096d52ab75a6b88cfbe56f55d1"}, - {file = "grpcio-1.51.3-cp39-cp39-win32.whl", hash = "sha256:6c99a73a6260bdf844b2e5ddad02dcd530310f80e1fa72c300fa19c1c7496962"}, - {file = "grpcio-1.51.3-cp39-cp39-win_amd64.whl", hash = "sha256:22bdfac4f7f27acdd4da359b5e7e1973dc74bf1ed406729b07d0759fde2f064b"}, - {file = "grpcio-1.51.3.tar.gz", hash = "sha256:be7b2265b7527bb12109a7727581e274170766d5b3c9258d4e466f4872522d7a"}, + {file = "grpcio-1.59.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:225e5fa61c35eeaebb4e7491cd2d768cd8eb6ed00f2664fa83a58f29418b39fd"}, + {file = "grpcio-1.59.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b95ec8ecc4f703f5caaa8d96e93e40c7f589bad299a2617bdb8becbcce525539"}, + {file = "grpcio-1.59.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:1a839ba86764cc48226f50b924216000c79779c563a301586a107bda9cbe9dcf"}, + {file = "grpcio-1.59.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6cfe44a5d7c7d5f1017a7da1c8160304091ca5dc64a0f85bca0d63008c3137a"}, + {file = "grpcio-1.59.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0fcf53df684fcc0154b1e61f6b4a8c4cf5f49d98a63511e3f30966feff39cd0"}, + {file = "grpcio-1.59.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa66cac32861500f280bb60fe7d5b3e22d68c51e18e65367e38f8669b78cea3b"}, + {file = "grpcio-1.59.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8cd2d38c2d52f607d75a74143113174c36d8a416d9472415eab834f837580cf7"}, + {file = "grpcio-1.59.0-cp310-cp310-win32.whl", hash = "sha256:228b91ce454876d7eed74041aff24a8f04c0306b7250a2da99d35dd25e2a1211"}, + {file = "grpcio-1.59.0-cp310-cp310-win_amd64.whl", hash = "sha256:ca87ee6183421b7cea3544190061f6c1c3dfc959e0b57a5286b108511fd34ff4"}, + {file = "grpcio-1.59.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:c173a87d622ea074ce79be33b952f0b424fa92182063c3bda8625c11d3585d09"}, + {file = "grpcio-1.59.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:ec78aebb9b6771d6a1de7b6ca2f779a2f6113b9108d486e904bde323d51f5589"}, + {file = "grpcio-1.59.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:0b84445fa94d59e6806c10266b977f92fa997db3585f125d6b751af02ff8b9fe"}, + {file = "grpcio-1.59.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c251d22de8f9f5cca9ee47e4bade7c5c853e6e40743f47f5cc02288ee7a87252"}, + {file = "grpcio-1.59.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:956f0b7cb465a65de1bd90d5a7475b4dc55089b25042fe0f6c870707e9aabb1d"}, + {file = "grpcio-1.59.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:38da5310ef84e16d638ad89550b5b9424df508fd5c7b968b90eb9629ca9be4b9"}, + {file = "grpcio-1.59.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:63982150a7d598281fa1d7ffead6096e543ff8be189d3235dd2b5604f2c553e5"}, + {file = "grpcio-1.59.0-cp311-cp311-win32.whl", hash = "sha256:50eff97397e29eeee5df106ea1afce3ee134d567aa2c8e04fabab05c79d791a7"}, + {file = "grpcio-1.59.0-cp311-cp311-win_amd64.whl", hash = "sha256:15f03bd714f987d48ae57fe092cf81960ae36da4e520e729392a59a75cda4f29"}, + {file = "grpcio-1.59.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:f1feb034321ae2f718172d86b8276c03599846dc7bb1792ae370af02718f91c5"}, + {file = "grpcio-1.59.0-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d09bd2a4e9f5a44d36bb8684f284835c14d30c22d8ec92ce796655af12163588"}, + {file = "grpcio-1.59.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:2f120d27051e4c59db2f267b71b833796770d3ea36ca712befa8c5fff5da6ebd"}, + {file = "grpcio-1.59.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba0ca727a173ee093f49ead932c051af463258b4b493b956a2c099696f38aa66"}, + {file = "grpcio-1.59.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5711c51e204dc52065f4a3327dca46e69636a0b76d3e98c2c28c4ccef9b04c52"}, + {file = "grpcio-1.59.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:d74f7d2d7c242a6af9d4d069552ec3669965b74fed6b92946e0e13b4168374f9"}, + {file = "grpcio-1.59.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3859917de234a0a2a52132489c4425a73669de9c458b01c9a83687f1f31b5b10"}, + {file = "grpcio-1.59.0-cp312-cp312-win32.whl", hash = "sha256:de2599985b7c1b4ce7526e15c969d66b93687571aa008ca749d6235d056b7205"}, + {file = "grpcio-1.59.0-cp312-cp312-win_amd64.whl", hash = "sha256:598f3530231cf10ae03f4ab92d48c3be1fee0c52213a1d5958df1a90957e6a88"}, + {file = "grpcio-1.59.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:b34c7a4c31841a2ea27246a05eed8a80c319bfc0d3e644412ec9ce437105ff6c"}, + {file = "grpcio-1.59.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:c4dfdb49f4997dc664f30116af2d34751b91aa031f8c8ee251ce4dcfc11277b0"}, + {file = "grpcio-1.59.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:61bc72a00ecc2b79d9695220b4d02e8ba53b702b42411397e831c9b0589f08a3"}, + {file = "grpcio-1.59.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f367e4b524cb319e50acbdea57bb63c3b717c5d561974ace0b065a648bb3bad3"}, + {file = "grpcio-1.59.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:849c47ef42424c86af069a9c5e691a765e304079755d5c29eff511263fad9c2a"}, + {file = "grpcio-1.59.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c0488c2b0528e6072010182075615620071371701733c63ab5be49140ed8f7f0"}, + {file = "grpcio-1.59.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:611d9aa0017fa386809bddcb76653a5ab18c264faf4d9ff35cb904d44745f575"}, + {file = "grpcio-1.59.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e5378785dce2b91eb2e5b857ec7602305a3b5cf78311767146464bfa365fc897"}, + {file = "grpcio-1.59.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:fe976910de34d21057bcb53b2c5e667843588b48bf11339da2a75f5c4c5b4055"}, + {file = "grpcio-1.59.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:c041a91712bf23b2a910f61e16565a05869e505dc5a5c025d429ca6de5de842c"}, + {file = "grpcio-1.59.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:0ae444221b2c16d8211b55326f8ba173ba8f8c76349bfc1768198ba592b58f74"}, + {file = "grpcio-1.59.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ceb1e68135788c3fce2211de86a7597591f0b9a0d2bb80e8401fd1d915991bac"}, + {file = "grpcio-1.59.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c4b1cc3a9dc1924d2eb26eec8792fedd4b3fcd10111e26c1d551f2e4eda79ce"}, + {file = "grpcio-1.59.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:871371ce0c0055d3db2a86fdebd1e1d647cf21a8912acc30052660297a5a6901"}, + {file = "grpcio-1.59.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:93e9cb546e610829e462147ce724a9cb108e61647a3454500438a6deef610be1"}, + {file = "grpcio-1.59.0-cp38-cp38-win32.whl", hash = "sha256:f21917aa50b40842b51aff2de6ebf9e2f6af3fe0971c31960ad6a3a2b24988f4"}, + {file = "grpcio-1.59.0-cp38-cp38-win_amd64.whl", hash = "sha256:14890da86a0c0e9dc1ea8e90101d7a3e0e7b1e71f4487fab36e2bfd2ecadd13c"}, + {file = "grpcio-1.59.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:34341d9e81a4b669a5f5dca3b2a760b6798e95cdda2b173e65d29d0b16692857"}, + {file = "grpcio-1.59.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:986de4aa75646e963466b386a8c5055c8b23a26a36a6c99052385d6fe8aaf180"}, + {file = "grpcio-1.59.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:aca8a24fef80bef73f83eb8153f5f5a0134d9539b4c436a716256b311dda90a6"}, + {file = "grpcio-1.59.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:936b2e04663660c600d5173bc2cc84e15adbad9c8f71946eb833b0afc205b996"}, + {file = "grpcio-1.59.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc8bf2e7bc725e76c0c11e474634a08c8f24bcf7426c0c6d60c8f9c6e70e4d4a"}, + {file = "grpcio-1.59.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81d86a096ccd24a57fa5772a544c9e566218bc4de49e8c909882dae9d73392df"}, + {file = "grpcio-1.59.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2ea95cd6abbe20138b8df965b4a8674ec312aaef3147c0f46a0bac661f09e8d0"}, + {file = "grpcio-1.59.0-cp39-cp39-win32.whl", hash = "sha256:3b8ff795d35a93d1df6531f31c1502673d1cebeeba93d0f9bd74617381507e3f"}, + {file = "grpcio-1.59.0-cp39-cp39-win_amd64.whl", hash = "sha256:38823bd088c69f59966f594d087d3a929d1ef310506bee9e3648317660d65b81"}, + {file = "grpcio-1.59.0.tar.gz", hash = "sha256:acf70a63cf09dd494000007b798aff88a436e1c03b394995ce450be437b8e54f"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.51.3)"] +protobuf = ["grpcio-tools (>=1.59.0)"] [[package]] name = "idna" @@ -1022,24 +973,6 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker perf = ["ipython"] testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] -[[package]] -name = "importlib-resources" -version = "6.1.0" -description = "Read resources from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_resources-6.1.0-py3-none-any.whl", hash = "sha256:aa50258bbfa56d4e33fbd8aa3ef48ded10d1735f11532b8df95388cc6bdb7e83"}, - {file = "importlib_resources-6.1.0.tar.gz", hash = "sha256:9d48dcccc213325e810fd723e7fbb45ccb39f6cf5c31f00cf2b965f5f10f3cb9"}, -] - -[package.dependencies] -zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff", "zipp (>=3.17)"] - [[package]] name = "iniconfig" version = "2.0.0" @@ -1078,9 +1011,7 @@ files = [ [package.dependencies] attrs = ">=22.2.0" -importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} jsonschema-specifications = ">=2023.03.6" -pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} referencing = ">=0.28.4" rpds-py = ">=0.7.1" @@ -1100,7 +1031,6 @@ files = [ ] [package.dependencies] -importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} referencing = ">=0.28.0" [[package]] @@ -1150,20 +1080,20 @@ testing = ["pytest"] [[package]] name = "markdown" -version = "3.4.4" +version = "3.5" description = "Python implementation of John Gruber's Markdown." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "Markdown-3.4.4-py3-none-any.whl", hash = "sha256:a4c1b65c0957b4bd9e7d86ddc7b3c9868fb9670660f6f99f6d1bca8954d5a941"}, - {file = "Markdown-3.4.4.tar.gz", hash = "sha256:225c6123522495d4119a90b3a3ba31a1e87a70369e03f14799ea9c0d7183a3d6"}, + {file = "Markdown-3.5-py3-none-any.whl", hash = "sha256:4afb124395ce5fc34e6d9886dab977fd9ae987fc6e85689f08278cf0c69d4bf3"}, + {file = "Markdown-3.5.tar.gz", hash = "sha256:a807eb2e4778d9156c8f07876c6e4d50b5494c5665c4834f67b06459dfd877b3"}, ] [package.dependencies] importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} [package.extras] -docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.0)", "mkdocs-nature (>=0.4)"] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] testing = ["coverage", "pyyaml"] [[package]] @@ -1193,16 +1123,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -1237,67 +1157,67 @@ files = [ [[package]] name = "msgpack" -version = "1.0.6" +version = "1.0.7" description = "MessagePack serializer" optional = false python-versions = ">=3.8" files = [ - {file = "msgpack-1.0.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f4321692e7f299277e55f322329b2c972d93bb612d85f3fda8741bec5c6285ce"}, - {file = "msgpack-1.0.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f0e36a5fa7a182cde391a128a64f437657d2b9371dfa42eda3436245adccbf5"}, - {file = "msgpack-1.0.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b5c8dd9a386a66e50bd7fa22b7a49fb8ead2b3574d6bd69eb1caced6caea0803"}, - {file = "msgpack-1.0.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f85200ea102276afdd3749ca94747f057bbb868d1c52921ee2446730b508d0f"}, - {file = "msgpack-1.0.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a006c300e82402c0c8f1ded11352a3ba2a61b87e7abb3054c845af2ca8d553c"}, - {file = "msgpack-1.0.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33bbf47ea5a6ff20c23426106e81863cdbb5402de1825493026ce615039cc99d"}, - {file = "msgpack-1.0.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:04450e4b5e1e662e7c86b6aafb7c230af9334fd0becf5e6b80459a507884241c"}, - {file = "msgpack-1.0.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b06a5095a79384760625b5de3f83f40b3053a385fb893be8a106fbbd84c14980"}, - {file = "msgpack-1.0.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3910211b0ab20be3a38e0bb944ed45bd4265d8d9f11a3d1674b95b298e08dd5c"}, - {file = "msgpack-1.0.6-cp310-cp310-win32.whl", hash = "sha256:1dc67b40fe81217b308ab12651adba05e7300b3a2ccf84d6b35a878e308dd8d4"}, - {file = "msgpack-1.0.6-cp310-cp310-win_amd64.whl", hash = "sha256:885de1ed5ea01c1bfe0a34c901152a264c3c1f8f1d382042b92ea354bd14bb0e"}, - {file = "msgpack-1.0.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:099c3d8a027367e1a6fc55d15336f04ff65c60c4f737b5739f7db4525c65fe9e"}, - {file = "msgpack-1.0.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b88dc97ba86c96b964c3745a445d9a65f76fe21955a953064fe04adb63e9367"}, - {file = "msgpack-1.0.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:00ce5f827d4f26fc094043e6f08b6069c1b148efa2631c47615ae14fb6cafc89"}, - {file = "msgpack-1.0.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd6af61388be65a8701f5787362cb54adae20007e0cc67ca9221a4b95115583b"}, - {file = "msgpack-1.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:652e4b7497825b0af6259e2c54700e6dc33d2fc4ed92b8839435090d4c9cc911"}, - {file = "msgpack-1.0.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b08676a17e3f791daad34d5fcb18479e9c85e7200d5a17cbe8de798643a7e37"}, - {file = "msgpack-1.0.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:229ccb6713c8b941eaa5cf13dc7478eba117f21513b5893c35e44483e2f0c9c8"}, - {file = "msgpack-1.0.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:95ade0bd4cf69e04e8b8f8ec2d197d9c9c4a9b6902e048dc7456bf6d82e12a80"}, - {file = "msgpack-1.0.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b16344032a27b2ccfd341f89dadf3e4ef6407d91e4b93563c14644a8abb3ad7"}, - {file = "msgpack-1.0.6-cp311-cp311-win32.whl", hash = "sha256:55bb4a1bf94e39447bc08238a2fb8a767460388a8192f67c103442eb36920887"}, - {file = "msgpack-1.0.6-cp311-cp311-win_amd64.whl", hash = "sha256:ae97504958d0bc58c1152045c170815d5c4f8af906561ce044b6358b43d0c97e"}, - {file = "msgpack-1.0.6-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7ecf431786019a7bfedc28281531d706627f603e3691d64eccdbce3ecd353823"}, - {file = "msgpack-1.0.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a635aecf1047255576dbb0927cbf9a7aa4a68e9d54110cc3c926652d18f144e0"}, - {file = "msgpack-1.0.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:102cfb54eaefa73e8ca1e784b9352c623524185c98e057e519545131a56fb0af"}, - {file = "msgpack-1.0.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c5e05e4f5756758c58a8088aa10dc70d851c89f842b611fdccfc0581c1846bc"}, - {file = "msgpack-1.0.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68569509dd015fcdd1e6b2b3ccc8c51fd27d9a97f461ccc909270e220ee09685"}, - {file = "msgpack-1.0.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf652839d16de91fe1cfb253e0a88db9a548796939533894e07f45d4bdf90a5f"}, - {file = "msgpack-1.0.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14db7e1b7a7ed362b2f94897bf2486c899c8bb50f6e34b2db92fe534cdab306f"}, - {file = "msgpack-1.0.6-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:159cfec18a6e125dd4723e2b1de6f202b34b87c850fb9d509acfd054c01135e9"}, - {file = "msgpack-1.0.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6a01a072b2219b65a6ff74df208f20b2cac9401c60adb676ee34e53b4c651077"}, - {file = "msgpack-1.0.6-cp312-cp312-win32.whl", hash = "sha256:e36560d001d4ba469d469b02037f2dd404421fd72277d9474efe9f03f83fced5"}, - {file = "msgpack-1.0.6-cp312-cp312-win_amd64.whl", hash = "sha256:5e7fae9ca93258a956551708cf60dc6c8145574e32ce8c8c4d894e63bcb04341"}, - {file = "msgpack-1.0.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:40b801b768f5a765e33c68f30665d3c6ee1c8623a2d2bb78e6e59f2db4e4ceb7"}, - {file = "msgpack-1.0.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:da057d3652e698b00746e47f06dbb513314f847421e857e32e1dc61c46f6c052"}, - {file = "msgpack-1.0.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f75114c05ec56566da6b55122791cf5bb53d5aada96a98c016d6231e03132f76"}, - {file = "msgpack-1.0.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61213482b5a387ead9e250e9e3cb290292feca39dc83b41c3b1b7b8ffc8d8ecb"}, - {file = "msgpack-1.0.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bae6c561f11b444b258b1b4be2bdd1e1cf93cd1d80766b7e869a79db4543a8a8"}, - {file = "msgpack-1.0.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:619a63753ba9e792fe3c6c0fc2b9ee2cfbd92153dd91bee029a89a71eb2942cd"}, - {file = "msgpack-1.0.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:70843788c85ca385846a2d2f836efebe7bb2687ca0734648bf5c9dc6c55602d2"}, - {file = "msgpack-1.0.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:fb4571efe86545b772a4630fee578c213c91cbcfd20347806e47fd4e782a18fe"}, - {file = "msgpack-1.0.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bbb4448a05d261fae423d5c0b0974ad899f60825bc77eabad5a0c518e78448c2"}, - {file = "msgpack-1.0.6-cp38-cp38-win32.whl", hash = "sha256:5cd67674db3c73026e0a2c729b909780e88bd9cbc8184256f9567640a5d299a8"}, - {file = "msgpack-1.0.6-cp38-cp38-win_amd64.whl", hash = "sha256:a1cf98afa7ad5e7012454ca3fde254499a13f9d92fd50cb46118118a249a1355"}, - {file = "msgpack-1.0.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d6d25b8a5c70e2334ed61a8da4c11cd9b97c6fbd980c406033f06e4463fda006"}, - {file = "msgpack-1.0.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:88cdb1da7fdb121dbb3116910722f5acab4d6e8bfcacab8fafe27e2e7744dc6a"}, - {file = "msgpack-1.0.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3b5658b1f9e486a2eec4c0c688f213a90085b9cf2fec76ef08f98fdf6c62f4b9"}, - {file = "msgpack-1.0.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76820f2ece3b0a7c948bbb6a599020e29574626d23a649476def023cbb026787"}, - {file = "msgpack-1.0.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c780d992f5d734432726b92a0c87bf1857c3d85082a8dea29cbf56e44a132b3"}, - {file = "msgpack-1.0.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0ed35d6d6122d0baa9a1b59ebca4ee302139f4cfb57dab85e4c73ab793ae7ed"}, - {file = "msgpack-1.0.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:32c0aff31f33033f4961abc01f78497e5e07bac02a508632aef394b384d27428"}, - {file = "msgpack-1.0.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:35ad5aed9b52217d4cea739d0ea3a492a18dd86fecb4b132668a69f27fb0363b"}, - {file = "msgpack-1.0.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47275ff73005a3e5e146e50baa2378e1730cba6e292f0222bc496a8e4c4adfc8"}, - {file = "msgpack-1.0.6-cp39-cp39-win32.whl", hash = "sha256:7baf16fd8908a025c4a8d7b699103e72d41f967e2aee5a2065432bcdbd9fd06e"}, - {file = "msgpack-1.0.6-cp39-cp39-win_amd64.whl", hash = "sha256:fc97aa4b4fb928ff4d3b74da7c30b360d0cb3ede49a5a6e1fd9705f49aea1deb"}, - {file = "msgpack-1.0.6.tar.gz", hash = "sha256:25d3746da40f3c8c59c3b1d001e49fd2aa17904438f980d9a391370366df001e"}, + {file = "msgpack-1.0.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:04ad6069c86e531682f9e1e71b71c1c3937d6014a7c3e9edd2aa81ad58842862"}, + {file = "msgpack-1.0.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cca1b62fe70d761a282496b96a5e51c44c213e410a964bdffe0928e611368329"}, + {file = "msgpack-1.0.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e50ebce52f41370707f1e21a59514e3375e3edd6e1832f5e5235237db933c98b"}, + {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7b4f35de6a304b5533c238bee86b670b75b03d31b7797929caa7a624b5dda6"}, + {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28efb066cde83c479dfe5a48141a53bc7e5f13f785b92ddde336c716663039ee"}, + {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cb14ce54d9b857be9591ac364cb08dc2d6a5c4318c1182cb1d02274029d590d"}, + {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b573a43ef7c368ba4ea06050a957c2a7550f729c31f11dd616d2ac4aba99888d"}, + {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ccf9a39706b604d884d2cb1e27fe973bc55f2890c52f38df742bc1d79ab9f5e1"}, + {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cb70766519500281815dfd7a87d3a178acf7ce95390544b8c90587d76b227681"}, + {file = "msgpack-1.0.7-cp310-cp310-win32.whl", hash = "sha256:b610ff0f24e9f11c9ae653c67ff8cc03c075131401b3e5ef4b82570d1728f8a9"}, + {file = "msgpack-1.0.7-cp310-cp310-win_amd64.whl", hash = "sha256:a40821a89dc373d6427e2b44b572efc36a2778d3f543299e2f24eb1a5de65415"}, + {file = "msgpack-1.0.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:576eb384292b139821c41995523654ad82d1916da6a60cff129c715a6223ea84"}, + {file = "msgpack-1.0.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:730076207cb816138cf1af7f7237b208340a2c5e749707457d70705715c93b93"}, + {file = "msgpack-1.0.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:85765fdf4b27eb5086f05ac0491090fc76f4f2b28e09d9350c31aac25a5aaff8"}, + {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3476fae43db72bd11f29a5147ae2f3cb22e2f1a91d575ef130d2bf49afd21c46"}, + {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d4c80667de2e36970ebf74f42d1088cc9ee7ef5f4e8c35eee1b40eafd33ca5b"}, + {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b0bf0effb196ed76b7ad883848143427a73c355ae8e569fa538365064188b8e"}, + {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f9a7c509542db4eceed3dcf21ee5267ab565a83555c9b88a8109dcecc4709002"}, + {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:84b0daf226913133f899ea9b30618722d45feffa67e4fe867b0b5ae83a34060c"}, + {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ec79ff6159dffcc30853b2ad612ed572af86c92b5168aa3fc01a67b0fa40665e"}, + {file = "msgpack-1.0.7-cp311-cp311-win32.whl", hash = "sha256:3e7bf4442b310ff154b7bb9d81eb2c016b7d597e364f97d72b1acc3817a0fdc1"}, + {file = "msgpack-1.0.7-cp311-cp311-win_amd64.whl", hash = "sha256:3f0c8c6dfa6605ab8ff0611995ee30d4f9fcff89966cf562733b4008a3d60d82"}, + {file = "msgpack-1.0.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f0936e08e0003f66bfd97e74ee530427707297b0d0361247e9b4f59ab78ddc8b"}, + {file = "msgpack-1.0.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98bbd754a422a0b123c66a4c341de0474cad4a5c10c164ceed6ea090f3563db4"}, + {file = "msgpack-1.0.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b291f0ee7961a597cbbcc77709374087fa2a9afe7bdb6a40dbbd9b127e79afee"}, + {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebbbba226f0a108a7366bf4b59bf0f30a12fd5e75100c630267d94d7f0ad20e5"}, + {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e2d69948e4132813b8d1131f29f9101bc2c915f26089a6d632001a5c1349672"}, + {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdf38ba2d393c7911ae989c3bbba510ebbcdf4ecbdbfec36272abe350c454075"}, + {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:993584fc821c58d5993521bfdcd31a4adf025c7d745bbd4d12ccfecf695af5ba"}, + {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:52700dc63a4676669b341ba33520f4d6e43d3ca58d422e22ba66d1736b0a6e4c"}, + {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e45ae4927759289c30ccba8d9fdce62bb414977ba158286b5ddaf8df2cddb5c5"}, + {file = "msgpack-1.0.7-cp312-cp312-win32.whl", hash = "sha256:27dcd6f46a21c18fa5e5deed92a43d4554e3df8d8ca5a47bf0615d6a5f39dbc9"}, + {file = "msgpack-1.0.7-cp312-cp312-win_amd64.whl", hash = "sha256:7687e22a31e976a0e7fc99c2f4d11ca45eff652a81eb8c8085e9609298916dcf"}, + {file = "msgpack-1.0.7-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5b6ccc0c85916998d788b295765ea0e9cb9aac7e4a8ed71d12e7d8ac31c23c95"}, + {file = "msgpack-1.0.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:235a31ec7db685f5c82233bddf9858748b89b8119bf4538d514536c485c15fe0"}, + {file = "msgpack-1.0.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cab3db8bab4b7e635c1c97270d7a4b2a90c070b33cbc00c99ef3f9be03d3e1f7"}, + {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bfdd914e55e0d2c9e1526de210f6fe8ffe9705f2b1dfcc4aecc92a4cb4b533d"}, + {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36e17c4592231a7dbd2ed09027823ab295d2791b3b1efb2aee874b10548b7524"}, + {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38949d30b11ae5f95c3c91917ee7a6b239f5ec276f271f28638dec9156f82cfc"}, + {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ff1d0899f104f3921d94579a5638847f783c9b04f2d5f229392ca77fba5b82fc"}, + {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dc43f1ec66eb8440567186ae2f8c447d91e0372d793dfe8c222aec857b81a8cf"}, + {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dd632777ff3beaaf629f1ab4396caf7ba0bdd075d948a69460d13d44357aca4c"}, + {file = "msgpack-1.0.7-cp38-cp38-win32.whl", hash = "sha256:4e71bc4416de195d6e9b4ee93ad3f2f6b2ce11d042b4d7a7ee00bbe0358bd0c2"}, + {file = "msgpack-1.0.7-cp38-cp38-win_amd64.whl", hash = "sha256:8f5b234f567cf76ee489502ceb7165c2a5cecec081db2b37e35332b537f8157c"}, + {file = "msgpack-1.0.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfef2bb6ef068827bbd021017a107194956918ab43ce4d6dc945ffa13efbc25f"}, + {file = "msgpack-1.0.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:484ae3240666ad34cfa31eea7b8c6cd2f1fdaae21d73ce2974211df099a95d81"}, + {file = "msgpack-1.0.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3967e4ad1aa9da62fd53e346ed17d7b2e922cba5ab93bdd46febcac39be636fc"}, + {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dd178c4c80706546702c59529ffc005681bd6dc2ea234c450661b205445a34d"}, + {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6ffbc252eb0d229aeb2f9ad051200668fc3a9aaa8994e49f0cb2ffe2b7867e7"}, + {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:822ea70dc4018c7e6223f13affd1c5c30c0f5c12ac1f96cd8e9949acddb48a61"}, + {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:384d779f0d6f1b110eae74cb0659d9aa6ff35aaf547b3955abf2ab4c901c4819"}, + {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f64e376cd20d3f030190e8c32e1c64582eba56ac6dc7d5b0b49a9d44021b52fd"}, + {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ed82f5a7af3697b1c4786053736f24a0efd0a1b8a130d4c7bfee4b9ded0f08f"}, + {file = "msgpack-1.0.7-cp39-cp39-win32.whl", hash = "sha256:f26a07a6e877c76a88e3cecac8531908d980d3d5067ff69213653649ec0f60ad"}, + {file = "msgpack-1.0.7-cp39-cp39-win_amd64.whl", hash = "sha256:1dc93e8e4653bdb5910aed79f11e165c85732067614f180f70534f056da97db3"}, + {file = "msgpack-1.0.7.tar.gz", hash = "sha256:572efc93db7a4d27e404501975ca6d2d9775705c2d922390d878fcf768d92c87"}, ] [[package]] @@ -1394,43 +1314,6 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] -[[package]] -name = "numpy" -version = "1.24.4" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, - {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, - {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, - {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"}, - {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"}, - {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"}, - {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"}, - {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"}, - {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"}, - {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"}, - {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"}, - {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"}, - {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"}, - {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"}, - {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"}, - {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"}, - {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"}, - {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"}, - {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"}, - {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"}, - {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"}, - {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"}, - {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"}, - {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, - {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, -] - [[package]] name = "numpy" version = "1.25.2" @@ -1536,13 +1419,13 @@ six = "*" [[package]] name = "packaging" -version = "23.1" +version = "23.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] [[package]] @@ -1581,26 +1464,15 @@ files = [ mako = "*" markdown = ">=3.0" -[[package]] -name = "pkgutil-resolve-name" -version = "1.3.10" -description = "Resolve a name to an object." -optional = false -python-versions = ">=3.6" -files = [ - {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, - {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, -] - [[package]] name = "platformdirs" -version = "3.10.0" +version = "3.11.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, - {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, + {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, + {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, ] [package.extras] @@ -1638,24 +1510,24 @@ twisted = ["twisted"] [[package]] name = "protobuf" -version = "4.24.3" +version = "4.24.4" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "protobuf-4.24.3-cp310-abi3-win32.whl", hash = "sha256:20651f11b6adc70c0f29efbe8f4a94a74caf61b6200472a9aea6e19898f9fcf4"}, - {file = "protobuf-4.24.3-cp310-abi3-win_amd64.whl", hash = "sha256:3d42e9e4796a811478c783ef63dc85b5a104b44aaaca85d4864d5b886e4b05e3"}, - {file = "protobuf-4.24.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:6e514e8af0045be2b56e56ae1bb14f43ce7ffa0f68b1c793670ccbe2c4fc7d2b"}, - {file = "protobuf-4.24.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:ba53c2f04798a326774f0e53b9c759eaef4f6a568ea7072ec6629851c8435959"}, - {file = "protobuf-4.24.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:f6ccbcf027761a2978c1406070c3788f6de4a4b2cc20800cc03d52df716ad675"}, - {file = "protobuf-4.24.3-cp37-cp37m-win32.whl", hash = "sha256:1b182c7181a2891e8f7f3a1b5242e4ec54d1f42582485a896e4de81aa17540c2"}, - {file = "protobuf-4.24.3-cp37-cp37m-win_amd64.whl", hash = "sha256:b0271a701e6782880d65a308ba42bc43874dabd1a0a0f41f72d2dac3b57f8e76"}, - {file = "protobuf-4.24.3-cp38-cp38-win32.whl", hash = "sha256:e29d79c913f17a60cf17c626f1041e5288e9885c8579832580209de8b75f2a52"}, - {file = "protobuf-4.24.3-cp38-cp38-win_amd64.whl", hash = "sha256:067f750169bc644da2e1ef18c785e85071b7c296f14ac53e0900e605da588719"}, - {file = "protobuf-4.24.3-cp39-cp39-win32.whl", hash = "sha256:2da777d34b4f4f7613cdf85c70eb9a90b1fbef9d36ae4a0ccfe014b0b07906f1"}, - {file = "protobuf-4.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:f631bb982c5478e0c1c70eab383af74a84be66945ebf5dd6b06fc90079668d0b"}, - {file = "protobuf-4.24.3-py3-none-any.whl", hash = "sha256:f6f8dc65625dadaad0c8545319c2e2f0424fede988368893ca3844261342c11a"}, - {file = "protobuf-4.24.3.tar.gz", hash = "sha256:12e9ad2ec079b833176d2921be2cb24281fa591f0b119b208b788adc48c2561d"}, + {file = "protobuf-4.24.4-cp310-abi3-win32.whl", hash = "sha256:ec9912d5cb6714a5710e28e592ee1093d68c5ebfeda61983b3f40331da0b1ebb"}, + {file = "protobuf-4.24.4-cp310-abi3-win_amd64.whl", hash = "sha256:1badab72aa8a3a2b812eacfede5020472e16c6b2212d737cefd685884c191085"}, + {file = "protobuf-4.24.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e61a27f362369c2f33248a0ff6896c20dcd47b5d48239cb9720134bef6082e4"}, + {file = "protobuf-4.24.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:bffa46ad9612e6779d0e51ae586fde768339b791a50610d85eb162daeb23661e"}, + {file = "protobuf-4.24.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:b493cb590960ff863743b9ff1452c413c2ee12b782f48beca77c8da3e2ffe9d9"}, + {file = "protobuf-4.24.4-cp37-cp37m-win32.whl", hash = "sha256:dbbed8a56e56cee8d9d522ce844a1379a72a70f453bde6243e3c86c30c2a3d46"}, + {file = "protobuf-4.24.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6b7d2e1c753715dcfe9d284a25a52d67818dd43c4932574307daf836f0071e37"}, + {file = "protobuf-4.24.4-cp38-cp38-win32.whl", hash = "sha256:02212557a76cd99574775a81fefeba8738d0f668d6abd0c6b1d3adcc75503dbe"}, + {file = "protobuf-4.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:2fa3886dfaae6b4c5ed2730d3bf47c7a38a72b3a1f0acb4d4caf68e6874b947b"}, + {file = "protobuf-4.24.4-cp39-cp39-win32.whl", hash = "sha256:b77272f3e28bb416e2071186cb39efd4abbf696d682cbb5dc731308ad37fa6dd"}, + {file = "protobuf-4.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:9fee5e8aa20ef1b84123bb9232b3f4a5114d9897ed89b4b8142d81924e05d79b"}, + {file = "protobuf-4.24.4-py3-none-any.whl", hash = "sha256:80797ce7424f8c8d2f2547e2d42bfbb6c08230ce5832d6c099a37335c9c90a92"}, + {file = "protobuf-4.24.4.tar.gz", hash = "sha256:5a70731910cd9104762161719c3d883c960151eea077134458503723b60e3667"}, ] [[package]] @@ -1738,47 +1610,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.12" +version = "1.10.13" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a1fcb59f2f355ec350073af41d927bf83a63b50e640f4dbaa01053a28b7a7718"}, - {file = "pydantic-1.10.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b7ccf02d7eb340b216ec33e53a3a629856afe1c6e0ef91d84a4e6f2fb2ca70fe"}, - {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fb2aa3ab3728d950bcc885a2e9eff6c8fc40bc0b7bb434e555c215491bcf48b"}, - {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:771735dc43cf8383959dc9b90aa281f0b6092321ca98677c5fb6125a6f56d58d"}, - {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ca48477862372ac3770969b9d75f1bf66131d386dba79506c46d75e6b48c1e09"}, - {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a5e7add47a5b5a40c49b3036d464e3c7802f8ae0d1e66035ea16aa5b7a3923ed"}, - {file = "pydantic-1.10.12-cp310-cp310-win_amd64.whl", hash = "sha256:e4129b528c6baa99a429f97ce733fff478ec955513630e61b49804b6cf9b224a"}, - {file = "pydantic-1.10.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0d191db0f92dfcb1dec210ca244fdae5cbe918c6050b342d619c09d31eea0cc"}, - {file = "pydantic-1.10.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:795e34e6cc065f8f498c89b894a3c6da294a936ee71e644e4bd44de048af1405"}, - {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69328e15cfda2c392da4e713443c7dbffa1505bc9d566e71e55abe14c97ddc62"}, - {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2031de0967c279df0d8a1c72b4ffc411ecd06bac607a212892757db7462fc494"}, - {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ba5b2e6fe6ca2b7e013398bc7d7b170e21cce322d266ffcd57cca313e54fb246"}, - {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2a7bac939fa326db1ab741c9d7f44c565a1d1e80908b3797f7f81a4f86bc8d33"}, - {file = "pydantic-1.10.12-cp311-cp311-win_amd64.whl", hash = "sha256:87afda5539d5140cb8ba9e8b8c8865cb5b1463924d38490d73d3ccfd80896b3f"}, - {file = "pydantic-1.10.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:549a8e3d81df0a85226963611950b12d2d334f214436a19537b2efed61b7639a"}, - {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598da88dfa127b666852bef6d0d796573a8cf5009ffd62104094a4fe39599565"}, - {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba5c4a8552bff16c61882db58544116d021d0b31ee7c66958d14cf386a5b5350"}, - {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c79e6a11a07da7374f46970410b41d5e266f7f38f6a17a9c4823db80dadf4303"}, - {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab26038b8375581dc832a63c948f261ae0aa21f1d34c1293469f135fa92972a5"}, - {file = "pydantic-1.10.12-cp37-cp37m-win_amd64.whl", hash = "sha256:e0a16d274b588767602b7646fa05af2782576a6cf1022f4ba74cbb4db66f6ca8"}, - {file = "pydantic-1.10.12-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6a9dfa722316f4acf4460afdf5d41d5246a80e249c7ff475c43a3a1e9d75cf62"}, - {file = "pydantic-1.10.12-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a73f489aebd0c2121ed974054cb2759af8a9f747de120acd2c3394cf84176ccb"}, - {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bcb8cbfccfcf02acb8f1a261143fab622831d9c0989707e0e659f77a18e0"}, - {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fcfb5296d7877af406ba1547dfde9943b1256d8928732267e2653c26938cd9c"}, - {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2f9a6fab5f82ada41d56b0602606a5506aab165ca54e52bc4545028382ef1c5d"}, - {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dea7adcc33d5d105896401a1f37d56b47d443a2b2605ff8a969a0ed5543f7e33"}, - {file = "pydantic-1.10.12-cp38-cp38-win_amd64.whl", hash = "sha256:1eb2085c13bce1612da8537b2d90f549c8cbb05c67e8f22854e201bde5d98a47"}, - {file = "pydantic-1.10.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ef6c96b2baa2100ec91a4b428f80d8f28a3c9e53568219b6c298c1125572ebc6"}, - {file = "pydantic-1.10.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c076be61cd0177a8433c0adcb03475baf4ee91edf5a4e550161ad57fc90f523"}, - {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5a58feb9a39f481eda4d5ca220aa8b9d4f21a41274760b9bc66bfd72595b86"}, - {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5f805d2d5d0a41633651a73fa4ecdd0b3d7a49de4ec3fadf062fe16501ddbf1"}, - {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1289c180abd4bd4555bb927c42ee42abc3aee02b0fb2d1223fb7c6e5bef87dbe"}, - {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5d1197e462e0364906cbc19681605cb7c036f2475c899b6f296104ad42b9f5fb"}, - {file = "pydantic-1.10.12-cp39-cp39-win_amd64.whl", hash = "sha256:fdbdd1d630195689f325c9ef1a12900524dceb503b00a987663ff4f58669b93d"}, - {file = "pydantic-1.10.12-py3-none-any.whl", hash = "sha256:b749a43aa51e32839c9d71dc67eb1e4221bb04af1033a32e3923d46f9effa942"}, - {file = "pydantic-1.10.12.tar.gz", hash = "sha256:0fe8a415cea8f340e7a9af9c54fc71a649b43e8ca3cc732986116b3cb135d303"}, + {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"}, + {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"}, + {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"}, + {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"}, + {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"}, + {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"}, + {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"}, + {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, + {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, + {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, + {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, + {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, + {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, + {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, + {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"}, + {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"}, + {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"}, + {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"}, + {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"}, + {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"}, + {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"}, + {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"}, + {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"}, + {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"}, + {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"}, + {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"}, + {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"}, + {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"}, + {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"}, + {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"}, + {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"}, + {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"}, + {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"}, + {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"}, + {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, + {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, ] [package.dependencies] @@ -1931,7 +1803,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1939,15 +1810,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1964,7 +1828,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1972,7 +1835,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -1980,79 +1842,77 @@ files = [ [[package]] name = "ray" -version = "2.5.0" +version = "2.7.0" description = "Ray provides a simple, universal API for building distributed applications." optional = false python-versions = "*" files = [ - {file = "ray-2.5.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:d1bebc874e896880c1215f4c1a11697ada49fa1595d6d99d7c5b4dc03030df36"}, - {file = "ray-2.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0285df2d24cacc36ca64b7852178a9bf37e3fc88545752fc2b46c27396965c1"}, - {file = "ray-2.5.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:38935d46c2597c1d1f113e1c8f88e2716c67052c480de5b2a0265e0a1a5ce88f"}, - {file = "ray-2.5.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:d53a07c9a9dbc134945a26980f557e9ff0f591bf8cabed1a6ebf921768d1c8bd"}, - {file = "ray-2.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:ef26ba24461dad98365b48ef01e27e70bc9737f4cf4734115804153d7d9195dc"}, - {file = "ray-2.5.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:d714175a5000ca91f82646a9b72521118bb6d2db5568e1b7ae9ceb64769716b6"}, - {file = "ray-2.5.0-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:0cde929e63497ed5f1c8626e5ccf7595ef6acaf1e7e270ad7c12f8e1c7695244"}, - {file = "ray-2.5.0-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:7e5512abf62c05c9ff90b1c89a4e0f2e45ee00e73f816eb8265e3ebd92fe4064"}, - {file = "ray-2.5.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3bf36beb213f89c0eb1ec5ac6ffddc8f53e616be745167f00ca017abd8672a2d"}, - {file = "ray-2.5.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:59c2448b07f45d9a9d8e594bb5337bd35a5fea04e42cb4211a3346c2c0d066b0"}, - {file = "ray-2.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:63008dd659d9ef25b0e20f0e1a285e8266e0af68b1178bca1b6ae43e49a68104"}, - {file = "ray-2.5.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:e9464e93d6b72e0da69b9c5ab0501cc40f2db14801e22c6b97fa4e8039647892"}, - {file = "ray-2.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7dc00fac119bfa1c2f8ac456d50a728346d6f2722fb7a21bf70841fc7476c285"}, - {file = "ray-2.5.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:d76051519bd4ae39fda4a87536978cafdebf2843c1c29a9f734c503d8ea676cd"}, - {file = "ray-2.5.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:9a8e06dc5d4201129c28b6768a971c474b82a23935b2e40461ffc7f1c2f4942a"}, - {file = "ray-2.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:849014b62ca50ff106b7a5d41430346e2762b1c4c803673af076209925b8f912"}, - {file = "ray-2.5.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:a1b52c12a3349d8e37357df31438b6f1b12c7719ef41bdf5089fc7e78e8ab212"}, - {file = "ray-2.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:25f3d50c27c4c4756259d093d152381c6604bb96684a0cf43c55ddcc2eb73f79"}, - {file = "ray-2.5.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:1cb4f6ef9cfdb69d2ae582f357e977527944390e2f5cbbf51efd8252ed4c9a11"}, - {file = "ray-2.5.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:662cff303c086369a29283badcd7445b7f911874d8407b2c589b1ccbf6028d2e"}, - {file = "ray-2.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:a2cea10981dad7cfd187edf5e225a667eb114269afc5f2321b52113ef2d86123"}, + {file = "ray-2.7.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:bc911655908b61b2e9f59b8df158fcc62cd32080c468b484b539ebf0a4111d04"}, + {file = "ray-2.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0ee8c14e1521559cd5802bfad3f0aba4a77afdfba57dd446162a7449c6e8ff68"}, + {file = "ray-2.7.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:ebde44af7d479ede21d1c2e68b5ccd8264e18df6e4f3c216d9e99c31e819bde6"}, + {file = "ray-2.7.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:b83621f5d2d4079e6ae624c3bf30046a4fefa0ea7ea5e4a4dfe4b50c580b3768"}, + {file = "ray-2.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:8e1b06abba6e227b8dde1ad861c587fb2608a6970d270e4755cd24a6f37ed565"}, + {file = "ray-2.7.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:5442d48719f033831a324f05b332d6e7181970d721e9504be2091cc9d9735394"}, + {file = "ray-2.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ca8225878cce7b9e2d0ca9668d9370893a7cee35629d11a3889a1b66a0007218"}, + {file = "ray-2.7.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:a3f59dbb0780f9fa11f5bf96bef853b4cb95245456d4400e1c7bf2e514d12ab2"}, + {file = "ray-2.7.0-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:8384b3f30bc1446ef810e9e894afa03238c5ac40d3c40c0740d82f347112015d"}, + {file = "ray-2.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:8d4530e7024375505552dabd3f4441fc9ac7a5562365a81ba9afa14185433879"}, + {file = "ray-2.7.0-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:c491b8051eef82b77d136c48a23d16485c0e54233303ccf68e9fe69a06c517e6"}, + {file = "ray-2.7.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:1684c434886cb7b263cdf98ed39d75dec343e949f7b14f3385d83bfe70ee8c80"}, + {file = "ray-2.7.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:856a9ae164b9b0aeaad54f3e78986eb19900ed3c74e26f51b02a7d8826c97e59"}, + {file = "ray-2.7.0-cp37-cp37m-win_amd64.whl", hash = "sha256:34925a90b6239de42592bb4524dcbdc59a9c65f1f74ad4d9f97f636bd59c73d7"}, + {file = "ray-2.7.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:60db240f37d80a80492e09a8d1e29b79d034431c6fcb651401e9e2d24d850793"}, + {file = "ray-2.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:835155fdeb8698eae426f3d9416e6b8165197fe5c1c74e1b02a429fc7f4ddcd2"}, + {file = "ray-2.7.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:442b7568946081d38c8addbc528e7b09fc1ee25453b4800c86b7e5ba4bce9dd3"}, + {file = "ray-2.7.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:3825292b777b423e2cd34bf66e8e1e7701b04c6a5308f9f291ad5929b289dc47"}, + {file = "ray-2.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:ce700322662946ad5c62a39b78e81feebcb855d378c49f5df6477c22f0ac1e5a"}, + {file = "ray-2.7.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:327c23aac5dd26ee4abe6cee70320322d63fdf97c6028fbb9555724b46a8f3e3"}, + {file = "ray-2.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a09021d45312ab7a44109b251984718b65fbff77df0b55e30e651193cdf42bff"}, + {file = "ray-2.7.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f05fcb609962d14f4d23cc88a9d07cafa7077ce3c5d5ee99cd08a19067b7eecf"}, + {file = "ray-2.7.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:0e0f7dbeb4444940c72b64fdecd6f331593466914b2dffeed03ce97225acec14"}, + {file = "ray-2.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:29a0866316756ae18e232dd074adbf408dcdabe95d135a9a96b9a8c24393c983"}, ] [package.dependencies] aiohttp = {version = ">=3.7", optional = true, markers = "extra == \"default\""} aiohttp-cors = {version = "*", optional = true, markers = "extra == \"default\""} aiosignal = "*" -attrs = "*" click = ">=7.0" colorful = {version = "*", optional = true, markers = "extra == \"default\""} filelock = "*" frozenlist = "*" gpustat = {version = ">=1.0.0", optional = true, markers = "extra == \"default\""} grpcio = [ - {version = ">=1.32.0,<=1.51.3", markers = "python_version < \"3.10\" and sys_platform != \"darwin\""}, - {version = ">=1.32.0,<=1.49.1", markers = "python_version < \"3.10\" and sys_platform == \"darwin\""}, - {version = ">=1.42.0,<=1.51.3", markers = "python_version >= \"3.10\" and sys_platform != \"darwin\""}, - {version = ">=1.42.0,<=1.49.1", markers = "python_version >= \"3.10\" and sys_platform == \"darwin\""}, + {version = ">=1.32.0", optional = true, markers = "python_version < \"3.10\" and extra == \"default\""}, + {version = ">=1.42.0", optional = true, markers = "python_version >= \"3.10\" and extra == \"default\""}, ] jsonschema = "*" msgpack = ">=1.0.0,<2.0.0" -numpy = [ - {version = ">=1.16", markers = "python_version < \"3.9\""}, - {version = ">=1.19.3", markers = "python_version >= \"3.9\""}, -] +numpy = {version = ">=1.19.3", markers = "python_version >= \"3.9\""} opencensus = {version = "*", optional = true, markers = "extra == \"default\""} packaging = "*" prometheus-client = {version = ">=0.7.1", optional = true, markers = "extra == \"default\""} protobuf = ">=3.15.3,<3.19.5 || >3.19.5" py-spy = {version = ">=0.2.0", optional = true, markers = "extra == \"default\""} -pydantic = {version = "*", optional = true, markers = "extra == \"default\""} +pydantic = {version = "<2", optional = true, markers = "extra == \"default\""} pyyaml = "*" requests = "*" smart-open = {version = "*", optional = true, markers = "extra == \"default\""} virtualenv = {version = ">=20.0.24,<20.21.1", optional = true, markers = "extra == \"default\""} [package.extras] -air = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi", "fsspec", "gpustat (>=1.0.0)", "numpy (>=1.20)", "opencensus", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic", "requests", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)"] -all = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "dm-tree", "fastapi", "fsspec", "gpustat (>=1.0.0)", "gymnasium (==0.26.3)", "kubernetes", "lz4", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic", "pyyaml", "ray-cpp (==2.5.0)", "requests", "rich", "scikit-image", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "typer", "urllib3", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)"] -cpp = ["ray-cpp (==2.5.0)"] +air = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi", "fsspec", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "numpy (>=1.20)", "opencensus", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "requests", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)", "watchfiles"] +all = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "dm-tree", "fastapi", "fsspec", "gpustat (>=1.0.0)", "grpcio (!=1.56.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "gymnasium (==0.28.1)", "lz4", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml", "ray-cpp (==2.7.0)", "requests", "rich", "scikit-image", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "typer", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)", "watchfiles"] +client = ["grpcio (!=1.56.0)"] +cpp = ["ray-cpp (==2.7.0)"] data = ["fsspec", "numpy (>=1.20)", "pandas (>=1.3)", "pyarrow (>=6.0.1)"] -default = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "gpustat (>=1.0.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic", "requests", "smart-open", "virtualenv (>=20.0.24,<20.21.1)"] -k8s = ["kubernetes", "urllib3"] +default = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2)", "requests", "smart-open", "virtualenv (>=20.0.24,<20.21.1)"] observability = ["opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk"] -rllib = ["dm-tree", "gymnasium (==0.26.3)", "lz4", "pandas", "pyarrow (>=6.0.1)", "pyyaml", "requests", "rich", "scikit-image", "scipy", "tensorboardX (>=1.9)", "typer"] -serve = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi", "gpustat (>=1.0.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic", "requests", "smart-open", "starlette", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)"] -train = ["pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1.9)"] -tune = ["pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1.9)"] +rllib = ["dm-tree", "fsspec", "gymnasium (==0.28.1)", "lz4", "pandas", "pyarrow (>=6.0.1)", "pyyaml", "requests", "rich", "scikit-image", "scipy", "tensorboardX (>=1.9)", "typer"] +serve = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2)", "requests", "smart-open", "starlette", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)", "watchfiles"] +serve-grpc = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2)", "requests", "smart-open", "starlette", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)", "watchfiles"] +train = ["fsspec", "pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1.9)"] +tune = ["fsspec", "pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1.9)"] [[package]] name = "referencing" @@ -2122,115 +1982,116 @@ files = [ [package.dependencies] commonmark = ">=0.9.0,<0.10.0" pygments = ">=2.6.0,<3.0.0" -typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""} [package.extras] jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] [[package]] name = "rpds-py" -version = "0.10.3" +version = "0.10.4" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.10.3-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:485747ee62da83366a44fbba963c5fe017860ad408ccd6cd99aa66ea80d32b2e"}, - {file = "rpds_py-0.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c55f9821f88e8bee4b7a72c82cfb5ecd22b6aad04033334f33c329b29bfa4da0"}, - {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3b52a67ac66a3a64a7e710ba629f62d1e26ca0504c29ee8cbd99b97df7079a8"}, - {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3aed39db2f0ace76faa94f465d4234aac72e2f32b009f15da6492a561b3bbebd"}, - {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:271c360fdc464fe6a75f13ea0c08ddf71a321f4c55fc20a3fe62ea3ef09df7d9"}, - {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef5fddfb264e89c435be4adb3953cef5d2936fdeb4463b4161a6ba2f22e7b740"}, - {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a771417c9c06c56c9d53d11a5b084d1de75de82978e23c544270ab25e7c066ff"}, - {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:52b5cbc0469328e58180021138207e6ec91d7ca2e037d3549cc9e34e2187330a"}, - {file = "rpds_py-0.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6ac3fefb0d168c7c6cab24fdfc80ec62cd2b4dfd9e65b84bdceb1cb01d385c33"}, - {file = "rpds_py-0.10.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8d54bbdf5d56e2c8cf81a1857250f3ea132de77af543d0ba5dce667183b61fec"}, - {file = "rpds_py-0.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cd2163f42868865597d89399a01aa33b7594ce8e2c4a28503127c81a2f17784e"}, - {file = "rpds_py-0.10.3-cp310-none-win32.whl", hash = "sha256:ea93163472db26ac6043e8f7f93a05d9b59e0505c760da2a3cd22c7dd7111391"}, - {file = "rpds_py-0.10.3-cp310-none-win_amd64.whl", hash = "sha256:7cd020b1fb41e3ab7716d4d2c3972d4588fdfbab9bfbbb64acc7078eccef8860"}, - {file = "rpds_py-0.10.3-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:1d9b5ee46dcb498fa3e46d4dfabcb531e1f2e76b477e0d99ef114f17bbd38453"}, - {file = "rpds_py-0.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:563646d74a4b4456d0cf3b714ca522e725243c603e8254ad85c3b59b7c0c4bf0"}, - {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e626b864725680cd3904414d72e7b0bd81c0e5b2b53a5b30b4273034253bb41f"}, - {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:485301ee56ce87a51ccb182a4b180d852c5cb2b3cb3a82f7d4714b4141119d8c"}, - {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42f712b4668831c0cd85e0a5b5a308700fe068e37dcd24c0062904c4e372b093"}, - {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c9141af27a4e5819d74d67d227d5047a20fa3c7d4d9df43037a955b4c748ec5"}, - {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef750a20de1b65657a1425f77c525b0183eac63fe7b8f5ac0dd16f3668d3e64f"}, - {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e1a0ffc39f51aa5f5c22114a8f1906b3c17eba68c5babb86c5f77d8b1bba14d1"}, - {file = "rpds_py-0.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f4c179a7aeae10ddf44c6bac87938134c1379c49c884529f090f9bf05566c836"}, - {file = "rpds_py-0.10.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:176287bb998fd1e9846a9b666e240e58f8d3373e3bf87e7642f15af5405187b8"}, - {file = "rpds_py-0.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6446002739ca29249f0beaaf067fcbc2b5aab4bc7ee8fb941bd194947ce19aff"}, - {file = "rpds_py-0.10.3-cp311-none-win32.whl", hash = "sha256:c7aed97f2e676561416c927b063802c8a6285e9b55e1b83213dfd99a8f4f9e48"}, - {file = "rpds_py-0.10.3-cp311-none-win_amd64.whl", hash = "sha256:8bd01ff4032abaed03f2db702fa9a61078bee37add0bd884a6190b05e63b028c"}, - {file = "rpds_py-0.10.3-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:4cf0855a842c5b5c391dd32ca273b09e86abf8367572073bd1edfc52bc44446b"}, - {file = "rpds_py-0.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:69b857a7d8bd4f5d6e0db4086da8c46309a26e8cefdfc778c0c5cc17d4b11e08"}, - {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:975382d9aa90dc59253d6a83a5ca72e07f4ada3ae3d6c0575ced513db322b8ec"}, - {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:35fbd23c1c8732cde7a94abe7fb071ec173c2f58c0bd0d7e5b669fdfc80a2c7b"}, - {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:106af1653007cc569d5fbb5f08c6648a49fe4de74c2df814e234e282ebc06957"}, - {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce5e7504db95b76fc89055c7f41e367eaadef5b1d059e27e1d6eabf2b55ca314"}, - {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aca759ada6b1967fcfd4336dcf460d02a8a23e6abe06e90ea7881e5c22c4de6"}, - {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b5d4bdd697195f3876d134101c40c7d06d46c6ab25159ed5cbd44105c715278a"}, - {file = "rpds_py-0.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a657250807b6efd19b28f5922520ae002a54cb43c2401e6f3d0230c352564d25"}, - {file = "rpds_py-0.10.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:177c9dd834cdf4dc39c27436ade6fdf9fe81484758885f2d616d5d03c0a83bd2"}, - {file = "rpds_py-0.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e22491d25f97199fc3581ad8dd8ce198d8c8fdb8dae80dea3512e1ce6d5fa99f"}, - {file = "rpds_py-0.10.3-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:2f3e1867dd574014253b4b8f01ba443b9c914e61d45f3674e452a915d6e929a3"}, - {file = "rpds_py-0.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c22211c165166de6683de8136229721f3d5c8606cc2c3d1562da9a3a5058049c"}, - {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40bc802a696887b14c002edd43c18082cb7b6f9ee8b838239b03b56574d97f71"}, - {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e271dd97c7bb8eefda5cca38cd0b0373a1fea50f71e8071376b46968582af9b"}, - {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:95cde244e7195b2c07ec9b73fa4c5026d4a27233451485caa1cd0c1b55f26dbd"}, - {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08a80cf4884920863623a9ee9a285ee04cef57ebedc1cc87b3e3e0f24c8acfe5"}, - {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:763ad59e105fca09705d9f9b29ecffb95ecdc3b0363be3bb56081b2c6de7977a"}, - {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:187700668c018a7e76e89424b7c1042f317c8df9161f00c0c903c82b0a8cac5c"}, - {file = "rpds_py-0.10.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5267cfda873ad62591b9332fd9472d2409f7cf02a34a9c9cb367e2c0255994bf"}, - {file = "rpds_py-0.10.3-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:2ed83d53a8c5902ec48b90b2ac045e28e1698c0bea9441af9409fc844dc79496"}, - {file = "rpds_py-0.10.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:255f1a10ae39b52122cce26ce0781f7a616f502feecce9e616976f6a87992d6b"}, - {file = "rpds_py-0.10.3-cp38-none-win32.whl", hash = "sha256:a019a344312d0b1f429c00d49c3be62fa273d4a1094e1b224f403716b6d03be1"}, - {file = "rpds_py-0.10.3-cp38-none-win_amd64.whl", hash = "sha256:efb9ece97e696bb56e31166a9dd7919f8f0c6b31967b454718c6509f29ef6fee"}, - {file = "rpds_py-0.10.3-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:570cc326e78ff23dec7f41487aa9c3dffd02e5ee9ab43a8f6ccc3df8f9327623"}, - {file = "rpds_py-0.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cff7351c251c7546407827b6a37bcef6416304fc54d12d44dbfecbb717064717"}, - {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:177914f81f66c86c012311f8c7f46887ec375cfcfd2a2f28233a3053ac93a569"}, - {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:448a66b8266de0b581246ca7cd6a73b8d98d15100fb7165974535fa3b577340e"}, - {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bbac1953c17252f9cc675bb19372444aadf0179b5df575ac4b56faaec9f6294"}, - {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9dd9d9d9e898b9d30683bdd2b6c1849449158647d1049a125879cb397ee9cd12"}, - {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8c71ea77536149e36c4c784f6d420ffd20bea041e3ba21ed021cb40ce58e2c9"}, - {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16a472300bc6c83fe4c2072cc22b3972f90d718d56f241adabc7ae509f53f154"}, - {file = "rpds_py-0.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b9255e7165083de7c1d605e818025e8860636348f34a79d84ec533546064f07e"}, - {file = "rpds_py-0.10.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:53d7a3cd46cdc1689296348cb05ffd4f4280035770aee0c8ead3bbd4d6529acc"}, - {file = "rpds_py-0.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22da15b902f9f8e267020d1c8bcfc4831ca646fecb60254f7bc71763569f56b1"}, - {file = "rpds_py-0.10.3-cp39-none-win32.whl", hash = "sha256:850c272e0e0d1a5c5d73b1b7871b0a7c2446b304cec55ccdb3eaac0d792bb065"}, - {file = "rpds_py-0.10.3-cp39-none-win_amd64.whl", hash = "sha256:de61e424062173b4f70eec07e12469edde7e17fa180019a2a0d75c13a5c5dc57"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:af247fd4f12cca4129c1b82090244ea5a9d5bb089e9a82feb5a2f7c6a9fe181d"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3ad59efe24a4d54c2742929001f2d02803aafc15d6d781c21379e3f7f66ec842"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642ed0a209ced4be3a46f8cb094f2d76f1f479e2a1ceca6de6346a096cd3409d"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:37d0c59548ae56fae01c14998918d04ee0d5d3277363c10208eef8c4e2b68ed6"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aad6ed9e70ddfb34d849b761fb243be58c735be6a9265b9060d6ddb77751e3e8"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f94fdd756ba1f79f988855d948ae0bad9ddf44df296770d9a58c774cfbcca72"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77076bdc8776a2b029e1e6ffbe6d7056e35f56f5e80d9dc0bad26ad4a024a762"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:87d9b206b1bd7a0523375dc2020a6ce88bca5330682ae2fe25e86fd5d45cea9c"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:8efaeb08ede95066da3a3e3c420fcc0a21693fcd0c4396d0585b019613d28515"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:a4d9bfda3f84fc563868fe25ca160c8ff0e69bc4443c5647f960d59400ce6557"}, - {file = "rpds_py-0.10.3-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:d27aa6bbc1f33be920bb7adbb95581452cdf23005d5611b29a12bb6a3468cc95"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ed8313809571a5463fd7db43aaca68ecb43ca7a58f5b23b6e6c6c5d02bdc7882"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:e10e6a1ed2b8661201e79dff5531f8ad4cdd83548a0f81c95cf79b3184b20c33"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:015de2ce2af1586ff5dc873e804434185199a15f7d96920ce67e50604592cae9"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae87137951bb3dc08c7d8bfb8988d8c119f3230731b08a71146e84aaa919a7a9"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0bb4f48bd0dd18eebe826395e6a48b7331291078a879295bae4e5d053be50d4c"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:09362f86ec201288d5687d1dc476b07bf39c08478cde837cb710b302864e7ec9"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:821392559d37759caa67d622d0d2994c7a3f2fb29274948ac799d496d92bca73"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7170cbde4070dc3c77dec82abf86f3b210633d4f89550fa0ad2d4b549a05572a"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:5de11c041486681ce854c814844f4ce3282b6ea1656faae19208ebe09d31c5b8"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:4ed172d0c79f156c1b954e99c03bc2e3033c17efce8dd1a7c781bc4d5793dfac"}, - {file = "rpds_py-0.10.3-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:11fdd1192240dda8d6c5d18a06146e9045cb7e3ba7c06de6973000ff035df7c6"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:f602881d80ee4228a2355c68da6b296a296cd22bbb91e5418d54577bbf17fa7c"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:691d50c99a937709ac4c4cd570d959a006bd6a6d970a484c84cc99543d4a5bbb"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24cd91a03543a0f8d09cb18d1cb27df80a84b5553d2bd94cba5979ef6af5c6e7"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fc2200e79d75b5238c8d69f6a30f8284290c777039d331e7340b6c17cad24a5a"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea65b59882d5fa8c74a23f8960db579e5e341534934f43f3b18ec1839b893e41"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:829e91f3a8574888b73e7a3feb3b1af698e717513597e23136ff4eba0bc8387a"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eab75a8569a095f2ad470b342f2751d9902f7944704f0571c8af46bede438475"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:061c3ff1f51ecec256e916cf71cc01f9975af8fb3af9b94d3c0cc8702cfea637"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:39d05e65f23a0fe897b6ac395f2a8d48c56ac0f583f5d663e0afec1da89b95da"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:4eca20917a06d2fca7628ef3c8b94a8c358f6b43f1a621c9815243462dcccf97"}, - {file = "rpds_py-0.10.3-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e8d0f0eca087630d58b8c662085529781fd5dc80f0a54eda42d5c9029f812599"}, - {file = "rpds_py-0.10.3.tar.gz", hash = "sha256:fcc1ebb7561a3e24a6588f7c6ded15d80aec22c66a070c757559b57b17ffd1cb"}, + {file = "rpds_py-0.10.4-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:e41824343c2c129599645373992b1ce17720bb8a514f04ff9567031e1c26951e"}, + {file = "rpds_py-0.10.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b9d8884d58ea8801e5906a491ab34af975091af76d1a389173db491ee7e316bb"}, + {file = "rpds_py-0.10.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5db93f9017b384a4f194e1d89e1ce82d0a41b1fafdbbd3e0c8912baf13f2950f"}, + {file = "rpds_py-0.10.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c31ecfc53ac03dad4928a1712f3a2893008bfba1b3cde49e1c14ff67faae2290"}, + {file = "rpds_py-0.10.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4f92d2372ec992c82fd7c74aa21e2a1910b3dcdc6a7e6392919a138f21d528a3"}, + {file = "rpds_py-0.10.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f7ea49ddf51d5ec0c3cbd95190dd15e077a3153c8d4b22a33da43b5dd2b3c640"}, + {file = "rpds_py-0.10.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c27942722cd5039bbf5098c7e21935a96243fed00ea11a9589f3c6c6424bd84"}, + {file = "rpds_py-0.10.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:08f07150c8ebbdbce1d2d51b8e9f4d588749a2af6a98035485ebe45c7ad9394e"}, + {file = "rpds_py-0.10.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f3331a3684192659fa1090bf2b448db928152fcba08222e58106f44758ef25f7"}, + {file = "rpds_py-0.10.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:efffa359cc69840c8793f0c05a7b663de6afa7b9078fa6c80309ee38b9db677d"}, + {file = "rpds_py-0.10.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:86e8d6ff15fa7a9590c0addaf3ce52fb58bda4299cab2c2d0afa404db6848dab"}, + {file = "rpds_py-0.10.4-cp310-none-win32.whl", hash = "sha256:8f90fc6dd505867514c8b8ef68a712dc0be90031a773c1ae2ad469f04062daef"}, + {file = "rpds_py-0.10.4-cp310-none-win_amd64.whl", hash = "sha256:9f9184744fb800c9f28e155a5896ecb54816296ee79d5d1978be6a2ae60f53c4"}, + {file = "rpds_py-0.10.4-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:72e9b1e92830c876cd49565d8404e4dcc9928302d348ea2517bc3f9e3a873a2a"}, + {file = "rpds_py-0.10.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3650eae998dc718960e90120eb45d42bd57b18b21b10cb9ee05f91bff2345d48"}, + {file = "rpds_py-0.10.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f40413d2859737ce6d95c29ce2dde0ef7cdc3063b5830ae4342fef5922c3bba7"}, + {file = "rpds_py-0.10.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b953d11b544ca5f2705bb77b177d8e17ab1bfd69e0fd99790a11549d2302258c"}, + {file = "rpds_py-0.10.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:28b4942ec7d9d6114c1e08cace0157db92ef674636a38093cab779ace5742d3a"}, + {file = "rpds_py-0.10.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e0e2e01c5f61ddf47e3ed2d1fe1c9136e780ca6222d57a2517b9b02afd4710c"}, + {file = "rpds_py-0.10.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:927e3461dae0c09b1f2e0066e50c1a9204f8a64a3060f596e9a6742d3b307785"}, + {file = "rpds_py-0.10.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8e69bbe0ede8f7fe2616e779421bbdb37f025c802335a90f6416e4d98b368a37"}, + {file = "rpds_py-0.10.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cc688a59c100f038fa9fec9e4ab457c2e2d1fca350fe7ea395016666f0d0a2dc"}, + {file = "rpds_py-0.10.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ec001689402b9104700b50a005c2d3d0218eae90eaa8bdbbd776fe78fe8a74b7"}, + {file = "rpds_py-0.10.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:628fbb8be71a103499d10b189af7764996ab2634ed7b44b423f1e19901606e0e"}, + {file = "rpds_py-0.10.4-cp311-none-win32.whl", hash = "sha256:e3f9c9e5dd8eba4768e15f19044e1b5e216929a43a54b4ab329e103aed9f3eda"}, + {file = "rpds_py-0.10.4-cp311-none-win_amd64.whl", hash = "sha256:3bc561c183684636c0099f9c3fbab8c1671841942edbce784bb01b4707d17924"}, + {file = "rpds_py-0.10.4-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:36ff30385fb9fb3ac23a28bffdd4a230a5229ed5b15704b708b7c84bfb7fce51"}, + {file = "rpds_py-0.10.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:db0589e0bf41ff6ce284ab045ca89f27be1adf19e7bce26c2e7de6739a70c18b"}, + {file = "rpds_py-0.10.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c330cb125983c5d380fef4a4155248a276297c86d64625fdaf500157e1981c"}, + {file = "rpds_py-0.10.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d230fddc60caced271cc038e43e6fb8f4dd6b2dbaa44ac9763f2d76d05b0365a"}, + {file = "rpds_py-0.10.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a9e864ec051a58fdb6bb2e6da03942adb20273897bc70067aee283e62bbac4d"}, + {file = "rpds_py-0.10.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e41d5b334e8de4bc3f38843f31b2afa9a0c472ebf73119d3fd55cde08974bdf"}, + {file = "rpds_py-0.10.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5bb3f3cb6072c73e6ec1f865d8b80419b599f1597acf33f63fbf02252aab5a03"}, + {file = "rpds_py-0.10.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:576d48e1e45c211e99fc02655ade65c32a75d3e383ccfd98ce59cece133ed02c"}, + {file = "rpds_py-0.10.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b28b9668a22ca2cfca4433441ba9acb2899624a323787a509a3dc5fbfa79c49d"}, + {file = "rpds_py-0.10.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ddbd113a37307638f94be5ae232a325155fd24dbfae2c56455da8724b471e7be"}, + {file = "rpds_py-0.10.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd0ad98c7d72b0e4cbfe89cdfa12cd07d2fd6ed22864341cdce12b318a383442"}, + {file = "rpds_py-0.10.4-cp312-none-win32.whl", hash = "sha256:2a97406d5e08b7095428f01dac0d3c091dc072351151945a167e7968d2755559"}, + {file = "rpds_py-0.10.4-cp312-none-win_amd64.whl", hash = "sha256:aab24b9bbaa3d49e666e9309556591aa00748bd24ea74257a405f7fed9e8b10d"}, + {file = "rpds_py-0.10.4-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6c5ca3eb817fb54bfd066740b64a2b31536eb8fe0b183dc35b09a7bd628ed680"}, + {file = "rpds_py-0.10.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fd37ab9a24021821b715478357af1cf369d5a42ac7405e83e5822be00732f463"}, + {file = "rpds_py-0.10.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2573ec23ad3a59dd2bc622befac845695972f3f2d08dc1a4405d017d20a6c225"}, + {file = "rpds_py-0.10.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:362faeae52dc6ccc50c0b6a01fa2ec0830bb61c292033f3749a46040b876f4ba"}, + {file = "rpds_py-0.10.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40f6e53461b19ddbb3354fe5bcf3d50d4333604ae4bf25b478333d83ca68002c"}, + {file = "rpds_py-0.10.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6090ba604ea06b525a231450ae5d343917a393cbf50423900dea968daf61d16f"}, + {file = "rpds_py-0.10.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28e29dac59df890972f73c511948072897f512974714a803fe793635b80ff8c7"}, + {file = "rpds_py-0.10.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f82abb5c5b83dc30e96be99ce76239a030b62a73a13c64410e429660a5602bfd"}, + {file = "rpds_py-0.10.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a3628815fd170a64624001bfb4e28946fd515bd672e68a1902d9e0290186eaf3"}, + {file = "rpds_py-0.10.4-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:d37f27ad80f742ef82796af3fe091888864958ad0bc8bab03da1830fa00c6004"}, + {file = "rpds_py-0.10.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:255a23bded80605e9f3997753e3a4b89c9aec9efb07ec036b1ca81440efcc1a9"}, + {file = "rpds_py-0.10.4-cp38-none-win32.whl", hash = "sha256:049098dabfe705e9638c55a3321137a821399c50940041a6fcce267a22c70db2"}, + {file = "rpds_py-0.10.4-cp38-none-win_amd64.whl", hash = "sha256:aa45cc71bf23a3181b8aa62466b5a2b7b7fb90fdc01df67ca433cd4fce7ec94d"}, + {file = "rpds_py-0.10.4-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:3507c459767cf24c11e9520e2a37c89674266abe8e65453e5cb66398aa47ee7b"}, + {file = "rpds_py-0.10.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2603e084054351cc65097da326570102c4c5bd07426ba8471ceaefdb0b642cc9"}, + {file = "rpds_py-0.10.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0f1d336786cb62613c72c00578c98e5bb8cd57b49c5bae5d4ab906ca7872f98"}, + {file = "rpds_py-0.10.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf032367f921201deaecf221d4cc895ea84b3decf50a9c73ee106f961885a0ad"}, + {file = "rpds_py-0.10.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f050ceffd8c730c1619a16bbf0b9cd037dcdb94b54710928ba38c7bde67e4a4"}, + {file = "rpds_py-0.10.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8709eb4ab477c533b7d0a76cd3065d7d95c9e25e6b9f6e27caeeb8c63e8799c9"}, + {file = "rpds_py-0.10.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc20dadb102140dff63529e08ce6f9745dbd36e673ebb2b1c4a63e134bca81c2"}, + {file = "rpds_py-0.10.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cd7da2adc721ccf19ac7ec86cae3a4fcaba03d9c477d5bd64ded6e9bb817bf3f"}, + {file = "rpds_py-0.10.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e5dba1c11e089b526379e74f6c636202e4c5bad9a48c7416502b8a5b0d026c91"}, + {file = "rpds_py-0.10.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ffd539d213c1ea2989ab92a5b9371ae7159c8c03cf2bcb9f2f594752f755ecd3"}, + {file = "rpds_py-0.10.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e791e3d13b14d0a7921804d0efe4d7bd15508bbcf8cb7a0c1ee1a27319a5f033"}, + {file = "rpds_py-0.10.4-cp39-none-win32.whl", hash = "sha256:2f2ac8bb01f705c5caaa7fe77ffd9b03f92f1b5061b94228f6ea5eaa0fca68ad"}, + {file = "rpds_py-0.10.4-cp39-none-win_amd64.whl", hash = "sha256:7c7ca791bedda059e5195cf7c6b77384657a51429357cdd23e64ac1d4973d6dc"}, + {file = "rpds_py-0.10.4-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:9c7e7bd1fa1f535af71dfcd3700fc83a6dc261a1204f8f5327d8ffe82e52905d"}, + {file = "rpds_py-0.10.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:7089d8bfa8064b28b2e39f5af7bf12d42f61caed884e35b9b4ea9e6fb1175077"}, + {file = "rpds_py-0.10.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1f191befea279cb9669b57be97ab1785781c8bab805900e95742ebfaa9cbf1d"}, + {file = "rpds_py-0.10.4-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:98c0aecf661c175ce9cb17347fc51a5c98c3e9189ca57e8fcd9348dae18541db"}, + {file = "rpds_py-0.10.4-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d81359911c3bb31c899c6a5c23b403bdc0279215e5b3bc0d2a692489fed38632"}, + {file = "rpds_py-0.10.4-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:83da147124499fe41ed86edf34b4e81e951b3fe28edcc46288aac24e8a5c8484"}, + {file = "rpds_py-0.10.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49db6c0a0e6626c2b97f5e7f8f7074da21cbd8ec73340c25e839a2457c007efa"}, + {file = "rpds_py-0.10.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:125776d5db15162fdd9135372bef7fe4fb7c5f5810cf25898eb74a06a0816aec"}, + {file = "rpds_py-0.10.4-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:32819b662e3b4c26355a4403ea2f60c0a00db45b640fe722dd12db3d2ef807fb"}, + {file = "rpds_py-0.10.4-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:3bd38b80491ef9686f719c1ad3d24d14fbd0e069988fdd4e7d1a6ffcdd7f4a13"}, + {file = "rpds_py-0.10.4-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:2e79eeeff8394284b09577f36316d410525e0cf0133abb3de10660e704d3d38e"}, + {file = "rpds_py-0.10.4-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:3e37f1f134037601eb4b1f46854194f0cc082435dac2ee3de11e51529f7831f2"}, + {file = "rpds_py-0.10.4-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:ba3246c60303eab3d0e562addf25a983d60bddc36f4d1edc2510f056d19df255"}, + {file = "rpds_py-0.10.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9123ba0f3f98ff79780eebca9984a2b525f88563844b740f94cffb9099701230"}, + {file = "rpds_py-0.10.4-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d98802b78093c7083cc51f83da41a5be5a57d406798c9f69424bd75f8ae0812a"}, + {file = "rpds_py-0.10.4-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:58bae860d1d116e6b4e1aad0cdc48a187d5893994f56d26db0c5534df7a47afd"}, + {file = "rpds_py-0.10.4-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd7e62e7d5bcfa38a62d8397fba6d0428b970ab7954c2197501cd1624f7f0bbb"}, + {file = "rpds_py-0.10.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83f5228459b84fa6279e4126a53abfdd73cd9cc183947ee5084153880f65d7"}, + {file = "rpds_py-0.10.4-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4bcb1abecd998a72ad4e36a0fca93577fd0c059a6aacc44f16247031b98f6ff4"}, + {file = "rpds_py-0.10.4-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:9e7b3ad9f53ea9e085b3d27286dd13f8290969c0a153f8a52c8b5c46002c374b"}, + {file = "rpds_py-0.10.4-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:cbec8e43cace64e63398155dc585dc479a89fef1e57ead06c22d3441e1bd09c3"}, + {file = "rpds_py-0.10.4-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ad21c60fc880204798f320387164dcacc25818a7b4ec2a0bf6b6c1d57b007d23"}, + {file = "rpds_py-0.10.4-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:6baea8a4f6f01e69e75cfdef3edd4a4d1c4b56238febbdf123ce96d09fbff010"}, + {file = "rpds_py-0.10.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:94876c21512535955a960f42a155213315e6ab06a4ce8ce372341a2a1b143eeb"}, + {file = "rpds_py-0.10.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cb55454a20d1b935f9eaab52e6ceab624a2efd8b52927c7ae7a43e02828dbe0"}, + {file = "rpds_py-0.10.4-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:13cbd79ccedc6b39c279af31ebfb0aec0467ad5d14641ddb15738bf6e4146157"}, + {file = "rpds_py-0.10.4-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00a88003db3cc953f8656b59fc9af9d0637a1fb93c235814007988f8c153b2f2"}, + {file = "rpds_py-0.10.4-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0f7f77a77c37159c9f417b8dd847f67a29e98c6acb52ee98fc6b91efbd1b2b6"}, + {file = "rpds_py-0.10.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70563a1596d2e0660ca2cebb738443437fc0e38597e7cbb276de0a7363924a52"}, + {file = "rpds_py-0.10.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e3ece9aa6d07e18c966f14b4352a4c6f40249f6174d3d2c694c1062e19c6adbb"}, + {file = "rpds_py-0.10.4-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d5ad7b1a1f6964d19b1a8acfc14bf7864f39587b3e25c16ca04f6cd1815026b3"}, + {file = "rpds_py-0.10.4-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:60018626e637528a1fa64bb3a2b3e46ab7bf672052316d61c3629814d5e65052"}, + {file = "rpds_py-0.10.4-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ae8a32ab77a84cc870bbfb60645851ca0f7d58fd251085ad67464b1445d632ca"}, + {file = "rpds_py-0.10.4.tar.gz", hash = "sha256:18d5ff7fbd305a1d564273e9eb22de83ae3cd9cd6329fddc8f12f6428a711a6a"}, ] [[package]] @@ -2348,17 +2209,17 @@ typing-extensions = ">=3.7.4" [[package]] name = "urllib3" -version = "1.26.16" +version = "1.26.17" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"}, - {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"}, + {file = "urllib3-1.26.17-py2.py3-none-any.whl", hash = "sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b"}, + {file = "urllib3-1.26.17.tar.gz", hash = "sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] @@ -2384,24 +2245,24 @@ test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess [[package]] name = "wcwidth" -version = "0.2.6" +version = "0.2.8" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" files = [ - {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"}, - {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, + {file = "wcwidth-0.2.8-py2.py3-none-any.whl", hash = "sha256:77f719e01648ed600dfa5402c347481c0992263b81a027344f3e1ba25493a704"}, + {file = "wcwidth-0.2.8.tar.gz", hash = "sha256:8705c569999ffbb4f6a87c6d1b80f324bd6db952f5eb0b95bc07517f4c1813d4"}, ] [[package]] name = "websocket-client" -version = "1.6.3" +version = "1.6.4" description = "WebSocket client for Python with low level API options" optional = false python-versions = ">=3.8" files = [ - {file = "websocket-client-1.6.3.tar.gz", hash = "sha256:3aad25d31284266bcfcfd1fd8a743f63282305a364b8d0948a43bd606acc652f"}, - {file = "websocket_client-1.6.3-py3-none-any.whl", hash = "sha256:6cfc30d051ebabb73a5fa246efdcc14c8fbebbd0330f8984ac3bb6d9edd2ad03"}, + {file = "websocket-client-1.6.4.tar.gz", hash = "sha256:b3324019b3c28572086c4a319f91d1dcd44e6e11cd340232978c684a7650d0df"}, + {file = "websocket_client-1.6.4-py3-none-any.whl", hash = "sha256:084072e0a7f5f347ef2ac3d8698a5e0b4ffbfcab607628cadabc650fc9a83a24"}, ] [package.extras] @@ -2513,5 +2374,5 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" -python-versions = "^3.8" -content-hash = "2d87d987bc2dbc1dbebe7ada682b5f2f0a475e51147f9045db752d07c057eb24" +python-versions = "^3.9" +content-hash = "e681986285dd53f1bbbbe532fdaae987ea20b77191c16c9703ddd6aa78f1e67f" diff --git a/pyproject.toml b/pyproject.toml index 8f6505dc..c1c9bab0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,10 +20,10 @@ homepage = "https://github.com/project-codeflare/codeflare-sdk" keywords = ['codeflare', 'python', 'sdk', 'client', 'batch', 'scale'] [tool.poetry.dependencies] -python = "^3.8" +python = "^3.9" openshift-client = "1.0.18" rich = "^12.5" -ray = {version = "2.5.0", extras = ["default"]} +ray = {version = "2.7.0", extras = ["default"]} kubernetes = ">= 25.3.0, < 27" codeflare-torchx = "0.6.0.dev1" cryptography = "40.0.2" From 7f4710ed523243485971a5b69ea031973769ebac Mon Sep 17 00:00:00 2001 From: Kevin Date: Tue, 10 Oct 2023 13:30:17 -0400 Subject: [PATCH 030/496] update ray version in default template and code references Signed-off-by: Kevin --- custom-nb-image/Dockerfile | 3 +- .../additional-demos/hf_interactive.ipynb | 2 +- demo-notebooks/guided-demos/0_basic_ray.ipynb | 2 +- .../notebook-ex-outputs/0_basic_ray.ipynb | 2 +- .../notebook-ex-outputs/gptfttest.yaml | 4 +- .../notebook-ex-outputs/instascaletest.yaml | 4 +- .../notebook-ex-outputs/interactivetest.yaml | 4 +- .../notebook-ex-outputs/jobtest.yaml | 4 +- .../notebook-ex-outputs/raytest.yaml | 4 +- .../preview_nbs/0_basic_ray.ipynb | 2 +- poetry.lock | 192 +++++++++++++++++- pyproject.toml | 2 +- src/codeflare_sdk/cluster/config.py | 2 +- .../templates/base-template.yaml | 6 +- tests/test-case-bad.yaml | 4 +- tests/test-case-prio.yaml | 6 +- tests/test-case.yaml | 6 +- tests/unit_test.py | 2 +- 18 files changed, 214 insertions(+), 37 deletions(-) diff --git a/custom-nb-image/Dockerfile b/custom-nb-image/Dockerfile index e5393f8b..e4bfcb26 100644 --- a/custom-nb-image/Dockerfile +++ b/custom-nb-image/Dockerfile @@ -12,8 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -FROM quay.io/opendatahub/notebooks:jupyter-minimal-ubi8-python-3.8-4c8f26e -# Install: torch (v1.12), ray (v2.1.0) and others +FROM quay.io/opendatahub/notebooks:jupyter-minimal-ubi9-python-3.9-2023a_20230302 COPY requirements.txt requirements.txt diff --git a/demo-notebooks/additional-demos/hf_interactive.ipynb b/demo-notebooks/additional-demos/hf_interactive.ipynb index 32e7be41..a6780a8f 100644 --- a/demo-notebooks/additional-demos/hf_interactive.ipynb +++ b/demo-notebooks/additional-demos/hf_interactive.ipynb @@ -96,7 +96,7 @@ " min_memory=16, \n", " max_memory=16, \n", " num_gpus=4,\n", - " image=\"quay.io/project-codeflare/ray:2.5.0-py38-cu116\",\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " instascale=True, machine_types=[\"m5.xlarge\", \"p3.8xlarge\"]))" ] }, diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb index dfe06733..d37b7d7d 100644 --- a/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -66,7 +66,7 @@ " min_memory=4,\n", " max_memory=4,\n", " num_gpus=0,\n", - " image=\"quay.io/project-codeflare/ray:2.5.0-py38-cu116\", #current default\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\", #current default\n", " instascale=False\n", "))" ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb index be4e8a5e..18e5a840 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb @@ -74,7 +74,7 @@ " min_memory=4,\n", " max_memory=4,\n", " num_gpus=0,\n", - " image=\"quay.io/project-codeflare/ray:2.5.0-py38-cu116\", #current default\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\", #current default\n", " instascale=False\n", "))" ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml index 427491dc..436abe05 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml @@ -81,7 +81,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:2.5.0-py38-cu116 + image: quay.io/project-codeflare/ray:latest-py39-cu118 imagePullPolicy: Always lifecycle: preStop: @@ -147,7 +147,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:2.5.0-py38-cu116 + image: quay.io/project-codeflare/ray:latest-py39-cu118 lifecycle: preStop: exec: diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml index 58ccc1ae..5f952649 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml @@ -81,7 +81,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:2.5.0-py38-cu116 + image: quay.io/project-codeflare/ray:latest-py39-cu118 imagePullPolicy: Always lifecycle: preStop: @@ -147,7 +147,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:2.5.0-py38-cu116 + image: quay.io/project-codeflare/ray:latest-py39-cu118 lifecycle: preStop: exec: diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml index 83e74e93..c80684e1 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml @@ -81,7 +81,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:2.5.0-py38-cu116 + image: quay.io/project-codeflare/ray:latest-py39-cu118 imagePullPolicy: Always lifecycle: preStop: @@ -147,7 +147,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:2.5.0-py38-cu116 + image: quay.io/project-codeflare/ray:latest-py39-cu118 lifecycle: preStop: exec: diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml index ef397681..aa85803e 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml @@ -70,7 +70,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:2.5.0-py38-cu116 + image: quay.io/project-codeflare/ray:latest-py39-cu118 imagePullPolicy: Always lifecycle: preStop: @@ -127,7 +127,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:2.5.0-py38-cu116 + image: quay.io/project-codeflare/ray:latest-py39-cu118 lifecycle: preStop: exec: diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml index 96d9b8af..c58b963a 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml @@ -70,7 +70,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:2.5.0-py38-cu116 + image: quay.io/project-codeflare/ray:latest-py39-cu118 imagePullPolicy: Always lifecycle: preStop: @@ -127,7 +127,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:2.5.0-py38-cu116 + image: quay.io/project-codeflare/ray:latest-py39-cu118 lifecycle: preStop: exec: diff --git a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb index dfe06733..d37b7d7d 100644 --- a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb @@ -66,7 +66,7 @@ " min_memory=4,\n", " max_memory=4,\n", " num_gpus=0,\n", - " image=\"quay.io/project-codeflare/ray:2.5.0-py38-cu116\", #current default\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\", #current default\n", " instascale=False\n", "))" ] diff --git a/poetry.lock b/poetry.lock index 353c743c..a02a4261 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,9 +1,10 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry and should not be changed by hand. [[package]] name = "aiohttp" version = "3.8.6" description = "Async http client/server framework (asyncio)" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -112,6 +113,7 @@ speedups = ["Brotli", "aiodns", "cchardet"] name = "aiohttp-cors" version = "0.7.0" description = "CORS support for aiohttp" +category = "main" optional = false python-versions = "*" files = [ @@ -126,6 +128,7 @@ aiohttp = ">=1.1" name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -140,6 +143,7 @@ frozenlist = ">=1.1.0" name = "ansicon" version = "1.89.0" description = "Python wrapper for loading Jason Hood's ANSICON" +category = "main" optional = false python-versions = "*" files = [ @@ -151,6 +155,7 @@ files = [ name = "async-timeout" version = "4.0.3" description = "Timeout context manager for asyncio programs" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -162,6 +167,7 @@ files = [ name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -180,6 +186,7 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "bcrypt" version = "4.0.1" description = "Modern password hashing for your software and your servers" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -214,6 +221,7 @@ typecheck = ["mypy"] name = "blessed" version = "1.20.0" description = "Easy, practical library for making terminal apps, by providing an elegant, well-documented interface to Colors, Keyboard input, and screen Positioning capabilities." +category = "main" optional = false python-versions = ">=2.7" files = [ @@ -230,6 +238,7 @@ wcwidth = ">=0.1.4" name = "cachetools" version = "5.3.1" description = "Extensible memoizing collections and decorators" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -241,6 +250,7 @@ files = [ name = "certifi" version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -252,6 +262,7 @@ files = [ name = "cffi" version = "1.16.0" description = "Foreign Function Interface for Python calling C code." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -316,6 +327,7 @@ pycparser = "*" name = "charset-normalizer" version = "3.3.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -415,6 +427,7 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -429,6 +442,7 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "codeflare-torchx" version = "0.6.0.dev1" description = "TorchX SDK and Components" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -457,6 +471,7 @@ ray = ["ray (>=1.12.1)"] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -468,6 +483,7 @@ files = [ name = "colorful" version = "0.5.5" description = "Terminal string styling done right, in Python." +category = "main" optional = false python-versions = "*" files = [ @@ -482,6 +498,7 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "commonmark" version = "0.9.1" description = "Python parser for the CommonMark Markdown spec" +category = "main" optional = false python-versions = "*" files = [ @@ -496,6 +513,7 @@ test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] name = "coverage" version = "7.2.7" description = "Code coverage measurement for Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -568,6 +586,7 @@ toml = ["tomli"] name = "cryptography" version = "40.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -609,6 +628,7 @@ tox = ["tox"] name = "distlib" version = "0.3.7" description = "Distribution utilities" +category = "main" optional = false python-versions = "*" files = [ @@ -620,6 +640,7 @@ files = [ name = "docker" version = "6.1.3" description = "A Python library for the Docker Engine API." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -641,6 +662,7 @@ ssh = ["paramiko (>=2.4.3)"] name = "docstring-parser" version = "0.8.1" description = "\"Parse Python docstrings in reST, Google and Numpydoc format\"" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -651,6 +673,7 @@ files = [ name = "exceptiongroup" version = "1.1.3" description = "Backport of PEP 654 (exception groups)" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -665,6 +688,7 @@ test = ["pytest (>=6)"] name = "executing" version = "1.2.0" description = "Get the currently executing AST node of a frame, and other information" +category = "main" optional = false python-versions = "*" files = [ @@ -679,6 +703,7 @@ tests = ["asttokens", "littleutils", "pytest", "rich"] name = "filelock" version = "3.12.4" description = "A platform independent file lock." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -695,6 +720,7 @@ typing = ["typing-extensions (>=4.7.1)"] name = "frozenlist" version = "1.4.0" description = "A list-like structure which implements collections.abc.MutableSequence" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -765,6 +791,7 @@ files = [ name = "fsspec" version = "2023.9.2" description = "File-system specification" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -800,6 +827,7 @@ tqdm = ["tqdm"] name = "google-api-core" version = "2.12.0" description = "Google API client core library" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -822,6 +850,7 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] name = "google-auth" version = "2.23.3" description = "Google Authentication Library" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -845,6 +874,7 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] name = "googleapis-common-protos" version = "1.60.0" description = "Common protobufs used in Google APIs" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -862,6 +892,7 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] name = "gpustat" version = "1.1.1" description = "An utility to monitor NVIDIA GPU status and usage" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -881,6 +912,7 @@ test = ["mockito (>=1.2.1)", "pytest (>=5.4.1)", "pytest-runner"] name = "grpcio" version = "1.59.0" description = "HTTP/2-based RPC framework" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -947,6 +979,7 @@ protobuf = ["grpcio-tools (>=1.59.0)"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -958,6 +991,7 @@ files = [ name = "importlib-metadata" version = "6.8.0" description = "Read metadata from Python packages" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -973,10 +1007,30 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker perf = ["ipython"] testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +[[package]] +name = "importlib-resources" +version = "6.1.0" +description = "Read resources from Python packages" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_resources-6.1.0-py3-none-any.whl", hash = "sha256:aa50258bbfa56d4e33fbd8aa3ef48ded10d1735f11532b8df95388cc6bdb7e83"}, + {file = "importlib_resources-6.1.0.tar.gz", hash = "sha256:9d48dcccc213325e810fd723e7fbb45ccb39f6cf5c31f00cf2b965f5f10f3cb9"}, +] + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff", "zipp (>=3.17)"] + [[package]] name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -988,6 +1042,7 @@ files = [ name = "jinxed" version = "1.2.0" description = "Jinxed Terminal Library" +category = "main" optional = false python-versions = "*" files = [ @@ -1002,6 +1057,7 @@ ansicon = {version = "*", markers = "platform_system == \"Windows\""} name = "jsonschema" version = "4.19.1" description = "An implementation of JSON Schema validation for Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1011,7 +1067,9 @@ files = [ [package.dependencies] attrs = ">=22.2.0" +importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} jsonschema-specifications = ">=2023.03.6" +pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} referencing = ">=0.28.4" rpds-py = ">=0.7.1" @@ -1023,6 +1081,7 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "jsonschema-specifications" version = "2023.7.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1031,12 +1090,14 @@ files = [ ] [package.dependencies] +importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} referencing = ">=0.28.0" [[package]] name = "kubernetes" version = "26.1.0" description = "Kubernetes python client" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1054,7 +1115,7 @@ requests-oauthlib = "*" setuptools = ">=21.0.0" six = ">=1.9.0" urllib3 = ">=1.24.2" -websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" +websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.0 || >=0.43.0" [package.extras] adal = ["adal (>=1.0.2)"] @@ -1063,6 +1124,7 @@ adal = ["adal (>=1.0.2)"] name = "mako" version = "1.2.4" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1082,6 +1144,7 @@ testing = ["pytest"] name = "markdown" version = "3.5" description = "Python implementation of John Gruber's Markdown." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1100,6 +1163,7 @@ testing = ["coverage", "pyyaml"] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1159,6 +1223,7 @@ files = [ name = "msgpack" version = "1.0.7" description = "MessagePack serializer" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1224,6 +1289,7 @@ files = [ name = "multidict" version = "6.0.4" description = "multidict implementation" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1307,6 +1373,7 @@ files = [ name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1314,10 +1381,49 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] +[[package]] +name = "numpy" +version = "1.24.4" +description = "Fundamental package for array computing in Python" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, + {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"}, + {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"}, + {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"}, + {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"}, + {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"}, + {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"}, + {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"}, + {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"}, + {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, + {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, +] + [[package]] name = "numpy" version = "1.25.2" description = "Fundamental package for array computing in Python" +category = "main" optional = false python-versions = ">=3.9" files = [ @@ -1352,6 +1458,7 @@ files = [ name = "nvidia-ml-py" version = "12.535.108" description = "Python Bindings for the NVIDIA Management Library" +category = "main" optional = false python-versions = "*" files = [ @@ -1363,6 +1470,7 @@ files = [ name = "oauthlib" version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1379,6 +1487,7 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] name = "opencensus" version = "0.11.3" description = "A stats collection and distributed tracing framework" +category = "main" optional = false python-versions = "*" files = [ @@ -1394,6 +1503,7 @@ opencensus-context = ">=0.1.3" name = "opencensus-context" version = "0.1.3" description = "OpenCensus Runtime Context" +category = "main" optional = false python-versions = "*" files = [ @@ -1405,6 +1515,7 @@ files = [ name = "openshift-client" version = "1.0.18" description = "OpenShift python client" +category = "main" optional = false python-versions = "*" files = [ @@ -1421,6 +1532,7 @@ six = "*" name = "packaging" version = "23.2" description = "Core utilities for Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1432,6 +1544,7 @@ files = [ name = "paramiko" version = "3.3.1" description = "SSH2 protocol library" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1453,6 +1566,7 @@ invoke = ["invoke (>=2.0)"] name = "pdoc3" version = "0.10.0" description = "Auto-generate API documentation for Python projects." +category = "dev" optional = false python-versions = ">= 3.6" files = [ @@ -1464,10 +1578,23 @@ files = [ mako = "*" markdown = ">=3.0" +[[package]] +name = "pkgutil-resolve-name" +version = "1.3.10" +description = "Resolve a name to an object." +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, + {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, +] + [[package]] name = "platformdirs" version = "3.11.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1483,6 +1610,7 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "pluggy" version = "1.3.0" description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1498,6 +1626,7 @@ testing = ["pytest", "pytest-benchmark"] name = "prometheus-client" version = "0.17.1" description = "Python client for the Prometheus monitoring system." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1512,6 +1641,7 @@ twisted = ["twisted"] name = "protobuf" version = "4.24.4" description = "" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1534,6 +1664,7 @@ files = [ name = "psutil" version = "5.9.5" description = "Cross-platform lib for process and system monitoring in Python." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1560,6 +1691,7 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] name = "py-spy" version = "0.3.14" description = "Sampling profiler for Python programs" +category = "main" optional = false python-versions = "*" files = [ @@ -1576,6 +1708,7 @@ files = [ name = "pyasn1" version = "0.5.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -1587,6 +1720,7 @@ files = [ name = "pyasn1-modules" version = "0.3.0" description = "A collection of ASN.1-based protocols modules" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -1601,6 +1735,7 @@ pyasn1 = ">=0.4.6,<0.6.0" name = "pycparser" version = "2.21" description = "C parser in Python" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1612,6 +1747,7 @@ files = [ name = "pydantic" version = "1.10.13" description = "Data validation and settings management using python type hints" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1664,6 +1800,7 @@ email = ["email-validator (>=1.0.3)"] name = "pygments" version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1678,6 +1815,7 @@ plugins = ["importlib-metadata"] name = "pynacl" version = "1.5.0" description = "Python binding to the Networking and Cryptography (NaCl) library" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1704,6 +1842,7 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] name = "pyre-extensions" version = "0.0.30" description = "Type system extensions for use with the pyre type checker" +category = "main" optional = false python-versions = "*" files = [ @@ -1719,6 +1858,7 @@ typing-inspect = "*" name = "pytest" version = "7.4.0" description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1741,6 +1881,7 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-mock" version = "3.11.1" description = "Thin-wrapper around the mock package for easier use with pytest" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1758,6 +1899,7 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -1772,6 +1914,7 @@ six = ">=1.5" name = "pywin32" version = "306" description = "Python for Window Extensions" +category = "main" optional = false python-versions = "*" files = [ @@ -1795,6 +1938,7 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1803,6 +1947,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1810,8 +1955,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1828,6 +1980,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1835,6 +1988,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -1844,6 +1998,7 @@ files = [ name = "ray" version = "2.7.0" description = "Ray provides a simple, universal API for building distributed applications." +category = "main" optional = false python-versions = "*" files = [ @@ -1883,12 +2038,15 @@ filelock = "*" frozenlist = "*" gpustat = {version = ">=1.0.0", optional = true, markers = "extra == \"default\""} grpcio = [ - {version = ">=1.32.0", optional = true, markers = "python_version < \"3.10\" and extra == \"default\""}, - {version = ">=1.42.0", optional = true, markers = "python_version >= \"3.10\" and extra == \"default\""}, + {version = ">=1.32.0", optional = true, markers = "python_version < \"3.10\""}, + {version = ">=1.42.0", optional = true, markers = "python_version >= \"3.10\""}, ] jsonschema = "*" msgpack = ">=1.0.0,<2.0.0" -numpy = {version = ">=1.19.3", markers = "python_version >= \"3.9\""} +numpy = [ + {version = ">=1.16", markers = "python_version < \"3.9\""}, + {version = ">=1.19.3", markers = "python_version >= \"3.9\""}, +] opencensus = {version = "*", optional = true, markers = "extra == \"default\""} packaging = "*" prometheus-client = {version = ">=0.7.1", optional = true, markers = "extra == \"default\""} @@ -1918,6 +2076,7 @@ tune = ["fsspec", "pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1. name = "referencing" version = "0.30.2" description = "JSON Referencing + Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1933,6 +2092,7 @@ rpds-py = ">=0.7.0" name = "requests" version = "2.31.0" description = "Python HTTP for Humans." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1954,6 +2114,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-oauthlib" version = "1.3.1" description = "OAuthlib authentication support for Requests." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1972,6 +2133,7 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] name = "rich" version = "12.6.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +category = "main" optional = false python-versions = ">=3.6.3,<4.0.0" files = [ @@ -1982,6 +2144,7 @@ files = [ [package.dependencies] commonmark = ">=0.9.0,<0.10.0" pygments = ">=2.6.0,<3.0.0" +typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""} [package.extras] jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] @@ -1990,6 +2153,7 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] name = "rpds-py" version = "0.10.4" description = "Python bindings to Rust's persistent data structures (rpds)" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2098,6 +2262,7 @@ files = [ name = "rsa" version = "4.9" description = "Pure-Python RSA implementation" +category = "main" optional = false python-versions = ">=3.6,<4" files = [ @@ -2112,6 +2277,7 @@ pyasn1 = ">=0.1.3" name = "setuptools" version = "68.2.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2128,6 +2294,7 @@ testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jar name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -2139,6 +2306,7 @@ files = [ name = "smart-open" version = "6.4.0" description = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)" +category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -2160,6 +2328,7 @@ webhdfs = ["requests"] name = "tabulate" version = "0.9.0" description = "Pretty-print tabular data" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2174,6 +2343,7 @@ widechars = ["wcwidth"] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2185,6 +2355,7 @@ files = [ name = "typing-extensions" version = "4.8.0" description = "Backported and Experimental Type Hints for Python 3.8+" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2196,6 +2367,7 @@ files = [ name = "typing-inspect" version = "0.9.0" description = "Runtime inspection utilities for typing module." +category = "main" optional = false python-versions = "*" files = [ @@ -2211,6 +2383,7 @@ typing-extensions = ">=3.7.4" name = "urllib3" version = "1.26.17" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -2227,6 +2400,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "virtualenv" version = "20.21.0" description = "Virtual Python Environment builder" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2247,6 +2421,7 @@ test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess name = "wcwidth" version = "0.2.8" description = "Measures the displayed width of unicode strings in a terminal" +category = "main" optional = false python-versions = "*" files = [ @@ -2258,6 +2433,7 @@ files = [ name = "websocket-client" version = "1.6.4" description = "WebSocket client for Python with low level API options" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2274,6 +2450,7 @@ test = ["websockets"] name = "yarl" version = "1.9.2" description = "Yet another URL library" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2361,6 +2538,7 @@ multidict = ">=4.0" name = "zipp" version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2374,5 +2552,5 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" -python-versions = "^3.9" -content-hash = "e681986285dd53f1bbbbe532fdaae987ea20b77191c16c9703ddd6aa78f1e67f" +python-versions = "^3.8" +content-hash = "089204b324e02b65ee7dfcec621a4f41df3787ea4ac61a6a752ac4bb10f982bf" diff --git a/pyproject.toml b/pyproject.toml index c1c9bab0..531dfd84 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,7 +20,7 @@ homepage = "https://github.com/project-codeflare/codeflare-sdk" keywords = ['codeflare', 'python', 'sdk', 'client', 'batch', 'scale'] [tool.poetry.dependencies] -python = "^3.9" +python = "^3.8" openshift-client = "1.0.18" rich = "^12.5" ray = {version = "2.7.0", extras = ["default"]} diff --git a/src/codeflare_sdk/cluster/config.py b/src/codeflare_sdk/cluster/config.py index bde3f4ca..30875a98 100644 --- a/src/codeflare_sdk/cluster/config.py +++ b/src/codeflare_sdk/cluster/config.py @@ -47,7 +47,7 @@ class ClusterConfiguration: template: str = f"{dir}/templates/base-template.yaml" instascale: bool = False envs: dict = field(default_factory=dict) - image: str = "quay.io/project-codeflare/ray:2.5.0-py38-cu116" + image: str = "quay.io/project-codeflare/ray:latest-py39-cu118" local_interactive: bool = False image_pull_secrets: list = field(default_factory=list) dispatch_priority: str = None diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index c2dddb6d..d2f3385e 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -49,7 +49,7 @@ spec: # - kubernetes spec: # The version of Ray you are using. Make sure all Ray containers are running this version of Ray. - rayVersion: '2.5.0' + rayVersion: '2.7.0' # If enableInTreeAutoscaling is true, the autoscaler sidecar will be added to the Ray head pod. # Ray autoscaler integration is supported only for Ray versions >= 1.11.0 # Ray autoscaler integration is Beta with KubeRay >= 0.3.0 and Ray >= 2.0.0. @@ -162,7 +162,7 @@ spec: - sh - -c - cd /home/ray/workspace/tls && openssl req -nodes -newkey rsa:2048 -keyout server.key -out server.csr -subj '/CN=ray-head' && printf "authorityKeyIdentifier=keyid,issuer\nbasicConstraints=CA:FALSE\nsubjectAltName = @alt_names\n[alt_names]\nDNS.1 = 127.0.0.1\nDNS.2 = localhost\nDNS.3 = ${FQ_RAY_IP}\nDNS.4 = $(awk 'END{print $1}' /etc/hosts)\nDNS.5 = rayclient-deployment-name-$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).server-name">./domain.ext && cp /home/ray/workspace/ca/* . && openssl x509 -req -CA ca.crt -CAkey ca.key -in server.csr -out server.crt -days 365 -CAcreateserial -extfile domain.ext - image: rayproject/ray:2.5.0 + image: rayproject/ray:2.7.0 name: create-cert # securityContext: # runAsUser: 1000 @@ -227,7 +227,7 @@ spec: image: busybox:1.28 command: ['sh', '-c', "until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; do echo waiting for myservice; sleep 2; done"] - name: create-cert - image: rayproject/ray:2.5.0 + image: rayproject/ray:2.7.0 command: - sh - -c diff --git a/tests/test-case-bad.yaml b/tests/test-case-bad.yaml index 358f756e..cdc4f6ac 100644 --- a/tests/test-case-bad.yaml +++ b/tests/test-case-bad.yaml @@ -73,7 +73,7 @@ spec: valueFrom: fieldRef: fieldPath: status.podIP - image: quay.io/project-codeflare/ray:2.5.0-py38-cu116 + image: quay.io/project-codeflare/ray:latest-py39-cu118 imagePullPolicy: Always lifecycle: preStop: @@ -130,7 +130,7 @@ spec: valueFrom: fieldRef: fieldPath: status.podIP - image: quay.io/project-codeflare/ray:2.5.0-py38-cu116 + image: quay.io/project-codeflare/ray:latest-py39-cu118 lifecycle: preStop: exec: diff --git a/tests/test-case-prio.yaml b/tests/test-case-prio.yaml index aadfad7d..6e7c8687 100644 --- a/tests/test-case-prio.yaml +++ b/tests/test-case-prio.yaml @@ -81,7 +81,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:2.5.0-py38-cu116 + image: quay.io/project-codeflare/ray:latest-py39-cu118 imagePullPolicy: Always lifecycle: preStop: @@ -110,7 +110,7 @@ spec: imagePullSecrets: - name: unit-test-pull-secret priorityClassName: default - rayVersion: 2.5.0 + rayVersion: 2.7.0 workerGroupSpecs: - groupName: small-group-prio-test-cluster maxReplicas: 2 @@ -149,7 +149,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:2.5.0-py38-cu116 + image: quay.io/project-codeflare/ray:latest-py39-cu118 lifecycle: preStop: exec: diff --git a/tests/test-case.yaml b/tests/test-case.yaml index adaf03db..08d36055 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -80,7 +80,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:2.5.0-py38-cu116 + image: quay.io/project-codeflare/ray:latest-py39-cu118 imagePullPolicy: Always lifecycle: preStop: @@ -108,7 +108,7 @@ spec: nvidia.com/gpu: 0 imagePullSecrets: - name: unit-test-pull-secret - rayVersion: 2.5.0 + rayVersion: 2.7.0 workerGroupSpecs: - groupName: small-group-unit-test-cluster maxReplicas: 2 @@ -147,7 +147,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:2.5.0-py38-cu116 + image: quay.io/project-codeflare/ray:latest-py39-cu118 lifecycle: preStop: exec: diff --git a/tests/unit_test.py b/tests/unit_test.py index 78925226..9b78e9e2 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -231,7 +231,7 @@ def test_config_creation(): assert config.min_cpus == 3 and config.max_cpus == 4 assert config.min_memory == 5 and config.max_memory == 6 assert config.num_gpus == 7 - assert config.image == "quay.io/project-codeflare/ray:2.5.0-py38-cu116" + assert config.image == "quay.io/project-codeflare/ray:latest-py39-cu118" assert config.template == f"{parent}/src/codeflare_sdk/templates/base-template.yaml" assert config.instascale assert config.machine_types == ["cpu.small", "gpu.large"] From 1c786d31ef0143aab6ee8ece352a1b50351d5226 Mon Sep 17 00:00:00 2001 From: codeflare-machine-account Date: Wed, 11 Oct 2023 18:36:27 +0000 Subject: [PATCH 031/496] Changes in docs for release: v0.9.0 --- docs/cluster/cluster.html | 107 ++++++++++++++++---- docs/cluster/config.html | 27 ++++- docs/cluster/model.html | 23 ++++- docs/utils/generate_yaml.html | 184 ++++++++++++++++++++++++---------- 4 files changed, 263 insertions(+), 78 deletions(-) diff --git a/docs/cluster/cluster.html b/docs/cluster/cluster.html index 21950889..3b396221 100644 --- a/docs/cluster/cluster.html +++ b/docs/cluster/cluster.html @@ -141,6 +141,9 @@

Module codeflare_sdk.cluster.cluster

name = self.config.name namespace = self.config.namespace + head_cpus = self.config.head_cpus + head_memory = self.config.head_memory + head_gpus = self.config.head_gpus min_cpu = self.config.min_cpus max_cpu = self.config.max_cpus min_memory = self.config.min_memory @@ -158,6 +161,9 @@

Module codeflare_sdk.cluster.cluster

return generate_appwrapper( name=name, namespace=namespace, + head_cpus=head_cpus, + head_memory=head_memory, + head_gpus=head_gpus, min_cpu=min_cpu, max_cpu=max_cpu, min_memory=min_memory, @@ -290,7 +296,7 @@

Module codeflare_sdk.cluster.cluster

else: return False - def wait_ready(self, timeout: Optional[int] = None): + def wait_ready(self, timeout: Optional[int] = None, dashboard_check: bool = True): """ Waits for requested cluster to be ready, up to an optional timeout (s). Checks every five seconds. @@ -300,19 +306,32 @@

Module codeflare_sdk.cluster.cluster

dashboard_ready = False status = None time = 0 - while not ready or not dashboard_ready: + while not ready: status, ready = self.status(print_to_console=False) - dashboard_ready = self.is_dashboard_ready() if status == CodeFlareClusterStatus.UNKNOWN: print( "WARNING: Current cluster status is unknown, have you run cluster.up yet?" ) - if not ready or not dashboard_ready: + if not ready: + if timeout and time >= timeout: + raise TimeoutError( + f"wait() timed out after waiting {timeout}s for cluster to be ready" + ) + sleep(5) + time += 5 + print("Requested cluster is up and running!") + + while dashboard_check and not dashboard_ready: + dashboard_ready = self.is_dashboard_ready() + if not dashboard_ready: if timeout and time >= timeout: - raise TimeoutError(f"wait() timed out after waiting {timeout}s") + raise TimeoutError( + f"wait() timed out after waiting {timeout}s for dashboard to be ready" + ) sleep(5) time += 5 - print("Requested cluster and dashboard are up and running!") + if dashboard_ready: + print("Dashboard is ready!") def details(self, print_to_console: bool = True) -> RayCluster: cluster = _copy_to_ray(self) @@ -640,6 +659,15 @@

Module codeflare_sdk.cluster.cluster

worker_gpu=0, # hard to detect currently how many gpus, can override it with what the user asked for namespace=rc["metadata"]["namespace"], dashboard=ray_route, + head_cpus=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][0][ + "resources" + ]["limits"]["cpu"], + head_mem=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][0][ + "resources" + ]["limits"]["memory"], + head_gpu=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][0][ + "resources" + ]["limits"]["nvidia.com/gpu"], ) @@ -670,6 +698,9 @@

Module codeflare_sdk.cluster.cluster

worker_gpu=cluster.config.num_gpus, namespace=cluster.config.namespace, dashboard=cluster.cluster_dashboard_uri(), + head_cpus=cluster.config.head_cpus, + head_mem=cluster.config.head_memory, + head_gpu=cluster.config.head_gpus, ) if ray.status == CodeFlareClusterStatus.READY: ray.status = RayClusterStatus.READY @@ -879,6 +910,9 @@

Classes

name = self.config.name namespace = self.config.namespace + head_cpus = self.config.head_cpus + head_memory = self.config.head_memory + head_gpus = self.config.head_gpus min_cpu = self.config.min_cpus max_cpu = self.config.max_cpus min_memory = self.config.min_memory @@ -896,6 +930,9 @@

Classes

return generate_appwrapper( name=name, namespace=namespace, + head_cpus=head_cpus, + head_memory=head_memory, + head_gpus=head_gpus, min_cpu=min_cpu, max_cpu=max_cpu, min_memory=min_memory, @@ -1028,7 +1065,7 @@

Classes

else: return False - def wait_ready(self, timeout: Optional[int] = None): + def wait_ready(self, timeout: Optional[int] = None, dashboard_check: bool = True): """ Waits for requested cluster to be ready, up to an optional timeout (s). Checks every five seconds. @@ -1038,19 +1075,32 @@

Classes

dashboard_ready = False status = None time = 0 - while not ready or not dashboard_ready: + while not ready: status, ready = self.status(print_to_console=False) - dashboard_ready = self.is_dashboard_ready() if status == CodeFlareClusterStatus.UNKNOWN: print( "WARNING: Current cluster status is unknown, have you run cluster.up yet?" ) - if not ready or not dashboard_ready: + if not ready: + if timeout and time >= timeout: + raise TimeoutError( + f"wait() timed out after waiting {timeout}s for cluster to be ready" + ) + sleep(5) + time += 5 + print("Requested cluster is up and running!") + + while dashboard_check and not dashboard_ready: + dashboard_ready = self.is_dashboard_ready() + if not dashboard_ready: if timeout and time >= timeout: - raise TimeoutError(f"wait() timed out after waiting {timeout}s") + raise TimeoutError( + f"wait() timed out after waiting {timeout}s for dashboard to be ready" + ) sleep(5) time += 5 - print("Requested cluster and dashboard are up and running!") + if dashboard_ready: + print("Dashboard is ready!") def details(self, print_to_console: bool = True) -> RayCluster: cluster = _copy_to_ray(self) @@ -1267,6 +1317,9 @@

Methods

name = self.config.name namespace = self.config.namespace + head_cpus = self.config.head_cpus + head_memory = self.config.head_memory + head_gpus = self.config.head_gpus min_cpu = self.config.min_cpus max_cpu = self.config.max_cpus min_memory = self.config.min_memory @@ -1284,6 +1337,9 @@

Methods

return generate_appwrapper( name=name, namespace=namespace, + head_cpus=head_cpus, + head_memory=head_memory, + head_gpus=head_gpus, min_cpu=min_cpu, max_cpu=max_cpu, min_memory=min_memory, @@ -1653,7 +1709,7 @@

Methods

-def wait_ready(self, timeout: Optional[int] = None) +def wait_ready(self, timeout: Optional[int] = None, dashboard_check: bool = True)

Waits for requested cluster to be ready, up to an optional timeout (s). @@ -1662,7 +1718,7 @@

Methods

Expand source code -
def wait_ready(self, timeout: Optional[int] = None):
+
def wait_ready(self, timeout: Optional[int] = None, dashboard_check: bool = True):
     """
     Waits for requested cluster to be ready, up to an optional timeout (s).
     Checks every five seconds.
@@ -1672,19 +1728,32 @@ 

Methods

dashboard_ready = False status = None time = 0 - while not ready or not dashboard_ready: + while not ready: status, ready = self.status(print_to_console=False) - dashboard_ready = self.is_dashboard_ready() if status == CodeFlareClusterStatus.UNKNOWN: print( "WARNING: Current cluster status is unknown, have you run cluster.up yet?" ) - if not ready or not dashboard_ready: + if not ready: + if timeout and time >= timeout: + raise TimeoutError( + f"wait() timed out after waiting {timeout}s for cluster to be ready" + ) + sleep(5) + time += 5 + print("Requested cluster is up and running!") + + while dashboard_check and not dashboard_ready: + dashboard_ready = self.is_dashboard_ready() + if not dashboard_ready: if timeout and time >= timeout: - raise TimeoutError(f"wait() timed out after waiting {timeout}s") + raise TimeoutError( + f"wait() timed out after waiting {timeout}s for dashboard to be ready" + ) sleep(5) time += 5 - print("Requested cluster and dashboard are up and running!")
+ if dashboard_ready: + print("Dashboard is ready!")
diff --git a/docs/cluster/config.html b/docs/cluster/config.html index 0575c01c..2edf0a3c 100644 --- a/docs/cluster/config.html +++ b/docs/cluster/config.html @@ -66,6 +66,9 @@

Module codeflare_sdk.cluster.config

name: str namespace: str = None head_info: list = field(default_factory=list) + head_cpus: int = 2 + head_memory: int = 8 + head_gpus: int = 0 machine_types: list = field(default_factory=list) # ["m4.xlarge", "g4dn.xlarge"] min_cpus: int = 1 max_cpus: int = 1 @@ -76,7 +79,7 @@

Module codeflare_sdk.cluster.config

template: str = f"{dir}/templates/base-template.yaml" instascale: bool = False envs: dict = field(default_factory=dict) - image: str = "quay.io/project-codeflare/ray:2.5.0-py38-cu116" + image: str = "quay.io/project-codeflare/ray:latest-py39-cu118" local_interactive: bool = False image_pull_secrets: list = field(default_factory=list) dispatch_priority: str = None
@@ -93,7 +96,7 @@

Classes

class ClusterConfiguration -(name: str, namespace: str = None, head_info: list = <factory>, machine_types: list = <factory>, min_cpus: int = 1, max_cpus: int = 1, num_workers: int = 1, min_memory: int = 2, max_memory: int = 2, num_gpus: int = 0, template: str = '/home/runner/work/codeflare-sdk/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', instascale: bool = False, envs: dict = <factory>, image: str = 'quay.io/project-codeflare/ray:2.5.0-py38-cu116', local_interactive: bool = False, image_pull_secrets: list = <factory>, dispatch_priority: str = None) +(name: str, namespace: str = None, head_info: list = <factory>, head_cpus: int = 2, head_memory: int = 8, head_gpus: int = 0, machine_types: list = <factory>, min_cpus: int = 1, max_cpus: int = 1, num_workers: int = 1, min_memory: int = 2, max_memory: int = 2, num_gpus: int = 0, template: str = '/home/runner/work/codeflare-sdk/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', instascale: bool = False, envs: dict = <factory>, image: str = 'quay.io/project-codeflare/ray:latest-py39-cu118', local_interactive: bool = False, image_pull_secrets: list = <factory>, dispatch_priority: str = None)

This dataclass is used to specify resource requirements and other details, and @@ -111,6 +114,9 @@

Classes

name: str namespace: str = None head_info: list = field(default_factory=list) + head_cpus: int = 2 + head_memory: int = 8 + head_gpus: int = 0 machine_types: list = field(default_factory=list) # ["m4.xlarge", "g4dn.xlarge"] min_cpus: int = 1 max_cpus: int = 1 @@ -121,7 +127,7 @@

Classes

template: str = f"{dir}/templates/base-template.yaml" instascale: bool = False envs: dict = field(default_factory=dict) - image: str = "quay.io/project-codeflare/ray:2.5.0-py38-cu116" + image: str = "quay.io/project-codeflare/ray:latest-py39-cu118" local_interactive: bool = False image_pull_secrets: list = field(default_factory=list) dispatch_priority: str = None @@ -136,10 +142,22 @@

Class variables

+
var head_cpus : int
+
+
+
+
var head_gpus : int
+
+
+
var head_info : list
+
var head_memory : int
+
+
+
var image : str
@@ -219,7 +237,10 @@

  • dispatch_priority
  • envs
  • +
  • head_cpus
  • +
  • head_gpus
  • head_info
  • +
  • head_memory
  • image
  • image_pull_secrets
  • instascale
  • diff --git a/docs/cluster/model.html b/docs/cluster/model.html index 7d911255..3832494e 100644 --- a/docs/cluster/model.html +++ b/docs/cluster/model.html @@ -102,6 +102,9 @@

    Module codeflare_sdk.cluster.model

    name: str status: RayClusterStatus + head_cpus: int + head_mem: str + head_gpu: int workers: int worker_mem_min: str worker_mem_max: str @@ -287,7 +290,7 @@

    Class variables

    class RayCluster -(name: str, status: RayClusterStatus, workers: int, worker_mem_min: str, worker_mem_max: str, worker_cpu: int, worker_gpu: int, namespace: str, dashboard: str) +(name: str, status: RayClusterStatus, head_cpus: int, head_mem: str, head_gpu: int, workers: int, worker_mem_min: str, worker_mem_max: str, worker_cpu: int, worker_gpu: int, namespace: str, dashboard: str)

    For storing information about a Ray cluster.

    @@ -302,6 +305,9 @@

    Class variables

    name: str status: RayClusterStatus + head_cpus: int + head_mem: str + head_gpu: int workers: int worker_mem_min: str worker_mem_max: str @@ -316,6 +322,18 @@

    Class variables

    +
    var head_cpus : int
    +
    +
    +
    +
    var head_gpu : int
    +
    +
    +
    +
    var head_mem : str
    +
    +
    +
    var name : str
    @@ -447,6 +465,9 @@

    RayCluster

    • dashboard
    • +
    • head_cpus
    • +
    • head_gpu
    • +
    • head_mem
    • name
    • namespace
    • status
    • diff --git a/docs/utils/generate_yaml.html b/docs/utils/generate_yaml.html index 6d94e4c7..791898a3 100644 --- a/docs/utils/generate_yaml.html +++ b/docs/utils/generate_yaml.html @@ -138,35 +138,51 @@

      Module codeflare_sdk.utils.generate_yaml

      def update_custompodresources( - item, min_cpu, max_cpu, min_memory, max_memory, gpu, workers + item, + min_cpu, + max_cpu, + min_memory, + max_memory, + gpu, + workers, + head_cpus, + head_memory, + head_gpus, ): if "custompodresources" in item.keys(): custompodresources = item.get("custompodresources") for i in range(len(custompodresources)): + resource = custompodresources[i] if i == 0: # Leave head node resources as template default - continue - resource = custompodresources[i] - for k, v in resource.items(): - if k == "replicas" and i == 1: - resource[k] = workers - if k == "requests" or k == "limits": - for spec, _ in v.items(): - if spec == "cpu": - if k == "limits": - resource[k][spec] = max_cpu - else: - resource[k][spec] = min_cpu - if spec == "memory": - if k == "limits": - resource[k][spec] = str(max_memory) + "G" - else: - resource[k][spec] = str(min_memory) + "G" - if spec == "nvidia.com/gpu": - if i == 0: - resource[k][spec] = 0 - else: - resource[k][spec] = gpu + resource["requests"]["cpu"] = head_cpus + resource["limits"]["cpu"] = head_cpus + resource["requests"]["memory"] = str(head_memory) + "G" + resource["limits"]["memory"] = str(head_memory) + "G" + resource["requests"]["nvidia.com/gpu"] = head_gpus + resource["limits"]["nvidia.com/gpu"] = head_gpus + + else: + for k, v in resource.items(): + if k == "replicas" and i == 1: + resource[k] = workers + if k == "requests" or k == "limits": + for spec, _ in v.items(): + if spec == "cpu": + if k == "limits": + resource[k][spec] = max_cpu + else: + resource[k][spec] = min_cpu + if spec == "memory": + if k == "limits": + resource[k][spec] = str(max_memory) + "G" + else: + resource[k][spec] = str(min_memory) + "G" + if spec == "nvidia.com/gpu": + if i == 0: + resource[k][spec] = 0 + else: + resource[k][spec] = gpu else: sys.exit("Error: malformed template") @@ -236,11 +252,15 @@

      Module codeflare_sdk.utils.generate_yaml

      instascale, env, image_pull_secrets, + head_cpus, + head_memory, + head_gpus, ): if "generictemplate" in item.keys(): head = item.get("generictemplate").get("spec").get("headGroupSpec") - worker = item.get("generictemplate").get("spec").get("workerGroupSpecs")[0] + head["rayStartParams"]["num-gpus"] = str(int(head_gpus)) + worker = item.get("generictemplate").get("spec").get("workerGroupSpecs")[0] # Head counts as first worker worker["replicas"] = workers worker["minReplicas"] = workers @@ -256,7 +276,9 @@

      Module codeflare_sdk.utils.generate_yaml

      update_env(spec, env) if comp == head: # TODO: Eventually add head node configuration outside of template - continue + update_resources( + spec, head_cpus, head_cpus, head_memory, head_memory, head_gpus + ) else: update_resources(spec, min_cpu, max_cpu, min_memory, max_memory, gpu) @@ -381,6 +403,9 @@

      Module codeflare_sdk.utils.generate_yaml

      def generate_appwrapper( name: str, namespace: str, + head_cpus: int, + head_memory: int, + head_gpus: int, min_cpu: int, max_cpu: int, min_memory: int, @@ -406,7 +431,16 @@

      Module codeflare_sdk.utils.generate_yaml

      update_labels(user_yaml, instascale, instance_types) update_priority(user_yaml, item, dispatch_priority, priority_val) update_custompodresources( - item, min_cpu, max_cpu, min_memory, max_memory, gpu, workers + item, + min_cpu, + max_cpu, + min_memory, + max_memory, + gpu, + workers, + head_cpus, + head_memory, + head_gpus, ) update_nodes( item, @@ -421,6 +455,9 @@

      Module codeflare_sdk.utils.generate_yaml

      instascale, env, image_pull_secrets, + head_cpus, + head_memory, + head_gpus, ) update_dashboard_route(route_item, cluster_name, namespace) if local_interactive: @@ -577,7 +614,7 @@

      Functions

    -def generate_appwrapper(name: str, namespace: str, min_cpu: int, max_cpu: int, min_memory: int, max_memory: int, gpu: int, workers: int, template: str, image: str, instascale: bool, instance_types: list, env, local_interactive: bool, image_pull_secrets: list, dispatch_priority: str, priority_val: int) +def generate_appwrapper(name: str, namespace: str, head_cpus: int, head_memory: int, head_gpus: int, min_cpu: int, max_cpu: int, min_memory: int, max_memory: int, gpu: int, workers: int, template: str, image: str, instascale: bool, instance_types: list, env, local_interactive: bool, image_pull_secrets: list, dispatch_priority: str, priority_val: int)
    @@ -588,6 +625,9 @@

    Functions

    def generate_appwrapper(
         name: str,
         namespace: str,
    +    head_cpus: int,
    +    head_memory: int,
    +    head_gpus: int,
         min_cpu: int,
         max_cpu: int,
         min_memory: int,
    @@ -613,7 +653,16 @@ 

    Functions

    update_labels(user_yaml, instascale, instance_types) update_priority(user_yaml, item, dispatch_priority, priority_val) update_custompodresources( - item, min_cpu, max_cpu, min_memory, max_memory, gpu, workers + item, + min_cpu, + max_cpu, + min_memory, + max_memory, + gpu, + workers, + head_cpus, + head_memory, + head_gpus, ) update_nodes( item, @@ -628,6 +677,9 @@

    Functions

    instascale, env, image_pull_secrets, + head_cpus, + head_memory, + head_gpus, ) update_dashboard_route(route_item, cluster_name, namespace) if local_interactive: @@ -700,7 +752,7 @@

    Functions

    -def update_custompodresources(item, min_cpu, max_cpu, min_memory, max_memory, gpu, workers) +def update_custompodresources(item, min_cpu, max_cpu, min_memory, max_memory, gpu, workers, head_cpus, head_memory, head_gpus)
    @@ -709,35 +761,51 @@

    Functions

    Expand source code
    def update_custompodresources(
    -    item, min_cpu, max_cpu, min_memory, max_memory, gpu, workers
    +    item,
    +    min_cpu,
    +    max_cpu,
    +    min_memory,
    +    max_memory,
    +    gpu,
    +    workers,
    +    head_cpus,
    +    head_memory,
    +    head_gpus,
     ):
         if "custompodresources" in item.keys():
             custompodresources = item.get("custompodresources")
             for i in range(len(custompodresources)):
    +            resource = custompodresources[i]
                 if i == 0:
                     # Leave head node resources as template default
    -                continue
    -            resource = custompodresources[i]
    -            for k, v in resource.items():
    -                if k == "replicas" and i == 1:
    -                    resource[k] = workers
    -                if k == "requests" or k == "limits":
    -                    for spec, _ in v.items():
    -                        if spec == "cpu":
    -                            if k == "limits":
    -                                resource[k][spec] = max_cpu
    -                            else:
    -                                resource[k][spec] = min_cpu
    -                        if spec == "memory":
    -                            if k == "limits":
    -                                resource[k][spec] = str(max_memory) + "G"
    -                            else:
    -                                resource[k][spec] = str(min_memory) + "G"
    -                        if spec == "nvidia.com/gpu":
    -                            if i == 0:
    -                                resource[k][spec] = 0
    -                            else:
    -                                resource[k][spec] = gpu
    +                resource["requests"]["cpu"] = head_cpus
    +                resource["limits"]["cpu"] = head_cpus
    +                resource["requests"]["memory"] = str(head_memory) + "G"
    +                resource["limits"]["memory"] = str(head_memory) + "G"
    +                resource["requests"]["nvidia.com/gpu"] = head_gpus
    +                resource["limits"]["nvidia.com/gpu"] = head_gpus
    +
    +            else:
    +                for k, v in resource.items():
    +                    if k == "replicas" and i == 1:
    +                        resource[k] = workers
    +                    if k == "requests" or k == "limits":
    +                        for spec, _ in v.items():
    +                            if spec == "cpu":
    +                                if k == "limits":
    +                                    resource[k][spec] = max_cpu
    +                                else:
    +                                    resource[k][spec] = min_cpu
    +                            if spec == "memory":
    +                                if k == "limits":
    +                                    resource[k][spec] = str(max_memory) + "G"
    +                                else:
    +                                    resource[k][spec] = str(min_memory) + "G"
    +                            if spec == "nvidia.com/gpu":
    +                                if i == 0:
    +                                    resource[k][spec] = 0
    +                                else:
    +                                    resource[k][spec] = gpu
         else:
             sys.exit("Error: malformed template")
    @@ -855,7 +923,7 @@

    Functions

    -def update_nodes(item, appwrapper_name, min_cpu, max_cpu, min_memory, max_memory, gpu, workers, image, instascale, env, image_pull_secrets) +def update_nodes(item, appwrapper_name, min_cpu, max_cpu, min_memory, max_memory, gpu, workers, image, instascale, env, image_pull_secrets, head_cpus, head_memory, head_gpus)
    @@ -876,11 +944,15 @@

    Functions

    instascale, env, image_pull_secrets, + head_cpus, + head_memory, + head_gpus, ): if "generictemplate" in item.keys(): head = item.get("generictemplate").get("spec").get("headGroupSpec") - worker = item.get("generictemplate").get("spec").get("workerGroupSpecs")[0] + head["rayStartParams"]["num-gpus"] = str(int(head_gpus)) + worker = item.get("generictemplate").get("spec").get("workerGroupSpecs")[0] # Head counts as first worker worker["replicas"] = workers worker["minReplicas"] = workers @@ -896,7 +968,9 @@

    Functions

    update_env(spec, env) if comp == head: # TODO: Eventually add head node configuration outside of template - continue + update_resources( + spec, head_cpus, head_cpus, head_memory, head_memory, head_gpus + ) else: update_resources(spec, min_cpu, max_cpu, min_memory, max_memory, gpu)
    From 06a3a59b1acb532a4b4dd3276c6432801485605f Mon Sep 17 00:00:00 2001 From: Kevin Date: Wed, 11 Oct 2023 15:49:11 -0400 Subject: [PATCH 032/496] update py lib to 3.9 Signed-off-by: Kevin --- custom-nb-image/Dockerfile | 2 +- custom-nb-image/imagestream.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/custom-nb-image/Dockerfile b/custom-nb-image/Dockerfile index e4bfcb26..a656618e 100644 --- a/custom-nb-image/Dockerfile +++ b/custom-nb-image/Dockerfile @@ -27,5 +27,5 @@ RUN pip install codeflare-sdk==${SDK_VERSION} \ transformers==4.23.1 \ evaluate==0.3.0 -RUN chmod -R g+w /opt/app-root/lib/python3.8/site-packages && \ +RUN chmod -R g+w /opt/app-root/lib/python3.9/site-packages && \ fix-permissions /opt/app-root -P diff --git a/custom-nb-image/imagestream.yaml b/custom-nb-image/imagestream.yaml index bd17076f..833a42fd 100644 --- a/custom-nb-image/imagestream.yaml +++ b/custom-nb-image/imagestream.yaml @@ -21,7 +21,7 @@ metadata: annotations: opendatahub.io/notebook-image-name: "CodeFlare Notebook" - opendatahub.io/notebook-image-desc: "Custom Jupyter notebook image with CodeFlare SDK, Python 3.8, Ray 2.5.0 and PyTorch 1.12.1" + opendatahub.io/notebook-image-desc: "Custom Jupyter notebook image with CodeFlare SDK, Python 3.9, Ray 2.7.0 and PyTorch 1.12.1" spec: lookupPolicy: local: true From e44dd8e37dbb79c7fd62fb68bf8593f49ff20069 Mon Sep 17 00:00:00 2001 From: Kevin Postlethwait Date: Fri, 20 Oct 2023 09:41:10 -0400 Subject: [PATCH 033/496] add functions for creating ray with oauth proxy in front of the dashboard (#298) * add functions for creating ray with oauth proxy in front of the dashboard Signed-off-by: Kevin * add unit test for OAuth create Signed-off-by: Kevin * add tests for replace and generate sidecar Signed-off-by: Kevin --------- Signed-off-by: Kevin --- src/codeflare_sdk/cluster/auth.py | 4 +- src/codeflare_sdk/cluster/cluster.py | 96 +++++++-- src/codeflare_sdk/cluster/config.py | 1 + src/codeflare_sdk/job/jobs.py | 151 +++++++------ src/codeflare_sdk/utils/generate_yaml.py | 95 ++++++++- src/codeflare_sdk/utils/kube_api_helpers.py | 5 + src/codeflare_sdk/utils/openshift_oauth.py | 217 +++++++++++++++++++ tests/unit_test.py | 225 ++++++++++++++++---- 8 files changed, 667 insertions(+), 127 deletions(-) create mode 100644 src/codeflare_sdk/utils/openshift_oauth.py diff --git a/src/codeflare_sdk/cluster/auth.py b/src/codeflare_sdk/cluster/auth.py index eb739136..1015a801 100644 --- a/src/codeflare_sdk/cluster/auth.py +++ b/src/codeflare_sdk/cluster/auth.py @@ -25,6 +25,8 @@ import urllib3 from ..utils.kube_api_helpers import _kube_api_error_handling +from typing import Optional + global api_client api_client = None global config_path @@ -188,7 +190,7 @@ def config_check() -> str: return config_path -def api_config_handler() -> str: +def api_config_handler() -> Optional[client.ApiClient]: """ This function is used to load the api client if the user has logged in """ diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 5d00cdae..29c026bd 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -21,12 +21,19 @@ from time import sleep from typing import List, Optional, Tuple, Dict +import openshift as oc +from kubernetes import config from ray.job_submission import JobSubmissionClient +import urllib3 from .auth import config_check, api_config_handler from ..utils import pretty_print from ..utils.generate_yaml import generate_appwrapper from ..utils.kube_api_helpers import _kube_api_error_handling +from ..utils.openshift_oauth import ( + create_openshift_oauth_objects, + delete_openshift_oauth_objects, +) from .config import ClusterConfiguration from .model import ( AppWrapper, @@ -40,6 +47,8 @@ import os import requests +from kubernetes import config + class Cluster: """ @@ -61,6 +70,39 @@ def __init__(self, config: ClusterConfiguration): self.config = config self.app_wrapper_yaml = self.create_app_wrapper() self.app_wrapper_name = self.app_wrapper_yaml.split(".")[0] + self._client = None + + @property + def _client_headers(self): + k8_client = api_config_handler() or client.ApiClient() + return { + "Authorization": k8_client.configuration.get_api_key_with_prefix( + "authorization" + ) + } + + @property + def _client_verify_tls(self): + return not self.config.openshift_oauth + + @property + def client(self): + if self._client: + return self._client + if self.config.openshift_oauth: + print( + api_config_handler().configuration.get_api_key_with_prefix( + "authorization" + ) + ) + self._client = JobSubmissionClient( + self.cluster_dashboard_uri(), + headers=self._client_headers, + verify=self._client_verify_tls, + ) + else: + self._client = JobSubmissionClient(self.cluster_dashboard_uri()) + return self._client def evaluate_dispatch_priority(self): priority_class = self.config.dispatch_priority @@ -147,6 +189,7 @@ def create_app_wrapper(self): image_pull_secrets=image_pull_secrets, dispatch_priority=dispatch_priority, priority_val=priority_val, + openshift_oauth=self.config.openshift_oauth, ) # creates a new cluster with the provided or default spec @@ -156,6 +199,11 @@ def up(self): the MCAD queue. """ namespace = self.config.namespace + if self.config.openshift_oauth: + create_openshift_oauth_objects( + cluster_name=self.config.name, namespace=namespace + ) + try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) @@ -190,6 +238,11 @@ def down(self): except Exception as e: # pragma: no cover return _kube_api_error_handling(e) + if self.config.openshift_oauth: + delete_openshift_oauth_objects( + cluster_name=self.config.name, namespace=namespace + ) + def status( self, print_to_console: bool = True ) -> Tuple[CodeFlareClusterStatus, bool]: @@ -258,7 +311,16 @@ def status( return status, ready def is_dashboard_ready(self) -> bool: - response = requests.get(self.cluster_dashboard_uri(), timeout=5) + try: + response = requests.get( + self.cluster_dashboard_uri(), + headers=self._client_headers, + timeout=5, + verify=self._client_verify_tls, + ) + except requests.exceptions.SSLError: + # SSL exception occurs when oauth ingress has been created but cluster is not up + return False if response.status_code == 200: return True else: @@ -330,7 +392,13 @@ def cluster_dashboard_uri(self) -> str: return _kube_api_error_handling(e) for route in routes["items"]: - if route["metadata"]["name"] == f"ray-dashboard-{self.config.name}": + if route["metadata"][ + "name" + ] == f"ray-dashboard-{self.config.name}" or route["metadata"][ + "name" + ].startswith( + f"{self.config.name}-ingress" + ): protocol = "https" if route["spec"].get("tls") else "http" return f"{protocol}://{route['spec']['host']}" return "Dashboard route not available yet, have you run cluster.up()?" @@ -339,30 +407,24 @@ def list_jobs(self) -> List: """ This method accesses the head ray node in your cluster and lists the running jobs. """ - dashboard_route = self.cluster_dashboard_uri() - client = JobSubmissionClient(dashboard_route) - return client.list_jobs() + return self.client.list_jobs() def job_status(self, job_id: str) -> str: """ This method accesses the head ray node in your cluster and returns the job status for the provided job id. """ - dashboard_route = self.cluster_dashboard_uri() - client = JobSubmissionClient(dashboard_route) - return client.get_job_status(job_id) + return self.client.get_job_status(job_id) def job_logs(self, job_id: str) -> str: """ This method accesses the head ray node in your cluster and returns the logs for the provided job id. """ - dashboard_route = self.cluster_dashboard_uri() - client = JobSubmissionClient(dashboard_route) - return client.get_job_logs(job_id) + return self.client.get_job_logs(job_id) def torchx_config( self, working_dir: str = None, requirements: str = None ) -> Dict[str, str]: - dashboard_address = f"{self.cluster_dashboard_uri().lstrip('http://')}" + dashboard_address = urllib3.util.parse_url(self.cluster_dashboard_uri()).host to_return = { "cluster_name": self.config.name, "dashboard_address": dashboard_address, @@ -591,7 +653,7 @@ def _get_app_wrappers( def _map_to_ray_cluster(rc) -> Optional[RayCluster]: - if "status" in rc and "state" in rc["status"]: + if "state" in rc["status"]: status = RayClusterStatus(rc["status"]["state"].lower()) else: status = RayClusterStatus.UNKNOWN @@ -606,7 +668,13 @@ def _map_to_ray_cluster(rc) -> Optional[RayCluster]: ) ray_route = None for route in routes["items"]: - if route["metadata"]["name"] == f"ray-dashboard-{rc['metadata']['name']}": + if route["metadata"][ + "name" + ] == f"ray-dashboard-{rc['metadata']['name']}" or route["metadata"][ + "name" + ].startswith( + f"{rc['metadata']['name']}-ingress" + ): protocol = "https" if route["spec"].get("tls") else "http" ray_route = f"{protocol}://{route['spec']['host']}" diff --git a/src/codeflare_sdk/cluster/config.py b/src/codeflare_sdk/cluster/config.py index 30875a98..fe83e9e5 100644 --- a/src/codeflare_sdk/cluster/config.py +++ b/src/codeflare_sdk/cluster/config.py @@ -51,3 +51,4 @@ class ClusterConfiguration: local_interactive: bool = False image_pull_secrets: list = field(default_factory=list) dispatch_priority: str = None + openshift_oauth: bool = False # NOTE: to use the user must have permission to create a RoleBinding for system:auth-delegator diff --git a/src/codeflare_sdk/job/jobs.py b/src/codeflare_sdk/job/jobs.py index b9bb9cdc..27f15283 100644 --- a/src/codeflare_sdk/job/jobs.py +++ b/src/codeflare_sdk/job/jobs.py @@ -18,15 +18,19 @@ from pathlib import Path from torchx.components.dist import ddp -from torchx.runner import get_runner +from torchx.runner import get_runner, Runner +from torchx.schedulers.ray_scheduler import RayScheduler from torchx.specs import AppHandle, parse_app_handle, AppDryRunInfo +from ray.job_submission import JobSubmissionClient + +import openshift as oc + if TYPE_CHECKING: from ..cluster.cluster import Cluster from ..cluster.cluster import get_current_namespace all_jobs: List["Job"] = [] -torchx_runner = get_runner() class JobDefinition(metaclass=abc.ABCMeta): @@ -92,30 +96,37 @@ def __init__( def _dry_run(self, cluster: "Cluster"): j = f"{cluster.config.num_workers}x{max(cluster.config.num_gpus, 1)}" # # of proc. = # of gpus - return torchx_runner.dryrun( - app=ddp( - *self.script_args, - script=self.script, - m=self.m, - name=self.name, - h=self.h, - cpu=self.cpu if self.cpu is not None else cluster.config.max_cpus, - gpu=self.gpu if self.gpu is not None else cluster.config.num_gpus, - memMB=self.memMB - if self.memMB is not None - else cluster.config.max_memory * 1024, - j=self.j if self.j is not None else j, - env=self.env, - max_retries=self.max_retries, - rdzv_port=self.rdzv_port, - rdzv_backend=self.rdzv_backend - if self.rdzv_backend is not None - else "static", - mounts=self.mounts, + runner = get_runner(ray_client=cluster.client) + runner._scheduler_instances["ray"] = RayScheduler( + session_name=runner._name, ray_client=cluster.client + ) + return ( + runner.dryrun( + app=ddp( + *self.script_args, + script=self.script, + m=self.m, + name=self.name, + h=self.h, + cpu=self.cpu if self.cpu is not None else cluster.config.max_cpus, + gpu=self.gpu if self.gpu is not None else cluster.config.num_gpus, + memMB=self.memMB + if self.memMB is not None + else cluster.config.max_memory * 1024, + j=self.j if self.j is not None else j, + env=self.env, + max_retries=self.max_retries, + rdzv_port=self.rdzv_port, + rdzv_backend=self.rdzv_backend + if self.rdzv_backend is not None + else "static", + mounts=self.mounts, + ), + scheduler=cluster.torchx_scheduler, + cfg=cluster.torchx_config(**self.scheduler_args), + workspace=self.workspace, ), - scheduler=cluster.torchx_scheduler, - cfg=cluster.torchx_config(**self.scheduler_args), - workspace=self.workspace, + runner, ) def _missing_spec(self, spec: str): @@ -125,41 +136,47 @@ def _dry_run_no_cluster(self): if self.scheduler_args is not None: if self.scheduler_args.get("namespace") is None: self.scheduler_args["namespace"] = get_current_namespace() - return torchx_runner.dryrun( - app=ddp( - *self.script_args, - script=self.script, - m=self.m, - name=self.name if self.name is not None else self._missing_spec("name"), - h=self.h, - cpu=self.cpu - if self.cpu is not None - else self._missing_spec("cpu (# cpus per worker)"), - gpu=self.gpu - if self.gpu is not None - else self._missing_spec("gpu (# gpus per worker)"), - memMB=self.memMB - if self.memMB is not None - else self._missing_spec("memMB (memory in MB)"), - j=self.j - if self.j is not None - else self._missing_spec( - "j (`workers`x`procs`)" - ), # # of proc. = # of gpus, - env=self.env, # should this still exist? - max_retries=self.max_retries, - rdzv_port=self.rdzv_port, # should this still exist? - rdzv_backend=self.rdzv_backend - if self.rdzv_backend is not None - else "c10d", - mounts=self.mounts, - image=self.image - if self.image is not None - else self._missing_spec("image"), + runner = get_runner() + return ( + runner.dryrun( + app=ddp( + *self.script_args, + script=self.script, + m=self.m, + name=self.name + if self.name is not None + else self._missing_spec("name"), + h=self.h, + cpu=self.cpu + if self.cpu is not None + else self._missing_spec("cpu (# cpus per worker)"), + gpu=self.gpu + if self.gpu is not None + else self._missing_spec("gpu (# gpus per worker)"), + memMB=self.memMB + if self.memMB is not None + else self._missing_spec("memMB (memory in MB)"), + j=self.j + if self.j is not None + else self._missing_spec( + "j (`workers`x`procs`)" + ), # # of proc. = # of gpus, + env=self.env, # should this still exist? + max_retries=self.max_retries, + rdzv_port=self.rdzv_port, # should this still exist? + rdzv_backend=self.rdzv_backend + if self.rdzv_backend is not None + else "c10d", + mounts=self.mounts, + image=self.image + if self.image is not None + else self._missing_spec("image"), + ), + scheduler="kubernetes_mcad", + cfg=self.scheduler_args, + workspace="", ), - scheduler="kubernetes_mcad", - cfg=self.scheduler_args, - workspace="", + runner, ) def submit(self, cluster: "Cluster" = None) -> "Job": @@ -171,18 +188,20 @@ def __init__(self, job_definition: "DDPJobDefinition", cluster: "Cluster" = None self.job_definition = job_definition self.cluster = cluster if self.cluster: - self._app_handle = torchx_runner.schedule(job_definition._dry_run(cluster)) + definition, runner = job_definition._dry_run(cluster) + self._app_handle = runner.schedule(definition) + self._runner = runner else: - self._app_handle = torchx_runner.schedule( - job_definition._dry_run_no_cluster() - ) + definition, runner = job_definition._dry_run_no_cluster() + self._app_handle = runner.schedule(definition) + self._runner = runner all_jobs.append(self) def status(self) -> str: - return torchx_runner.status(self._app_handle) + return self._runner.status(self._app_handle) def logs(self) -> str: - return "".join(torchx_runner.log_lines(self._app_handle, None)) + return "".join(self._runner.log_lines(self._app_handle, None)) def cancel(self): - torchx_runner.cancel(self._app_handle) + self._runner.cancel(self._app_handle) diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 95e1c5ec..4757f537 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -24,6 +24,13 @@ from kubernetes import client, config from .kube_api_helpers import _kube_api_error_handling from ..cluster.auth import api_config_handler, config_check +from os import urandom +from base64 import b64encode +from urllib3.util import parse_url + +from kubernetes import client, config + +from .kube_api_helpers import _get_api_host def read_template(template): @@ -46,13 +53,17 @@ def gen_names(name): def update_dashboard_route(route_item, cluster_name, namespace): metadata = route_item.get("generictemplate", {}).get("metadata") - metadata["name"] = f"ray-dashboard-{cluster_name}" + metadata["name"] = gen_dashboard_route_name(cluster_name) metadata["namespace"] = namespace metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc" spec = route_item.get("generictemplate", {}).get("spec") spec["to"]["name"] = f"{cluster_name}-head-svc" +def gen_dashboard_route_name(cluster_name): + return f"ray-dashboard-{cluster_name}" + + # ToDo: refactor the update_x_route() functions def update_rayclient_route(route_item, cluster_name, namespace): metadata = route_item.get("generictemplate", {}).get("metadata") @@ -369,6 +380,83 @@ def write_user_appwrapper(user_yaml, output_file_name): print(f"Written to: {output_file_name}") +def enable_openshift_oauth(user_yaml, cluster_name, namespace): + config_check() + k8_client = api_config_handler() or client.ApiClient() + tls_mount_location = "/etc/tls/private" + oauth_port = 8443 + oauth_sa_name = f"{cluster_name}-oauth-proxy" + tls_secret_name = f"{cluster_name}-proxy-tls-secret" + tls_volume_name = "proxy-tls-secret" + port_name = "oauth-proxy" + host = _get_api_host(k8_client) + host = host.replace( + "api.", f"{gen_dashboard_route_name(cluster_name)}-{namespace}.apps." + ) + oauth_sidecar = _create_oauth_sidecar_object( + namespace, + tls_mount_location, + oauth_port, + oauth_sa_name, + tls_volume_name, + port_name, + ) + tls_secret_volume = client.V1Volume( + name=tls_volume_name, + secret=client.V1SecretVolumeSource(secret_name=tls_secret_name), + ) + # allows for setting value of Cluster object when initializing object from an existing AppWrapper on cluster + user_yaml["metadata"]["annotations"] = user_yaml["metadata"].get("annotations", {}) + user_yaml["metadata"]["annotations"][ + "codeflare-sdk-use-oauth" + ] = "true" # if the user gets an + ray_headgroup_pod = user_yaml["spec"]["resources"]["GenericItems"][0][ + "generictemplate" + ]["spec"]["headGroupSpec"]["template"]["spec"] + user_yaml["spec"]["resources"]["GenericItems"].pop(1) + ray_headgroup_pod["serviceAccount"] = oauth_sa_name + ray_headgroup_pod["volumes"] = ray_headgroup_pod.get("volumes", []) + + # we use a generic api client here so that the serialization function doesn't need to be mocked for unit tests + ray_headgroup_pod["volumes"].append( + client.ApiClient().sanitize_for_serialization(tls_secret_volume) + ) + ray_headgroup_pod["containers"].append( + client.ApiClient().sanitize_for_serialization(oauth_sidecar) + ) + + +def _create_oauth_sidecar_object( + namespace: str, + tls_mount_location: str, + oauth_port: int, + oauth_sa_name: str, + tls_volume_name: str, + port_name: str, +) -> client.V1Container: + return client.V1Container( + args=[ + f"--https-address=:{oauth_port}", + "--provider=openshift", + f"--openshift-service-account={oauth_sa_name}", + "--upstream=http://localhost:8265", + f"--tls-cert={tls_mount_location}/tls.crt", + f"--tls-key={tls_mount_location}/tls.key", + f"--cookie-secret={b64encode(urandom(64)).decode('utf-8')}", # create random string for encrypting cookie + f'--openshift-delegate-urls={{"/":{{"resource":"pods","namespace":"{namespace}","verb":"get"}}}}', + ], + image="registry.redhat.io/openshift4/ose-oauth-proxy@sha256:1ea6a01bf3e63cdcf125c6064cbd4a4a270deaf0f157b3eabb78f60556840366", + name="oauth-proxy", + ports=[client.V1ContainerPort(container_port=oauth_port, name=port_name)], + resources=client.V1ResourceRequirements(limits=None, requests=None), + volume_mounts=[ + client.V1VolumeMount( + mount_path=tls_mount_location, name=tls_volume_name, read_only=True + ) + ], + ) + + def generate_appwrapper( name: str, namespace: str, @@ -390,6 +478,7 @@ def generate_appwrapper( image_pull_secrets: list, dispatch_priority: str, priority_val: int, + openshift_oauth: bool, ): user_yaml = read_template(template) appwrapper_name, cluster_name = gen_names(name) @@ -433,6 +522,10 @@ def generate_appwrapper( enable_local_interactive(resources, cluster_name, namespace) else: disable_raycluster_tls(resources["resources"]) + + if openshift_oauth: + enable_openshift_oauth(user_yaml, cluster_name, namespace) + outfile = appwrapper_name + ".yaml" write_user_appwrapper(user_yaml, outfile) return outfile diff --git a/src/codeflare_sdk/utils/kube_api_helpers.py b/src/codeflare_sdk/utils/kube_api_helpers.py index 58358a05..8f8180b9 100644 --- a/src/codeflare_sdk/utils/kube_api_helpers.py +++ b/src/codeflare_sdk/utils/kube_api_helpers.py @@ -19,6 +19,7 @@ import executing from kubernetes import client, config +from urllib3.util import parse_url # private methods @@ -42,3 +43,7 @@ def _kube_api_error_handling(e: Exception): # pragma: no cover elif e.reason == "Conflict": raise FileExistsError(exists_msg) raise e + + +def _get_api_host(api_client: client.ApiClient): # pragma: no cover + return parse_url(api_client.configuration.host).host diff --git a/src/codeflare_sdk/utils/openshift_oauth.py b/src/codeflare_sdk/utils/openshift_oauth.py new file mode 100644 index 00000000..5c3fc55a --- /dev/null +++ b/src/codeflare_sdk/utils/openshift_oauth.py @@ -0,0 +1,217 @@ +from urllib3.util import parse_url +from .generate_yaml import gen_dashboard_route_name +from .kube_api_helpers import _get_api_host +from base64 import b64decode + +from ..cluster.auth import config_check, api_config_handler + +from kubernetes import client + + +def create_openshift_oauth_objects(cluster_name, namespace): + config_check() + api_client = api_config_handler() or client.ApiClient() + oauth_port = 8443 + oauth_sa_name = f"{cluster_name}-oauth-proxy" + tls_secret_name = _gen_tls_secret_name(cluster_name) + service_name = f"{cluster_name}-oauth" + port_name = "oauth-proxy" + host = _get_api_host(api_client) + + # replace "^api" with the expected host + host = f"{gen_dashboard_route_name(cluster_name)}-{namespace}.apps" + host.lstrip( + "api" + ) + + _create_or_replace_oauth_sa(namespace, oauth_sa_name, host) + _create_or_replace_oauth_service_obj( + cluster_name, namespace, oauth_port, tls_secret_name, service_name, port_name + ) + _create_or_replace_oauth_ingress_object( + cluster_name, namespace, service_name, port_name, host + ) + _create_or_replace_oauth_rb(cluster_name, namespace, oauth_sa_name) + + +def _create_or_replace_oauth_sa(namespace, oauth_sa_name, host): + api_client = api_config_handler() + oauth_sa = client.V1ServiceAccount( + api_version="v1", + kind="ServiceAccount", + metadata=client.V1ObjectMeta( + name=oauth_sa_name, + namespace=namespace, + annotations={ + "serviceaccounts.openshift.io/oauth-redirecturi.first": f"https://{host}" + }, + ), + ) + try: + client.CoreV1Api(api_client).create_namespaced_service_account( + namespace=namespace, body=oauth_sa + ) + except client.ApiException as e: + if e.reason == "Conflict": + client.CoreV1Api(api_client).replace_namespaced_service_account( + namespace=namespace, + body=oauth_sa, + name=oauth_sa_name, + ) + else: + raise e + + +def _create_or_replace_oauth_rb(cluster_name, namespace, oauth_sa_name): + api_client = api_config_handler() + oauth_crb = client.V1ClusterRoleBinding( + api_version="rbac.authorization.k8s.io/v1", + kind="ClusterRoleBinding", + metadata=client.V1ObjectMeta(name=f"{cluster_name}-rb"), + role_ref=client.V1RoleRef( + api_group="rbac.authorization.k8s.io", + kind="ClusterRole", + name="system:auth-delegator", + ), + subjects=[ + client.V1Subject( + kind="ServiceAccount", name=oauth_sa_name, namespace=namespace + ) + ], + ) + try: + client.RbacAuthorizationV1Api(api_client).create_cluster_role_binding( + body=oauth_crb + ) + except client.ApiException as e: + if e.reason == "Conflict": + client.RbacAuthorizationV1Api(api_client).replace_cluster_role_binding( + body=oauth_crb, name=f"{cluster_name}-rb" + ) + else: + raise e + + +def _gen_tls_secret_name(cluster_name): + return f"{cluster_name}-proxy-tls-secret" + + +def delete_openshift_oauth_objects(cluster_name, namespace): + # NOTE: it might be worth adding error handling here, but shouldn't be necessary because cluster.down(...) checks + # for an existing cluster before calling this => the objects should never be deleted twice + api_client = api_config_handler() + oauth_sa_name = f"{cluster_name}-oauth-proxy" + service_name = f"{cluster_name}-oauth" + client.CoreV1Api(api_client).delete_namespaced_service_account( + name=oauth_sa_name, namespace=namespace + ) + client.CoreV1Api(api_client).delete_namespaced_service( + name=service_name, namespace=namespace + ) + client.NetworkingV1Api(api_client).delete_namespaced_ingress( + name=f"{cluster_name}-ingress", namespace=namespace + ) + client.RbacAuthorizationV1Api(api_client).delete_cluster_role_binding( + name=f"{cluster_name}-rb" + ) + + +def _create_or_replace_oauth_service_obj( + cluster_name: str, + namespace: str, + oauth_port: int, + tls_secret_name: str, + service_name: str, + port_name: str, +) -> client.V1Service: + api_client = api_config_handler() + oauth_service = client.V1Service( + api_version="v1", + kind="Service", + metadata=client.V1ObjectMeta( + annotations={ + "service.beta.openshift.io/serving-cert-secret-name": tls_secret_name + }, + name=service_name, + namespace=namespace, + ), + spec=client.V1ServiceSpec( + ports=[ + client.V1ServicePort( + name=port_name, + protocol="TCP", + port=443, + target_port=oauth_port, + ) + ], + selector={ + "app.kubernetes.io/created-by": "kuberay-operator", + "app.kubernetes.io/name": "kuberay", + "ray.io/cluster": cluster_name, + "ray.io/identifier": f"{cluster_name}-head", + "ray.io/node-type": "head", + }, + ), + ) + try: + client.CoreV1Api(api_client).create_namespaced_service( + namespace=namespace, body=oauth_service + ) + except client.ApiException as e: + if e.reason == "Conflict": + client.CoreV1Api(api_client).replace_namespaced_service( + namespace=namespace, body=oauth_service, name=service_name + ) + else: + raise e + + +def _create_or_replace_oauth_ingress_object( + cluster_name: str, + namespace: str, + service_name: str, + port_name: str, + host: str, +) -> client.V1Ingress: + api_client = api_config_handler() + ingress = client.V1Ingress( + api_version="networking.k8s.io/v1", + kind="Ingress", + metadata=client.V1ObjectMeta( + annotations={"route.openshift.io/termination": "passthrough"}, + name=f"{cluster_name}-ingress", + namespace=namespace, + ), + spec=client.V1IngressSpec( + rules=[ + client.V1IngressRule( + host=host, + http=client.V1HTTPIngressRuleValue( + paths=[ + client.V1HTTPIngressPath( + backend=client.V1IngressBackend( + service=client.V1IngressServiceBackend( + name=service_name, + port=client.V1ServiceBackendPort( + name=port_name + ), + ) + ), + path_type="ImplementationSpecific", + ) + ] + ), + ) + ] + ), + ) + try: + client.NetworkingV1Api(api_client).create_namespaced_ingress( + namespace=namespace, body=ingress + ) + except client.ApiException as e: + if e.reason == "Conflict": + client.NetworkingV1Api(api_client).replace_namespaced_ingress( + namespace=namespace, body=ingress, name=f"{cluster_name}-ingress" + ) + else: + raise e diff --git a/tests/unit_test.py b/tests/unit_test.py index 9b78e9e2..f2c86f1f 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +# TODO: replace all instances of torchx_runner + from pathlib import Path import sys import filecmp @@ -38,8 +40,8 @@ Authentication, KubeConfigFileAuthentication, config_check, - api_config_handler, ) +from codeflare_sdk.utils.openshift_oauth import create_openshift_oauth_objects from codeflare_sdk.utils.pretty_print import ( print_no_resources_found, print_app_wrappers_status, @@ -58,7 +60,6 @@ Job, DDPJobDefinition, DDPJob, - torchx_runner, ) from codeflare_sdk.utils.generate_cert import ( generate_ca_cert, @@ -74,6 +75,8 @@ createDDPJob_with_cluster, ) +import codeflare_sdk.utils.kube_api_helpers + import openshift from openshift.selector import Selector import ray @@ -83,7 +86,9 @@ from torchx.schedulers.kubernetes_mcad_scheduler import KubernetesMCADJob import pytest import yaml - +from unittest.mock import MagicMock +from pytest_mock import MockerFixture +from ray.job_submission import JobSubmissionClient # For mocking openshift client results fake_res = openshift.Result("fake") @@ -1835,7 +1840,7 @@ def test_DDPJobDefinition_creation(): assert ddp.scheduler_args == {"requirements": "test"} -def test_DDPJobDefinition_dry_run(mocker): +def test_DDPJobDefinition_dry_run(mocker: MockerFixture): """ Test that the dry run method returns the correct type: AppDryRunInfo, that the attributes of the returned object are of the correct type, @@ -1846,9 +1851,10 @@ def test_DDPJobDefinition_dry_run(mocker): "codeflare_sdk.cluster.cluster.Cluster.cluster_dashboard_uri", return_value="", ) + mocker.patch.object(Cluster, "client") ddp = createTestDDP() cluster = createClusterWithConfig() - ddp_job = ddp._dry_run(cluster) + ddp_job, _ = ddp._dry_run(cluster) assert type(ddp_job) == AppDryRunInfo assert ddp_job._fmt is not None assert type(ddp_job.request) == RayJob @@ -1884,7 +1890,7 @@ def test_DDPJobDefinition_dry_run_no_cluster(mocker): ddp = createTestDDP() ddp.image = "fake-image" - ddp_job = ddp._dry_run_no_cluster() + ddp_job, _ = ddp._dry_run_no_cluster() assert type(ddp_job) == AppDryRunInfo assert ddp_job._fmt is not None assert type(ddp_job.request) == KubernetesMCADJob @@ -1915,6 +1921,7 @@ def test_DDPJobDefinition_dry_run_no_resource_args(mocker): Test that the dry run correctly gets resources from the cluster object when the job definition does not specify resources. """ + mocker.patch.object(Cluster, "client") mocker.patch( "codeflare_sdk.cluster.cluster.Cluster.cluster_dashboard_uri", return_value="", @@ -1932,7 +1939,7 @@ def test_DDPJobDefinition_dry_run_no_resource_args(mocker): rdzv_port=29500, scheduler_args={"requirements": "test"}, ) - ddp_job = ddp._dry_run(cluster) + ddp_job, _ = ddp._dry_run(cluster) assert ddp_job._app.roles[0].resource.cpu == cluster.config.max_cpus assert ddp_job._app.roles[0].resource.gpu == cluster.config.num_gpus @@ -1998,25 +2005,24 @@ def test_DDPJobDefinition_dry_run_no_cluster_no_resource_args(mocker): assert str(e) == "Job definition missing arg: j (`workers`x`procs`)" -def test_DDPJobDefinition_submit(mocker): +def test_DDPJobDefinition_submit(mocker: MockerFixture): """ Tests that the submit method returns the correct type: DDPJob And that the attributes of the returned object are of the correct type """ - mocker.patch( - "codeflare_sdk.cluster.cluster.Cluster.cluster_dashboard_uri", - return_value="fake-dashboard-uri", - ) + mock_schedule = MagicMock() + mocker.patch.object(Runner, "schedule", mock_schedule) + mock_schedule.return_value = "fake-dashboard-url" + mocker.patch.object(Cluster, "client") ddp_def = createTestDDP() cluster = createClusterWithConfig() mocker.patch( "codeflare_sdk.job.jobs.get_current_namespace", side_effect="opendatahub", ) - mocker.patch( - "codeflare_sdk.job.jobs.torchx_runner.schedule", - return_value="fake-dashboard-url", - ) # a fake app_handle + mocker.patch.object( + Cluster, "cluster_dashboard_uri", return_value="fake-dashboard-url" + ) ddp_job = ddp_def.submit(cluster) assert type(ddp_job) == DDPJob assert type(ddp_job.job_definition) == DDPJobDefinition @@ -2033,24 +2039,23 @@ def test_DDPJobDefinition_submit(mocker): assert ddp_job._app_handle == "fake-dashboard-url" -def test_DDPJob_creation(mocker): - mocker.patch( - "codeflare_sdk.cluster.cluster.Cluster.cluster_dashboard_uri", - return_value="fake-dashboard-uri", +def test_DDPJob_creation(mocker: MockerFixture): + mocker.patch.object(Cluster, "client") + mock_schedule = MagicMock() + mocker.patch.object(Runner, "schedule", mock_schedule) + mocker.patch.object( + Cluster, "cluster_dashboard_uri", return_value="fake-dashboard-url" ) ddp_def = createTestDDP() cluster = createClusterWithConfig() - mocker.patch( - "codeflare_sdk.job.jobs.torchx_runner.schedule", - return_value="fake-dashboard-url", - ) # a fake app_handle + mock_schedule.return_value = "fake-dashboard-url" ddp_job = createDDPJob_with_cluster(ddp_def, cluster) assert type(ddp_job) == DDPJob assert type(ddp_job.job_definition) == DDPJobDefinition assert type(ddp_job.cluster) == Cluster assert type(ddp_job._app_handle) == str assert ddp_job._app_handle == "fake-dashboard-url" - _, args, kwargs = torchx_runner.schedule.mock_calls[0] + _, args, kwargs = mock_schedule.mock_calls[0] assert type(args[0]) == AppDryRunInfo job_info = args[0] assert type(job_info.request) == RayJob @@ -2059,24 +2064,23 @@ def test_DDPJob_creation(mocker): assert type(job_info._scheduler) == type(str()) -def test_DDPJob_creation_no_cluster(mocker): +def test_DDPJob_creation_no_cluster(mocker: MockerFixture): ddp_def = createTestDDP() ddp_def.image = "fake-image" mocker.patch( "codeflare_sdk.job.jobs.get_current_namespace", side_effect="opendatahub", ) - mocker.patch( - "codeflare_sdk.job.jobs.torchx_runner.schedule", - return_value="fake-app-handle", - ) # a fake app_handle + mock_schedule = MagicMock() + mocker.patch.object(Runner, "schedule", mock_schedule) + mock_schedule.return_value = "fake-app-handle" ddp_job = createDDPJob_no_cluster(ddp_def, None) assert type(ddp_job) == DDPJob assert type(ddp_job.job_definition) == DDPJobDefinition assert ddp_job.cluster == None assert type(ddp_job._app_handle) == str assert ddp_job._app_handle == "fake-app-handle" - _, args, kwargs = torchx_runner.schedule.mock_calls[0] + _, args, kwargs = mock_schedule.mock_calls[0] assert type(args[0]) == AppDryRunInfo job_info = args[0] assert type(job_info.request) == KubernetesMCADJob @@ -2085,31 +2089,31 @@ def test_DDPJob_creation_no_cluster(mocker): assert type(job_info._scheduler) == type(str()) -def test_DDPJob_status(mocker): +def test_DDPJob_status(mocker: MockerFixture): # Setup the neccesary mock patches + mock_status = MagicMock() + mocker.patch.object(Runner, "status", mock_status) test_DDPJob_creation(mocker) ddp_def = createTestDDP() cluster = createClusterWithConfig() ddp_job = createDDPJob_with_cluster(ddp_def, cluster) - mocker.patch( - "codeflare_sdk.job.jobs.torchx_runner.status", return_value="fake-status" - ) + mock_status.return_value = "fake-status" assert ddp_job.status() == "fake-status" - _, args, kwargs = torchx_runner.status.mock_calls[0] + _, args, kwargs = mock_status.mock_calls[0] assert args[0] == "fake-dashboard-url" -def test_DDPJob_logs(mocker): +def test_DDPJob_logs(mocker: MockerFixture): + mock_log = MagicMock() + mocker.patch.object(Runner, "log_lines", mock_log) # Setup the neccesary mock patches test_DDPJob_creation(mocker) ddp_def = createTestDDP() cluster = createClusterWithConfig() ddp_job = createDDPJob_with_cluster(ddp_def, cluster) - mocker.patch( - "codeflare_sdk.job.jobs.torchx_runner.log_lines", return_value="fake-logs" - ) + mock_log.return_value = "fake-logs" assert ddp_job.logs() == "fake-logs" - _, args, kwargs = torchx_runner.log_lines.mock_calls[0] + _, args, kwargs = mock_log.mock_calls[0] assert args[0] == "fake-dashboard-url" @@ -2117,7 +2121,9 @@ def arg_check_side_effect(*args): assert args[0] == "fake-app-handle" -def test_DDPJob_cancel(mocker): +def test_DDPJob_cancel(mocker: MockerFixture): + mock_cancel = MagicMock() + mocker.patch.object(Runner, "cancel", mock_cancel) # Setup the neccesary mock patches test_DDPJob_creation_no_cluster(mocker) ddp_def = createTestDDP() @@ -2127,9 +2133,7 @@ def test_DDPJob_cancel(mocker): "openshift.get_project_name", return_value="opendatahub", ) - mocker.patch( - "codeflare_sdk.job.jobs.torchx_runner.cancel", side_effect=arg_check_side_effect - ) + mock_cancel.side_effect = arg_check_side_effect ddp_job.cancel() @@ -2292,6 +2296,137 @@ def test_export_env(): ) +def test_create_openshift_oauth(mocker: MockerFixture): + create_namespaced_service_account = MagicMock() + create_cluster_role_binding = MagicMock() + create_namespaced_service = MagicMock() + create_namespaced_ingress = MagicMock() + mocker.patch.object( + client.CoreV1Api, + "create_namespaced_service_account", + create_namespaced_service_account, + ) + mocker.patch.object( + client.RbacAuthorizationV1Api, + "create_cluster_role_binding", + create_cluster_role_binding, + ) + mocker.patch.object( + client.CoreV1Api, "create_namespaced_service", create_namespaced_service + ) + mocker.patch.object( + client.NetworkingV1Api, "create_namespaced_ingress", create_namespaced_ingress + ) + mocker.patch( + "codeflare_sdk.utils.openshift_oauth._get_api_host", return_value="foo.com" + ) + create_openshift_oauth_objects("foo", "bar") + create_ns_sa_args = create_namespaced_service_account.call_args + create_crb_args = create_cluster_role_binding.call_args + create_ns_serv_args = create_namespaced_service.call_args + create_ns_ingress_args = create_namespaced_ingress.call_args + assert ( + create_ns_sa_args.kwargs["namespace"] == create_ns_serv_args.kwargs["namespace"] + ) + assert ( + create_ns_serv_args.kwargs["namespace"] + == create_ns_ingress_args.kwargs["namespace"] + ) + assert isinstance(create_ns_sa_args.kwargs["body"], client.V1ServiceAccount) + assert isinstance(create_crb_args.kwargs["body"], client.V1ClusterRoleBinding) + assert isinstance(create_ns_serv_args.kwargs["body"], client.V1Service) + assert isinstance(create_ns_ingress_args.kwargs["body"], client.V1Ingress) + assert ( + create_ns_serv_args.kwargs["body"].spec.ports[0].name + == create_ns_ingress_args.kwargs["body"] + .spec.rules[0] + .http.paths[0] + .backend.service.port.name + ) + + +def test_replace_openshift_oauth(mocker: MockerFixture): + # not_found_exception = client.ApiException(reason="Conflict") + create_namespaced_service_account = MagicMock( + side_effect=client.ApiException(reason="Conflict") + ) + create_cluster_role_binding = MagicMock( + side_effect=client.ApiException(reason="Conflict") + ) + create_namespaced_service = MagicMock( + side_effect=client.ApiException(reason="Conflict") + ) + create_namespaced_ingress = MagicMock( + side_effect=client.ApiException(reason="Conflict") + ) + mocker.patch.object( + client.CoreV1Api, + "create_namespaced_service_account", + create_namespaced_service_account, + ) + mocker.patch.object( + client.RbacAuthorizationV1Api, + "create_cluster_role_binding", + create_cluster_role_binding, + ) + mocker.patch.object( + client.CoreV1Api, "create_namespaced_service", create_namespaced_service + ) + mocker.patch.object( + client.NetworkingV1Api, "create_namespaced_ingress", create_namespaced_ingress + ) + mocker.patch( + "codeflare_sdk.utils.openshift_oauth._get_api_host", return_value="foo.com" + ) + replace_namespaced_service_account = MagicMock() + replace_cluster_role_binding = MagicMock() + replace_namespaced_service = MagicMock() + replace_namespaced_ingress = MagicMock() + mocker.patch.object( + client.CoreV1Api, + "replace_namespaced_service_account", + replace_namespaced_service_account, + ) + mocker.patch.object( + client.RbacAuthorizationV1Api, + "replace_cluster_role_binding", + replace_cluster_role_binding, + ) + mocker.patch.object( + client.CoreV1Api, "replace_namespaced_service", replace_namespaced_service + ) + mocker.patch.object( + client.NetworkingV1Api, "replace_namespaced_ingress", replace_namespaced_ingress + ) + create_openshift_oauth_objects("foo", "bar") + replace_namespaced_service_account.assert_called_once() + replace_cluster_role_binding.assert_called_once() + replace_namespaced_service.assert_called_once() + replace_namespaced_ingress.assert_called_once() + + +def test_gen_app_wrapper_with_oauth(mocker: MockerFixture): + mocker.patch( + "codeflare_sdk.utils.generate_yaml._get_api_host", return_value="foo.com" + ) + mocker.patch( + "codeflare_sdk.cluster.cluster.get_current_namespace", + return_value="opendatahub", + ) + write_user_appwrapper = MagicMock() + mocker.patch( + "codeflare_sdk.utils.generate_yaml.write_user_appwrapper", write_user_appwrapper + ) + Cluster(ClusterConfiguration("test_cluster", openshift_oauth=True)) + user_yaml = write_user_appwrapper.call_args.args[0] + assert any( + container["name"] == "oauth-proxy" + for container in user_yaml["spec"]["resources"]["GenericItems"][0][ + "generictemplate" + ]["spec"]["headGroupSpec"]["template"]["spec"]["containers"] + ) + + # Make sure to always keep this function last def test_cleanup(): os.remove("unit-test-cluster.yaml") From 2441f4fa3fb3b6a78fd69ac3512fe89c9e8f465b Mon Sep 17 00:00:00 2001 From: codeflare-machine-account Date: Fri, 20 Oct 2023 15:17:42 +0000 Subject: [PATCH 034/496] Changes in docs for release: v0.10.0 --- docs/cluster/auth.html | 8 +- docs/cluster/cluster.html | 250 ++++++++++++++++++---- docs/cluster/config.html | 13 +- docs/job/jobs.html | 300 ++++++++++++++------------ docs/utils/generate_yaml.html | 174 +++++++++++++++- docs/utils/index.html | 5 + docs/utils/kube_api_helpers.html | 7 +- docs/utils/openshift_oauth.html | 347 +++++++++++++++++++++++++++++++ 8 files changed, 922 insertions(+), 182 deletions(-) create mode 100644 docs/utils/openshift_oauth.html diff --git a/docs/cluster/auth.html b/docs/cluster/auth.html index 0389fbf7..f1d8ad00 100644 --- a/docs/cluster/auth.html +++ b/docs/cluster/auth.html @@ -58,6 +58,8 @@

    Module codeflare_sdk.cluster.auth

    import urllib3 from ..utils.kube_api_helpers import _kube_api_error_handling +from typing import Optional + global api_client api_client = None global config_path @@ -221,7 +223,7 @@

    Module codeflare_sdk.cluster.auth

    return config_path -def api_config_handler() -> str: +def api_config_handler() -> Optional[client.ApiClient]: """ This function is used to load the api client if the user has logged in """ @@ -239,7 +241,7 @@

    Module codeflare_sdk.cluster.auth

    Functions

    -def api_config_handler() ‑> str +def api_config_handler() ‑> Optional[kubernetes.client.api_client.ApiClient]

    This function is used to load the api client if the user has logged in

    @@ -247,7 +249,7 @@

    Functions

    Expand source code -
    def api_config_handler() -> str:
    +
    def api_config_handler() -> Optional[client.ApiClient]:
         """
         This function is used to load the api client if the user has logged in
         """
    diff --git a/docs/cluster/cluster.html b/docs/cluster/cluster.html
    index 3b396221..7b391787 100644
    --- a/docs/cluster/cluster.html
    +++ b/docs/cluster/cluster.html
    @@ -53,12 +53,19 @@ 

    Module codeflare_sdk.cluster.cluster

    from time import sleep from typing import List, Optional, Tuple, Dict +import openshift as oc +from kubernetes import config from ray.job_submission import JobSubmissionClient +import urllib3 from .auth import config_check, api_config_handler from ..utils import pretty_print from ..utils.generate_yaml import generate_appwrapper from ..utils.kube_api_helpers import _kube_api_error_handling +from ..utils.openshift_oauth import ( + create_openshift_oauth_objects, + delete_openshift_oauth_objects, +) from .config import ClusterConfiguration from .model import ( AppWrapper, @@ -72,6 +79,8 @@

    Module codeflare_sdk.cluster.cluster

    import os import requests +from kubernetes import config + class Cluster: """ @@ -93,6 +102,39 @@

    Module codeflare_sdk.cluster.cluster

    self.config = config self.app_wrapper_yaml = self.create_app_wrapper() self.app_wrapper_name = self.app_wrapper_yaml.split(".")[0] + self._client = None + + @property + def _client_headers(self): + k8_client = api_config_handler() or client.ApiClient() + return { + "Authorization": k8_client.configuration.get_api_key_with_prefix( + "authorization" + ) + } + + @property + def _client_verify_tls(self): + return not self.config.openshift_oauth + + @property + def client(self): + if self._client: + return self._client + if self.config.openshift_oauth: + print( + api_config_handler().configuration.get_api_key_with_prefix( + "authorization" + ) + ) + self._client = JobSubmissionClient( + self.cluster_dashboard_uri(), + headers=self._client_headers, + verify=self._client_verify_tls, + ) + else: + self._client = JobSubmissionClient(self.cluster_dashboard_uri()) + return self._client def evaluate_dispatch_priority(self): priority_class = self.config.dispatch_priority @@ -179,6 +221,7 @@

    Module codeflare_sdk.cluster.cluster

    image_pull_secrets=image_pull_secrets, dispatch_priority=dispatch_priority, priority_val=priority_val, + openshift_oauth=self.config.openshift_oauth, ) # creates a new cluster with the provided or default spec @@ -188,6 +231,11 @@

    Module codeflare_sdk.cluster.cluster

    the MCAD queue. """ namespace = self.config.namespace + if self.config.openshift_oauth: + create_openshift_oauth_objects( + cluster_name=self.config.name, namespace=namespace + ) + try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) @@ -222,6 +270,11 @@

    Module codeflare_sdk.cluster.cluster

    except Exception as e: # pragma: no cover return _kube_api_error_handling(e) + if self.config.openshift_oauth: + delete_openshift_oauth_objects( + cluster_name=self.config.name, namespace=namespace + ) + def status( self, print_to_console: bool = True ) -> Tuple[CodeFlareClusterStatus, bool]: @@ -290,7 +343,16 @@

    Module codeflare_sdk.cluster.cluster

    return status, ready def is_dashboard_ready(self) -> bool: - response = requests.get(self.cluster_dashboard_uri(), timeout=5) + try: + response = requests.get( + self.cluster_dashboard_uri(), + headers=self._client_headers, + timeout=5, + verify=self._client_verify_tls, + ) + except requests.exceptions.SSLError: + # SSL exception occurs when oauth ingress has been created but cluster is not up + return False if response.status_code == 200: return True else: @@ -362,7 +424,13 @@

    Module codeflare_sdk.cluster.cluster

    return _kube_api_error_handling(e) for route in routes["items"]: - if route["metadata"]["name"] == f"ray-dashboard-{self.config.name}": + if route["metadata"][ + "name" + ] == f"ray-dashboard-{self.config.name}" or route["metadata"][ + "name" + ].startswith( + f"{self.config.name}-ingress" + ): protocol = "https" if route["spec"].get("tls") else "http" return f"{protocol}://{route['spec']['host']}" return "Dashboard route not available yet, have you run cluster.up()?" @@ -371,30 +439,24 @@

    Module codeflare_sdk.cluster.cluster

    """ This method accesses the head ray node in your cluster and lists the running jobs. """ - dashboard_route = self.cluster_dashboard_uri() - client = JobSubmissionClient(dashboard_route) - return client.list_jobs() + return self.client.list_jobs() def job_status(self, job_id: str) -> str: """ This method accesses the head ray node in your cluster and returns the job status for the provided job id. """ - dashboard_route = self.cluster_dashboard_uri() - client = JobSubmissionClient(dashboard_route) - return client.get_job_status(job_id) + return self.client.get_job_status(job_id) def job_logs(self, job_id: str) -> str: """ This method accesses the head ray node in your cluster and returns the logs for the provided job id. """ - dashboard_route = self.cluster_dashboard_uri() - client = JobSubmissionClient(dashboard_route) - return client.get_job_logs(job_id) + return self.client.get_job_logs(job_id) def torchx_config( self, working_dir: str = None, requirements: str = None ) -> Dict[str, str]: - dashboard_address = f"{self.cluster_dashboard_uri().lstrip('http://')}" + dashboard_address = urllib3.util.parse_url(self.cluster_dashboard_uri()).host to_return = { "cluster_name": self.config.name, "dashboard_address": dashboard_address, @@ -623,7 +685,7 @@

    Module codeflare_sdk.cluster.cluster

    def _map_to_ray_cluster(rc) -> Optional[RayCluster]: - if "status" in rc and "state" in rc["status"]: + if "state" in rc["status"]: status = RayClusterStatus(rc["status"]["state"].lower()) else: status = RayClusterStatus.UNKNOWN @@ -638,7 +700,13 @@

    Module codeflare_sdk.cluster.cluster

    ) ray_route = None for route in routes["items"]: - if route["metadata"]["name"] == f"ray-dashboard-{rc['metadata']['name']}": + if route["metadata"][ + "name" + ] == f"ray-dashboard-{rc['metadata']['name']}" or route["metadata"][ + "name" + ].startswith( + f"{rc['metadata']['name']}-ingress" + ): protocol = "https" if route["spec"].get("tls") else "http" ray_route = f"{protocol}://{route['spec']['host']}" @@ -862,6 +930,39 @@

    Classes

    self.config = config self.app_wrapper_yaml = self.create_app_wrapper() self.app_wrapper_name = self.app_wrapper_yaml.split(".")[0] + self._client = None + + @property + def _client_headers(self): + k8_client = api_config_handler() or client.ApiClient() + return { + "Authorization": k8_client.configuration.get_api_key_with_prefix( + "authorization" + ) + } + + @property + def _client_verify_tls(self): + return not self.config.openshift_oauth + + @property + def client(self): + if self._client: + return self._client + if self.config.openshift_oauth: + print( + api_config_handler().configuration.get_api_key_with_prefix( + "authorization" + ) + ) + self._client = JobSubmissionClient( + self.cluster_dashboard_uri(), + headers=self._client_headers, + verify=self._client_verify_tls, + ) + else: + self._client = JobSubmissionClient(self.cluster_dashboard_uri()) + return self._client def evaluate_dispatch_priority(self): priority_class = self.config.dispatch_priority @@ -948,6 +1049,7 @@

    Classes

    image_pull_secrets=image_pull_secrets, dispatch_priority=dispatch_priority, priority_val=priority_val, + openshift_oauth=self.config.openshift_oauth, ) # creates a new cluster with the provided or default spec @@ -957,6 +1059,11 @@

    Classes

    the MCAD queue. """ namespace = self.config.namespace + if self.config.openshift_oauth: + create_openshift_oauth_objects( + cluster_name=self.config.name, namespace=namespace + ) + try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) @@ -991,6 +1098,11 @@

    Classes

    except Exception as e: # pragma: no cover return _kube_api_error_handling(e) + if self.config.openshift_oauth: + delete_openshift_oauth_objects( + cluster_name=self.config.name, namespace=namespace + ) + def status( self, print_to_console: bool = True ) -> Tuple[CodeFlareClusterStatus, bool]: @@ -1059,7 +1171,16 @@

    Classes

    return status, ready def is_dashboard_ready(self) -> bool: - response = requests.get(self.cluster_dashboard_uri(), timeout=5) + try: + response = requests.get( + self.cluster_dashboard_uri(), + headers=self._client_headers, + timeout=5, + verify=self._client_verify_tls, + ) + except requests.exceptions.SSLError: + # SSL exception occurs when oauth ingress has been created but cluster is not up + return False if response.status_code == 200: return True else: @@ -1131,7 +1252,13 @@

    Classes

    return _kube_api_error_handling(e) for route in routes["items"]: - if route["metadata"]["name"] == f"ray-dashboard-{self.config.name}": + if route["metadata"][ + "name" + ] == f"ray-dashboard-{self.config.name}" or route["metadata"][ + "name" + ].startswith( + f"{self.config.name}-ingress" + ): protocol = "https" if route["spec"].get("tls") else "http" return f"{protocol}://{route['spec']['host']}" return "Dashboard route not available yet, have you run cluster.up()?" @@ -1140,30 +1267,24 @@

    Classes

    """ This method accesses the head ray node in your cluster and lists the running jobs. """ - dashboard_route = self.cluster_dashboard_uri() - client = JobSubmissionClient(dashboard_route) - return client.list_jobs() + return self.client.list_jobs() def job_status(self, job_id: str) -> str: """ This method accesses the head ray node in your cluster and returns the job status for the provided job id. """ - dashboard_route = self.cluster_dashboard_uri() - client = JobSubmissionClient(dashboard_route) - return client.get_job_status(job_id) + return self.client.get_job_status(job_id) def job_logs(self, job_id: str) -> str: """ This method accesses the head ray node in your cluster and returns the logs for the provided job id. """ - dashboard_route = self.cluster_dashboard_uri() - client = JobSubmissionClient(dashboard_route) - return client.get_job_logs(job_id) + return self.client.get_job_logs(job_id) def torchx_config( self, working_dir: str = None, requirements: str = None ) -> Dict[str, str]: - dashboard_address = f"{self.cluster_dashboard_uri().lstrip('http://')}" + dashboard_address = urllib3.util.parse_url(self.cluster_dashboard_uri()).host to_return = { "cluster_name": self.config.name, "dashboard_address": dashboard_address, @@ -1230,6 +1351,36 @@

    Class variables

    +

    Instance variables

    +
    +
    var client
    +
    +
    +
    + +Expand source code + +
    @property
    +def client(self):
    +    if self._client:
    +        return self._client
    +    if self.config.openshift_oauth:
    +        print(
    +            api_config_handler().configuration.get_api_key_with_prefix(
    +                "authorization"
    +            )
    +        )
    +        self._client = JobSubmissionClient(
    +            self.cluster_dashboard_uri(),
    +            headers=self._client_headers,
    +            verify=self._client_verify_tls,
    +        )
    +    else:
    +        self._client = JobSubmissionClient(self.cluster_dashboard_uri())
    +    return self._client
    +
    +
    +

    Methods

    @@ -1258,7 +1409,13 @@

    Methods

    return _kube_api_error_handling(e) for route in routes["items"]: - if route["metadata"]["name"] == f"ray-dashboard-{self.config.name}": + if route["metadata"][ + "name" + ] == f"ray-dashboard-{self.config.name}" or route["metadata"][ + "name" + ].startswith( + f"{self.config.name}-ingress" + ): protocol = "https" if route["spec"].get("tls") else "http" return f"{protocol}://{route['spec']['host']}" return "Dashboard route not available yet, have you run cluster.up()?"
    @@ -1355,6 +1512,7 @@

    Methods

    image_pull_secrets=image_pull_secrets, dispatch_priority=dispatch_priority, priority_val=priority_val, + openshift_oauth=self.config.openshift_oauth, )
    @@ -1401,7 +1559,12 @@

    Methods

    name=self.app_wrapper_name, ) except Exception as e: # pragma: no cover - return _kube_api_error_handling(e)
    + return _kube_api_error_handling(e) + + if self.config.openshift_oauth: + delete_openshift_oauth_objects( + cluster_name=self.config.name, namespace=namespace + )
    @@ -1496,7 +1659,16 @@

    Methods

    Expand source code
    def is_dashboard_ready(self) -> bool:
    -    response = requests.get(self.cluster_dashboard_uri(), timeout=5)
    +    try:
    +        response = requests.get(
    +            self.cluster_dashboard_uri(),
    +            headers=self._client_headers,
    +            timeout=5,
    +            verify=self._client_verify_tls,
    +        )
    +    except requests.exceptions.SSLError:
    +        # SSL exception occurs when oauth ingress has been created but cluster is not up
    +        return False
         if response.status_code == 200:
             return True
         else:
    @@ -1516,9 +1688,7 @@ 

    Methods

    """ This method accesses the head ray node in your cluster and returns the logs for the provided job id. """ - dashboard_route = self.cluster_dashboard_uri() - client = JobSubmissionClient(dashboard_route) - return client.get_job_logs(job_id)
    + return self.client.get_job_logs(job_id)
    @@ -1534,9 +1704,7 @@

    Methods

    """ This method accesses the head ray node in your cluster and returns the job status for the provided job id. """ - dashboard_route = self.cluster_dashboard_uri() - client = JobSubmissionClient(dashboard_route) - return client.get_job_status(job_id)
    + return self.client.get_job_status(job_id)
    @@ -1552,9 +1720,7 @@

    Methods

    """ This method accesses the head ray node in your cluster and lists the running jobs. """ - dashboard_route = self.cluster_dashboard_uri() - client = JobSubmissionClient(dashboard_route) - return client.list_jobs()
    + return self.client.list_jobs()
    @@ -1664,7 +1830,7 @@

    Methods

    def torchx_config(
         self, working_dir: str = None, requirements: str = None
     ) -> Dict[str, str]:
    -    dashboard_address = f"{self.cluster_dashboard_uri().lstrip('http://')}"
    +    dashboard_address = urllib3.util.parse_url(self.cluster_dashboard_uri()).host
         to_return = {
             "cluster_name": self.config.name,
             "dashboard_address": dashboard_address,
    @@ -1692,6 +1858,11 @@ 

    Methods

    the MCAD queue. """ namespace = self.config.namespace + if self.config.openshift_oauth: + create_openshift_oauth_objects( + cluster_name=self.config.name, namespace=namespace + ) + try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) @@ -1785,6 +1956,7 @@

    Index

  • Cluster

      +
    • client
    • cluster_dashboard_uri
    • cluster_uri
    • create_app_wrapper
    • diff --git a/docs/cluster/config.html b/docs/cluster/config.html index 2edf0a3c..28830390 100644 --- a/docs/cluster/config.html +++ b/docs/cluster/config.html @@ -82,7 +82,8 @@

      Module codeflare_sdk.cluster.config

      image: str = "quay.io/project-codeflare/ray:latest-py39-cu118" local_interactive: bool = False image_pull_secrets: list = field(default_factory=list) - dispatch_priority: str = None
  • + dispatch_priority: str = None + openshift_oauth: bool = False # NOTE: to use the user must have permission to create a RoleBinding for system:auth-delegator
    @@ -96,7 +97,7 @@

    Classes

    class ClusterConfiguration -(name: str, namespace: str = None, head_info: list = <factory>, head_cpus: int = 2, head_memory: int = 8, head_gpus: int = 0, machine_types: list = <factory>, min_cpus: int = 1, max_cpus: int = 1, num_workers: int = 1, min_memory: int = 2, max_memory: int = 2, num_gpus: int = 0, template: str = '/home/runner/work/codeflare-sdk/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', instascale: bool = False, envs: dict = <factory>, image: str = 'quay.io/project-codeflare/ray:latest-py39-cu118', local_interactive: bool = False, image_pull_secrets: list = <factory>, dispatch_priority: str = None) +(name: str, namespace: str = None, head_info: list = <factory>, head_cpus: int = 2, head_memory: int = 8, head_gpus: int = 0, machine_types: list = <factory>, min_cpus: int = 1, max_cpus: int = 1, num_workers: int = 1, min_memory: int = 2, max_memory: int = 2, num_gpus: int = 0, template: str = '/home/runner/work/codeflare-sdk/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', instascale: bool = False, envs: dict = <factory>, image: str = 'quay.io/project-codeflare/ray:latest-py39-cu118', local_interactive: bool = False, image_pull_secrets: list = <factory>, dispatch_priority: str = None, openshift_oauth: bool = False)

    This dataclass is used to specify resource requirements and other details, and @@ -130,7 +131,8 @@

    Classes

    image: str = "quay.io/project-codeflare/ray:latest-py39-cu118" local_interactive: bool = False image_pull_secrets: list = field(default_factory=list) - dispatch_priority: str = None + dispatch_priority: str = None + openshift_oauth: bool = False # NOTE: to use the user must have permission to create a RoleBinding for system:auth-delegator

    Class variables

    @@ -210,6 +212,10 @@

    Class variables

    +
    var openshift_oauth : bool
    +
    +
    +
    var template : str
    @@ -254,6 +260,7 @@

    namespace
  • num_gpus
  • num_workers
  • +
  • openshift_oauth
  • template
  • diff --git a/docs/job/jobs.html b/docs/job/jobs.html index 96ea4744..266da13e 100644 --- a/docs/job/jobs.html +++ b/docs/job/jobs.html @@ -46,15 +46,19 @@

    Module codeflare_sdk.job.jobs

    from pathlib import Path from torchx.components.dist import ddp -from torchx.runner import get_runner +from torchx.runner import get_runner, Runner +from torchx.schedulers.ray_scheduler import RayScheduler from torchx.specs import AppHandle, parse_app_handle, AppDryRunInfo +from ray.job_submission import JobSubmissionClient + +import openshift as oc + if TYPE_CHECKING: from ..cluster.cluster import Cluster from ..cluster.cluster import get_current_namespace all_jobs: List["Job"] = [] -torchx_runner = get_runner() class JobDefinition(metaclass=abc.ABCMeta): @@ -120,30 +124,37 @@

    Module codeflare_sdk.job.jobs

    def _dry_run(self, cluster: "Cluster"): j = f"{cluster.config.num_workers}x{max(cluster.config.num_gpus, 1)}" # # of proc. = # of gpus - return torchx_runner.dryrun( - app=ddp( - *self.script_args, - script=self.script, - m=self.m, - name=self.name, - h=self.h, - cpu=self.cpu if self.cpu is not None else cluster.config.max_cpus, - gpu=self.gpu if self.gpu is not None else cluster.config.num_gpus, - memMB=self.memMB - if self.memMB is not None - else cluster.config.max_memory * 1024, - j=self.j if self.j is not None else j, - env=self.env, - max_retries=self.max_retries, - rdzv_port=self.rdzv_port, - rdzv_backend=self.rdzv_backend - if self.rdzv_backend is not None - else "static", - mounts=self.mounts, + runner = get_runner(ray_client=cluster.client) + runner._scheduler_instances["ray"] = RayScheduler( + session_name=runner._name, ray_client=cluster.client + ) + return ( + runner.dryrun( + app=ddp( + *self.script_args, + script=self.script, + m=self.m, + name=self.name, + h=self.h, + cpu=self.cpu if self.cpu is not None else cluster.config.max_cpus, + gpu=self.gpu if self.gpu is not None else cluster.config.num_gpus, + memMB=self.memMB + if self.memMB is not None + else cluster.config.max_memory * 1024, + j=self.j if self.j is not None else j, + env=self.env, + max_retries=self.max_retries, + rdzv_port=self.rdzv_port, + rdzv_backend=self.rdzv_backend + if self.rdzv_backend is not None + else "static", + mounts=self.mounts, + ), + scheduler=cluster.torchx_scheduler, + cfg=cluster.torchx_config(**self.scheduler_args), + workspace=self.workspace, ), - scheduler=cluster.torchx_scheduler, - cfg=cluster.torchx_config(**self.scheduler_args), - workspace=self.workspace, + runner, ) def _missing_spec(self, spec: str): @@ -153,41 +164,47 @@

    Module codeflare_sdk.job.jobs

    if self.scheduler_args is not None: if self.scheduler_args.get("namespace") is None: self.scheduler_args["namespace"] = get_current_namespace() - return torchx_runner.dryrun( - app=ddp( - *self.script_args, - script=self.script, - m=self.m, - name=self.name if self.name is not None else self._missing_spec("name"), - h=self.h, - cpu=self.cpu - if self.cpu is not None - else self._missing_spec("cpu (# cpus per worker)"), - gpu=self.gpu - if self.gpu is not None - else self._missing_spec("gpu (# gpus per worker)"), - memMB=self.memMB - if self.memMB is not None - else self._missing_spec("memMB (memory in MB)"), - j=self.j - if self.j is not None - else self._missing_spec( - "j (`workers`x`procs`)" - ), # # of proc. = # of gpus, - env=self.env, # should this still exist? - max_retries=self.max_retries, - rdzv_port=self.rdzv_port, # should this still exist? - rdzv_backend=self.rdzv_backend - if self.rdzv_backend is not None - else "c10d", - mounts=self.mounts, - image=self.image - if self.image is not None - else self._missing_spec("image"), + runner = get_runner() + return ( + runner.dryrun( + app=ddp( + *self.script_args, + script=self.script, + m=self.m, + name=self.name + if self.name is not None + else self._missing_spec("name"), + h=self.h, + cpu=self.cpu + if self.cpu is not None + else self._missing_spec("cpu (# cpus per worker)"), + gpu=self.gpu + if self.gpu is not None + else self._missing_spec("gpu (# gpus per worker)"), + memMB=self.memMB + if self.memMB is not None + else self._missing_spec("memMB (memory in MB)"), + j=self.j + if self.j is not None + else self._missing_spec( + "j (`workers`x`procs`)" + ), # # of proc. = # of gpus, + env=self.env, # should this still exist? + max_retries=self.max_retries, + rdzv_port=self.rdzv_port, # should this still exist? + rdzv_backend=self.rdzv_backend + if self.rdzv_backend is not None + else "c10d", + mounts=self.mounts, + image=self.image + if self.image is not None + else self._missing_spec("image"), + ), + scheduler="kubernetes_mcad", + cfg=self.scheduler_args, + workspace="", ), - scheduler="kubernetes_mcad", - cfg=self.scheduler_args, - workspace="", + runner, ) def submit(self, cluster: "Cluster" = None) -> "Job": @@ -199,21 +216,23 @@

    Module codeflare_sdk.job.jobs

    self.job_definition = job_definition self.cluster = cluster if self.cluster: - self._app_handle = torchx_runner.schedule(job_definition._dry_run(cluster)) + definition, runner = job_definition._dry_run(cluster) + self._app_handle = runner.schedule(definition) + self._runner = runner else: - self._app_handle = torchx_runner.schedule( - job_definition._dry_run_no_cluster() - ) + definition, runner = job_definition._dry_run_no_cluster() + self._app_handle = runner.schedule(definition) + self._runner = runner all_jobs.append(self) def status(self) -> str: - return torchx_runner.status(self._app_handle) + return self._runner.status(self._app_handle) def logs(self) -> str: - return "".join(torchx_runner.log_lines(self._app_handle, None)) + return "".join(self._runner.log_lines(self._app_handle, None)) def cancel(self): - torchx_runner.cancel(self._app_handle) + self._runner.cancel(self._app_handle)
    @@ -240,21 +259,23 @@

    Classes

    self.job_definition = job_definition self.cluster = cluster if self.cluster: - self._app_handle = torchx_runner.schedule(job_definition._dry_run(cluster)) + definition, runner = job_definition._dry_run(cluster) + self._app_handle = runner.schedule(definition) + self._runner = runner else: - self._app_handle = torchx_runner.schedule( - job_definition._dry_run_no_cluster() - ) + definition, runner = job_definition._dry_run_no_cluster() + self._app_handle = runner.schedule(definition) + self._runner = runner all_jobs.append(self) def status(self) -> str: - return torchx_runner.status(self._app_handle) + return self._runner.status(self._app_handle) def logs(self) -> str: - return "".join(torchx_runner.log_lines(self._app_handle, None)) + return "".join(self._runner.log_lines(self._app_handle, None)) def cancel(self): - torchx_runner.cancel(self._app_handle) + self._runner.cancel(self._app_handle)

    Ancestors

      @@ -272,7 +293,7 @@

      Methods

      Expand source code
      def cancel(self):
      -    torchx_runner.cancel(self._app_handle)
      + self._runner.cancel(self._app_handle)
      @@ -285,7 +306,7 @@

      Methods

      Expand source code
      def logs(self) -> str:
      -    return "".join(torchx_runner.log_lines(self._app_handle, None))
      + return "".join(self._runner.log_lines(self._app_handle, None))
      @@ -298,7 +319,7 @@

      Methods

      Expand source code
      def status(self) -> str:
      -    return torchx_runner.status(self._app_handle)
      + return self._runner.status(self._app_handle)
    @@ -360,30 +381,37 @@

    Methods

    def _dry_run(self, cluster: "Cluster"): j = f"{cluster.config.num_workers}x{max(cluster.config.num_gpus, 1)}" # # of proc. = # of gpus - return torchx_runner.dryrun( - app=ddp( - *self.script_args, - script=self.script, - m=self.m, - name=self.name, - h=self.h, - cpu=self.cpu if self.cpu is not None else cluster.config.max_cpus, - gpu=self.gpu if self.gpu is not None else cluster.config.num_gpus, - memMB=self.memMB - if self.memMB is not None - else cluster.config.max_memory * 1024, - j=self.j if self.j is not None else j, - env=self.env, - max_retries=self.max_retries, - rdzv_port=self.rdzv_port, - rdzv_backend=self.rdzv_backend - if self.rdzv_backend is not None - else "static", - mounts=self.mounts, + runner = get_runner(ray_client=cluster.client) + runner._scheduler_instances["ray"] = RayScheduler( + session_name=runner._name, ray_client=cluster.client + ) + return ( + runner.dryrun( + app=ddp( + *self.script_args, + script=self.script, + m=self.m, + name=self.name, + h=self.h, + cpu=self.cpu if self.cpu is not None else cluster.config.max_cpus, + gpu=self.gpu if self.gpu is not None else cluster.config.num_gpus, + memMB=self.memMB + if self.memMB is not None + else cluster.config.max_memory * 1024, + j=self.j if self.j is not None else j, + env=self.env, + max_retries=self.max_retries, + rdzv_port=self.rdzv_port, + rdzv_backend=self.rdzv_backend + if self.rdzv_backend is not None + else "static", + mounts=self.mounts, + ), + scheduler=cluster.torchx_scheduler, + cfg=cluster.torchx_config(**self.scheduler_args), + workspace=self.workspace, ), - scheduler=cluster.torchx_scheduler, - cfg=cluster.torchx_config(**self.scheduler_args), - workspace=self.workspace, + runner, ) def _missing_spec(self, spec: str): @@ -393,41 +421,47 @@

    Methods

    if self.scheduler_args is not None: if self.scheduler_args.get("namespace") is None: self.scheduler_args["namespace"] = get_current_namespace() - return torchx_runner.dryrun( - app=ddp( - *self.script_args, - script=self.script, - m=self.m, - name=self.name if self.name is not None else self._missing_spec("name"), - h=self.h, - cpu=self.cpu - if self.cpu is not None - else self._missing_spec("cpu (# cpus per worker)"), - gpu=self.gpu - if self.gpu is not None - else self._missing_spec("gpu (# gpus per worker)"), - memMB=self.memMB - if self.memMB is not None - else self._missing_spec("memMB (memory in MB)"), - j=self.j - if self.j is not None - else self._missing_spec( - "j (`workers`x`procs`)" - ), # # of proc. = # of gpus, - env=self.env, # should this still exist? - max_retries=self.max_retries, - rdzv_port=self.rdzv_port, # should this still exist? - rdzv_backend=self.rdzv_backend - if self.rdzv_backend is not None - else "c10d", - mounts=self.mounts, - image=self.image - if self.image is not None - else self._missing_spec("image"), + runner = get_runner() + return ( + runner.dryrun( + app=ddp( + *self.script_args, + script=self.script, + m=self.m, + name=self.name + if self.name is not None + else self._missing_spec("name"), + h=self.h, + cpu=self.cpu + if self.cpu is not None + else self._missing_spec("cpu (# cpus per worker)"), + gpu=self.gpu + if self.gpu is not None + else self._missing_spec("gpu (# gpus per worker)"), + memMB=self.memMB + if self.memMB is not None + else self._missing_spec("memMB (memory in MB)"), + j=self.j + if self.j is not None + else self._missing_spec( + "j (`workers`x`procs`)" + ), # # of proc. = # of gpus, + env=self.env, # should this still exist? + max_retries=self.max_retries, + rdzv_port=self.rdzv_port, # should this still exist? + rdzv_backend=self.rdzv_backend + if self.rdzv_backend is not None + else "c10d", + mounts=self.mounts, + image=self.image + if self.image is not None + else self._missing_spec("image"), + ), + scheduler="kubernetes_mcad", + cfg=self.scheduler_args, + workspace="", ), - scheduler="kubernetes_mcad", - cfg=self.scheduler_args, - workspace="", + runner, ) def submit(self, cluster: "Cluster" = None) -> "Job": diff --git a/docs/utils/generate_yaml.html b/docs/utils/generate_yaml.html index 791898a3..2091f8b3 100644 --- a/docs/utils/generate_yaml.html +++ b/docs/utils/generate_yaml.html @@ -55,6 +55,13 @@

    Module codeflare_sdk.utils.generate_yaml

    from kubernetes import client, config from .kube_api_helpers import _kube_api_error_handling from ..cluster.auth import api_config_handler, config_check +from os import urandom +from base64 import b64encode +from urllib3.util import parse_url + +from kubernetes import client, config + +from .kube_api_helpers import _get_api_host def read_template(template): @@ -77,13 +84,17 @@

    Module codeflare_sdk.utils.generate_yaml

    def update_dashboard_route(route_item, cluster_name, namespace): metadata = route_item.get("generictemplate", {}).get("metadata") - metadata["name"] = f"ray-dashboard-{cluster_name}" + metadata["name"] = gen_dashboard_route_name(cluster_name) metadata["namespace"] = namespace metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc" spec = route_item.get("generictemplate", {}).get("spec") spec["to"]["name"] = f"{cluster_name}-head-svc" +def gen_dashboard_route_name(cluster_name): + return f"ray-dashboard-{cluster_name}" + + # ToDo: refactor the update_x_route() functions def update_rayclient_route(route_item, cluster_name, namespace): metadata = route_item.get("generictemplate", {}).get("metadata") @@ -400,6 +411,83 @@

    Module codeflare_sdk.utils.generate_yaml

    print(f"Written to: {output_file_name}") +def enable_openshift_oauth(user_yaml, cluster_name, namespace): + config_check() + k8_client = api_config_handler() or client.ApiClient() + tls_mount_location = "/etc/tls/private" + oauth_port = 8443 + oauth_sa_name = f"{cluster_name}-oauth-proxy" + tls_secret_name = f"{cluster_name}-proxy-tls-secret" + tls_volume_name = "proxy-tls-secret" + port_name = "oauth-proxy" + host = _get_api_host(k8_client) + host = host.replace( + "api.", f"{gen_dashboard_route_name(cluster_name)}-{namespace}.apps." + ) + oauth_sidecar = _create_oauth_sidecar_object( + namespace, + tls_mount_location, + oauth_port, + oauth_sa_name, + tls_volume_name, + port_name, + ) + tls_secret_volume = client.V1Volume( + name=tls_volume_name, + secret=client.V1SecretVolumeSource(secret_name=tls_secret_name), + ) + # allows for setting value of Cluster object when initializing object from an existing AppWrapper on cluster + user_yaml["metadata"]["annotations"] = user_yaml["metadata"].get("annotations", {}) + user_yaml["metadata"]["annotations"][ + "codeflare-sdk-use-oauth" + ] = "true" # if the user gets an + ray_headgroup_pod = user_yaml["spec"]["resources"]["GenericItems"][0][ + "generictemplate" + ]["spec"]["headGroupSpec"]["template"]["spec"] + user_yaml["spec"]["resources"]["GenericItems"].pop(1) + ray_headgroup_pod["serviceAccount"] = oauth_sa_name + ray_headgroup_pod["volumes"] = ray_headgroup_pod.get("volumes", []) + + # we use a generic api client here so that the serialization function doesn't need to be mocked for unit tests + ray_headgroup_pod["volumes"].append( + client.ApiClient().sanitize_for_serialization(tls_secret_volume) + ) + ray_headgroup_pod["containers"].append( + client.ApiClient().sanitize_for_serialization(oauth_sidecar) + ) + + +def _create_oauth_sidecar_object( + namespace: str, + tls_mount_location: str, + oauth_port: int, + oauth_sa_name: str, + tls_volume_name: str, + port_name: str, +) -> client.V1Container: + return client.V1Container( + args=[ + f"--https-address=:{oauth_port}", + "--provider=openshift", + f"--openshift-service-account={oauth_sa_name}", + "--upstream=http://localhost:8265", + f"--tls-cert={tls_mount_location}/tls.crt", + f"--tls-key={tls_mount_location}/tls.key", + f"--cookie-secret={b64encode(urandom(64)).decode('utf-8')}", # create random string for encrypting cookie + f'--openshift-delegate-urls={{"/":{{"resource":"pods","namespace":"{namespace}","verb":"get"}}}}', + ], + image="registry.redhat.io/openshift4/ose-oauth-proxy@sha256:1ea6a01bf3e63cdcf125c6064cbd4a4a270deaf0f157b3eabb78f60556840366", + name="oauth-proxy", + ports=[client.V1ContainerPort(container_port=oauth_port, name=port_name)], + resources=client.V1ResourceRequirements(limits=None, requests=None), + volume_mounts=[ + client.V1VolumeMount( + mount_path=tls_mount_location, name=tls_volume_name, read_only=True + ) + ], + ) + + def generate_appwrapper( name: str, namespace: str, @@ -421,6 +509,7 @@

    Module codeflare_sdk.utils.generate_yaml

    image_pull_secrets: list, dispatch_priority: str, priority_val: int, + openshift_oauth: bool, ): user_yaml = read_template(template) appwrapper_name, cluster_name = gen_names(name) @@ -464,6 +553,10 @@

    Module codeflare_sdk.utils.generate_yaml

    enable_local_interactive(resources, cluster_name, namespace) else: disable_raycluster_tls(resources["resources"]) + + if openshift_oauth: + enable_openshift_oauth(user_yaml, cluster_name, namespace) + outfile = appwrapper_name + ".yaml" write_user_appwrapper(user_yaml, outfile) return outfile @@ -594,6 +687,74 @@

    Functions

    ][0].get("command")[2] = command +
    +def enable_openshift_oauth(user_yaml, cluster_name, namespace) +
    +
    +
    +
    + +Expand source code + +
    def enable_openshift_oauth(user_yaml, cluster_name, namespace):
    +    config_check()
    +    k8_client = api_config_handler() or client.ApiClient()
    +    tls_mount_location = "/etc/tls/private"
    +    oauth_port = 8443
    +    oauth_sa_name = f"{cluster_name}-oauth-proxy"
    +    tls_secret_name = f"{cluster_name}-proxy-tls-secret"
    +    tls_volume_name = "proxy-tls-secret"
    +    port_name = "oauth-proxy"
    +    host = _get_api_host(k8_client)
    +    host = host.replace(
    +        "api.", f"{gen_dashboard_route_name(cluster_name)}-{namespace}.apps."
    +    )
    +    oauth_sidecar = _create_oauth_sidecar_object(
    +        namespace,
    +        tls_mount_location,
    +        oauth_port,
    +        oauth_sa_name,
    +        tls_volume_name,
    +        port_name,
    +    )
    +    tls_secret_volume = client.V1Volume(
    +        name=tls_volume_name,
    +        secret=client.V1SecretVolumeSource(secret_name=tls_secret_name),
    +    )
    +    # allows for setting value of Cluster object when initializing object from an existing AppWrapper on cluster
    +    user_yaml["metadata"]["annotations"] = user_yaml["metadata"].get("annotations", {})
    +    user_yaml["metadata"]["annotations"][
    +        "codeflare-sdk-use-oauth"
    +    ] = "true"  # if the user gets an
    +    ray_headgroup_pod = user_yaml["spec"]["resources"]["GenericItems"][0][
    +        "generictemplate"
    +    ]["spec"]["headGroupSpec"]["template"]["spec"]
    +    user_yaml["spec"]["resources"]["GenericItems"].pop(1)
    +    ray_headgroup_pod["serviceAccount"] = oauth_sa_name
    +    ray_headgroup_pod["volumes"] = ray_headgroup_pod.get("volumes", [])
    +
    +    # we use a generic api client here so that the serialization function doesn't need to be mocked for unit tests
    +    ray_headgroup_pod["volumes"].append(
    +        client.ApiClient().sanitize_for_serialization(tls_secret_volume)
    +    )
    +    ray_headgroup_pod["containers"].append(
    +        client.ApiClient().sanitize_for_serialization(oauth_sidecar)
    +    )
    +
    +
    +
    +def gen_dashboard_route_name(cluster_name) +
    +
    +
    +
    + +Expand source code + +
    def gen_dashboard_route_name(cluster_name):
    +    return f"ray-dashboard-{cluster_name}"
    +
    +
    def gen_names(name)
    @@ -614,7 +775,7 @@

    Functions

    -def generate_appwrapper(name: str, namespace: str, head_cpus: int, head_memory: int, head_gpus: int, min_cpu: int, max_cpu: int, min_memory: int, max_memory: int, gpu: int, workers: int, template: str, image: str, instascale: bool, instance_types: list, env, local_interactive: bool, image_pull_secrets: list, dispatch_priority: str, priority_val: int) +def generate_appwrapper(name: str, namespace: str, head_cpus: int, head_memory: int, head_gpus: int, min_cpu: int, max_cpu: int, min_memory: int, max_memory: int, gpu: int, workers: int, template: str, image: str, instascale: bool, instance_types: list, env, local_interactive: bool, image_pull_secrets: list, dispatch_priority: str, priority_val: int, openshift_oauth: bool)
    @@ -643,6 +804,7 @@

    Functions

    image_pull_secrets: list, dispatch_priority: str, priority_val: int, + openshift_oauth: bool, ): user_yaml = read_template(template) appwrapper_name, cluster_name = gen_names(name) @@ -686,6 +848,10 @@

    Functions

    enable_local_interactive(resources, cluster_name, namespace) else: disable_raycluster_tls(resources["resources"]) + + if openshift_oauth: + enable_openshift_oauth(user_yaml, cluster_name, namespace) + outfile = appwrapper_name + ".yaml" write_user_appwrapper(user_yaml, outfile) return outfile @@ -821,7 +987,7 @@

    Functions

    def update_dashboard_route(route_item, cluster_name, namespace):
         metadata = route_item.get("generictemplate", {}).get("metadata")
    -    metadata["name"] = f"ray-dashboard-{cluster_name}"
    +    metadata["name"] = gen_dashboard_route_name(cluster_name)
         metadata["namespace"] = namespace
         metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc"
         spec = route_item.get("generictemplate", {}).get("spec")
    @@ -1078,6 +1244,8 @@ 

    Index

    +
    codeflare_sdk.utils.openshift_oauth
    +
    +
    +
    codeflare_sdk.utils.pretty_print

    This sub-module exists primarily to be used internally by the Cluster object @@ -70,6 +74,7 @@

    Index

  • codeflare_sdk.utils.generate_cert
  • codeflare_sdk.utils.generate_yaml
  • codeflare_sdk.utils.kube_api_helpers
  • +
  • codeflare_sdk.utils.openshift_oauth
  • codeflare_sdk.utils.pretty_print
  • diff --git a/docs/utils/kube_api_helpers.html b/docs/utils/kube_api_helpers.html index 4c2ecb78..37034ab7 100644 --- a/docs/utils/kube_api_helpers.html +++ b/docs/utils/kube_api_helpers.html @@ -50,6 +50,7 @@

    Module codeflare_sdk.utils.kube_api_helpers

    import executing from kubernetes import client, config +from urllib3.util import parse_url # private methods @@ -72,7 +73,11 @@

    Module codeflare_sdk.utils.kube_api_helpers

    raise PermissionError(perm_msg) elif e.reason == "Conflict": raise FileExistsError(exists_msg) - raise e
    + raise e + + +def _get_api_host(api_client: client.ApiClient): # pragma: no cover + return parse_url(api_client.configuration.host).host
    diff --git a/docs/utils/openshift_oauth.html b/docs/utils/openshift_oauth.html new file mode 100644 index 00000000..0fc27b6b --- /dev/null +++ b/docs/utils/openshift_oauth.html @@ -0,0 +1,347 @@ + + + + + + +codeflare_sdk.utils.openshift_oauth API documentation + + + + + + + + + + + +
    +
    +
    +

    Module codeflare_sdk.utils.openshift_oauth

    +
    +
    +
    + +Expand source code + +
    from urllib3.util import parse_url
    +from .generate_yaml import gen_dashboard_route_name
    +from .kube_api_helpers import _get_api_host
    +from base64 import b64decode
    +
    +from ..cluster.auth import config_check, api_config_handler
    +
    +from kubernetes import client
    +
    +
    +def create_openshift_oauth_objects(cluster_name, namespace):
    +    config_check()
    +    api_client = api_config_handler() or client.ApiClient()
    +    oauth_port = 8443
    +    oauth_sa_name = f"{cluster_name}-oauth-proxy"
    +    tls_secret_name = _gen_tls_secret_name(cluster_name)
    +    service_name = f"{cluster_name}-oauth"
    +    port_name = "oauth-proxy"
    +    host = _get_api_host(api_client)
    +
    +    # replace "^api" with the expected host
    +    host = f"{gen_dashboard_route_name(cluster_name)}-{namespace}.apps" + host.lstrip(
    +        "api"
    +    )
    +
    +    _create_or_replace_oauth_sa(namespace, oauth_sa_name, host)
    +    _create_or_replace_oauth_service_obj(
    +        cluster_name, namespace, oauth_port, tls_secret_name, service_name, port_name
    +    )
    +    _create_or_replace_oauth_ingress_object(
    +        cluster_name, namespace, service_name, port_name, host
    +    )
    +    _create_or_replace_oauth_rb(cluster_name, namespace, oauth_sa_name)
    +
    +
    +def _create_or_replace_oauth_sa(namespace, oauth_sa_name, host):
    +    api_client = api_config_handler()
    +    oauth_sa = client.V1ServiceAccount(
    +        api_version="v1",
    +        kind="ServiceAccount",
    +        metadata=client.V1ObjectMeta(
    +            name=oauth_sa_name,
    +            namespace=namespace,
    +            annotations={
    +                "serviceaccounts.openshift.io/oauth-redirecturi.first": f"https://{host}"
    +            },
    +        ),
    +    )
    +    try:
    +        client.CoreV1Api(api_client).create_namespaced_service_account(
    +            namespace=namespace, body=oauth_sa
    +        )
    +    except client.ApiException as e:
    +        if e.reason == "Conflict":
    +            client.CoreV1Api(api_client).replace_namespaced_service_account(
    +                namespace=namespace,
    +                body=oauth_sa,
    +                name=oauth_sa_name,
    +            )
    +        else:
    +            raise e
    +
    +
    +def _create_or_replace_oauth_rb(cluster_name, namespace, oauth_sa_name):
    +    api_client = api_config_handler()
    +    oauth_crb = client.V1ClusterRoleBinding(
    +        api_version="rbac.authorization.k8s.io/v1",
    +        kind="ClusterRoleBinding",
    +        metadata=client.V1ObjectMeta(name=f"{cluster_name}-rb"),
    +        role_ref=client.V1RoleRef(
    +            api_group="rbac.authorization.k8s.io",
    +            kind="ClusterRole",
    +            name="system:auth-delegator",
    +        ),
    +        subjects=[
    +            client.V1Subject(
    +                kind="ServiceAccount", name=oauth_sa_name, namespace=namespace
    +            )
    +        ],
    +    )
    +    try:
    +        client.RbacAuthorizationV1Api(api_client).create_cluster_role_binding(
    +            body=oauth_crb
    +        )
    +    except client.ApiException as e:
    +        if e.reason == "Conflict":
    +            client.RbacAuthorizationV1Api(api_client).replace_cluster_role_binding(
    +                body=oauth_crb, name=f"{cluster_name}-rb"
    +            )
    +        else:
    +            raise e
    +
    +
    +def _gen_tls_secret_name(cluster_name):
    +    return f"{cluster_name}-proxy-tls-secret"
    +
    +
    +def delete_openshift_oauth_objects(cluster_name, namespace):
    +    # NOTE: it might be worth adding error handling here, but shouldn't be necessary because cluster.down(...) checks
    +    # for an existing cluster before calling this => the objects should never be deleted twice
    +    api_client = api_config_handler()
    +    oauth_sa_name = f"{cluster_name}-oauth-proxy"
    +    service_name = f"{cluster_name}-oauth"
    +    client.CoreV1Api(api_client).delete_namespaced_service_account(
    +        name=oauth_sa_name, namespace=namespace
    +    )
    +    client.CoreV1Api(api_client).delete_namespaced_service(
    +        name=service_name, namespace=namespace
    +    )
    +    client.NetworkingV1Api(api_client).delete_namespaced_ingress(
    +        name=f"{cluster_name}-ingress", namespace=namespace
    +    )
    +    client.RbacAuthorizationV1Api(api_client).delete_cluster_role_binding(
    +        name=f"{cluster_name}-rb"
    +    )
    +
    +
    +def _create_or_replace_oauth_service_obj(
    +    cluster_name: str,
    +    namespace: str,
    +    oauth_port: int,
    +    tls_secret_name: str,
    +    service_name: str,
    +    port_name: str,
    +) -> client.V1Service:
    +    api_client = api_config_handler()
    +    oauth_service = client.V1Service(
    +        api_version="v1",
    +        kind="Service",
    +        metadata=client.V1ObjectMeta(
    +            annotations={
    +                "service.beta.openshift.io/serving-cert-secret-name": tls_secret_name
    +            },
    +            name=service_name,
    +            namespace=namespace,
    +        ),
    +        spec=client.V1ServiceSpec(
    +            ports=[
    +                client.V1ServicePort(
    +                    name=port_name,
    +                    protocol="TCP",
    +                    port=443,
    +                    target_port=oauth_port,
    +                )
    +            ],
    +            selector={
    +                "app.kubernetes.io/created-by": "kuberay-operator",
    +                "app.kubernetes.io/name": "kuberay",
    +                "ray.io/cluster": cluster_name,
    +                "ray.io/identifier": f"{cluster_name}-head",
    +                "ray.io/node-type": "head",
    +            },
    +        ),
    +    )
    +    try:
    +        client.CoreV1Api(api_client).create_namespaced_service(
    +            namespace=namespace, body=oauth_service
    +        )
    +    except client.ApiException as e:
    +        if e.reason == "Conflict":
    +            client.CoreV1Api(api_client).replace_namespaced_service(
    +                namespace=namespace, body=oauth_service, name=service_name
    +            )
    +        else:
    +            raise e
    +
    +
    +def _create_or_replace_oauth_ingress_object(
    +    cluster_name: str,
    +    namespace: str,
    +    service_name: str,
    +    port_name: str,
    +    host: str,
    +) -> client.V1Ingress:
    +    api_client = api_config_handler()
    +    ingress = client.V1Ingress(
    +        api_version="networking.k8s.io/v1",
    +        kind="Ingress",
    +        metadata=client.V1ObjectMeta(
    +            annotations={"route.openshift.io/termination": "passthrough"},
    +            name=f"{cluster_name}-ingress",
    +            namespace=namespace,
    +        ),
    +        spec=client.V1IngressSpec(
    +            rules=[
    +                client.V1IngressRule(
    +                    host=host,
    +                    http=client.V1HTTPIngressRuleValue(
    +                        paths=[
    +                            client.V1HTTPIngressPath(
    +                                backend=client.V1IngressBackend(
    +                                    service=client.V1IngressServiceBackend(
    +                                        name=service_name,
    +                                        port=client.V1ServiceBackendPort(
    +                                            name=port_name
    +                                        ),
    +                                    )
    +                                ),
    +                                path_type="ImplementationSpecific",
    +                            )
    +                        ]
    +                    ),
    +                )
    +            ]
    +        ),
    +    )
    +    try:
    +        client.NetworkingV1Api(api_client).create_namespaced_ingress(
    +            namespace=namespace, body=ingress
    +        )
    +    except client.ApiException as e:
    +        if e.reason == "Conflict":
    +            client.NetworkingV1Api(api_client).replace_namespaced_ingress(
    +                namespace=namespace, body=ingress, name=f"{cluster_name}-ingress"
    +            )
    +        else:
    +            raise e
    +
    +
    +
    +
    +
    +
    +
    +

    Functions

    +
    +
    +def create_openshift_oauth_objects(cluster_name, namespace) +
    +
    +
    +
    + +Expand source code + +
    def create_openshift_oauth_objects(cluster_name, namespace):
    +    config_check()
    +    api_client = api_config_handler() or client.ApiClient()
    +    oauth_port = 8443
    +    oauth_sa_name = f"{cluster_name}-oauth-proxy"
    +    tls_secret_name = _gen_tls_secret_name(cluster_name)
    +    service_name = f"{cluster_name}-oauth"
    +    port_name = "oauth-proxy"
    +    host = _get_api_host(api_client)
    +
    +    # replace "^api" with the expected host
    +    host = f"{gen_dashboard_route_name(cluster_name)}-{namespace}.apps" + host.lstrip(
    +        "api"
    +    )
    +
    +    _create_or_replace_oauth_sa(namespace, oauth_sa_name, host)
    +    _create_or_replace_oauth_service_obj(
    +        cluster_name, namespace, oauth_port, tls_secret_name, service_name, port_name
    +    )
    +    _create_or_replace_oauth_ingress_object(
    +        cluster_name, namespace, service_name, port_name, host
    +    )
    +    _create_or_replace_oauth_rb(cluster_name, namespace, oauth_sa_name)
    +
    +
    +
    +def delete_openshift_oauth_objects(cluster_name, namespace) +
    +
    +
    +
    + +Expand source code + +
    def delete_openshift_oauth_objects(cluster_name, namespace):
    +    # NOTE: it might be worth adding error handling here, but shouldn't be necessary because cluster.down(...) checks
    +    # for an existing cluster before calling this => the objects should never be deleted twice
    +    api_client = api_config_handler()
    +    oauth_sa_name = f"{cluster_name}-oauth-proxy"
    +    service_name = f"{cluster_name}-oauth"
    +    client.CoreV1Api(api_client).delete_namespaced_service_account(
    +        name=oauth_sa_name, namespace=namespace
    +    )
    +    client.CoreV1Api(api_client).delete_namespaced_service(
    +        name=service_name, namespace=namespace
    +    )
    +    client.NetworkingV1Api(api_client).delete_namespaced_ingress(
    +        name=f"{cluster_name}-ingress", namespace=namespace
    +    )
    +    client.RbacAuthorizationV1Api(api_client).delete_cluster_role_binding(
    +        name=f"{cluster_name}-rb"
    +    )
    +
    +
    +
    +
    +
    +
    +
    + +
    + + + From 830cbceaa13c380421ed42251ff208c5c1b07fdc Mon Sep 17 00:00:00 2001 From: Mustafa Eyceoz Date: Mon, 23 Oct 2023 11:38:51 -0400 Subject: [PATCH 035/496] Adding direct KubeRay compatibility to the SDK (#358) * Added component generation * Added multi-resource YAML support * Cluster.up on ray cluster object * Basic status and down for RayCluster * Finished up/down and added unit tests * Remove unused utils import * Applied review feedback * Changed naming of internal funcs * Review feedback applied, auto-select * OAuth conflict resolution --- src/codeflare_sdk/cluster/cluster.py | 222 ++++++++++++++------ src/codeflare_sdk/cluster/config.py | 1 + src/codeflare_sdk/job/jobs.py | 7 +- src/codeflare_sdk/utils/generate_yaml.py | 19 +- src/codeflare_sdk/utils/kube_api_helpers.py | 10 +- tests/test-case-no-mcad.yamls | 162 ++++++++++++++ tests/unit_test.py | 113 ++++++++-- 7 files changed, 450 insertions(+), 84 deletions(-) create mode 100644 tests/test-case-no-mcad.yamls diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 29c026bd..66475278 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -70,7 +70,7 @@ def __init__(self, config: ClusterConfiguration): self.config = config self.app_wrapper_yaml = self.create_app_wrapper() self.app_wrapper_name = self.app_wrapper_yaml.split(".")[0] - self._client = None + self._job_submission_client = None @property def _client_headers(self): @@ -86,23 +86,25 @@ def _client_verify_tls(self): return not self.config.openshift_oauth @property - def client(self): - if self._client: - return self._client + def job_client(self): + if self._job_submission_client: + return self._job_submission_client if self.config.openshift_oauth: print( api_config_handler().configuration.get_api_key_with_prefix( "authorization" ) ) - self._client = JobSubmissionClient( + self._job_submission_client = JobSubmissionClient( self.cluster_dashboard_uri(), headers=self._client_headers, verify=self._client_verify_tls, ) else: - self._client = JobSubmissionClient(self.cluster_dashboard_uri()) - return self._client + self._job_submission_client = JobSubmissionClient( + self.cluster_dashboard_uri() + ) + return self._job_submission_client def evaluate_dispatch_priority(self): priority_class = self.config.dispatch_priority @@ -141,6 +143,10 @@ def create_app_wrapper(self): # Before attempting to create the cluster AW, let's evaluate the ClusterConfig if self.config.dispatch_priority: + if not self.config.mcad: + raise ValueError( + "Invalid Cluster Configuration, cannot have dispatch priority without MCAD" + ) priority_val = self.evaluate_dispatch_priority() if priority_val == None: raise ValueError( @@ -163,6 +169,7 @@ def create_app_wrapper(self): template = self.config.template image = self.config.image instascale = self.config.instascale + mcad = self.config.mcad instance_types = self.config.machine_types env = self.config.envs local_interactive = self.config.local_interactive @@ -183,6 +190,7 @@ def create_app_wrapper(self): template=template, image=image, instascale=instascale, + mcad=mcad, instance_types=instance_types, env=env, local_interactive=local_interactive, @@ -207,15 +215,18 @@ def up(self): try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) - with open(self.app_wrapper_yaml) as f: - aw = yaml.load(f, Loader=yaml.FullLoader) - api_instance.create_namespaced_custom_object( - group="workload.codeflare.dev", - version="v1beta1", - namespace=namespace, - plural="appwrappers", - body=aw, - ) + if self.config.mcad: + with open(self.app_wrapper_yaml) as f: + aw = yaml.load(f, Loader=yaml.FullLoader) + api_instance.create_namespaced_custom_object( + group="workload.codeflare.dev", + version="v1beta1", + namespace=namespace, + plural="appwrappers", + body=aw, + ) + else: + self._component_resources_up(namespace, api_instance) except Exception as e: # pragma: no cover return _kube_api_error_handling(e) @@ -228,13 +239,16 @@ def down(self): try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) - api_instance.delete_namespaced_custom_object( - group="workload.codeflare.dev", - version="v1beta1", - namespace=namespace, - plural="appwrappers", - name=self.app_wrapper_name, - ) + if self.config.mcad: + api_instance.delete_namespaced_custom_object( + group="workload.codeflare.dev", + version="v1beta1", + namespace=namespace, + plural="appwrappers", + name=self.app_wrapper_name, + ) + else: + self._component_resources_down(namespace, api_instance) except Exception as e: # pragma: no cover return _kube_api_error_handling(e) @@ -252,42 +266,46 @@ def status( """ ready = False status = CodeFlareClusterStatus.UNKNOWN - # check the app wrapper status - appwrapper = _app_wrapper_status(self.config.name, self.config.namespace) - if appwrapper: - if appwrapper.status in [ - AppWrapperStatus.RUNNING, - AppWrapperStatus.COMPLETED, - AppWrapperStatus.RUNNING_HOLD_COMPLETION, - ]: - ready = False - status = CodeFlareClusterStatus.STARTING - elif appwrapper.status in [ - AppWrapperStatus.FAILED, - AppWrapperStatus.DELETED, - ]: - ready = False - status = CodeFlareClusterStatus.FAILED # should deleted be separate - return status, ready # exit early, no need to check ray status - elif appwrapper.status in [ - AppWrapperStatus.PENDING, - AppWrapperStatus.QUEUEING, - ]: - ready = False - if appwrapper.status == AppWrapperStatus.PENDING: - status = CodeFlareClusterStatus.QUEUED - else: - status = CodeFlareClusterStatus.QUEUEING - if print_to_console: - pretty_print.print_app_wrappers_status([appwrapper]) - return ( - status, - ready, - ) # no need to check the ray status since still in queue + if self.config.mcad: + # check the app wrapper status + appwrapper = _app_wrapper_status(self.config.name, self.config.namespace) + if appwrapper: + if appwrapper.status in [ + AppWrapperStatus.RUNNING, + AppWrapperStatus.COMPLETED, + AppWrapperStatus.RUNNING_HOLD_COMPLETION, + ]: + ready = False + status = CodeFlareClusterStatus.STARTING + elif appwrapper.status in [ + AppWrapperStatus.FAILED, + AppWrapperStatus.DELETED, + ]: + ready = False + status = CodeFlareClusterStatus.FAILED # should deleted be separate + return status, ready # exit early, no need to check ray status + elif appwrapper.status in [ + AppWrapperStatus.PENDING, + AppWrapperStatus.QUEUEING, + ]: + ready = False + if appwrapper.status == AppWrapperStatus.PENDING: + status = CodeFlareClusterStatus.QUEUED + else: + status = CodeFlareClusterStatus.QUEUEING + if print_to_console: + pretty_print.print_app_wrappers_status([appwrapper]) + return ( + status, + ready, + ) # no need to check the ray status since still in queue # check the ray cluster status cluster = _ray_cluster_status(self.config.name, self.config.namespace) - if cluster and not cluster.status == RayClusterStatus.UNKNOWN: + if cluster: + if cluster.status == RayClusterStatus.UNKNOWN: + ready = False + status = CodeFlareClusterStatus.STARTING if cluster.status == RayClusterStatus.READY: ready = True status = CodeFlareClusterStatus.READY @@ -407,19 +425,19 @@ def list_jobs(self) -> List: """ This method accesses the head ray node in your cluster and lists the running jobs. """ - return self.client.list_jobs() + return self.job_client.list_jobs() def job_status(self, job_id: str) -> str: """ This method accesses the head ray node in your cluster and returns the job status for the provided job id. """ - return self.client.get_job_status(job_id) + return self.job_client.get_job_status(job_id) def job_logs(self, job_id: str) -> str: """ This method accesses the head ray node in your cluster and returns the logs for the provided job id. """ - return self.client.get_job_logs(job_id) + return self.job_client.get_job_logs(job_id) def torchx_config( self, working_dir: str = None, requirements: str = None @@ -435,7 +453,7 @@ def torchx_config( to_return["requirements"] = requirements return to_return - def from_k8_cluster_object(rc): + def from_k8_cluster_object(rc, mcad=True): machine_types = ( rc["metadata"]["labels"]["orderedinstance"].split("_") if "orderedinstance" in rc["metadata"]["labels"] @@ -474,6 +492,7 @@ def from_k8_cluster_object(rc): 0 ]["image"], local_interactive=local_interactive, + mcad=mcad, ) return Cluster(cluster_config) @@ -484,6 +503,66 @@ def local_client_url(self): else: return "None" + def _component_resources_up( + self, namespace: str, api_instance: client.CustomObjectsApi + ): + with open(self.app_wrapper_yaml) as f: + yamls = yaml.load_all(f, Loader=yaml.FullLoader) + for resource in yamls: + if resource["kind"] == "RayCluster": + api_instance.create_namespaced_custom_object( + group="ray.io", + version="v1alpha1", + namespace=namespace, + plural="rayclusters", + body=resource, + ) + elif resource["kind"] == "Route": + api_instance.create_namespaced_custom_object( + group="route.openshift.io", + version="v1", + namespace=namespace, + plural="routes", + body=resource, + ) + elif resource["kind"] == "Secret": + secret_instance = client.CoreV1Api(api_config_handler()) + secret_instance.create_namespaced_secret( + namespace=namespace, + body=resource, + ) + + def _component_resources_down( + self, namespace: str, api_instance: client.CustomObjectsApi + ): + with open(self.app_wrapper_yaml) as f: + yamls = yaml.load_all(f, Loader=yaml.FullLoader) + for resource in yamls: + if resource["kind"] == "RayCluster": + api_instance.delete_namespaced_custom_object( + group="ray.io", + version="v1alpha1", + namespace=namespace, + plural="rayclusters", + name=self.app_wrapper_name, + ) + elif resource["kind"] == "Route": + name = resource["metadata"]["name"] + api_instance.delete_namespaced_custom_object( + group="route.openshift.io", + version="v1", + namespace=namespace, + plural="routes", + name=name, + ) + elif resource["kind"] == "Secret": + name = resource["metadata"]["name"] + secret_instance = client.CoreV1Api(api_config_handler()) + secret_instance.delete_namespaced_secret( + namespace=namespace, + name=name, + ) + def list_all_clusters(namespace: str, print_to_console: bool = True): """ @@ -549,13 +628,33 @@ def get_cluster(cluster_name: str, namespace: str = "default"): for rc in rcs["items"]: if rc["metadata"]["name"] == cluster_name: - return Cluster.from_k8_cluster_object(rc) + mcad = _check_aw_exists(cluster_name, namespace) + return Cluster.from_k8_cluster_object(rc, mcad=mcad) raise FileNotFoundError( f"Cluster {cluster_name} is not found in {namespace} namespace" ) # private methods +def _check_aw_exists(name: str, namespace: str) -> bool: + try: + config_check() + api_instance = client.CustomObjectsApi(api_config_handler()) + aws = api_instance.list_namespaced_custom_object( + group="workload.codeflare.dev", + version="v1beta1", + namespace=namespace, + plural="appwrappers", + ) + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e, print_error=False) + + for aw in aws["items"]: + if aw["metadata"]["name"] == name: + return True + return False + + def _get_ingress_domain(): try: config_check() @@ -660,6 +759,7 @@ def _map_to_ray_cluster(rc) -> Optional[RayCluster]: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) + # UPDATE THIS routes = api_instance.list_namespaced_custom_object( group="route.openshift.io", version="v1", diff --git a/src/codeflare_sdk/cluster/config.py b/src/codeflare_sdk/cluster/config.py index fe83e9e5..a21318ab 100644 --- a/src/codeflare_sdk/cluster/config.py +++ b/src/codeflare_sdk/cluster/config.py @@ -46,6 +46,7 @@ class ClusterConfiguration: num_gpus: int = 0 template: str = f"{dir}/templates/base-template.yaml" instascale: bool = False + mcad: bool = True envs: dict = field(default_factory=dict) image: str = "quay.io/project-codeflare/ray:latest-py39-cu118" local_interactive: bool = False diff --git a/src/codeflare_sdk/job/jobs.py b/src/codeflare_sdk/job/jobs.py index 27f15283..c3814971 100644 --- a/src/codeflare_sdk/job/jobs.py +++ b/src/codeflare_sdk/job/jobs.py @@ -22,9 +22,6 @@ from torchx.schedulers.ray_scheduler import RayScheduler from torchx.specs import AppHandle, parse_app_handle, AppDryRunInfo -from ray.job_submission import JobSubmissionClient - -import openshift as oc if TYPE_CHECKING: from ..cluster.cluster import Cluster @@ -96,9 +93,9 @@ def __init__( def _dry_run(self, cluster: "Cluster"): j = f"{cluster.config.num_workers}x{max(cluster.config.num_gpus, 1)}" # # of proc. = # of gpus - runner = get_runner(ray_client=cluster.client) + runner = get_runner(ray_client=cluster.job_client) runner._scheduler_instances["ray"] = RayScheduler( - session_name=runner._name, ray_client=cluster.client + session_name=runner._name, ray_client=cluster.job_client ) return ( runner.dryrun( diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 4757f537..a833892a 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -457,6 +457,19 @@ def _create_oauth_sidecar_object( ) +def write_components(user_yaml: dict, output_file_name: str): + components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") + open(output_file_name, "w").close() + with open(output_file_name, "a") as outfile: + for component in components: + if "generictemplate" in component: + outfile.write("---\n") + yaml.dump( + component["generictemplate"], outfile, default_flow_style=False + ) + print(f"Written to: {output_file_name}") + + def generate_appwrapper( name: str, namespace: str, @@ -472,6 +485,7 @@ def generate_appwrapper( template: str, image: str, instascale: bool, + mcad: bool, instance_types: list, env, local_interactive: bool, @@ -527,5 +541,8 @@ def generate_appwrapper( enable_openshift_oauth(user_yaml, cluster_name, namespace) outfile = appwrapper_name + ".yaml" - write_user_appwrapper(user_yaml, outfile) + if not mcad: + write_components(user_yaml, outfile) + else: + write_user_appwrapper(user_yaml, outfile) return outfile diff --git a/src/codeflare_sdk/utils/kube_api_helpers.py b/src/codeflare_sdk/utils/kube_api_helpers.py index 8f8180b9..01a93ef5 100644 --- a/src/codeflare_sdk/utils/kube_api_helpers.py +++ b/src/codeflare_sdk/utils/kube_api_helpers.py @@ -23,7 +23,9 @@ # private methods -def _kube_api_error_handling(e: Exception): # pragma: no cover +def _kube_api_error_handling( + e: Exception, print_error: bool = True +): # pragma: no cover perm_msg = ( "Action not permitted, have you put in correct/up-to-date auth credentials?" ) @@ -32,11 +34,13 @@ def _kube_api_error_handling(e: Exception): # pragma: no cover if type(e) == config.ConfigException: raise PermissionError(perm_msg) if type(e) == executing.executing.NotOneValueFound: - print(nf_msg) + if print_error: + print(nf_msg) return if type(e) == client.ApiException: if e.reason == "Not Found": - print(nf_msg) + if print_error: + print(nf_msg) return elif e.reason == "Unauthorized" or e.reason == "Forbidden": raise PermissionError(perm_msg) diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls new file mode 100644 index 00000000..6d905566 --- /dev/null +++ b/tests/test-case-no-mcad.yamls @@ -0,0 +1,162 @@ +--- +apiVersion: ray.io/v1alpha1 +kind: RayCluster +metadata: + labels: + appwrapper.mcad.ibm.com: unit-test-cluster-ray + controller-tools.k8s.io: '1.0' + name: unit-test-cluster-ray + namespace: ns +spec: + autoscalerOptions: + idleTimeoutSeconds: 60 + imagePullPolicy: Always + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 500m + memory: 512Mi + upscalingMode: Default + enableInTreeAutoscaling: false + headGroupSpec: + rayStartParams: + block: 'true' + dashboard-host: 0.0.0.0 + num-gpus: '0' + serviceType: ClusterIP + template: + spec: + affinity: + nodeAffinity: + requiredDuringSchedulingIgnoredDuringExecution: + nodeSelectorTerms: + - matchExpressions: + - key: unit-test-cluster-ray + operator: In + values: + - unit-test-cluster-ray + containers: + - env: + - name: MY_POD_IP + valueFrom: + fieldRef: + fieldPath: status.podIP + - name: RAY_USE_TLS + value: '0' + - name: RAY_TLS_SERVER_CERT + value: /home/ray/workspace/tls/server.crt + - name: RAY_TLS_SERVER_KEY + value: /home/ray/workspace/tls/server.key + - name: RAY_TLS_CA_CERT + value: /home/ray/workspace/tls/ca.crt + image: quay.io/project-codeflare/ray:latest-py39-cu118 + imagePullPolicy: Always + lifecycle: + preStop: + exec: + command: + - /bin/sh + - -c + - ray stop + name: ray-head + ports: + - containerPort: 6379 + name: gcs + - containerPort: 8265 + name: dashboard + - containerPort: 10001 + name: client + resources: + limits: + cpu: 2 + memory: 8G + nvidia.com/gpu: 0 + requests: + cpu: 2 + memory: 8G + nvidia.com/gpu: 0 + imagePullSecrets: + - name: unit-test-pull-secret + rayVersion: 2.7.0 + workerGroupSpecs: + - groupName: small-group-unit-test-cluster-ray + maxReplicas: 2 + minReplicas: 2 + rayStartParams: + block: 'true' + num-gpus: '7' + replicas: 2 + template: + metadata: + annotations: + key: value + labels: + key: value + spec: + affinity: + nodeAffinity: + requiredDuringSchedulingIgnoredDuringExecution: + nodeSelectorTerms: + - matchExpressions: + - key: unit-test-cluster-ray + operator: In + values: + - unit-test-cluster-ray + containers: + - env: + - name: MY_POD_IP + valueFrom: + fieldRef: + fieldPath: status.podIP + - name: RAY_USE_TLS + value: '0' + - name: RAY_TLS_SERVER_CERT + value: /home/ray/workspace/tls/server.crt + - name: RAY_TLS_SERVER_KEY + value: /home/ray/workspace/tls/server.key + - name: RAY_TLS_CA_CERT + value: /home/ray/workspace/tls/ca.crt + image: quay.io/project-codeflare/ray:latest-py39-cu118 + lifecycle: + preStop: + exec: + command: + - /bin/sh + - -c + - ray stop + name: machine-learning + resources: + limits: + cpu: 4 + memory: 6G + nvidia.com/gpu: 7 + requests: + cpu: 3 + memory: 5G + nvidia.com/gpu: 7 + imagePullSecrets: + - name: unit-test-pull-secret + initContainers: + - command: + - sh + - -c + - until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; + do echo waiting for myservice; sleep 2; done + image: busybox:1.28 + name: init-myservice +--- +apiVersion: route.openshift.io/v1 +kind: Route +metadata: + labels: + odh-ray-cluster-service: unit-test-cluster-ray-head-svc + name: ray-dashboard-unit-test-cluster-ray + namespace: ns +spec: + port: + targetPort: dashboard + to: + kind: Service + name: unit-test-cluster-ray-head-svc diff --git a/tests/unit_test.py b/tests/unit_test.py index f2c86f1f..9ac13159 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -34,6 +34,7 @@ get_cluster, _app_wrapper_status, _ray_cluster_status, + _get_ingress_domain, ) from codeflare_sdk.cluster.auth import ( TokenAuthentication, @@ -242,6 +243,8 @@ def test_config_creation(): assert config.machine_types == ["cpu.small", "gpu.large"] assert config.image_pull_secrets == ["unit-test-pull-secret"] assert config.dispatch_priority == None + assert config.mcad == True + assert config.local_interactive == False def test_cluster_creation(): @@ -253,6 +256,20 @@ def test_cluster_creation(): ) +def test_cluster_creation_no_mcad(): + config = createClusterConfig() + config.name = "unit-test-cluster-ray" + config.mcad = False + cluster = Cluster(config) + assert cluster.app_wrapper_yaml == "unit-test-cluster-ray.yaml" + assert cluster.app_wrapper_name == "unit-test-cluster-ray" + assert filecmp.cmp( + "unit-test-cluster-ray.yaml", + f"{parent}/tests/test-case-no-mcad.yamls", + shallow=True, + ) + + def test_cluster_creation_priority(mocker): mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") mocker.patch( @@ -286,23 +303,49 @@ def test_default_cluster_creation(mocker): def arg_check_apply_effect(group, version, namespace, plural, body, *args): - assert group == "workload.codeflare.dev" - assert version == "v1beta1" assert namespace == "ns" - assert plural == "appwrappers" - with open("unit-test-cluster.yaml") as f: - aw = yaml.load(f, Loader=yaml.FullLoader) - assert body == aw assert args == tuple() + if plural == "appwrappers": + assert group == "workload.codeflare.dev" + assert version == "v1beta1" + with open("unit-test-cluster.yaml") as f: + aw = yaml.load(f, Loader=yaml.FullLoader) + assert body == aw + elif plural == "rayclusters": + assert group == "ray.io" + assert version == "v1alpha1" + with open("unit-test-cluster-ray.yaml") as f: + yamls = yaml.load_all(f, Loader=yaml.FullLoader) + for resource in yamls: + if resource["kind"] == "RayCluster": + assert body == resource + elif plural == "routes": + assert group == "route.openshift.io" + assert version == "v1" + with open("unit-test-cluster-ray.yaml") as f: + yamls = yaml.load_all(f, Loader=yaml.FullLoader) + for resource in yamls: + if resource["kind"] == "Route": + assert body == resource + else: + assert 1 == 0 def arg_check_del_effect(group, version, namespace, plural, name, *args): - assert group == "workload.codeflare.dev" - assert version == "v1beta1" assert namespace == "ns" - assert plural == "appwrappers" - assert name == "unit-test-cluster" assert args == tuple() + if plural == "appwrappers": + assert group == "workload.codeflare.dev" + assert version == "v1beta1" + assert name == "unit-test-cluster" + elif plural == "rayclusters": + assert group == "ray.io" + assert version == "v1alpha1" + assert name == "unit-test-cluster-ray" + elif plural == "routes": + assert group == "route.openshift.io" + assert version == "v1" + assert name == "ray-dashboard-unit-test-cluster-ray" def test_cluster_up_down(mocker): @@ -324,6 +367,47 @@ def test_cluster_up_down(mocker): cluster.down() +def test_cluster_up_down_no_mcad(mocker): + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch( + "kubernetes.client.CustomObjectsApi.create_namespaced_custom_object", + side_effect=arg_check_apply_effect, + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.delete_namespaced_custom_object", + side_effect=arg_check_del_effect, + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_cluster_custom_object", + return_value={"items": []}, + ) + config = createClusterConfig() + config.name = "unit-test-cluster-ray" + config.mcad = False + cluster = Cluster(config) + cluster.up() + cluster.down() + + +def arg_check_list_effect(group, version, plural, name, *args): + assert group == "config.openshift.io" + assert version == "v1" + assert plural == "ingresses" + assert name == "cluster" + assert args == tuple() + return {"spec": {"domain": "test"}} + + +def test_get_ingress_domain(mocker): + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + side_effect=arg_check_list_effect, + ) + domain = _get_ingress_domain() + assert domain == "test" + + def aw_status_fields(group, version, namespace, plural, *args): assert group == "workload.codeflare.dev" assert version == "v1beta1" @@ -1851,7 +1935,7 @@ def test_DDPJobDefinition_dry_run(mocker: MockerFixture): "codeflare_sdk.cluster.cluster.Cluster.cluster_dashboard_uri", return_value="", ) - mocker.patch.object(Cluster, "client") + mocker.patch.object(Cluster, "job_client") ddp = createTestDDP() cluster = createClusterWithConfig() ddp_job, _ = ddp._dry_run(cluster) @@ -1921,7 +2005,7 @@ def test_DDPJobDefinition_dry_run_no_resource_args(mocker): Test that the dry run correctly gets resources from the cluster object when the job definition does not specify resources. """ - mocker.patch.object(Cluster, "client") + mocker.patch.object(Cluster, "job_client") mocker.patch( "codeflare_sdk.cluster.cluster.Cluster.cluster_dashboard_uri", return_value="", @@ -2013,7 +2097,7 @@ def test_DDPJobDefinition_submit(mocker: MockerFixture): mock_schedule = MagicMock() mocker.patch.object(Runner, "schedule", mock_schedule) mock_schedule.return_value = "fake-dashboard-url" - mocker.patch.object(Cluster, "client") + mocker.patch.object(Cluster, "job_client") ddp_def = createTestDDP() cluster = createClusterWithConfig() mocker.patch( @@ -2040,7 +2124,7 @@ def test_DDPJobDefinition_submit(mocker: MockerFixture): def test_DDPJob_creation(mocker: MockerFixture): - mocker.patch.object(Cluster, "client") + mocker.patch.object(Cluster, "job_client") mock_schedule = MagicMock() mocker.patch.object(Runner, "schedule", mock_schedule) mocker.patch.object( @@ -2432,6 +2516,7 @@ def test_cleanup(): os.remove("unit-test-cluster.yaml") os.remove("prio-test-cluster.yaml") os.remove("unit-test-default-cluster.yaml") + os.remove("unit-test-cluster-ray.yaml") os.remove("test.yaml") os.remove("raytest2.yaml") os.remove("quicktest.yaml") From f57497e72e18ed77561302d667a1e005784d8050 Mon Sep 17 00:00:00 2001 From: codeflare-machine-account Date: Mon, 23 Oct 2023 20:08:24 +0000 Subject: [PATCH 036/496] Changes in docs for release: v0.10.1 --- docs/cluster/cluster.html | 569 +++++++++++++++++++++---------- docs/cluster/config.html | 9 +- docs/job/jobs.html | 11 +- docs/utils/generate_yaml.html | 50 ++- docs/utils/kube_api_helpers.html | 10 +- 5 files changed, 450 insertions(+), 199 deletions(-) diff --git a/docs/cluster/cluster.html b/docs/cluster/cluster.html index 7b391787..e33b9cc2 100644 --- a/docs/cluster/cluster.html +++ b/docs/cluster/cluster.html @@ -102,7 +102,7 @@

    Module codeflare_sdk.cluster.cluster

    self.config = config self.app_wrapper_yaml = self.create_app_wrapper() self.app_wrapper_name = self.app_wrapper_yaml.split(".")[0] - self._client = None + self._job_submission_client = None @property def _client_headers(self): @@ -118,23 +118,25 @@

    Module codeflare_sdk.cluster.cluster

    return not self.config.openshift_oauth @property - def client(self): - if self._client: - return self._client + def job_client(self): + if self._job_submission_client: + return self._job_submission_client if self.config.openshift_oauth: print( api_config_handler().configuration.get_api_key_with_prefix( "authorization" ) ) - self._client = JobSubmissionClient( + self._job_submission_client = JobSubmissionClient( self.cluster_dashboard_uri(), headers=self._client_headers, verify=self._client_verify_tls, ) else: - self._client = JobSubmissionClient(self.cluster_dashboard_uri()) - return self._client + self._job_submission_client = JobSubmissionClient( + self.cluster_dashboard_uri() + ) + return self._job_submission_client def evaluate_dispatch_priority(self): priority_class = self.config.dispatch_priority @@ -173,6 +175,10 @@

    Module codeflare_sdk.cluster.cluster

    # Before attempting to create the cluster AW, let's evaluate the ClusterConfig if self.config.dispatch_priority: + if not self.config.mcad: + raise ValueError( + "Invalid Cluster Configuration, cannot have dispatch priority without MCAD" + ) priority_val = self.evaluate_dispatch_priority() if priority_val == None: raise ValueError( @@ -195,6 +201,7 @@

    Module codeflare_sdk.cluster.cluster

    template = self.config.template image = self.config.image instascale = self.config.instascale + mcad = self.config.mcad instance_types = self.config.machine_types env = self.config.envs local_interactive = self.config.local_interactive @@ -215,6 +222,7 @@

    Module codeflare_sdk.cluster.cluster

    template=template, image=image, instascale=instascale, + mcad=mcad, instance_types=instance_types, env=env, local_interactive=local_interactive, @@ -239,15 +247,18 @@

    Module codeflare_sdk.cluster.cluster

    try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) - with open(self.app_wrapper_yaml) as f: - aw = yaml.load(f, Loader=yaml.FullLoader) - api_instance.create_namespaced_custom_object( - group="workload.codeflare.dev", - version="v1beta1", - namespace=namespace, - plural="appwrappers", - body=aw, - ) + if self.config.mcad: + with open(self.app_wrapper_yaml) as f: + aw = yaml.load(f, Loader=yaml.FullLoader) + api_instance.create_namespaced_custom_object( + group="workload.codeflare.dev", + version="v1beta1", + namespace=namespace, + plural="appwrappers", + body=aw, + ) + else: + self._component_resources_up(namespace, api_instance) except Exception as e: # pragma: no cover return _kube_api_error_handling(e) @@ -260,13 +271,16 @@

    Module codeflare_sdk.cluster.cluster

    try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) - api_instance.delete_namespaced_custom_object( - group="workload.codeflare.dev", - version="v1beta1", - namespace=namespace, - plural="appwrappers", - name=self.app_wrapper_name, - ) + if self.config.mcad: + api_instance.delete_namespaced_custom_object( + group="workload.codeflare.dev", + version="v1beta1", + namespace=namespace, + plural="appwrappers", + name=self.app_wrapper_name, + ) + else: + self._component_resources_down(namespace, api_instance) except Exception as e: # pragma: no cover return _kube_api_error_handling(e) @@ -284,42 +298,46 @@

    Module codeflare_sdk.cluster.cluster

    """ ready = False status = CodeFlareClusterStatus.UNKNOWN - # check the app wrapper status - appwrapper = _app_wrapper_status(self.config.name, self.config.namespace) - if appwrapper: - if appwrapper.status in [ - AppWrapperStatus.RUNNING, - AppWrapperStatus.COMPLETED, - AppWrapperStatus.RUNNING_HOLD_COMPLETION, - ]: - ready = False - status = CodeFlareClusterStatus.STARTING - elif appwrapper.status in [ - AppWrapperStatus.FAILED, - AppWrapperStatus.DELETED, - ]: - ready = False - status = CodeFlareClusterStatus.FAILED # should deleted be separate - return status, ready # exit early, no need to check ray status - elif appwrapper.status in [ - AppWrapperStatus.PENDING, - AppWrapperStatus.QUEUEING, - ]: - ready = False - if appwrapper.status == AppWrapperStatus.PENDING: - status = CodeFlareClusterStatus.QUEUED - else: - status = CodeFlareClusterStatus.QUEUEING - if print_to_console: - pretty_print.print_app_wrappers_status([appwrapper]) - return ( - status, - ready, - ) # no need to check the ray status since still in queue + if self.config.mcad: + # check the app wrapper status + appwrapper = _app_wrapper_status(self.config.name, self.config.namespace) + if appwrapper: + if appwrapper.status in [ + AppWrapperStatus.RUNNING, + AppWrapperStatus.COMPLETED, + AppWrapperStatus.RUNNING_HOLD_COMPLETION, + ]: + ready = False + status = CodeFlareClusterStatus.STARTING + elif appwrapper.status in [ + AppWrapperStatus.FAILED, + AppWrapperStatus.DELETED, + ]: + ready = False + status = CodeFlareClusterStatus.FAILED # should deleted be separate + return status, ready # exit early, no need to check ray status + elif appwrapper.status in [ + AppWrapperStatus.PENDING, + AppWrapperStatus.QUEUEING, + ]: + ready = False + if appwrapper.status == AppWrapperStatus.PENDING: + status = CodeFlareClusterStatus.QUEUED + else: + status = CodeFlareClusterStatus.QUEUEING + if print_to_console: + pretty_print.print_app_wrappers_status([appwrapper]) + return ( + status, + ready, + ) # no need to check the ray status since still in queue # check the ray cluster status cluster = _ray_cluster_status(self.config.name, self.config.namespace) - if cluster and not cluster.status == RayClusterStatus.UNKNOWN: + if cluster: + if cluster.status == RayClusterStatus.UNKNOWN: + ready = False + status = CodeFlareClusterStatus.STARTING if cluster.status == RayClusterStatus.READY: ready = True status = CodeFlareClusterStatus.READY @@ -439,19 +457,19 @@

    Module codeflare_sdk.cluster.cluster

    """ This method accesses the head ray node in your cluster and lists the running jobs. """ - return self.client.list_jobs() + return self.job_client.list_jobs() def job_status(self, job_id: str) -> str: """ This method accesses the head ray node in your cluster and returns the job status for the provided job id. """ - return self.client.get_job_status(job_id) + return self.job_client.get_job_status(job_id) def job_logs(self, job_id: str) -> str: """ This method accesses the head ray node in your cluster and returns the logs for the provided job id. """ - return self.client.get_job_logs(job_id) + return self.job_client.get_job_logs(job_id) def torchx_config( self, working_dir: str = None, requirements: str = None @@ -467,7 +485,7 @@

    Module codeflare_sdk.cluster.cluster

    to_return["requirements"] = requirements return to_return - def from_k8_cluster_object(rc): + def from_k8_cluster_object(rc, mcad=True): machine_types = ( rc["metadata"]["labels"]["orderedinstance"].split("_") if "orderedinstance" in rc["metadata"]["labels"] @@ -506,6 +524,7 @@

    Module codeflare_sdk.cluster.cluster

    0 ]["image"], local_interactive=local_interactive, + mcad=mcad, ) return Cluster(cluster_config) @@ -516,6 +535,66 @@

    Module codeflare_sdk.cluster.cluster

    else: return "None" + def _component_resources_up( + self, namespace: str, api_instance: client.CustomObjectsApi + ): + with open(self.app_wrapper_yaml) as f: + yamls = yaml.load_all(f, Loader=yaml.FullLoader) + for resource in yamls: + if resource["kind"] == "RayCluster": + api_instance.create_namespaced_custom_object( + group="ray.io", + version="v1alpha1", + namespace=namespace, + plural="rayclusters", + body=resource, + ) + elif resource["kind"] == "Route": + api_instance.create_namespaced_custom_object( + group="route.openshift.io", + version="v1", + namespace=namespace, + plural="routes", + body=resource, + ) + elif resource["kind"] == "Secret": + secret_instance = client.CoreV1Api(api_config_handler()) + secret_instance.create_namespaced_secret( + namespace=namespace, + body=resource, + ) + + def _component_resources_down( + self, namespace: str, api_instance: client.CustomObjectsApi + ): + with open(self.app_wrapper_yaml) as f: + yamls = yaml.load_all(f, Loader=yaml.FullLoader) + for resource in yamls: + if resource["kind"] == "RayCluster": + api_instance.delete_namespaced_custom_object( + group="ray.io", + version="v1alpha1", + namespace=namespace, + plural="rayclusters", + name=self.app_wrapper_name, + ) + elif resource["kind"] == "Route": + name = resource["metadata"]["name"] + api_instance.delete_namespaced_custom_object( + group="route.openshift.io", + version="v1", + namespace=namespace, + plural="routes", + name=name, + ) + elif resource["kind"] == "Secret": + name = resource["metadata"]["name"] + secret_instance = client.CoreV1Api(api_config_handler()) + secret_instance.delete_namespaced_secret( + namespace=namespace, + name=name, + ) + def list_all_clusters(namespace: str, print_to_console: bool = True): """ @@ -581,13 +660,33 @@

    Module codeflare_sdk.cluster.cluster

    for rc in rcs["items"]: if rc["metadata"]["name"] == cluster_name: - return Cluster.from_k8_cluster_object(rc) + mcad = _check_aw_exists(cluster_name, namespace) + return Cluster.from_k8_cluster_object(rc, mcad=mcad) raise FileNotFoundError( f"Cluster {cluster_name} is not found in {namespace} namespace" ) # private methods +def _check_aw_exists(name: str, namespace: str) -> bool: + try: + config_check() + api_instance = client.CustomObjectsApi(api_config_handler()) + aws = api_instance.list_namespaced_custom_object( + group="workload.codeflare.dev", + version="v1beta1", + namespace=namespace, + plural="appwrappers", + ) + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e, print_error=False) + + for aw in aws["items"]: + if aw["metadata"]["name"] == name: + return True + return False + + def _get_ingress_domain(): try: config_check() @@ -692,6 +791,7 @@

    Module codeflare_sdk.cluster.cluster

    config_check() api_instance = client.CustomObjectsApi(api_config_handler()) + # UPDATE THIS routes = api_instance.list_namespaced_custom_object( group="route.openshift.io", version="v1", @@ -806,7 +906,8 @@

    Functions

    for rc in rcs["items"]: if rc["metadata"]["name"] == cluster_name: - return Cluster.from_k8_cluster_object(rc) + mcad = _check_aw_exists(cluster_name, namespace) + return Cluster.from_k8_cluster_object(rc, mcad=mcad) raise FileNotFoundError( f"Cluster {cluster_name} is not found in {namespace} namespace" ) @@ -930,7 +1031,7 @@

    Classes

    self.config = config self.app_wrapper_yaml = self.create_app_wrapper() self.app_wrapper_name = self.app_wrapper_yaml.split(".")[0] - self._client = None + self._job_submission_client = None @property def _client_headers(self): @@ -946,23 +1047,25 @@

    Classes

    return not self.config.openshift_oauth @property - def client(self): - if self._client: - return self._client + def job_client(self): + if self._job_submission_client: + return self._job_submission_client if self.config.openshift_oauth: print( api_config_handler().configuration.get_api_key_with_prefix( "authorization" ) ) - self._client = JobSubmissionClient( + self._job_submission_client = JobSubmissionClient( self.cluster_dashboard_uri(), headers=self._client_headers, verify=self._client_verify_tls, ) else: - self._client = JobSubmissionClient(self.cluster_dashboard_uri()) - return self._client + self._job_submission_client = JobSubmissionClient( + self.cluster_dashboard_uri() + ) + return self._job_submission_client def evaluate_dispatch_priority(self): priority_class = self.config.dispatch_priority @@ -1001,6 +1104,10 @@

    Classes

    # Before attempting to create the cluster AW, let's evaluate the ClusterConfig if self.config.dispatch_priority: + if not self.config.mcad: + raise ValueError( + "Invalid Cluster Configuration, cannot have dispatch priority without MCAD" + ) priority_val = self.evaluate_dispatch_priority() if priority_val == None: raise ValueError( @@ -1023,6 +1130,7 @@

    Classes

    template = self.config.template image = self.config.image instascale = self.config.instascale + mcad = self.config.mcad instance_types = self.config.machine_types env = self.config.envs local_interactive = self.config.local_interactive @@ -1043,6 +1151,7 @@

    Classes

    template=template, image=image, instascale=instascale, + mcad=mcad, instance_types=instance_types, env=env, local_interactive=local_interactive, @@ -1067,15 +1176,18 @@

    Classes

    try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) - with open(self.app_wrapper_yaml) as f: - aw = yaml.load(f, Loader=yaml.FullLoader) - api_instance.create_namespaced_custom_object( - group="workload.codeflare.dev", - version="v1beta1", - namespace=namespace, - plural="appwrappers", - body=aw, - ) + if self.config.mcad: + with open(self.app_wrapper_yaml) as f: + aw = yaml.load(f, Loader=yaml.FullLoader) + api_instance.create_namespaced_custom_object( + group="workload.codeflare.dev", + version="v1beta1", + namespace=namespace, + plural="appwrappers", + body=aw, + ) + else: + self._component_resources_up(namespace, api_instance) except Exception as e: # pragma: no cover return _kube_api_error_handling(e) @@ -1088,13 +1200,16 @@

    Classes

    try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) - api_instance.delete_namespaced_custom_object( - group="workload.codeflare.dev", - version="v1beta1", - namespace=namespace, - plural="appwrappers", - name=self.app_wrapper_name, - ) + if self.config.mcad: + api_instance.delete_namespaced_custom_object( + group="workload.codeflare.dev", + version="v1beta1", + namespace=namespace, + plural="appwrappers", + name=self.app_wrapper_name, + ) + else: + self._component_resources_down(namespace, api_instance) except Exception as e: # pragma: no cover return _kube_api_error_handling(e) @@ -1112,42 +1227,46 @@

    Classes

    """ ready = False status = CodeFlareClusterStatus.UNKNOWN - # check the app wrapper status - appwrapper = _app_wrapper_status(self.config.name, self.config.namespace) - if appwrapper: - if appwrapper.status in [ - AppWrapperStatus.RUNNING, - AppWrapperStatus.COMPLETED, - AppWrapperStatus.RUNNING_HOLD_COMPLETION, - ]: - ready = False - status = CodeFlareClusterStatus.STARTING - elif appwrapper.status in [ - AppWrapperStatus.FAILED, - AppWrapperStatus.DELETED, - ]: - ready = False - status = CodeFlareClusterStatus.FAILED # should deleted be separate - return status, ready # exit early, no need to check ray status - elif appwrapper.status in [ - AppWrapperStatus.PENDING, - AppWrapperStatus.QUEUEING, - ]: - ready = False - if appwrapper.status == AppWrapperStatus.PENDING: - status = CodeFlareClusterStatus.QUEUED - else: - status = CodeFlareClusterStatus.QUEUEING - if print_to_console: - pretty_print.print_app_wrappers_status([appwrapper]) - return ( - status, - ready, - ) # no need to check the ray status since still in queue + if self.config.mcad: + # check the app wrapper status + appwrapper = _app_wrapper_status(self.config.name, self.config.namespace) + if appwrapper: + if appwrapper.status in [ + AppWrapperStatus.RUNNING, + AppWrapperStatus.COMPLETED, + AppWrapperStatus.RUNNING_HOLD_COMPLETION, + ]: + ready = False + status = CodeFlareClusterStatus.STARTING + elif appwrapper.status in [ + AppWrapperStatus.FAILED, + AppWrapperStatus.DELETED, + ]: + ready = False + status = CodeFlareClusterStatus.FAILED # should deleted be separate + return status, ready # exit early, no need to check ray status + elif appwrapper.status in [ + AppWrapperStatus.PENDING, + AppWrapperStatus.QUEUEING, + ]: + ready = False + if appwrapper.status == AppWrapperStatus.PENDING: + status = CodeFlareClusterStatus.QUEUED + else: + status = CodeFlareClusterStatus.QUEUEING + if print_to_console: + pretty_print.print_app_wrappers_status([appwrapper]) + return ( + status, + ready, + ) # no need to check the ray status since still in queue # check the ray cluster status cluster = _ray_cluster_status(self.config.name, self.config.namespace) - if cluster and not cluster.status == RayClusterStatus.UNKNOWN: + if cluster: + if cluster.status == RayClusterStatus.UNKNOWN: + ready = False + status = CodeFlareClusterStatus.STARTING if cluster.status == RayClusterStatus.READY: ready = True status = CodeFlareClusterStatus.READY @@ -1267,19 +1386,19 @@

    Classes

    """ This method accesses the head ray node in your cluster and lists the running jobs. """ - return self.client.list_jobs() + return self.job_client.list_jobs() def job_status(self, job_id: str) -> str: """ This method accesses the head ray node in your cluster and returns the job status for the provided job id. """ - return self.client.get_job_status(job_id) + return self.job_client.get_job_status(job_id) def job_logs(self, job_id: str) -> str: """ This method accesses the head ray node in your cluster and returns the logs for the provided job id. """ - return self.client.get_job_logs(job_id) + return self.job_client.get_job_logs(job_id) def torchx_config( self, working_dir: str = None, requirements: str = None @@ -1295,7 +1414,7 @@

    Classes

    to_return["requirements"] = requirements return to_return - def from_k8_cluster_object(rc): + def from_k8_cluster_object(rc, mcad=True): machine_types = ( rc["metadata"]["labels"]["orderedinstance"].split("_") if "orderedinstance" in rc["metadata"]["labels"] @@ -1334,6 +1453,7 @@

    Classes

    0 ]["image"], local_interactive=local_interactive, + mcad=mcad, ) return Cluster(cluster_config) @@ -1342,7 +1462,67 @@

    Classes

    ingress_domain = _get_ingress_domain() return f"ray://rayclient-{self.config.name}-{self.config.namespace}.{ingress_domain}" else: - return "None" + return "None" + + def _component_resources_up( + self, namespace: str, api_instance: client.CustomObjectsApi + ): + with open(self.app_wrapper_yaml) as f: + yamls = yaml.load_all(f, Loader=yaml.FullLoader) + for resource in yamls: + if resource["kind"] == "RayCluster": + api_instance.create_namespaced_custom_object( + group="ray.io", + version="v1alpha1", + namespace=namespace, + plural="rayclusters", + body=resource, + ) + elif resource["kind"] == "Route": + api_instance.create_namespaced_custom_object( + group="route.openshift.io", + version="v1", + namespace=namespace, + plural="routes", + body=resource, + ) + elif resource["kind"] == "Secret": + secret_instance = client.CoreV1Api(api_config_handler()) + secret_instance.create_namespaced_secret( + namespace=namespace, + body=resource, + ) + + def _component_resources_down( + self, namespace: str, api_instance: client.CustomObjectsApi + ): + with open(self.app_wrapper_yaml) as f: + yamls = yaml.load_all(f, Loader=yaml.FullLoader) + for resource in yamls: + if resource["kind"] == "RayCluster": + api_instance.delete_namespaced_custom_object( + group="ray.io", + version="v1alpha1", + namespace=namespace, + plural="rayclusters", + name=self.app_wrapper_name, + ) + elif resource["kind"] == "Route": + name = resource["metadata"]["name"] + api_instance.delete_namespaced_custom_object( + group="route.openshift.io", + version="v1", + namespace=namespace, + plural="routes", + name=name, + ) + elif resource["kind"] == "Secret": + name = resource["metadata"]["name"] + secret_instance = client.CoreV1Api(api_config_handler()) + secret_instance.delete_namespaced_secret( + namespace=namespace, + name=name, + )

    Class variables

    @@ -1353,7 +1533,7 @@

    Class variables

    Instance variables

    -
    var client
    +
    var job_client
    @@ -1361,23 +1541,25 @@

    Instance variables

    Expand source code
    @property
    -def client(self):
    -    if self._client:
    -        return self._client
    +def job_client(self):
    +    if self._job_submission_client:
    +        return self._job_submission_client
         if self.config.openshift_oauth:
             print(
                 api_config_handler().configuration.get_api_key_with_prefix(
                     "authorization"
                 )
             )
    -        self._client = JobSubmissionClient(
    +        self._job_submission_client = JobSubmissionClient(
                 self.cluster_dashboard_uri(),
                 headers=self._client_headers,
                 verify=self._client_verify_tls,
             )
         else:
    -        self._client = JobSubmissionClient(self.cluster_dashboard_uri())
    -    return self._client
    + self._job_submission_client = JobSubmissionClient( + self.cluster_dashboard_uri() + ) + return self._job_submission_client
    @@ -1464,6 +1646,10 @@

    Methods

    # Before attempting to create the cluster AW, let's evaluate the ClusterConfig if self.config.dispatch_priority: + if not self.config.mcad: + raise ValueError( + "Invalid Cluster Configuration, cannot have dispatch priority without MCAD" + ) priority_val = self.evaluate_dispatch_priority() if priority_val == None: raise ValueError( @@ -1486,6 +1672,7 @@

    Methods

    template = self.config.template image = self.config.image instascale = self.config.instascale + mcad = self.config.mcad instance_types = self.config.machine_types env = self.config.envs local_interactive = self.config.local_interactive @@ -1506,6 +1693,7 @@

    Methods

    template=template, image=image, instascale=instascale, + mcad=mcad, instance_types=instance_types, env=env, local_interactive=local_interactive, @@ -1551,13 +1739,16 @@

    Methods

    try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) - api_instance.delete_namespaced_custom_object( - group="workload.codeflare.dev", - version="v1beta1", - namespace=namespace, - plural="appwrappers", - name=self.app_wrapper_name, - ) + if self.config.mcad: + api_instance.delete_namespaced_custom_object( + group="workload.codeflare.dev", + version="v1beta1", + namespace=namespace, + plural="appwrappers", + name=self.app_wrapper_name, + ) + else: + self._component_resources_down(namespace, api_instance) except Exception as e: # pragma: no cover return _kube_api_error_handling(e) @@ -1598,7 +1789,7 @@

    Methods

    -def from_k8_cluster_object(rc) +def from_k8_cluster_object(rc, mcad=True)
    @@ -1606,7 +1797,7 @@

    Methods

    Expand source code -
    def from_k8_cluster_object(rc):
    +
    def from_k8_cluster_object(rc, mcad=True):
         machine_types = (
             rc["metadata"]["labels"]["orderedinstance"].split("_")
             if "orderedinstance" in rc["metadata"]["labels"]
    @@ -1645,6 +1836,7 @@ 

    Methods

    0 ]["image"], local_interactive=local_interactive, + mcad=mcad, ) return Cluster(cluster_config)
    @@ -1688,7 +1880,7 @@

    Methods

    """ This method accesses the head ray node in your cluster and returns the logs for the provided job id. """ - return self.client.get_job_logs(job_id)
    + return self.job_client.get_job_logs(job_id)
    @@ -1704,7 +1896,7 @@

    Methods

    """ This method accesses the head ray node in your cluster and returns the job status for the provided job id. """ - return self.client.get_job_status(job_id)
    + return self.job_client.get_job_status(job_id)
    @@ -1720,7 +1912,7 @@

    Methods

    """ This method accesses the head ray node in your cluster and lists the running jobs. """ - return self.client.list_jobs()
    + return self.job_client.list_jobs()
    @@ -1759,42 +1951,46 @@

    Methods

    """ ready = False status = CodeFlareClusterStatus.UNKNOWN - # check the app wrapper status - appwrapper = _app_wrapper_status(self.config.name, self.config.namespace) - if appwrapper: - if appwrapper.status in [ - AppWrapperStatus.RUNNING, - AppWrapperStatus.COMPLETED, - AppWrapperStatus.RUNNING_HOLD_COMPLETION, - ]: - ready = False - status = CodeFlareClusterStatus.STARTING - elif appwrapper.status in [ - AppWrapperStatus.FAILED, - AppWrapperStatus.DELETED, - ]: - ready = False - status = CodeFlareClusterStatus.FAILED # should deleted be separate - return status, ready # exit early, no need to check ray status - elif appwrapper.status in [ - AppWrapperStatus.PENDING, - AppWrapperStatus.QUEUEING, - ]: - ready = False - if appwrapper.status == AppWrapperStatus.PENDING: - status = CodeFlareClusterStatus.QUEUED - else: - status = CodeFlareClusterStatus.QUEUEING - if print_to_console: - pretty_print.print_app_wrappers_status([appwrapper]) - return ( - status, - ready, - ) # no need to check the ray status since still in queue + if self.config.mcad: + # check the app wrapper status + appwrapper = _app_wrapper_status(self.config.name, self.config.namespace) + if appwrapper: + if appwrapper.status in [ + AppWrapperStatus.RUNNING, + AppWrapperStatus.COMPLETED, + AppWrapperStatus.RUNNING_HOLD_COMPLETION, + ]: + ready = False + status = CodeFlareClusterStatus.STARTING + elif appwrapper.status in [ + AppWrapperStatus.FAILED, + AppWrapperStatus.DELETED, + ]: + ready = False + status = CodeFlareClusterStatus.FAILED # should deleted be separate + return status, ready # exit early, no need to check ray status + elif appwrapper.status in [ + AppWrapperStatus.PENDING, + AppWrapperStatus.QUEUEING, + ]: + ready = False + if appwrapper.status == AppWrapperStatus.PENDING: + status = CodeFlareClusterStatus.QUEUED + else: + status = CodeFlareClusterStatus.QUEUEING + if print_to_console: + pretty_print.print_app_wrappers_status([appwrapper]) + return ( + status, + ready, + ) # no need to check the ray status since still in queue # check the ray cluster status cluster = _ray_cluster_status(self.config.name, self.config.namespace) - if cluster and not cluster.status == RayClusterStatus.UNKNOWN: + if cluster: + if cluster.status == RayClusterStatus.UNKNOWN: + ready = False + status = CodeFlareClusterStatus.STARTING if cluster.status == RayClusterStatus.READY: ready = True status = CodeFlareClusterStatus.READY @@ -1866,15 +2062,18 @@

    Methods

    try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) - with open(self.app_wrapper_yaml) as f: - aw = yaml.load(f, Loader=yaml.FullLoader) - api_instance.create_namespaced_custom_object( - group="workload.codeflare.dev", - version="v1beta1", - namespace=namespace, - plural="appwrappers", - body=aw, - ) + if self.config.mcad: + with open(self.app_wrapper_yaml) as f: + aw = yaml.load(f, Loader=yaml.FullLoader) + api_instance.create_namespaced_custom_object( + group="workload.codeflare.dev", + version="v1beta1", + namespace=namespace, + plural="appwrappers", + body=aw, + ) + else: + self._component_resources_up(namespace, api_instance) except Exception as e: # pragma: no cover return _kube_api_error_handling(e)
    @@ -1956,7 +2155,6 @@

    Index

  • Cluster

      -
    • client
    • cluster_dashboard_uri
    • cluster_uri
    • create_app_wrapper
    • @@ -1965,6 +2163,7 @@

      evaluate_dispatch_priority
    • from_k8_cluster_object
    • is_dashboard_ready
    • +
    • job_client
    • job_logs
    • job_status
    • list_jobs
    • diff --git a/docs/cluster/config.html b/docs/cluster/config.html index 28830390..37242b17 100644 --- a/docs/cluster/config.html +++ b/docs/cluster/config.html @@ -78,6 +78,7 @@

      Module codeflare_sdk.cluster.config

      num_gpus: int = 0 template: str = f"{dir}/templates/base-template.yaml" instascale: bool = False + mcad: bool = True envs: dict = field(default_factory=dict) image: str = "quay.io/project-codeflare/ray:latest-py39-cu118" local_interactive: bool = False @@ -97,7 +98,7 @@

      Classes

      class ClusterConfiguration -(name: str, namespace: str = None, head_info: list = <factory>, head_cpus: int = 2, head_memory: int = 8, head_gpus: int = 0, machine_types: list = <factory>, min_cpus: int = 1, max_cpus: int = 1, num_workers: int = 1, min_memory: int = 2, max_memory: int = 2, num_gpus: int = 0, template: str = '/home/runner/work/codeflare-sdk/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', instascale: bool = False, envs: dict = <factory>, image: str = 'quay.io/project-codeflare/ray:latest-py39-cu118', local_interactive: bool = False, image_pull_secrets: list = <factory>, dispatch_priority: str = None, openshift_oauth: bool = False) +(name: str, namespace: str = None, head_info: list = <factory>, head_cpus: int = 2, head_memory: int = 8, head_gpus: int = 0, machine_types: list = <factory>, min_cpus: int = 1, max_cpus: int = 1, num_workers: int = 1, min_memory: int = 2, max_memory: int = 2, num_gpus: int = 0, template: str = '/home/runner/work/codeflare-sdk/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', instascale: bool = False, mcad: bool = True, envs: dict = <factory>, image: str = 'quay.io/project-codeflare/ray:latest-py39-cu118', local_interactive: bool = False, image_pull_secrets: list = <factory>, dispatch_priority: str = None, openshift_oauth: bool = False)

      This dataclass is used to specify resource requirements and other details, and @@ -127,6 +128,7 @@

      Classes

      num_gpus: int = 0 template: str = f"{dir}/templates/base-template.yaml" instascale: bool = False + mcad: bool = True envs: dict = field(default_factory=dict) image: str = "quay.io/project-codeflare/ray:latest-py39-cu118" local_interactive: bool = False @@ -188,6 +190,10 @@

      Class variables

      +
      var mcad : bool
      +
      +
      +
      var min_cpus : int
      @@ -254,6 +260,7 @@

      machine_types
    • max_cpus
    • max_memory
    • +
    • mcad
    • min_cpus
    • min_memory
    • name
    • diff --git a/docs/job/jobs.html b/docs/job/jobs.html index 266da13e..ea587df8 100644 --- a/docs/job/jobs.html +++ b/docs/job/jobs.html @@ -50,9 +50,6 @@

      Module codeflare_sdk.job.jobs

      from torchx.schedulers.ray_scheduler import RayScheduler from torchx.specs import AppHandle, parse_app_handle, AppDryRunInfo -from ray.job_submission import JobSubmissionClient - -import openshift as oc if TYPE_CHECKING: from ..cluster.cluster import Cluster @@ -124,9 +121,9 @@

      Module codeflare_sdk.job.jobs

      def _dry_run(self, cluster: "Cluster"): j = f"{cluster.config.num_workers}x{max(cluster.config.num_gpus, 1)}" # # of proc. = # of gpus - runner = get_runner(ray_client=cluster.client) + runner = get_runner(ray_client=cluster.job_client) runner._scheduler_instances["ray"] = RayScheduler( - session_name=runner._name, ray_client=cluster.client + session_name=runner._name, ray_client=cluster.job_client ) return ( runner.dryrun( @@ -381,9 +378,9 @@

      Methods

      def _dry_run(self, cluster: "Cluster"): j = f"{cluster.config.num_workers}x{max(cluster.config.num_gpus, 1)}" # # of proc. = # of gpus - runner = get_runner(ray_client=cluster.client) + runner = get_runner(ray_client=cluster.job_client) runner._scheduler_instances["ray"] = RayScheduler( - session_name=runner._name, ray_client=cluster.client + session_name=runner._name, ray_client=cluster.job_client ) return ( runner.dryrun( diff --git a/docs/utils/generate_yaml.html b/docs/utils/generate_yaml.html index 2091f8b3..4c0ecd3f 100644 --- a/docs/utils/generate_yaml.html +++ b/docs/utils/generate_yaml.html @@ -488,6 +488,19 @@

      Module codeflare_sdk.utils.generate_yaml

      ) +def write_components(user_yaml: dict, output_file_name: str): + components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") + open(output_file_name, "w").close() + with open(output_file_name, "a") as outfile: + for component in components: + if "generictemplate" in component: + outfile.write("---\n") + yaml.dump( + component["generictemplate"], outfile, default_flow_style=False + ) + print(f"Written to: {output_file_name}") + + def generate_appwrapper( name: str, namespace: str, @@ -503,6 +516,7 @@

      Module codeflare_sdk.utils.generate_yaml

      template: str, image: str, instascale: bool, + mcad: bool, instance_types: list, env, local_interactive: bool, @@ -558,7 +572,10 @@

      Module codeflare_sdk.utils.generate_yaml

      enable_openshift_oauth(user_yaml, cluster_name, namespace) outfile = appwrapper_name + ".yaml" - write_user_appwrapper(user_yaml, outfile) + if not mcad: + write_components(user_yaml, outfile) + else: + write_user_appwrapper(user_yaml, outfile) return outfile @@ -775,7 +792,7 @@

      Functions

      -def generate_appwrapper(name: str, namespace: str, head_cpus: int, head_memory: int, head_gpus: int, min_cpu: int, max_cpu: int, min_memory: int, max_memory: int, gpu: int, workers: int, template: str, image: str, instascale: bool, instance_types: list, env, local_interactive: bool, image_pull_secrets: list, dispatch_priority: str, priority_val: int, openshift_oauth: bool) +def generate_appwrapper(name: str, namespace: str, head_cpus: int, head_memory: int, head_gpus: int, min_cpu: int, max_cpu: int, min_memory: int, max_memory: int, gpu: int, workers: int, template: str, image: str, instascale: bool, mcad: bool, instance_types: list, env, local_interactive: bool, image_pull_secrets: list, dispatch_priority: str, priority_val: int, openshift_oauth: bool)
      @@ -798,6 +815,7 @@

      Functions

      template: str, image: str, instascale: bool, + mcad: bool, instance_types: list, env, local_interactive: bool, @@ -853,7 +871,10 @@

      Functions

      enable_openshift_oauth(user_yaml, cluster_name, namespace) outfile = appwrapper_name + ".yaml" - write_user_appwrapper(user_yaml, outfile) + if not mcad: + write_components(user_yaml, outfile) + else: + write_user_appwrapper(user_yaml, outfile) return outfile
      @@ -1209,6 +1230,28 @@

      Functions

      limits["nvidia.com/gpu"] = gpu +
      +def write_components(user_yaml: dict, output_file_name: str) +
      +
      +
      +
      + +Expand source code + +
      def write_components(user_yaml: dict, output_file_name: str):
      +    components = user_yaml.get("spec", "resources")["resources"].get("GenericItems")
      +    open(output_file_name, "w").close()
      +    with open(output_file_name, "a") as outfile:
      +        for component in components:
      +            if "generictemplate" in component:
      +                outfile.write("---\n")
      +                yaml.dump(
      +                    component["generictemplate"], outfile, default_flow_style=False
      +                )
      +    print(f"Written to: {output_file_name}")
      +
      +
      def write_user_appwrapper(user_yaml, output_file_name)
      @@ -1262,6 +1305,7 @@

      Index

    • update_priority
    • update_rayclient_route
    • update_resources
    • +
    • write_components
    • write_user_appwrapper
  • diff --git a/docs/utils/kube_api_helpers.html b/docs/utils/kube_api_helpers.html index 37034ab7..4105a4c5 100644 --- a/docs/utils/kube_api_helpers.html +++ b/docs/utils/kube_api_helpers.html @@ -54,7 +54,9 @@

    Module codeflare_sdk.utils.kube_api_helpers

    # private methods -def _kube_api_error_handling(e: Exception): # pragma: no cover +def _kube_api_error_handling( + e: Exception, print_error: bool = True +): # pragma: no cover perm_msg = ( "Action not permitted, have you put in correct/up-to-date auth credentials?" ) @@ -63,11 +65,13 @@

    Module codeflare_sdk.utils.kube_api_helpers

    if type(e) == config.ConfigException: raise PermissionError(perm_msg) if type(e) == executing.executing.NotOneValueFound: - print(nf_msg) + if print_error: + print(nf_msg) return if type(e) == client.ApiException: if e.reason == "Not Found": - print(nf_msg) + if print_error: + print(nf_msg) return elif e.reason == "Unauthorized" or e.reason == "Forbidden": raise PermissionError(perm_msg) From cc1dcd7860b789ffda8575a392cee447b4817686 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Mon, 16 Oct 2023 16:09:14 +0100 Subject: [PATCH 037/496] Update RayCluster labels to workload.codeflare.dev/appwrappers --- .../guided-demos/notebook-ex-outputs/gptfttest.yaml | 2 +- .../guided-demos/notebook-ex-outputs/instascaletest.yaml | 2 +- .../guided-demos/notebook-ex-outputs/interactivetest.yaml | 2 +- .../guided-demos/notebook-ex-outputs/jobtest.yaml | 2 +- .../guided-demos/notebook-ex-outputs/raytest.yaml | 2 +- docs/utils/generate_yaml.html | 4 ++-- src/codeflare_sdk/templates/base-template.yaml | 2 +- src/codeflare_sdk/utils/generate_yaml.py | 2 +- tests/test-case-bad.yaml | 2 +- tests/test-case-prio.yaml | 2 +- tests/test-case.yaml | 2 +- tests/unit_test.py | 8 ++++---- 12 files changed, 16 insertions(+), 16 deletions(-) diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml index 436abe05..544c5213 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml @@ -33,7 +33,7 @@ spec: kind: RayCluster metadata: labels: - appwrapper.mcad.ibm.com: gptfttest + workload.codeflare.dev/appwrapper: gptfttest controller-tools.k8s.io: '1.0' name: gptfttest namespace: default diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml index 5f952649..e10bc3bf 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml @@ -33,7 +33,7 @@ spec: kind: RayCluster metadata: labels: - appwrapper.mcad.ibm.com: instascaletest + workload.codeflare.dev/appwrapper: instascaletest controller-tools.k8s.io: '1.0' name: instascaletest namespace: default diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml index c80684e1..dd5eba28 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml @@ -33,7 +33,7 @@ spec: kind: RayCluster metadata: labels: - appwrapper.mcad.ibm.com: interactivetest + workload.codeflare.dev/appwrapper: interactivetest controller-tools.k8s.io: '1.0' name: interactivetest namespace: default diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml index aa85803e..b2e7a0aa 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml @@ -31,7 +31,7 @@ spec: kind: RayCluster metadata: labels: - appwrapper.mcad.ibm.com: jobtest + workload.codeflare.dev/appwrapper: jobtest controller-tools.k8s.io: '1.0' name: jobtest namespace: default diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml index c58b963a..0cbbdb7c 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml @@ -31,7 +31,7 @@ spec: kind: RayCluster metadata: labels: - appwrapper.mcad.ibm.com: raytest + workload.codeflare.dev/appwrapper: raytest controller-tools.k8s.io: '1.0' name: raytest namespace: default diff --git a/docs/utils/generate_yaml.html b/docs/utils/generate_yaml.html index 4c0ecd3f..fa449d8d 100644 --- a/docs/utils/generate_yaml.html +++ b/docs/utils/generate_yaml.html @@ -110,7 +110,7 @@

    Module codeflare_sdk.utils.generate_yaml

    metadata["name"] = appwrapper_name metadata["namespace"] = namespace lower_meta = item.get("generictemplate", {}).get("metadata") - lower_meta["labels"]["appwrapper.mcad.ibm.com"] = appwrapper_name + lower_meta["labels"]["workload.codeflare.dev/appwrapper"] = appwrapper_name lower_meta["name"] = cluster_name lower_meta["namespace"] = namespace @@ -1104,7 +1104,7 @@

    Functions

    metadata["name"] = appwrapper_name metadata["namespace"] = namespace lower_meta = item.get("generictemplate", {}).get("metadata") - lower_meta["labels"]["appwrapper.mcad.ibm.com"] = appwrapper_name + lower_meta["labels"]["workload.codeflare.dev/appwrapper"] = appwrapper_name lower_meta["name"] = cluster_name lower_meta["namespace"] = namespace diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index d2f3385e..cf4ec496 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -41,7 +41,7 @@ spec: kind: RayCluster metadata: labels: - appwrapper.mcad.ibm.com: "aw-kuberay" + workload.codeflare.dev/appwrapper: "aw-kuberay" controller-tools.k8s.io: "1.0" # A unique identifier for the head node and workers of this cluster. name: kuberay-cluster diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index a833892a..4e4d4492 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -79,7 +79,7 @@ def update_names(yaml, item, appwrapper_name, cluster_name, namespace): metadata["name"] = appwrapper_name metadata["namespace"] = namespace lower_meta = item.get("generictemplate", {}).get("metadata") - lower_meta["labels"]["appwrapper.mcad.ibm.com"] = appwrapper_name + lower_meta["labels"]["workload.codeflare.dev/appwrapper"] = appwrapper_name lower_meta["name"] = cluster_name lower_meta["namespace"] = namespace diff --git a/tests/test-case-bad.yaml b/tests/test-case-bad.yaml index cdc4f6ac..eb800e56 100644 --- a/tests/test-case-bad.yaml +++ b/tests/test-case-bad.yaml @@ -33,7 +33,7 @@ spec: kind: RayCluster metadata: labels: - appwrapper.mcad.ibm.com: unit-test-cluster + workload.codeflare.dev/appwrapper: unit-test-cluster controller-tools.k8s.io: '1.0' name: unit-test-cluster namespace: ns diff --git a/tests/test-case-prio.yaml b/tests/test-case-prio.yaml index 6e7c8687..9020e6d8 100644 --- a/tests/test-case-prio.yaml +++ b/tests/test-case-prio.yaml @@ -33,7 +33,7 @@ spec: kind: RayCluster metadata: labels: - appwrapper.mcad.ibm.com: prio-test-cluster + workload.codeflare.dev/appwrapper: prio-test-cluster controller-tools.k8s.io: '1.0' name: prio-test-cluster namespace: ns diff --git a/tests/test-case.yaml b/tests/test-case.yaml index 08d36055..05ed386c 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -32,7 +32,7 @@ spec: kind: RayCluster metadata: labels: - appwrapper.mcad.ibm.com: unit-test-cluster + workload.codeflare.dev/appwrapper: unit-test-cluster controller-tools.k8s.io: '1.0' name: unit-test-cluster namespace: ns diff --git a/tests/unit_test.py b/tests/unit_test.py index 9ac13159..8900df5b 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -743,7 +743,7 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "creationTimestamp": "2023-02-22T16:26:07Z", "generation": 1, "labels": { - "appwrapper.mcad.ibm.com": "quicktest", + "workload.codeflare.dev/appwrapper": "quicktest", "controller-tools.k8s.io": "1.0", "resourceName": "quicktest", "orderedinstance": "m4.xlarge_g4dn.xlarge", @@ -756,7 +756,7 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "f:metadata": { "f:labels": { ".": {}, - "f:appwrapper.mcad.ibm.com": {}, + "f:workload.codeflare.dev/appwrapper": {}, "f:controller-tools.k8s.io": {}, "f:resourceName": {}, }, @@ -1114,7 +1114,7 @@ def get_aw_obj(group, version, namespace, plural): "kind": "RayCluster", "metadata": { "labels": { - "appwrapper.mcad.ibm.com": "quicktest1", + "workload.codeflare.dev/appwrapper": "quicktest1", "controller-tools.k8s.io": "1.0", }, "name": "quicktest1", @@ -1437,7 +1437,7 @@ def get_aw_obj(group, version, namespace, plural): "kind": "RayCluster", "metadata": { "labels": { - "appwrapper.mcad.ibm.com": "quicktest2", + "workload.codeflare.dev/appwrapper": "quicktest2", "controller-tools.k8s.io": "1.0", }, "name": "quicktest2", From e985bb539daf9a85d7fbb9e00dcefa56ae174015 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Mon, 16 Oct 2023 16:42:34 +0100 Subject: [PATCH 038/496] Update unit tests to reflect label update --- tests/test-case-prio.yaml | 2 +- tests/test-case.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test-case-prio.yaml b/tests/test-case-prio.yaml index 9020e6d8..36cc3a40 100644 --- a/tests/test-case-prio.yaml +++ b/tests/test-case-prio.yaml @@ -33,8 +33,8 @@ spec: kind: RayCluster metadata: labels: - workload.codeflare.dev/appwrapper: prio-test-cluster controller-tools.k8s.io: '1.0' + workload.codeflare.dev/appwrapper: prio-test-cluster name: prio-test-cluster namespace: ns spec: diff --git a/tests/test-case.yaml b/tests/test-case.yaml index 05ed386c..8ab27f01 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -32,8 +32,8 @@ spec: kind: RayCluster metadata: labels: - workload.codeflare.dev/appwrapper: unit-test-cluster controller-tools.k8s.io: '1.0' + workload.codeflare.dev/appwrapper: unit-test-cluster name: unit-test-cluster namespace: ns spec: From 407aedca7546e8922c4eee176112bb978112f586 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Wed, 25 Oct 2023 16:39:58 +0100 Subject: [PATCH 039/496] Fixed label related tests and notebooks --- demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml | 2 +- .../guided-demos/notebook-ex-outputs/instascaletest.yaml | 2 +- .../guided-demos/notebook-ex-outputs/interactivetest.yaml | 2 +- demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml | 2 +- demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml | 2 +- tests/test-case-no-mcad.yamls | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml index 544c5213..436abe05 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml @@ -33,7 +33,7 @@ spec: kind: RayCluster metadata: labels: - workload.codeflare.dev/appwrapper: gptfttest + appwrapper.mcad.ibm.com: gptfttest controller-tools.k8s.io: '1.0' name: gptfttest namespace: default diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml index e10bc3bf..5f952649 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml @@ -33,7 +33,7 @@ spec: kind: RayCluster metadata: labels: - workload.codeflare.dev/appwrapper: instascaletest + appwrapper.mcad.ibm.com: instascaletest controller-tools.k8s.io: '1.0' name: instascaletest namespace: default diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml index dd5eba28..c80684e1 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml @@ -33,7 +33,7 @@ spec: kind: RayCluster metadata: labels: - workload.codeflare.dev/appwrapper: interactivetest + appwrapper.mcad.ibm.com: interactivetest controller-tools.k8s.io: '1.0' name: interactivetest namespace: default diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml index b2e7a0aa..aa85803e 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml @@ -31,7 +31,7 @@ spec: kind: RayCluster metadata: labels: - workload.codeflare.dev/appwrapper: jobtest + appwrapper.mcad.ibm.com: jobtest controller-tools.k8s.io: '1.0' name: jobtest namespace: default diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml index 0cbbdb7c..c58b963a 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml @@ -31,7 +31,7 @@ spec: kind: RayCluster metadata: labels: - workload.codeflare.dev/appwrapper: raytest + appwrapper.mcad.ibm.com: raytest controller-tools.k8s.io: '1.0' name: raytest namespace: default diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index 6d905566..b48e7cf5 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -3,8 +3,8 @@ apiVersion: ray.io/v1alpha1 kind: RayCluster metadata: labels: - appwrapper.mcad.ibm.com: unit-test-cluster-ray controller-tools.k8s.io: '1.0' + workload.codeflare.dev/appwrapper: unit-test-cluster-ray name: unit-test-cluster-ray namespace: ns spec: From c0fd0afd8c326d4815d2caa5b6e2e3eb11028a6d Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Wed, 26 Jul 2023 16:46:52 +0100 Subject: [PATCH 040/496] Added support for ingress over routes on cluster creation --- poetry.lock | 10 + src/codeflare_sdk/cluster/cluster.py | 103 +++---- src/codeflare_sdk/cluster/config.py | 2 + .../templates/base-template.yaml | 54 ++-- src/codeflare_sdk/utils/generate_yaml.py | 267 +++++++++++++++--- src/codeflare_sdk/utils/openshift_oauth.py | 4 +- tests/test-case-prio.yaml | 22 +- tests/test-case.yaml | 22 +- tests/unit_test.py | 194 ++++++++++--- tests/unit_test_support.py | 11 +- 10 files changed, 517 insertions(+), 172 deletions(-) diff --git a/poetry.lock b/poetry.lock index a02a4261..13722766 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1187,6 +1187,16 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 66475278..808aa007 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -28,7 +28,9 @@ from .auth import config_check, api_config_handler from ..utils import pretty_print -from ..utils.generate_yaml import generate_appwrapper +from ..utils.generate_yaml import ( + generate_appwrapper, +) from ..utils.kube_api_helpers import _kube_api_error_handling from ..utils.openshift_oauth import ( create_openshift_oauth_objects, @@ -175,6 +177,8 @@ def create_app_wrapper(self): local_interactive = self.config.local_interactive image_pull_secrets = self.config.image_pull_secrets dispatch_priority = self.config.dispatch_priority + ingress_domain = self.config.ingress_domain + ingress_options = self.config.ingress_options return generate_appwrapper( name=name, namespace=namespace, @@ -198,6 +202,8 @@ def create_app_wrapper(self): dispatch_priority=dispatch_priority, priority_val=priority_val, openshift_oauth=self.config.openshift_oauth, + ingress_domain=ingress_domain, + ingress_options=ingress_options, ) # creates a new cluster with the provided or default spec @@ -399,27 +405,22 @@ def cluster_dashboard_uri(self) -> str: """ try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) - routes = api_instance.list_namespaced_custom_object( - group="route.openshift.io", - version="v1", - namespace=self.config.namespace, - plural="routes", - ) - except Exception as e: # pragma: no cover + api_instance = client.NetworkingV1Api(api_config_handler()) + ingresses = api_instance.list_namespaced_ingress(self.config.namespace) + except Exception as e: return _kube_api_error_handling(e) - for route in routes["items"]: - if route["metadata"][ - "name" - ] == f"ray-dashboard-{self.config.name}" or route["metadata"][ - "name" - ].startswith( - f"{self.config.name}-ingress" - ): - protocol = "https" if route["spec"].get("tls") else "http" - return f"{protocol}://{route['spec']['host']}" + for ingress in ingresses.items: + annotations = ingress.metadata.annotations + if ingress.metadata.name == f"ray-dashboard-{self.config.name}" or ingress.metadata.name.startswith( + f"{self.config.name}-ingress" ): + if annotations == None: + protocol = "http" + elif "route.openshift.io/termination" in annotations: + protocol = "https" + return f"{protocol}://{ingress.spec.rules[0].host}" return "Dashboard route not available yet, have you run cluster.up()?" + def list_jobs(self) -> List: """ @@ -498,8 +499,8 @@ def from_k8_cluster_object(rc, mcad=True): def local_client_url(self): if self.config.local_interactive == True: - ingress_domain = _get_ingress_domain() - return f"ray://rayclient-{self.config.name}-{self.config.namespace}.{ingress_domain}" + ingress_domain = _get_ingress_domain(self) + return f"ray://{ingress_domain}" else: return "None" @@ -655,16 +656,23 @@ def _check_aw_exists(name: str, namespace: str) -> bool: return False -def _get_ingress_domain(): +# Cant test this until get_current_namespace is fixed +def _get_ingress_domain(self): # pragma: no cover try: config_check() - api_client = client.CustomObjectsApi(api_config_handler()) - ingress = api_client.get_cluster_custom_object( - "config.openshift.io", "v1", "ingresses", "cluster" - ) - except Exception as e: # pragma: no cover + api_client = client.NetworkingV1Api(api_config_handler()) + if self.config.namespace != None: + namespace = self.config.namespace + else: + namespace = get_current_namespace() + ingresses = api_client.list_namespaced_ingress(namespace) + except Exception as e: # pragma: no cover return _kube_api_error_handling(e) - return ingress["spec"]["domain"] + domain = None + for ingress in ingresses.items: + if ingress.spec.rules[0].http.paths[0].backend.service.port.number == 10001: + domain = ingress.spec.rules[0].host + return domain def _app_wrapper_status(name, namespace="default") -> Optional[AppWrapper]: @@ -756,27 +764,22 @@ def _map_to_ray_cluster(rc) -> Optional[RayCluster]: status = RayClusterStatus(rc["status"]["state"].lower()) else: status = RayClusterStatus.UNKNOWN - - config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) - # UPDATE THIS - routes = api_instance.list_namespaced_custom_object( - group="route.openshift.io", - version="v1", - namespace=rc["metadata"]["namespace"], - plural="routes", - ) - ray_route = None - for route in routes["items"]: - if route["metadata"][ - "name" - ] == f"ray-dashboard-{rc['metadata']['name']}" or route["metadata"][ - "name" - ].startswith( - f"{rc['metadata']['name']}-ingress" - ): - protocol = "https" if route["spec"].get("tls") else "http" - ray_route = f"{protocol}://{route['spec']['host']}" + try: + config_check() + api_instance = client.NetworkingV1Api(api_config_handler()) + ingresses = api_instance.list_namespaced_ingress(rc["metadata"]["namespace"]) + except Exception as e: + return _kube_api_error_handling(e) + ray_ingress = None + for ingress in ingresses.items: + annotations = ingress.metadata.annotations + if ingress.metadata.name == f"ray-dashboard-{rc['metadata']['name']}" or ingress.metadata.name.startswith( + f"{rc['metadata']['name']}-ingress" ): + if annotations == None: + protocol = "http" + elif "route.openshift.io/termination" in annotations: + protocol = "https" + ray_ingress = f"{protocol}://{ingress.spec.rules[0].host}" return RayCluster( name=rc["metadata"]["name"], @@ -794,7 +797,6 @@ def _map_to_ray_cluster(rc) -> Optional[RayCluster]: ]["resources"]["limits"]["cpu"], worker_gpu=0, # hard to detect currently how many gpus, can override it with what the user asked for namespace=rc["metadata"]["namespace"], - dashboard=ray_route, head_cpus=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][0][ "resources" ]["limits"]["cpu"], @@ -804,6 +806,7 @@ def _map_to_ray_cluster(rc) -> Optional[RayCluster]: head_gpu=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][0][ "resources" ]["limits"]["nvidia.com/gpu"], + dashboard=ray_ingress, ) diff --git a/src/codeflare_sdk/cluster/config.py b/src/codeflare_sdk/cluster/config.py index a21318ab..19209764 100644 --- a/src/codeflare_sdk/cluster/config.py +++ b/src/codeflare_sdk/cluster/config.py @@ -53,3 +53,5 @@ class ClusterConfiguration: image_pull_secrets: list = field(default_factory=list) dispatch_priority: str = None openshift_oauth: bool = False # NOTE: to use the user must have permission to create a RoleBinding for system:auth-delegator + ingress_options: dict = field(default_factory=dict) + ingress_domain: str = None diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index cf4ec496..8f309630 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -289,38 +289,50 @@ spec: emptyDir: {} - replicas: 1 generictemplate: - kind: Route - apiVersion: route.openshift.io/v1 + apiVersion: networking.k8s.io/v1 + kind: Ingress metadata: - name: ray-dashboard-deployment-name + name: ray-dashboard-raytest namespace: default - labels: - # allows me to return name of service that Ray operator creates - odh-ray-cluster-service: deployment-name-head-svc + annotations: + annotations-example:annotations-example spec: - to: - kind: Service - name: deployment-name-head-svc - port: - targetPort: dashboard + ingressClassName: nginx + rules: + - http: + paths: + - backend: + service: + name: raytest-head-svc + port: + number: 8265 + pathType: Prefix + path: / + host: ray-dashboard-raytest. - replicas: 1 generictemplate: - apiVersion: route.openshift.io/v1 - kind: Route + apiVersion: networking.k8s.io/v1 + kind: Ingress metadata: name: rayclient-deployment-name namespace: default + annotations: + annotations-example:annotations-example labels: - # allows me to return name of service that Ray operator creates odh-ray-cluster-service: deployment-name-head-svc spec: - port: - targetPort: client - tls: - termination: passthrough - to: - kind: Service - name: deployment-name-head-svc + ingressClassName: nginx + rules: + - http: + paths: + - backend: + service: + name: deployment-name-head-svc + port: + number: 10001 + path: '' + pathType: ImplementationSpecific + host: rayclient-raytest. - replicas: 1 generictemplate: apiVersion: v1 diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 4e4d4492..e5ca4bd6 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -50,28 +50,191 @@ def gen_names(name): else: return name, name - -def update_dashboard_route(route_item, cluster_name, namespace): - metadata = route_item.get("generictemplate", {}).get("metadata") - metadata["name"] = gen_dashboard_route_name(cluster_name) - metadata["namespace"] = namespace - metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc" - spec = route_item.get("generictemplate", {}).get("spec") - spec["to"]["name"] = f"{cluster_name}-head-svc" - - def gen_dashboard_route_name(cluster_name): return f"ray-dashboard-{cluster_name}" +# Check if the ingress api cluster resource exists +def is_openshift_cluster(): + try: + config_check() + api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance.get_cluster_custom_object( + "config.openshift.io", "v1", "ingresses", "cluster" + ) -# ToDo: refactor the update_x_route() functions -def update_rayclient_route(route_item, cluster_name, namespace): - metadata = route_item.get("generictemplate", {}).get("metadata") - metadata["name"] = f"rayclient-{cluster_name}" - metadata["namespace"] = namespace - metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc" - spec = route_item.get("generictemplate", {}).get("spec") - spec["to"]["name"] = f"{cluster_name}-head-svc" + return True + except client.ApiException as e: # pragma: no cover + if e.status == 404 or e.status == 403: + return False + else: + print(f"Error detecting cluster type defaulting to Kubernetes: {e}") + return False + + +def update_dashboard_ingress( + ingress_item, cluster_name, namespace, ingress_options, ingress_domain +): # pragma: no cover + metadata = ingress_item.get("generictemplate", {}).get("metadata") + spec = ingress_item.get("generictemplate", {}).get("spec") + if ingress_options != {}: + for index, ingress_option in enumerate(ingress_options["ingresses"]): + if "ingressName" not in ingress_option.keys(): + raise ValueError( + f"Error: 'ingressName' is missing or empty for ingress item at index {index}" + ) + if "port" not in ingress_option.keys(): + raise ValueError( + f"Error: 'port' is missing or empty for ingress item at index {index}" + ) + elif not isinstance(ingress_option["port"], int): + raise ValueError( + f"Error: 'port' is not of type int for ingress item at index {index}" + ) + if ingress_option["port"] == 8265: + metadata["name"] = ingress_option["ingressName"] + metadata["namespace"] = namespace + if "annotations" not in ingress_option.keys(): + del metadata["annotations"] + else: + metadata["annotations"] = ingress_option["annotations"] + if "path" not in ingress_option.keys(): + del spec["rules"][0]["http"]["paths"][0]["path"] + else: + spec["rules"][0]["http"]["paths"][0]["path"] = ingress_option[ + "path" + ] + if "pathType" not in ingress_option.keys(): + spec["rules"][0]["http"]["paths"][0][ + "pathType" + ] = "ImplementationSpecific" + if "host" not in ingress_option.keys(): + del spec["rules"][0]["host"] + else: + spec["rules"][0]["host"] = ingress_option["host"] + if "ingressClassName" not in ingress_option.keys(): + del spec["ingressClassName"] + else: + spec["ingressClassName"] = ingress_option["ingressClassName"] + + spec["rules"][0]["http"]["paths"][0]["backend"]["service"][ + "name" + ] = f"{cluster_name}-head-svc" + else: + metadata["name"] = f"ray-dashboard-{cluster_name}" + metadata["namespace"] = namespace + spec["rules"][0]["http"]["paths"][0]["backend"]["service"][ + "name" + ] = f"{cluster_name}-head-svc" + if is_openshift_cluster(): + try: + config_check() + api_client = client.CustomObjectsApi(api_config_handler()) + ingress = api_client.get_cluster_custom_object( + "config.openshift.io", "v1", "ingresses", "cluster" + ) + del spec["ingressClassName"] + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + domain = ingress["spec"]["domain"] + elif ingress_domain is None: + raise ValueError( + "ingress_domain is invalid. For Kubernetes Clusters please specify an ingress domain" + ) + else: + domain = ingress_domain + del metadata["annotations"] + spec["rules"][0]["host"] = f"ray-dashboard-{cluster_name}-{namespace}.{domain}" + + +def update_rayclient_ingress( + ingress_item, cluster_name, namespace, ingress_options, ingress_domain +): # pragma: no cover + metadata = ingress_item.get("generictemplate", {}).get("metadata") + spec = ingress_item.get("generictemplate", {}).get("spec") + if ingress_options != {}: + for index, ingress_option in enumerate(ingress_options["ingresses"]): + if "ingressName" not in ingress_option.keys(): + raise ValueError( + f"Error: 'ingressName' is missing or empty for ingress item at index {index}" + ) + if "port" not in ingress_option.keys(): + raise ValueError( + f"Error: 'port' is missing or empty for ingress item at index {index}" + ) + elif not isinstance(ingress_option["port"], int): + raise ValueError( + f"Error: 'port' is not of type int for ingress item at index {index}" + ) + if ingress_option["port"] == 10001: + metadata["name"] = ingress_option["ingressName"] + metadata["namespace"] = namespace + if "annotations" not in ingress_option.keys(): + del metadata["annotations"] + else: + metadata["annotations"] = ingress_option["annotations"] + if "path" not in ingress_option.keys(): + del spec["rules"][0]["http"]["paths"][0]["path"] + else: + spec["rules"][0]["http"]["paths"][0]["path"] = ingress_option[ + "path" + ] + if "pathType" not in ingress_option.keys(): + spec["rules"][0]["http"]["paths"][0][ + "pathType" + ] = "ImplementationSpecific" + if "host" not in ingress_option.keys(): + del spec["rules"][0]["host"] + else: + spec["rules"][0]["host"] = ingress_option["host"] + if "ingressClassName" not in ingress_option.keys(): + del spec["ingressClassName"] + else: + spec["ingressClassName"] = ingress_option["ingressClassName"] + + spec["rules"][0]["http"]["paths"][0]["backend"]["service"][ + "name" + ] = f"{cluster_name}-head-svc" + else: + metadata["name"] = f"rayclient-{cluster_name}" + metadata["namespace"] = namespace + metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc" + + spec["rules"][0]["http"]["paths"][0]["backend"]["service"][ + "name" + ] = f"{cluster_name}-head-svc" + + if is_openshift_cluster(): + try: + config_check() + api_client = client.CustomObjectsApi(api_config_handler()) + ingress = api_client.get_cluster_custom_object( + "config.openshift.io", "v1", "ingresses", "cluster" + ) + ingressClassName = "openshift-default" + annotations = { + "nginx.ingress.kubernetes.io/rewrite-target": "/", + "nginx.ingress.kubernetes.io/ssl-redirect": "true", + "route.openshift.io/termination": "passthrough", + } + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + domain = ingress["spec"]["domain"] + elif ingress_domain is None: + raise ValueError( + "ingress_domain is invalid. For Kubernetes Clusters please specify an ingress domain" + ) + else: + domain = ingress_domain + ingressClassName = "nginx" + annotations = { + "nginx.ingress.kubernetes.io/rewrite-target": "/", + "nginx.ingress.kubernetes.io/ssl-redirect": "true", + "nginx.ingress.kubernetes.io/ssl-passthrough": "true", + } + + metadata["annotations"] = annotations + spec["ingressClassName"] = ingressClassName + spec["rules"][0]["host"] = f"rayclient-{cluster_name}-{namespace}.{domain}" def update_names(yaml, item, appwrapper_name, cluster_name, namespace): @@ -274,11 +437,12 @@ def update_ca_secret(ca_secret_item, cluster_name, namespace): data["ca.key"], data["ca.crt"] = generate_cert.generate_ca_cert(365) -def enable_local_interactive(resources, cluster_name, namespace): - rayclient_route_item = resources["resources"].get("GenericItems")[2] +def enable_local_interactive( + resources, cluster_name, namespace, ingress_options, ingress_domain +): + rayclient_ingress_item = resources["resources"].get("GenericItems")[2] ca_secret_item = resources["resources"].get("GenericItems")[3] item = resources["resources"].get("GenericItems")[0] - update_rayclient_route(rayclient_route_item, cluster_name, namespace) update_ca_secret(ca_secret_item, cluster_name, namespace) # update_ca_secret_volumes item["generictemplate"]["spec"]["headGroupSpec"]["template"]["spec"]["volumes"][0][ @@ -300,16 +464,47 @@ def enable_local_interactive(resources, cluster_name, namespace): ][0].get("command")[2] command = command.replace("deployment-name", cluster_name) - try: - config_check() - api_client = client.CustomObjectsApi(api_config_handler()) - ingress = api_client.get_cluster_custom_object( - "config.openshift.io", "v1", "ingresses", "cluster" - ) - except Exception as e: # pragma: no cover - return _kube_api_error_handling(e) - domain = ingress["spec"]["domain"] + + if ingress_options != {}: + for index, ingress_option in enumerate(ingress_options["ingresses"]): + if ingress_option["port"] == 10001: + if "host" not in ingress_option.keys(): + raise ValueError( + f"Client host is not specified please include a host for the ingress item at index {index}" + ) + else: + host = ingress_option["host"] + domain_split = host.split(".", 1) + if len(domain_split) > 1: + domain = domain_split[1] + else: + raise ValueError( + f"The client ingress host is configured incorrectly please specify a host with a correct domain for the ingress item at index {index}" + ) + + else: + if is_openshift_cluster(): + # We can try get the domain through checking ingresses.config.openshift.io + try: + config_check() + api_client = client.CustomObjectsApi(api_config_handler()) + ingress = api_client.get_cluster_custom_object( + "config.openshift.io", "v1", "ingresses", "cluster" + ) + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + domain = ingress["spec"]["domain"] + elif ingress_domain is None: + raise ValueError( + "ingress_domain is invalid. For Kubernetes Clusters please specify an ingress domain" + ) + else: + domain = ingress_domain + command = command.replace("server-name", domain) + update_rayclient_ingress( + rayclient_ingress_item, cluster_name, namespace, ingress_options, domain + ) item["generictemplate"]["spec"]["headGroupSpec"]["template"]["spec"][ "initContainers" @@ -493,12 +688,14 @@ def generate_appwrapper( dispatch_priority: str, priority_val: int, openshift_oauth: bool, + ingress_domain: str, + ingress_options: dict, ): user_yaml = read_template(template) appwrapper_name, cluster_name = gen_names(name) resources = user_yaml.get("spec", "resources") item = resources["resources"].get("GenericItems")[0] - route_item = resources["resources"].get("GenericItems")[1] + ingress_item = resources["resources"].get("GenericItems")[1] update_names(user_yaml, item, appwrapper_name, cluster_name, namespace) update_labels(user_yaml, instascale, instance_types) update_priority(user_yaml, item, dispatch_priority, priority_val) @@ -531,9 +728,13 @@ def generate_appwrapper( head_memory, head_gpus, ) - update_dashboard_route(route_item, cluster_name, namespace) + update_dashboard_ingress( + ingress_item, cluster_name, namespace, ingress_options, ingress_domain + ) if local_interactive: - enable_local_interactive(resources, cluster_name, namespace) + enable_local_interactive( + resources, cluster_name, namespace, ingress_options, ingress_domain + ) else: disable_raycluster_tls(resources["resources"]) diff --git a/src/codeflare_sdk/utils/openshift_oauth.py b/src/codeflare_sdk/utils/openshift_oauth.py index 5c3fc55a..022e9adb 100644 --- a/src/codeflare_sdk/utils/openshift_oauth.py +++ b/src/codeflare_sdk/utils/openshift_oauth.py @@ -1,5 +1,5 @@ from urllib3.util import parse_url -from .generate_yaml import gen_dashboard_route_name +from .generate_yaml import gen_dashboard_ingress_name from .kube_api_helpers import _get_api_host from base64 import b64decode @@ -19,7 +19,7 @@ def create_openshift_oauth_objects(cluster_name, namespace): host = _get_api_host(api_client) # replace "^api" with the expected host - host = f"{gen_dashboard_route_name(cluster_name)}-{namespace}.apps" + host.lstrip( + host = f"{gen_dashboard_ingress_name(cluster_name)}-{namespace}.apps" + host.lstrip( "api" ) diff --git a/tests/test-case-prio.yaml b/tests/test-case-prio.yaml index 36cc3a40..9278e2c0 100644 --- a/tests/test-case-prio.yaml +++ b/tests/test-case-prio.yaml @@ -180,18 +180,22 @@ spec: priorityClassName: default replicas: 1 - generictemplate: - apiVersion: route.openshift.io/v1 - kind: Route + apiVersion: networking.k8s.io/v1 + kind: Ingress metadata: - labels: - odh-ray-cluster-service: prio-test-cluster-head-svc name: ray-dashboard-prio-test-cluster namespace: ns spec: - port: - targetPort: dashboard - to: - kind: Service - name: prio-test-cluster-head-svc + rules: + - host: ray-dashboard-prio-test-cluster-ns.apps.cluster.awsroute.org + http: + paths: + - backend: + service: + name: prio-test-cluster-head-svc + port: + number: 8265 + path: / + pathType: Prefix replicas: 1 Items: [] diff --git a/tests/test-case.yaml b/tests/test-case.yaml index 8ab27f01..d6701eb4 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -177,18 +177,22 @@ spec: name: init-myservice replicas: 1 - generictemplate: - apiVersion: route.openshift.io/v1 - kind: Route + apiVersion: networking.k8s.io/v1 + kind: Ingress metadata: - labels: - odh-ray-cluster-service: unit-test-cluster-head-svc name: ray-dashboard-unit-test-cluster namespace: ns spec: - port: - targetPort: dashboard - to: - kind: Service - name: unit-test-cluster-head-svc + rules: + - host: ray-dashboard-unit-test-cluster-ns.apps.cluster.awsroute.org + http: + paths: + - backend: + service: + name: unit-test-cluster-head-svc + port: + number: 8265 + path: / + pathType: Prefix replicas: 1 Items: [] diff --git a/tests/unit_test.py b/tests/unit_test.py index 8900df5b..0b96fe18 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -19,6 +19,7 @@ import filecmp import os import re +import uuid parent = Path(__file__).resolve().parents[1] sys.path.append(str(parent) + "/src") @@ -77,6 +78,7 @@ ) import codeflare_sdk.utils.kube_api_helpers +from codeflare_sdk.utils.generate_yaml import gen_names, is_openshift_cluster import openshift from openshift.selector import Selector @@ -247,8 +249,8 @@ def test_config_creation(): assert config.local_interactive == False -def test_cluster_creation(): - cluster = createClusterWithConfig() +def test_cluster_creation(mocker): + cluster = createClusterWithConfig(mocker) assert cluster.app_wrapper_yaml == "unit-test-cluster.yaml" assert cluster.app_wrapper_name == "unit-test-cluster" assert filecmp.cmp( @@ -279,6 +281,10 @@ def test_cluster_creation_priority(mocker): config = createClusterConfig() config.name = "prio-test-cluster" config.dispatch_priority = "default" + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, + ) cluster = Cluster(config) assert cluster.app_wrapper_yaml == "prio-test-cluster.yaml" assert cluster.app_wrapper_name == "prio-test-cluster" @@ -292,6 +298,10 @@ def test_default_cluster_creation(mocker): "codeflare_sdk.cluster.cluster.get_current_namespace", return_value="opendatahub", ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": ""}}, + ) default_config = ClusterConfiguration( name="unit-test-default-cluster", ) @@ -302,6 +312,25 @@ def test_default_cluster_creation(mocker): assert cluster.config.namespace == "opendatahub" +def test_gen_names_with_name(mocker): + mocker.patch.object( + uuid, "uuid4", return_value=uuid.UUID("00000000-0000-0000-0000-000000000001") + ) + name = "myname" + appwrapper_name, cluster_name = gen_names(name) + assert appwrapper_name == name + assert cluster_name == name + + +def test_gen_names_without_name(mocker): + mocker.patch.object( + uuid, "uuid4", return_value=uuid.UUID("00000000-0000-0000-0000-000000000001") + ) + appwrapper_name, cluster_name = gen_names(None) + assert appwrapper_name.startswith("appwrapper-") + assert cluster_name.startswith("cluster-") + + def arg_check_apply_effect(group, version, namespace, plural, body, *args): assert namespace == "ns" assert args == tuple() @@ -350,6 +379,10 @@ def arg_check_del_effect(group, version, namespace, plural, name, *args): def test_cluster_up_down(mocker): mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": ""}}, + ) mocker.patch( "kubernetes.client.CustomObjectsApi.create_namespaced_custom_object", side_effect=arg_check_apply_effect, @@ -362,7 +395,7 @@ def test_cluster_up_down(mocker): "kubernetes.client.CustomObjectsApi.list_cluster_custom_object", return_value={"items": []}, ) - cluster = cluster = createClusterWithConfig() + cluster = cluster = createClusterWithConfig(mocker) cluster.up() cluster.down() @@ -446,41 +479,29 @@ def test_rc_status(mocker): assert rc == None -def uri_retreival(group, version, namespace, plural, *args): - assert group == "route.openshift.io" - assert version == "v1" - assert namespace == "ns" - assert plural == "routes" - assert args == tuple() - return { - "items": [ - { - "metadata": {"name": "ray-dashboard-unit-test-cluster"}, - "spec": { - "host": "ray-dashboard-unit-test-cluster-ns.apps.cluster.awsroute.org" - }, - } - ] - } - - def test_cluster_uris(mocker): mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - side_effect=uri_retreival, + "codeflare_sdk.cluster.cluster._get_ingress_domain", + return_value="apps.cluster.awsroute.org", + ) + cluster = cluster = createClusterWithConfig(mocker) + mocker.patch( + "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", + return_value=ingress_retrieval(port=8265), ) - - cluster = cluster = createClusterWithConfig() assert cluster.cluster_uri() == "ray://unit-test-cluster-head-svc.ns.svc:10001" assert ( cluster.cluster_dashboard_uri() == "http://ray-dashboard-unit-test-cluster-ns.apps.cluster.awsroute.org" ) cluster.config.name = "fake" + mocker.patch( + "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", + ) assert ( cluster.cluster_dashboard_uri() - == "Dashboard route not available yet, have you run cluster.up()?" + == "Dashboard ingress not available yet, have you run cluster.up()?" ) @@ -491,7 +512,7 @@ def test_local_client_url(mocker): ) mocker.patch( "codeflare_sdk.cluster.cluster._get_ingress_domain", - return_value="apps.cluster.awsroute.org", + return_value="rayclient-unit-test-cluster-localinter-ns.apps.cluster.awsroute.org", ) mocker.patch( "codeflare_sdk.cluster.cluster.Cluster.create_app_wrapper", @@ -512,14 +533,41 @@ def ray_addr(self, *args): return self._address -def test_ray_job_wrapping(mocker): - mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - side_effect=uri_retreival, - ) - cluster = cluster = createClusterWithConfig() +def ingress_retrieval(port): + if port == 10001: + serviceName = "client" + else: + serviceName = "dashboard" + mock_ingress = client.V1Ingress( + metadata=client.V1ObjectMeta(name=f"ray-{serviceName}-unit-test-cluster"), + spec=client.V1IngressSpec( + rules=[ + client.V1IngressRule( + host=f"ray-{serviceName}-unit-test-cluster-ns.apps.cluster.awsroute.org", + http=client.V1HTTPIngressRuleValue( + paths=[ + client.V1HTTPIngressPath( + path_type="Prefix", + path="/", + backend=client.V1IngressBackend( + service=client.V1IngressServiceBackend( + name="head-svc-test", + port=client.V1ServiceBackendPort(number=port), + ) + ), + ) + ] + ), + ) + ], + ), + ) + mock_ingress_list = client.V1IngressList(items=[mock_ingress]) + return mock_ingress_list + +def test_ray_job_wrapping(mocker): + cluster = cluster = createClusterWithConfig(mocker) mocker.patch( "ray.job_submission.JobSubmissionClient._check_connection_and_version_with_url", return_value="None", @@ -528,6 +576,14 @@ def test_ray_job_wrapping(mocker): ray.job_submission.JobSubmissionClient, "list_jobs", autospec=True ) mock_res.side_effect = ray_addr + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": ""}}, + ) + mocker.patch( + "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", + return_value=ingress_retrieval(8265), + ) assert cluster.list_jobs() == cluster.cluster_dashboard_uri() mock_res = mocker.patch.object( @@ -604,6 +660,10 @@ def test_print_appwrappers(capsys): def test_ray_details(mocker, capsys): + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": ""}}, + ) ray1 = RayCluster( name="raytest1", status=RayClusterStatus.READY, @@ -1664,6 +1724,10 @@ def get_aw_obj(group, version, namespace, plural): def test_get_cluster(mocker): mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": ""}}, + ) mocker.patch( "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", side_effect=get_ray_obj, @@ -1692,6 +1756,9 @@ def test_list_clusters(mocker, capsys): "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", side_effect=get_obj_none, ) + mocker.patch( + "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", + ) list_all_clusters("ns") captured = capsys.readouterr() assert captured.out == ( @@ -1764,6 +1831,10 @@ def test_list_queue(mocker, capsys): def test_cluster_status(mocker): mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": ""}}, + ) fake_aw = AppWrapper( "test", AppWrapperStatus.FAILED, can_run=True, job_state="unused" ) @@ -1845,6 +1916,14 @@ def test_cluster_status(mocker): def test_wait_ready(mocker, capsys): + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": ""}}, + ) + mocker.patch( + "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", + return_value=ingress_retrieval(8265), + ) mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") mocker.patch("codeflare_sdk.cluster.cluster._app_wrapper_status", return_value=None) mocker.patch("codeflare_sdk.cluster.cluster._ray_cluster_status", return_value=None) @@ -1893,9 +1972,13 @@ def test_wait_ready(mocker, capsys): ) -def test_jobdefinition_coverage(): +def test_jobdefinition_coverage(mocker): + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": ""}}, + ) abstract = JobDefinition() - cluster = createClusterWithConfig() + cluster = createClusterWithConfig(mocker) abstract._dry_run(cluster) abstract.submit(cluster) @@ -1937,8 +2020,8 @@ def test_DDPJobDefinition_dry_run(mocker: MockerFixture): ) mocker.patch.object(Cluster, "job_client") ddp = createTestDDP() - cluster = createClusterWithConfig() - ddp_job, _ = ddp._dry_run(cluster) + cluster = createClusterWithConfig(mocker) + ddp_job, _ = ddp._dry_run(mocker, cluster) assert type(ddp_job) == AppDryRunInfo assert ddp_job._fmt is not None assert type(ddp_job.request) == RayJob @@ -2006,11 +2089,15 @@ def test_DDPJobDefinition_dry_run_no_resource_args(mocker): when the job definition does not specify resources. """ mocker.patch.object(Cluster, "job_client") + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": ""}}, + ) mocker.patch( "codeflare_sdk.cluster.cluster.Cluster.cluster_dashboard_uri", return_value="", ) - cluster = createClusterWithConfig() + cluster = createClusterWithConfig(mocker) ddp = DDPJobDefinition( script="test.py", m=None, @@ -2099,7 +2186,7 @@ def test_DDPJobDefinition_submit(mocker: MockerFixture): mock_schedule.return_value = "fake-dashboard-url" mocker.patch.object(Cluster, "job_client") ddp_def = createTestDDP() - cluster = createClusterWithConfig() + cluster = createClusterWithConfig(mocker) mocker.patch( "codeflare_sdk.job.jobs.get_current_namespace", side_effect="opendatahub", @@ -2131,9 +2218,9 @@ def test_DDPJob_creation(mocker: MockerFixture): Cluster, "cluster_dashboard_uri", return_value="fake-dashboard-url" ) ddp_def = createTestDDP() - cluster = createClusterWithConfig() + cluster = createClusterWithConfig(mocker) mock_schedule.return_value = "fake-dashboard-url" - ddp_job = createDDPJob_with_cluster(ddp_def, cluster) + ddp_job = createDDPJob_with_cluster(mocker, ddp_def, cluster) assert type(ddp_job) == DDPJob assert type(ddp_job.job_definition) == DDPJobDefinition assert type(ddp_job.cluster) == Cluster @@ -2179,8 +2266,8 @@ def test_DDPJob_status(mocker: MockerFixture): mocker.patch.object(Runner, "status", mock_status) test_DDPJob_creation(mocker) ddp_def = createTestDDP() - cluster = createClusterWithConfig() - ddp_job = createDDPJob_with_cluster(ddp_def, cluster) + cluster = createClusterWithConfig(mocker) + ddp_job = createDDPJob_with_cluster(mocker, ddp_def, cluster) mock_status.return_value = "fake-status" assert ddp_job.status() == "fake-status" _, args, kwargs = mock_status.mock_calls[0] @@ -2193,8 +2280,8 @@ def test_DDPJob_logs(mocker: MockerFixture): # Setup the neccesary mock patches test_DDPJob_creation(mocker) ddp_def = createTestDDP() - cluster = createClusterWithConfig() - ddp_job = createDDPJob_with_cluster(ddp_def, cluster) + cluster = createClusterWithConfig(mocker) + ddp_job = createDDPJob_with_cluster(mocker, ddp_def, cluster) mock_log.return_value = "fake-logs" assert ddp_job.logs() == "fake-logs" _, args, kwargs = mock_log.mock_calls[0] @@ -2337,6 +2424,21 @@ def secret_ca_retreival(secret_name, namespace): return client.models.V1Secret(data=data) +def test_is_openshift_cluster(mocker): + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch.object( + client.CustomObjectsApi, + "get_cluster_custom_object", + side_effect=client.ApiException(status=404), + ) + assert is_openshift_cluster() == False + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": ""}}, + ) + assert is_openshift_cluster() == True + + def test_generate_tls_cert(mocker): """ test the function codeflare_sdk.utils.generate_ca_cert generates the correct outputs diff --git a/tests/unit_test_support.py b/tests/unit_test_support.py index a4ea056a..fea4ceaf 100644 --- a/tests/unit_test_support.py +++ b/tests/unit_test_support.py @@ -46,14 +46,21 @@ def createClusterConfig(): instascale=True, machine_types=["cpu.small", "gpu.large"], image_pull_secrets=["unit-test-pull-secret"], + ingress_domain="apps.cluster.awsroute.org", ) return config -def createClusterWithConfig(): +def createClusterWithConfig(mocker): + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, + ) cluster = Cluster(createClusterConfig()) return cluster -def createDDPJob_with_cluster(ddp_def, cluster=createClusterWithConfig()): +def createDDPJob_with_cluster(mocker, ddp_def, cluster=None): + cluster = createClusterWithConfig(mocker) return DDPJob(ddp_def, cluster) From a0f80b51759d32a9eda2fa1cb8b2e0f359b9adca Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Mon, 23 Oct 2023 18:37:24 +0100 Subject: [PATCH 041/496] Fixed unit tests and ingress related methods --- src/codeflare_sdk/cluster/cluster.py | 21 +++++++++++++-------- src/codeflare_sdk/utils/generate_yaml.py | 6 ++++-- tests/test-case-no-mcad.yamls | 22 +++++++++++++--------- tests/unit_test.py | 22 ++++++++++++++++++---- 4 files changed, 48 insertions(+), 23 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 808aa007..4ddfcfd3 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -412,15 +412,17 @@ def cluster_dashboard_uri(self) -> str: for ingress in ingresses.items: annotations = ingress.metadata.annotations - if ingress.metadata.name == f"ray-dashboard-{self.config.name}" or ingress.metadata.name.startswith( - f"{self.config.name}-ingress" ): + protocol = "http" + if ( + ingress.metadata.name == f"ray-dashboard-{self.config.name}" + or ingress.metadata.name.startswith(f"{self.config.name}-ingress") + ): if annotations == None: protocol = "http" elif "route.openshift.io/termination" in annotations: protocol = "https" return f"{protocol}://{ingress.spec.rules[0].host}" - return "Dashboard route not available yet, have you run cluster.up()?" - + return "Dashboard ingress not available yet, have you run cluster.up()?" def list_jobs(self) -> List: """ @@ -665,8 +667,8 @@ def _get_ingress_domain(self): # pragma: no cover namespace = self.config.namespace else: namespace = get_current_namespace() - ingresses = api_client.list_namespaced_ingress(namespace) - except Exception as e: # pragma: no cover + ingresses = api_client.list_namespaced_ingress(namespace) + except Exception as e: # pragma: no cover return _kube_api_error_handling(e) domain = None for ingress in ingresses.items: @@ -773,8 +775,11 @@ def _map_to_ray_cluster(rc) -> Optional[RayCluster]: ray_ingress = None for ingress in ingresses.items: annotations = ingress.metadata.annotations - if ingress.metadata.name == f"ray-dashboard-{rc['metadata']['name']}" or ingress.metadata.name.startswith( - f"{rc['metadata']['name']}-ingress" ): + protocol = "http" + if ( + ingress.metadata.name == f"ray-dashboard-{rc['metadata']['name']}" + or ingress.metadata.name.startswith(f"{rc['metadata']['name']}-ingress") + ): if annotations == None: protocol = "http" elif "route.openshift.io/termination" in annotations: diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index e5ca4bd6..3cf886cf 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -50,9 +50,11 @@ def gen_names(name): else: return name, name -def gen_dashboard_route_name(cluster_name): + +def gen_dashboard_ingress_name(cluster_name): return f"ray-dashboard-{cluster_name}" + # Check if the ingress api cluster resource exists def is_openshift_cluster(): try: @@ -586,7 +588,7 @@ def enable_openshift_oauth(user_yaml, cluster_name, namespace): port_name = "oauth-proxy" host = _get_api_host(k8_client) host = host.replace( - "api.", f"{gen_dashboard_route_name(cluster_name)}-{namespace}.apps." + "api.", f"{gen_dashboard_ingress_name(cluster_name)}-{namespace}.apps." ) oauth_sidecar = _create_oauth_sidecar_object( namespace, diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index b48e7cf5..0416aa7b 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -147,16 +147,20 @@ spec: image: busybox:1.28 name: init-myservice --- -apiVersion: route.openshift.io/v1 -kind: Route +apiVersion: networking.k8s.io/v1 +kind: Ingress metadata: - labels: - odh-ray-cluster-service: unit-test-cluster-ray-head-svc name: ray-dashboard-unit-test-cluster-ray namespace: ns spec: - port: - targetPort: dashboard - to: - kind: Service - name: unit-test-cluster-ray-head-svc + rules: + - host: ray-dashboard-unit-test-cluster-ray-ns.apps.cluster.awsroute.org + http: + paths: + - backend: + service: + name: unit-test-cluster-ray-head-svc + port: + number: 8265 + path: / + pathType: Prefix diff --git a/tests/unit_test.py b/tests/unit_test.py index 0b96fe18..c5be392c 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -258,7 +258,11 @@ def test_cluster_creation(mocker): ) -def test_cluster_creation_no_mcad(): +def test_cluster_creation_no_mcad(mocker): + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, + ) config = createClusterConfig() config.name = "unit-test-cluster-ray" config.mcad = False @@ -402,6 +406,10 @@ def test_cluster_up_down(mocker): def test_cluster_up_down_no_mcad(mocker): mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, + ) mocker.patch( "kubernetes.client.CustomObjectsApi.create_namespaced_custom_object", side_effect=arg_check_apply_effect, @@ -431,14 +439,16 @@ def arg_check_list_effect(group, version, plural, name, *args): return {"spec": {"domain": "test"}} -def test_get_ingress_domain(mocker): +""" +def test_get_ingress_domain(self, mocker): mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") mocker.patch( "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", side_effect=arg_check_list_effect, ) - domain = _get_ingress_domain() + domain = _get_ingress_domain(self) assert domain == "test" +""" def aw_status_fields(group, version, namespace, plural, *args): @@ -2021,7 +2031,7 @@ def test_DDPJobDefinition_dry_run(mocker: MockerFixture): mocker.patch.object(Cluster, "job_client") ddp = createTestDDP() cluster = createClusterWithConfig(mocker) - ddp_job, _ = ddp._dry_run(mocker, cluster) + ddp_job, _ = ddp._dry_run(cluster) assert type(ddp_job) == AppDryRunInfo assert ddp_job._fmt is not None assert type(ddp_job.request) == RayJob @@ -2599,6 +2609,10 @@ def test_gen_app_wrapper_with_oauth(mocker: MockerFixture): "codeflare_sdk.cluster.cluster.get_current_namespace", return_value="opendatahub", ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": ""}}, + ) write_user_appwrapper = MagicMock() mocker.patch( "codeflare_sdk.utils.generate_yaml.write_user_appwrapper", write_user_appwrapper From 387b573a1269518dc6fc6adb268851a0eaf3aa7e Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Mon, 23 Oct 2023 19:10:01 +0100 Subject: [PATCH 042/496] updated unit tests --- src/codeflare_sdk/cluster/cluster.py | 6 ++-- tests/unit_test.py | 42 ++++++++++++++++++++++++++-- 2 files changed, 42 insertions(+), 6 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 4ddfcfd3..a2871000 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -342,7 +342,7 @@ def is_dashboard_ready(self) -> bool: timeout=5, verify=self._client_verify_tls, ) - except requests.exceptions.SSLError: + except requests.exceptions.SSLError: # pragma no cover # SSL exception occurs when oauth ingress has been created but cluster is not up return False if response.status_code == 200: @@ -407,7 +407,7 @@ def cluster_dashboard_uri(self) -> str: config_check() api_instance = client.NetworkingV1Api(api_config_handler()) ingresses = api_instance.list_namespaced_ingress(self.config.namespace) - except Exception as e: + except Exception as e: # pragma no cover return _kube_api_error_handling(e) for ingress in ingresses.items: @@ -770,7 +770,7 @@ def _map_to_ray_cluster(rc) -> Optional[RayCluster]: config_check() api_instance = client.NetworkingV1Api(api_config_handler()) ingresses = api_instance.list_namespaced_ingress(rc["metadata"]["namespace"]) - except Exception as e: + except Exception as e: # pragma no cover return _kube_api_error_handling(e) ray_ingress = None for ingress in ingresses.items: diff --git a/tests/unit_test.py b/tests/unit_test.py index c5be392c..c6b57736 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -43,7 +43,10 @@ KubeConfigFileAuthentication, config_check, ) -from codeflare_sdk.utils.openshift_oauth import create_openshift_oauth_objects +from codeflare_sdk.utils.openshift_oauth import ( + create_openshift_oauth_objects, + delete_openshift_oauth_objects, +) from codeflare_sdk.utils.pretty_print import ( print_no_resources_found, print_app_wrappers_status, @@ -489,6 +492,27 @@ def test_rc_status(mocker): assert rc == None +def test_delete_openshift_oauth_objects(mocker): + mocker.patch.object(client.CoreV1Api, "delete_namespaced_service_account") + mocker.patch.object(client.CoreV1Api, "delete_namespaced_service") + mocker.patch.object(client.NetworkingV1Api, "delete_namespaced_ingress") + mocker.patch.object(client.RbacAuthorizationV1Api, "delete_cluster_role_binding") + delete_openshift_oauth_objects("test-cluster", "test-namespace") + + client.CoreV1Api.delete_namespaced_service_account.assert_called_with( + name="test-cluster-oauth-proxy", namespace="test-namespace" + ) + client.CoreV1Api.delete_namespaced_service.assert_called_with( + name="test-cluster-oauth", namespace="test-namespace" + ) + client.NetworkingV1Api.delete_namespaced_ingress.assert_called_with( + name="test-cluster-ingress", namespace="test-namespace" + ) + client.RbacAuthorizationV1Api.delete_cluster_role_binding.assert_called_with( + name="test-cluster-rb" + ) + + def test_cluster_uris(mocker): mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") mocker.patch( @@ -496,6 +520,16 @@ def test_cluster_uris(mocker): return_value="apps.cluster.awsroute.org", ) cluster = cluster = createClusterWithConfig(mocker) + mocker.patch( + "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", + return_value=ingress_retrieval( + port=8265, annotations={"route.openshift.io/termination": "passthrough"} + ), + ) + assert ( + cluster.cluster_dashboard_uri() + == "https://ray-dashboard-unit-test-cluster-ns.apps.cluster.awsroute.org" + ) mocker.patch( "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", return_value=ingress_retrieval(port=8265), @@ -543,13 +577,15 @@ def ray_addr(self, *args): return self._address -def ingress_retrieval(port): +def ingress_retrieval(port, annotations=None): if port == 10001: serviceName = "client" else: serviceName = "dashboard" mock_ingress = client.V1Ingress( - metadata=client.V1ObjectMeta(name=f"ray-{serviceName}-unit-test-cluster"), + metadata=client.V1ObjectMeta( + name=f"ray-{serviceName}-unit-test-cluster", annotations=annotations + ), spec=client.V1IngressSpec( rules=[ client.V1IngressRule( From 1e4fb19efc9c1670061e79834234dd75d24232a7 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Tue, 24 Oct 2023 12:09:07 +0100 Subject: [PATCH 043/496] Removed ability to customise ray client --- src/codeflare_sdk/utils/generate_yaml.py | 114 +++++++---------------- 1 file changed, 34 insertions(+), 80 deletions(-) diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 3cf886cf..49e7212b 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -149,94 +149,50 @@ def update_dashboard_ingress( def update_rayclient_ingress( - ingress_item, cluster_name, namespace, ingress_options, ingress_domain + ingress_item, cluster_name, namespace, ingress_domain ): # pragma: no cover metadata = ingress_item.get("generictemplate", {}).get("metadata") spec = ingress_item.get("generictemplate", {}).get("spec") - if ingress_options != {}: - for index, ingress_option in enumerate(ingress_options["ingresses"]): - if "ingressName" not in ingress_option.keys(): - raise ValueError( - f"Error: 'ingressName' is missing or empty for ingress item at index {index}" - ) - if "port" not in ingress_option.keys(): - raise ValueError( - f"Error: 'port' is missing or empty for ingress item at index {index}" - ) - elif not isinstance(ingress_option["port"], int): - raise ValueError( - f"Error: 'port' is not of type int for ingress item at index {index}" - ) - if ingress_option["port"] == 10001: - metadata["name"] = ingress_option["ingressName"] - metadata["namespace"] = namespace - if "annotations" not in ingress_option.keys(): - del metadata["annotations"] - else: - metadata["annotations"] = ingress_option["annotations"] - if "path" not in ingress_option.keys(): - del spec["rules"][0]["http"]["paths"][0]["path"] - else: - spec["rules"][0]["http"]["paths"][0]["path"] = ingress_option[ - "path" - ] - if "pathType" not in ingress_option.keys(): - spec["rules"][0]["http"]["paths"][0][ - "pathType" - ] = "ImplementationSpecific" - if "host" not in ingress_option.keys(): - del spec["rules"][0]["host"] - else: - spec["rules"][0]["host"] = ingress_option["host"] - if "ingressClassName" not in ingress_option.keys(): - del spec["ingressClassName"] - else: - spec["ingressClassName"] = ingress_option["ingressClassName"] - - spec["rules"][0]["http"]["paths"][0]["backend"]["service"][ - "name" - ] = f"{cluster_name}-head-svc" - else: - metadata["name"] = f"rayclient-{cluster_name}" - metadata["namespace"] = namespace - metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc" + metadata["name"] = f"rayclient-{cluster_name}" + metadata["namespace"] = namespace + metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc" - spec["rules"][0]["http"]["paths"][0]["backend"]["service"][ - "name" - ] = f"{cluster_name}-head-svc" + spec["rules"][0]["http"]["paths"][0]["backend"]["service"][ + "name" + ] = f"{cluster_name}-head-svc" - if is_openshift_cluster(): - try: - config_check() - api_client = client.CustomObjectsApi(api_config_handler()) - ingress = api_client.get_cluster_custom_object( - "config.openshift.io", "v1", "ingresses", "cluster" - ) - ingressClassName = "openshift-default" - annotations = { - "nginx.ingress.kubernetes.io/rewrite-target": "/", - "nginx.ingress.kubernetes.io/ssl-redirect": "true", - "route.openshift.io/termination": "passthrough", - } - except Exception as e: # pragma: no cover - return _kube_api_error_handling(e) - domain = ingress["spec"]["domain"] - elif ingress_domain is None: - raise ValueError( - "ingress_domain is invalid. For Kubernetes Clusters please specify an ingress domain" + if is_openshift_cluster(): + try: + config_check() + api_client = client.CustomObjectsApi(api_config_handler()) + ingress = api_client.get_cluster_custom_object( + "config.openshift.io", "v1", "ingresses", "cluster" ) - else: - domain = ingress_domain - ingressClassName = "nginx" + ingressClassName = "openshift-default" annotations = { "nginx.ingress.kubernetes.io/rewrite-target": "/", "nginx.ingress.kubernetes.io/ssl-redirect": "true", - "nginx.ingress.kubernetes.io/ssl-passthrough": "true", + "route.openshift.io/termination": "passthrough", } + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + domain = ingress["spec"]["domain"] + elif ingress_domain is None: + raise ValueError( + "ingress_domain is invalid. For Kubernetes Clusters please specify an ingress domain" + ) + else: + domain = ingress_domain + ingressClassName = "nginx" + annotations = { + "nginx.ingress.kubernetes.io/rewrite-target": "/", + "nginx.ingress.kubernetes.io/ssl-redirect": "true", + "nginx.ingress.kubernetes.io/ssl-passthrough": "true", + } - metadata["annotations"] = annotations - spec["ingressClassName"] = ingressClassName - spec["rules"][0]["host"] = f"rayclient-{cluster_name}-{namespace}.{domain}" + metadata["annotations"] = annotations + spec["ingressClassName"] = ingressClassName + spec["rules"][0]["host"] = f"rayclient-{cluster_name}-{namespace}.{domain}" def update_names(yaml, item, appwrapper_name, cluster_name, namespace): @@ -504,9 +460,7 @@ def enable_local_interactive( domain = ingress_domain command = command.replace("server-name", domain) - update_rayclient_ingress( - rayclient_ingress_item, cluster_name, namespace, ingress_options, domain - ) + update_rayclient_ingress(rayclient_ingress_item, cluster_name, namespace, domain) item["generictemplate"]["spec"]["headGroupSpec"]["template"]["spec"][ "initContainers" From 428f5c4f255ddfb4384cf6f00cec1a7b97cc1738 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Wed, 25 Oct 2023 09:27:46 +0100 Subject: [PATCH 044/496] Removed ingress_options from local_interactive --- src/codeflare_sdk/utils/generate_yaml.py | 54 ++++++++---------------- 1 file changed, 17 insertions(+), 37 deletions(-) diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 49e7212b..72fef865 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -395,9 +395,7 @@ def update_ca_secret(ca_secret_item, cluster_name, namespace): data["ca.key"], data["ca.crt"] = generate_cert.generate_ca_cert(365) -def enable_local_interactive( - resources, cluster_name, namespace, ingress_options, ingress_domain -): +def enable_local_interactive(resources, cluster_name, namespace, ingress_domain): rayclient_ingress_item = resources["resources"].get("GenericItems")[2] ca_secret_item = resources["resources"].get("GenericItems")[3] item = resources["resources"].get("GenericItems")[0] @@ -423,41 +421,23 @@ def enable_local_interactive( command = command.replace("deployment-name", cluster_name) - if ingress_options != {}: - for index, ingress_option in enumerate(ingress_options["ingresses"]): - if ingress_option["port"] == 10001: - if "host" not in ingress_option.keys(): - raise ValueError( - f"Client host is not specified please include a host for the ingress item at index {index}" - ) - else: - host = ingress_option["host"] - domain_split = host.split(".", 1) - if len(domain_split) > 1: - domain = domain_split[1] - else: - raise ValueError( - f"The client ingress host is configured incorrectly please specify a host with a correct domain for the ingress item at index {index}" - ) - - else: - if is_openshift_cluster(): - # We can try get the domain through checking ingresses.config.openshift.io - try: - config_check() - api_client = client.CustomObjectsApi(api_config_handler()) - ingress = api_client.get_cluster_custom_object( - "config.openshift.io", "v1", "ingresses", "cluster" - ) - except Exception as e: # pragma: no cover - return _kube_api_error_handling(e) - domain = ingress["spec"]["domain"] - elif ingress_domain is None: - raise ValueError( - "ingress_domain is invalid. For Kubernetes Clusters please specify an ingress domain" + if is_openshift_cluster(): + # We can try get the domain through checking ingresses.config.openshift.io + try: + config_check() + api_client = client.CustomObjectsApi(api_config_handler()) + ingress = api_client.get_cluster_custom_object( + "config.openshift.io", "v1", "ingresses", "cluster" ) - else: - domain = ingress_domain + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + domain = ingress["spec"]["domain"] + elif ingress_domain is None: + raise ValueError( + "ingress_domain is invalid. For Kubernetes Clusters please specify an ingress domain" + ) + else: + domain = ingress_domain command = command.replace("server-name", domain) update_rayclient_ingress(rayclient_ingress_item, cluster_name, namespace, domain) From 3aa1919e05eef2607fbc9fad6393d3c6f5a8a538 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Wed, 25 Oct 2023 10:17:40 +0100 Subject: [PATCH 045/496] fixed local_interactive --- src/codeflare_sdk/utils/generate_yaml.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 72fef865..cf9686c4 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -668,9 +668,7 @@ def generate_appwrapper( ingress_item, cluster_name, namespace, ingress_options, ingress_domain ) if local_interactive: - enable_local_interactive( - resources, cluster_name, namespace, ingress_options, ingress_domain - ) + enable_local_interactive(resources, cluster_name, namespace, ingress_domain) else: disable_raycluster_tls(resources["resources"]) From b1dad42dbeb97b6baf67347fa19014292e23071d Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Sun, 29 Oct 2023 23:27:36 +0000 Subject: [PATCH 046/496] Updated coverage.svg --- coverage.svg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/coverage.svg b/coverage.svg index c1490035..607d3de4 100644 --- a/coverage.svg +++ b/coverage.svg @@ -15,7 +15,7 @@ coverage coverage - 90% - 90% + 91% + 91% From 104a732738ad5aaf5c2743e9e80501516e028bb7 Mon Sep 17 00:00:00 2001 From: "openshift-ci[bot]" Date: Sun, 29 Oct 2023 23:27:36 +0000 Subject: [PATCH 047/496] [create-pull-request] automated change --- poetry.lock | 103 ++-------------------------------------------------- 1 file changed, 4 insertions(+), 99 deletions(-) diff --git a/poetry.lock b/poetry.lock index 13722766..f0be160f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "aiohttp" version = "3.8.6" description = "Async http client/server framework (asyncio)" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -113,7 +112,6 @@ speedups = ["Brotli", "aiodns", "cchardet"] name = "aiohttp-cors" version = "0.7.0" description = "CORS support for aiohttp" -category = "main" optional = false python-versions = "*" files = [ @@ -128,7 +126,6 @@ aiohttp = ">=1.1" name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -143,7 +140,6 @@ frozenlist = ">=1.1.0" name = "ansicon" version = "1.89.0" description = "Python wrapper for loading Jason Hood's ANSICON" -category = "main" optional = false python-versions = "*" files = [ @@ -155,7 +151,6 @@ files = [ name = "async-timeout" version = "4.0.3" description = "Timeout context manager for asyncio programs" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -167,7 +162,6 @@ files = [ name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -186,7 +180,6 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "bcrypt" version = "4.0.1" description = "Modern password hashing for your software and your servers" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -221,7 +214,6 @@ typecheck = ["mypy"] name = "blessed" version = "1.20.0" description = "Easy, practical library for making terminal apps, by providing an elegant, well-documented interface to Colors, Keyboard input, and screen Positioning capabilities." -category = "main" optional = false python-versions = ">=2.7" files = [ @@ -238,7 +230,6 @@ wcwidth = ">=0.1.4" name = "cachetools" version = "5.3.1" description = "Extensible memoizing collections and decorators" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -250,7 +241,6 @@ files = [ name = "certifi" version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -262,7 +252,6 @@ files = [ name = "cffi" version = "1.16.0" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -327,7 +316,6 @@ pycparser = "*" name = "charset-normalizer" version = "3.3.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -427,7 +415,6 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -442,7 +429,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "codeflare-torchx" version = "0.6.0.dev1" description = "TorchX SDK and Components" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -471,7 +457,6 @@ ray = ["ray (>=1.12.1)"] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -483,7 +468,6 @@ files = [ name = "colorful" version = "0.5.5" description = "Terminal string styling done right, in Python." -category = "main" optional = false python-versions = "*" files = [ @@ -498,7 +482,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "commonmark" version = "0.9.1" description = "Python parser for the CommonMark Markdown spec" -category = "main" optional = false python-versions = "*" files = [ @@ -513,7 +496,6 @@ test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] name = "coverage" version = "7.2.7" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -586,7 +568,6 @@ toml = ["tomli"] name = "cryptography" version = "40.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -628,7 +609,6 @@ tox = ["tox"] name = "distlib" version = "0.3.7" description = "Distribution utilities" -category = "main" optional = false python-versions = "*" files = [ @@ -640,7 +620,6 @@ files = [ name = "docker" version = "6.1.3" description = "A Python library for the Docker Engine API." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -662,7 +641,6 @@ ssh = ["paramiko (>=2.4.3)"] name = "docstring-parser" version = "0.8.1" description = "\"Parse Python docstrings in reST, Google and Numpydoc format\"" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -673,7 +651,6 @@ files = [ name = "exceptiongroup" version = "1.1.3" description = "Backport of PEP 654 (exception groups)" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -688,7 +665,6 @@ test = ["pytest (>=6)"] name = "executing" version = "1.2.0" description = "Get the currently executing AST node of a frame, and other information" -category = "main" optional = false python-versions = "*" files = [ @@ -703,7 +679,6 @@ tests = ["asttokens", "littleutils", "pytest", "rich"] name = "filelock" version = "3.12.4" description = "A platform independent file lock." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -720,7 +695,6 @@ typing = ["typing-extensions (>=4.7.1)"] name = "frozenlist" version = "1.4.0" description = "A list-like structure which implements collections.abc.MutableSequence" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -791,7 +765,6 @@ files = [ name = "fsspec" version = "2023.9.2" description = "File-system specification" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -827,7 +800,6 @@ tqdm = ["tqdm"] name = "google-api-core" version = "2.12.0" description = "Google API client core library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -850,7 +822,6 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] name = "google-auth" version = "2.23.3" description = "Google Authentication Library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -874,7 +845,6 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] name = "googleapis-common-protos" version = "1.60.0" description = "Common protobufs used in Google APIs" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -892,7 +862,6 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] name = "gpustat" version = "1.1.1" description = "An utility to monitor NVIDIA GPU status and usage" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -912,7 +881,6 @@ test = ["mockito (>=1.2.1)", "pytest (>=5.4.1)", "pytest-runner"] name = "grpcio" version = "1.59.0" description = "HTTP/2-based RPC framework" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -979,7 +947,6 @@ protobuf = ["grpcio-tools (>=1.59.0)"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -991,7 +958,6 @@ files = [ name = "importlib-metadata" version = "6.8.0" description = "Read metadata from Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1011,7 +977,6 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs name = "importlib-resources" version = "6.1.0" description = "Read resources from Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1030,7 +995,6 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1042,7 +1006,6 @@ files = [ name = "jinxed" version = "1.2.0" description = "Jinxed Terminal Library" -category = "main" optional = false python-versions = "*" files = [ @@ -1057,7 +1020,6 @@ ansicon = {version = "*", markers = "platform_system == \"Windows\""} name = "jsonschema" version = "4.19.1" description = "An implementation of JSON Schema validation for Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1081,7 +1043,6 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "jsonschema-specifications" version = "2023.7.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1097,7 +1058,6 @@ referencing = ">=0.28.0" name = "kubernetes" version = "26.1.0" description = "Kubernetes python client" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1115,7 +1075,7 @@ requests-oauthlib = "*" setuptools = ">=21.0.0" six = ">=1.9.0" urllib3 = ">=1.24.2" -websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.0 || >=0.43.0" +websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" [package.extras] adal = ["adal (>=1.0.2)"] @@ -1124,7 +1084,6 @@ adal = ["adal (>=1.0.2)"] name = "mako" version = "1.2.4" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1144,7 +1103,6 @@ testing = ["pytest"] name = "markdown" version = "3.5" description = "Python implementation of John Gruber's Markdown." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1163,7 +1121,6 @@ testing = ["coverage", "pyyaml"] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1233,7 +1190,6 @@ files = [ name = "msgpack" version = "1.0.7" description = "MessagePack serializer" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1299,7 +1255,6 @@ files = [ name = "multidict" version = "6.0.4" description = "multidict implementation" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1383,7 +1338,6 @@ files = [ name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1395,7 +1349,6 @@ files = [ name = "numpy" version = "1.24.4" description = "Fundamental package for array computing in Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1433,7 +1386,6 @@ files = [ name = "numpy" version = "1.25.2" description = "Fundamental package for array computing in Python" -category = "main" optional = false python-versions = ">=3.9" files = [ @@ -1468,7 +1420,6 @@ files = [ name = "nvidia-ml-py" version = "12.535.108" description = "Python Bindings for the NVIDIA Management Library" -category = "main" optional = false python-versions = "*" files = [ @@ -1480,7 +1431,6 @@ files = [ name = "oauthlib" version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1497,7 +1447,6 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] name = "opencensus" version = "0.11.3" description = "A stats collection and distributed tracing framework" -category = "main" optional = false python-versions = "*" files = [ @@ -1513,7 +1462,6 @@ opencensus-context = ">=0.1.3" name = "opencensus-context" version = "0.1.3" description = "OpenCensus Runtime Context" -category = "main" optional = false python-versions = "*" files = [ @@ -1525,7 +1473,6 @@ files = [ name = "openshift-client" version = "1.0.18" description = "OpenShift python client" -category = "main" optional = false python-versions = "*" files = [ @@ -1542,7 +1489,6 @@ six = "*" name = "packaging" version = "23.2" description = "Core utilities for Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1554,7 +1500,6 @@ files = [ name = "paramiko" version = "3.3.1" description = "SSH2 protocol library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1576,7 +1521,6 @@ invoke = ["invoke (>=2.0)"] name = "pdoc3" version = "0.10.0" description = "Auto-generate API documentation for Python projects." -category = "dev" optional = false python-versions = ">= 3.6" files = [ @@ -1592,7 +1536,6 @@ markdown = ">=3.0" name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1604,7 +1547,6 @@ files = [ name = "platformdirs" version = "3.11.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1620,7 +1562,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "pluggy" version = "1.3.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1636,7 +1577,6 @@ testing = ["pytest", "pytest-benchmark"] name = "prometheus-client" version = "0.17.1" description = "Python client for the Prometheus monitoring system." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1651,7 +1591,6 @@ twisted = ["twisted"] name = "protobuf" version = "4.24.4" description = "" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1674,7 +1613,6 @@ files = [ name = "psutil" version = "5.9.5" description = "Cross-platform lib for process and system monitoring in Python." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1701,7 +1639,6 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] name = "py-spy" version = "0.3.14" description = "Sampling profiler for Python programs" -category = "main" optional = false python-versions = "*" files = [ @@ -1718,7 +1655,6 @@ files = [ name = "pyasn1" version = "0.5.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -1730,7 +1666,6 @@ files = [ name = "pyasn1-modules" version = "0.3.0" description = "A collection of ASN.1-based protocols modules" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -1745,7 +1680,6 @@ pyasn1 = ">=0.4.6,<0.6.0" name = "pycparser" version = "2.21" description = "C parser in Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1757,7 +1691,6 @@ files = [ name = "pydantic" version = "1.10.13" description = "Data validation and settings management using python type hints" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1810,7 +1743,6 @@ email = ["email-validator (>=1.0.3)"] name = "pygments" version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1825,7 +1757,6 @@ plugins = ["importlib-metadata"] name = "pynacl" version = "1.5.0" description = "Python binding to the Networking and Cryptography (NaCl) library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1852,7 +1783,6 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] name = "pyre-extensions" version = "0.0.30" description = "Type system extensions for use with the pyre type checker" -category = "main" optional = false python-versions = "*" files = [ @@ -1868,7 +1798,6 @@ typing-inspect = "*" name = "pytest" version = "7.4.0" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1891,7 +1820,6 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-mock" version = "3.11.1" description = "Thin-wrapper around the mock package for easier use with pytest" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1909,7 +1837,6 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -1924,7 +1851,6 @@ six = ">=1.5" name = "pywin32" version = "306" description = "Python for Window Extensions" -category = "main" optional = false python-versions = "*" files = [ @@ -1948,7 +1874,6 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2008,7 +1933,6 @@ files = [ name = "ray" version = "2.7.0" description = "Ray provides a simple, universal API for building distributed applications." -category = "main" optional = false python-versions = "*" files = [ @@ -2048,8 +1972,8 @@ filelock = "*" frozenlist = "*" gpustat = {version = ">=1.0.0", optional = true, markers = "extra == \"default\""} grpcio = [ - {version = ">=1.32.0", optional = true, markers = "python_version < \"3.10\""}, - {version = ">=1.42.0", optional = true, markers = "python_version >= \"3.10\""}, + {version = ">=1.32.0", optional = true, markers = "python_version < \"3.10\" and extra == \"default\""}, + {version = ">=1.42.0", optional = true, markers = "python_version >= \"3.10\" and extra == \"default\""}, ] jsonschema = "*" msgpack = ">=1.0.0,<2.0.0" @@ -2086,7 +2010,6 @@ tune = ["fsspec", "pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1. name = "referencing" version = "0.30.2" description = "JSON Referencing + Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2102,7 +2025,6 @@ rpds-py = ">=0.7.0" name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2124,7 +2046,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-oauthlib" version = "1.3.1" description = "OAuthlib authentication support for Requests." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2143,7 +2064,6 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] name = "rich" version = "12.6.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -category = "main" optional = false python-versions = ">=3.6.3,<4.0.0" files = [ @@ -2163,7 +2083,6 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] name = "rpds-py" version = "0.10.4" description = "Python bindings to Rust's persistent data structures (rpds)" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2272,7 +2191,6 @@ files = [ name = "rsa" version = "4.9" description = "Pure-Python RSA implementation" -category = "main" optional = false python-versions = ">=3.6,<4" files = [ @@ -2287,7 +2205,6 @@ pyasn1 = ">=0.1.3" name = "setuptools" version = "68.2.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2304,7 +2221,6 @@ testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jar name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -2316,7 +2232,6 @@ files = [ name = "smart-open" version = "6.4.0" description = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)" -category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -2338,7 +2253,6 @@ webhdfs = ["requests"] name = "tabulate" version = "0.9.0" description = "Pretty-print tabular data" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2353,7 +2267,6 @@ widechars = ["wcwidth"] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2365,7 +2278,6 @@ files = [ name = "typing-extensions" version = "4.8.0" description = "Backported and Experimental Type Hints for Python 3.8+" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2377,7 +2289,6 @@ files = [ name = "typing-inspect" version = "0.9.0" description = "Runtime inspection utilities for typing module." -category = "main" optional = false python-versions = "*" files = [ @@ -2393,7 +2304,6 @@ typing-extensions = ">=3.7.4" name = "urllib3" version = "1.26.17" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -2410,7 +2320,6 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "virtualenv" version = "20.21.0" description = "Virtual Python Environment builder" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2431,7 +2340,6 @@ test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess name = "wcwidth" version = "0.2.8" description = "Measures the displayed width of unicode strings in a terminal" -category = "main" optional = false python-versions = "*" files = [ @@ -2443,7 +2351,6 @@ files = [ name = "websocket-client" version = "1.6.4" description = "WebSocket client for Python with low level API options" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2460,7 +2367,6 @@ test = ["websockets"] name = "yarl" version = "1.9.2" description = "Yet another URL library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2548,7 +2454,6 @@ multidict = ">=4.0" name = "zipp" version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" optional = false python-versions = ">=3.8" files = [ From 3f856a7120839a0ac45eac5133c22bcfaf66d5c9 Mon Sep 17 00:00:00 2001 From: codeflare-machine-account Date: Tue, 31 Oct 2023 15:50:45 +0000 Subject: [PATCH 048/496] Changes in docs for release: v0.11.0 --- docs/cluster/cluster.html | 192 ++++++++------- docs/cluster/config.html | 20 +- docs/utils/generate_yaml.html | 414 ++++++++++++++++++++++++++------ docs/utils/openshift_oauth.html | 6 +- 4 files changed, 468 insertions(+), 164 deletions(-) diff --git a/docs/cluster/cluster.html b/docs/cluster/cluster.html index e33b9cc2..8a313dc7 100644 --- a/docs/cluster/cluster.html +++ b/docs/cluster/cluster.html @@ -60,7 +60,9 @@

    Module codeflare_sdk.cluster.cluster

    from .auth import config_check, api_config_handler from ..utils import pretty_print -from ..utils.generate_yaml import generate_appwrapper +from ..utils.generate_yaml import ( + generate_appwrapper, +) from ..utils.kube_api_helpers import _kube_api_error_handling from ..utils.openshift_oauth import ( create_openshift_oauth_objects, @@ -207,6 +209,8 @@

    Module codeflare_sdk.cluster.cluster

    local_interactive = self.config.local_interactive image_pull_secrets = self.config.image_pull_secrets dispatch_priority = self.config.dispatch_priority + ingress_domain = self.config.ingress_domain + ingress_options = self.config.ingress_options return generate_appwrapper( name=name, namespace=namespace, @@ -230,6 +234,8 @@

    Module codeflare_sdk.cluster.cluster

    dispatch_priority=dispatch_priority, priority_val=priority_val, openshift_oauth=self.config.openshift_oauth, + ingress_domain=ingress_domain, + ingress_options=ingress_options, ) # creates a new cluster with the provided or default spec @@ -368,7 +374,7 @@

    Module codeflare_sdk.cluster.cluster

    timeout=5, verify=self._client_verify_tls, ) - except requests.exceptions.SSLError: + except requests.exceptions.SSLError: # pragma no cover # SSL exception occurs when oauth ingress has been created but cluster is not up return False if response.status_code == 200: @@ -431,27 +437,24 @@

    Module codeflare_sdk.cluster.cluster

    """ try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) - routes = api_instance.list_namespaced_custom_object( - group="route.openshift.io", - version="v1", - namespace=self.config.namespace, - plural="routes", - ) - except Exception as e: # pragma: no cover + api_instance = client.NetworkingV1Api(api_config_handler()) + ingresses = api_instance.list_namespaced_ingress(self.config.namespace) + except Exception as e: # pragma no cover return _kube_api_error_handling(e) - for route in routes["items"]: - if route["metadata"][ - "name" - ] == f"ray-dashboard-{self.config.name}" or route["metadata"][ - "name" - ].startswith( - f"{self.config.name}-ingress" + for ingress in ingresses.items: + annotations = ingress.metadata.annotations + protocol = "http" + if ( + ingress.metadata.name == f"ray-dashboard-{self.config.name}" + or ingress.metadata.name.startswith(f"{self.config.name}-ingress") ): - protocol = "https" if route["spec"].get("tls") else "http" - return f"{protocol}://{route['spec']['host']}" - return "Dashboard route not available yet, have you run cluster.up()?" + if annotations == None: + protocol = "http" + elif "route.openshift.io/termination" in annotations: + protocol = "https" + return f"{protocol}://{ingress.spec.rules[0].host}" + return "Dashboard ingress not available yet, have you run cluster.up()?" def list_jobs(self) -> List: """ @@ -530,8 +533,8 @@

    Module codeflare_sdk.cluster.cluster

    def local_client_url(self): if self.config.local_interactive == True: - ingress_domain = _get_ingress_domain() - return f"ray://rayclient-{self.config.name}-{self.config.namespace}.{ingress_domain}" + ingress_domain = _get_ingress_domain(self) + return f"ray://{ingress_domain}" else: return "None" @@ -687,16 +690,23 @@

    Module codeflare_sdk.cluster.cluster

    return False -def _get_ingress_domain(): +# Cant test this until get_current_namespace is fixed +def _get_ingress_domain(self): # pragma: no cover try: config_check() - api_client = client.CustomObjectsApi(api_config_handler()) - ingress = api_client.get_cluster_custom_object( - "config.openshift.io", "v1", "ingresses", "cluster" - ) + api_client = client.NetworkingV1Api(api_config_handler()) + if self.config.namespace != None: + namespace = self.config.namespace + else: + namespace = get_current_namespace() + ingresses = api_client.list_namespaced_ingress(namespace) except Exception as e: # pragma: no cover return _kube_api_error_handling(e) - return ingress["spec"]["domain"] + domain = None + for ingress in ingresses.items: + if ingress.spec.rules[0].http.paths[0].backend.service.port.number == 10001: + domain = ingress.spec.rules[0].host + return domain def _app_wrapper_status(name, namespace="default") -> Optional[AppWrapper]: @@ -788,27 +798,25 @@

    Module codeflare_sdk.cluster.cluster

    status = RayClusterStatus(rc["status"]["state"].lower()) else: status = RayClusterStatus.UNKNOWN - - config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) - # UPDATE THIS - routes = api_instance.list_namespaced_custom_object( - group="route.openshift.io", - version="v1", - namespace=rc["metadata"]["namespace"], - plural="routes", - ) - ray_route = None - for route in routes["items"]: - if route["metadata"][ - "name" - ] == f"ray-dashboard-{rc['metadata']['name']}" or route["metadata"][ - "name" - ].startswith( - f"{rc['metadata']['name']}-ingress" + try: + config_check() + api_instance = client.NetworkingV1Api(api_config_handler()) + ingresses = api_instance.list_namespaced_ingress(rc["metadata"]["namespace"]) + except Exception as e: # pragma no cover + return _kube_api_error_handling(e) + ray_ingress = None + for ingress in ingresses.items: + annotations = ingress.metadata.annotations + protocol = "http" + if ( + ingress.metadata.name == f"ray-dashboard-{rc['metadata']['name']}" + or ingress.metadata.name.startswith(f"{rc['metadata']['name']}-ingress") ): - protocol = "https" if route["spec"].get("tls") else "http" - ray_route = f"{protocol}://{route['spec']['host']}" + if annotations == None: + protocol = "http" + elif "route.openshift.io/termination" in annotations: + protocol = "https" + ray_ingress = f"{protocol}://{ingress.spec.rules[0].host}" return RayCluster( name=rc["metadata"]["name"], @@ -826,7 +834,6 @@

    Module codeflare_sdk.cluster.cluster

    ]["resources"]["limits"]["cpu"], worker_gpu=0, # hard to detect currently how many gpus, can override it with what the user asked for namespace=rc["metadata"]["namespace"], - dashboard=ray_route, head_cpus=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][0][ "resources" ]["limits"]["cpu"], @@ -836,6 +843,7 @@

    Module codeflare_sdk.cluster.cluster

    head_gpu=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][0][ "resources" ]["limits"]["nvidia.com/gpu"], + dashboard=ray_ingress, ) @@ -1136,6 +1144,8 @@

    Classes

    local_interactive = self.config.local_interactive image_pull_secrets = self.config.image_pull_secrets dispatch_priority = self.config.dispatch_priority + ingress_domain = self.config.ingress_domain + ingress_options = self.config.ingress_options return generate_appwrapper( name=name, namespace=namespace, @@ -1159,6 +1169,8 @@

    Classes

    dispatch_priority=dispatch_priority, priority_val=priority_val, openshift_oauth=self.config.openshift_oauth, + ingress_domain=ingress_domain, + ingress_options=ingress_options, ) # creates a new cluster with the provided or default spec @@ -1297,7 +1309,7 @@

    Classes

    timeout=5, verify=self._client_verify_tls, ) - except requests.exceptions.SSLError: + except requests.exceptions.SSLError: # pragma no cover # SSL exception occurs when oauth ingress has been created but cluster is not up return False if response.status_code == 200: @@ -1360,27 +1372,24 @@

    Classes

    """ try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) - routes = api_instance.list_namespaced_custom_object( - group="route.openshift.io", - version="v1", - namespace=self.config.namespace, - plural="routes", - ) - except Exception as e: # pragma: no cover + api_instance = client.NetworkingV1Api(api_config_handler()) + ingresses = api_instance.list_namespaced_ingress(self.config.namespace) + except Exception as e: # pragma no cover return _kube_api_error_handling(e) - for route in routes["items"]: - if route["metadata"][ - "name" - ] == f"ray-dashboard-{self.config.name}" or route["metadata"][ - "name" - ].startswith( - f"{self.config.name}-ingress" + for ingress in ingresses.items: + annotations = ingress.metadata.annotations + protocol = "http" + if ( + ingress.metadata.name == f"ray-dashboard-{self.config.name}" + or ingress.metadata.name.startswith(f"{self.config.name}-ingress") ): - protocol = "https" if route["spec"].get("tls") else "http" - return f"{protocol}://{route['spec']['host']}" - return "Dashboard route not available yet, have you run cluster.up()?" + if annotations == None: + protocol = "http" + elif "route.openshift.io/termination" in annotations: + protocol = "https" + return f"{protocol}://{ingress.spec.rules[0].host}" + return "Dashboard ingress not available yet, have you run cluster.up()?" def list_jobs(self) -> List: """ @@ -1459,8 +1468,8 @@

    Classes

    def local_client_url(self): if self.config.local_interactive == True: - ingress_domain = _get_ingress_domain() - return f"ray://rayclient-{self.config.name}-{self.config.namespace}.{ingress_domain}" + ingress_domain = _get_ingress_domain(self) + return f"ray://{ingress_domain}" else: return "None" @@ -1580,27 +1589,24 @@

    Methods

    """ try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) - routes = api_instance.list_namespaced_custom_object( - group="route.openshift.io", - version="v1", - namespace=self.config.namespace, - plural="routes", - ) - except Exception as e: # pragma: no cover + api_instance = client.NetworkingV1Api(api_config_handler()) + ingresses = api_instance.list_namespaced_ingress(self.config.namespace) + except Exception as e: # pragma no cover return _kube_api_error_handling(e) - for route in routes["items"]: - if route["metadata"][ - "name" - ] == f"ray-dashboard-{self.config.name}" or route["metadata"][ - "name" - ].startswith( - f"{self.config.name}-ingress" + for ingress in ingresses.items: + annotations = ingress.metadata.annotations + protocol = "http" + if ( + ingress.metadata.name == f"ray-dashboard-{self.config.name}" + or ingress.metadata.name.startswith(f"{self.config.name}-ingress") ): - protocol = "https" if route["spec"].get("tls") else "http" - return f"{protocol}://{route['spec']['host']}" - return "Dashboard route not available yet, have you run cluster.up()?" + if annotations == None: + protocol = "http" + elif "route.openshift.io/termination" in annotations: + protocol = "https" + return f"{protocol}://{ingress.spec.rules[0].host}" + return "Dashboard ingress not available yet, have you run cluster.up()?"
    @@ -1678,6 +1684,8 @@

    Methods

    local_interactive = self.config.local_interactive image_pull_secrets = self.config.image_pull_secrets dispatch_priority = self.config.dispatch_priority + ingress_domain = self.config.ingress_domain + ingress_options = self.config.ingress_options return generate_appwrapper( name=name, namespace=namespace, @@ -1701,6 +1709,8 @@

    Methods

    dispatch_priority=dispatch_priority, priority_val=priority_val, openshift_oauth=self.config.openshift_oauth, + ingress_domain=ingress_domain, + ingress_options=ingress_options, )
    @@ -1858,7 +1868,7 @@

    Methods

    timeout=5, verify=self._client_verify_tls, ) - except requests.exceptions.SSLError: + except requests.exceptions.SSLError: # pragma no cover # SSL exception occurs when oauth ingress has been created but cluster is not up return False if response.status_code == 200: @@ -1926,8 +1936,8 @@

    Methods

    def local_client_url(self):
         if self.config.local_interactive == True:
    -        ingress_domain = _get_ingress_domain()
    -        return f"ray://rayclient-{self.config.name}-{self.config.namespace}.{ingress_domain}"
    +        ingress_domain = _get_ingress_domain(self)
    +        return f"ray://{ingress_domain}"
         else:
             return "None"
    diff --git a/docs/cluster/config.html b/docs/cluster/config.html index 37242b17..f9b762ba 100644 --- a/docs/cluster/config.html +++ b/docs/cluster/config.html @@ -84,7 +84,9 @@

    Module codeflare_sdk.cluster.config

    local_interactive: bool = False image_pull_secrets: list = field(default_factory=list) dispatch_priority: str = None - openshift_oauth: bool = False # NOTE: to use the user must have permission to create a RoleBinding for system:auth-delegator + openshift_oauth: bool = False # NOTE: to use the user must have permission to create a RoleBinding for system:auth-delegator + ingress_options: dict = field(default_factory=dict) + ingress_domain: str = None
    @@ -98,7 +100,7 @@

    Classes

    class ClusterConfiguration -(name: str, namespace: str = None, head_info: list = <factory>, head_cpus: int = 2, head_memory: int = 8, head_gpus: int = 0, machine_types: list = <factory>, min_cpus: int = 1, max_cpus: int = 1, num_workers: int = 1, min_memory: int = 2, max_memory: int = 2, num_gpus: int = 0, template: str = '/home/runner/work/codeflare-sdk/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', instascale: bool = False, mcad: bool = True, envs: dict = <factory>, image: str = 'quay.io/project-codeflare/ray:latest-py39-cu118', local_interactive: bool = False, image_pull_secrets: list = <factory>, dispatch_priority: str = None, openshift_oauth: bool = False) +(name: str, namespace: str = None, head_info: list = <factory>, head_cpus: int = 2, head_memory: int = 8, head_gpus: int = 0, machine_types: list = <factory>, min_cpus: int = 1, max_cpus: int = 1, num_workers: int = 1, min_memory: int = 2, max_memory: int = 2, num_gpus: int = 0, template: str = '/home/runner/work/codeflare-sdk/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', instascale: bool = False, mcad: bool = True, envs: dict = <factory>, image: str = 'quay.io/project-codeflare/ray:latest-py39-cu118', local_interactive: bool = False, image_pull_secrets: list = <factory>, dispatch_priority: str = None, openshift_oauth: bool = False, ingress_options: dict = <factory>, ingress_domain: str = None)

    This dataclass is used to specify resource requirements and other details, and @@ -134,7 +136,9 @@

    Classes

    local_interactive: bool = False image_pull_secrets: list = field(default_factory=list) dispatch_priority: str = None - openshift_oauth: bool = False # NOTE: to use the user must have permission to create a RoleBinding for system:auth-delegator + openshift_oauth: bool = False # NOTE: to use the user must have permission to create a RoleBinding for system:auth-delegator + ingress_options: dict = field(default_factory=dict) + ingress_domain: str = None

    Class variables

    @@ -170,6 +174,14 @@

    Class variables

    +
    var ingress_domain : str
    +
    +
    +
    +
    var ingress_options : dict
    +
    +
    +
    var instascale : bool
    @@ -255,6 +267,8 @@

    head_memory
  • image
  • image_pull_secrets
  • +
  • ingress_domain
  • +
  • ingress_options
  • instascale
  • local_interactive
  • machine_types
  • diff --git a/docs/utils/generate_yaml.html b/docs/utils/generate_yaml.html index fa449d8d..ba244664 100644 --- a/docs/utils/generate_yaml.html +++ b/docs/utils/generate_yaml.html @@ -82,27 +82,148 @@

    Module codeflare_sdk.utils.generate_yaml

    return name, name -def update_dashboard_route(route_item, cluster_name, namespace): - metadata = route_item.get("generictemplate", {}).get("metadata") - metadata["name"] = gen_dashboard_route_name(cluster_name) - metadata["namespace"] = namespace - metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc" - spec = route_item.get("generictemplate", {}).get("spec") - spec["to"]["name"] = f"{cluster_name}-head-svc" +def gen_dashboard_ingress_name(cluster_name): + return f"ray-dashboard-{cluster_name}" -def gen_dashboard_route_name(cluster_name): - return f"ray-dashboard-{cluster_name}" +# Check if the ingress api cluster resource exists +def is_openshift_cluster(): + try: + config_check() + api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance.get_cluster_custom_object( + "config.openshift.io", "v1", "ingresses", "cluster" + ) + return True + except client.ApiException as e: # pragma: no cover + if e.status == 404 or e.status == 403: + return False + else: + print(f"Error detecting cluster type defaulting to Kubernetes: {e}") + return False + + +def update_dashboard_ingress( + ingress_item, cluster_name, namespace, ingress_options, ingress_domain +): # pragma: no cover + metadata = ingress_item.get("generictemplate", {}).get("metadata") + spec = ingress_item.get("generictemplate", {}).get("spec") + if ingress_options != {}: + for index, ingress_option in enumerate(ingress_options["ingresses"]): + if "ingressName" not in ingress_option.keys(): + raise ValueError( + f"Error: 'ingressName' is missing or empty for ingress item at index {index}" + ) + if "port" not in ingress_option.keys(): + raise ValueError( + f"Error: 'port' is missing or empty for ingress item at index {index}" + ) + elif not isinstance(ingress_option["port"], int): + raise ValueError( + f"Error: 'port' is not of type int for ingress item at index {index}" + ) + if ingress_option["port"] == 8265: + metadata["name"] = ingress_option["ingressName"] + metadata["namespace"] = namespace + if "annotations" not in ingress_option.keys(): + del metadata["annotations"] + else: + metadata["annotations"] = ingress_option["annotations"] + if "path" not in ingress_option.keys(): + del spec["rules"][0]["http"]["paths"][0]["path"] + else: + spec["rules"][0]["http"]["paths"][0]["path"] = ingress_option[ + "path" + ] + if "pathType" not in ingress_option.keys(): + spec["rules"][0]["http"]["paths"][0][ + "pathType" + ] = "ImplementationSpecific" + if "host" not in ingress_option.keys(): + del spec["rules"][0]["host"] + else: + spec["rules"][0]["host"] = ingress_option["host"] + if "ingressClassName" not in ingress_option.keys(): + del spec["ingressClassName"] + else: + spec["ingressClassName"] = ingress_option["ingressClassName"] + + spec["rules"][0]["http"]["paths"][0]["backend"]["service"][ + "name" + ] = f"{cluster_name}-head-svc" + else: + metadata["name"] = f"ray-dashboard-{cluster_name}" + metadata["namespace"] = namespace + spec["rules"][0]["http"]["paths"][0]["backend"]["service"][ + "name" + ] = f"{cluster_name}-head-svc" + if is_openshift_cluster(): + try: + config_check() + api_client = client.CustomObjectsApi(api_config_handler()) + ingress = api_client.get_cluster_custom_object( + "config.openshift.io", "v1", "ingresses", "cluster" + ) + del spec["ingressClassName"] + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + domain = ingress["spec"]["domain"] + elif ingress_domain is None: + raise ValueError( + "ingress_domain is invalid. For Kubernetes Clusters please specify an ingress domain" + ) + else: + domain = ingress_domain + del metadata["annotations"] + spec["rules"][0]["host"] = f"ray-dashboard-{cluster_name}-{namespace}.{domain}" -# ToDo: refactor the update_x_route() functions -def update_rayclient_route(route_item, cluster_name, namespace): - metadata = route_item.get("generictemplate", {}).get("metadata") + +def update_rayclient_ingress( + ingress_item, cluster_name, namespace, ingress_domain +): # pragma: no cover + metadata = ingress_item.get("generictemplate", {}).get("metadata") + spec = ingress_item.get("generictemplate", {}).get("spec") metadata["name"] = f"rayclient-{cluster_name}" metadata["namespace"] = namespace metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc" - spec = route_item.get("generictemplate", {}).get("spec") - spec["to"]["name"] = f"{cluster_name}-head-svc" + + spec["rules"][0]["http"]["paths"][0]["backend"]["service"][ + "name" + ] = f"{cluster_name}-head-svc" + + if is_openshift_cluster(): + try: + config_check() + api_client = client.CustomObjectsApi(api_config_handler()) + ingress = api_client.get_cluster_custom_object( + "config.openshift.io", "v1", "ingresses", "cluster" + ) + ingressClassName = "openshift-default" + annotations = { + "nginx.ingress.kubernetes.io/rewrite-target": "/", + "nginx.ingress.kubernetes.io/ssl-redirect": "true", + "route.openshift.io/termination": "passthrough", + } + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + domain = ingress["spec"]["domain"] + elif ingress_domain is None: + raise ValueError( + "ingress_domain is invalid. For Kubernetes Clusters please specify an ingress domain" + ) + else: + domain = ingress_domain + ingressClassName = "nginx" + annotations = { + "nginx.ingress.kubernetes.io/rewrite-target": "/", + "nginx.ingress.kubernetes.io/ssl-redirect": "true", + "nginx.ingress.kubernetes.io/ssl-passthrough": "true", + } + + metadata["annotations"] = annotations + spec["ingressClassName"] = ingressClassName + spec["rules"][0]["host"] = f"rayclient-{cluster_name}-{namespace}.{domain}" def update_names(yaml, item, appwrapper_name, cluster_name, namespace): @@ -305,11 +426,10 @@

    Module codeflare_sdk.utils.generate_yaml

    data["ca.key"], data["ca.crt"] = generate_cert.generate_ca_cert(365) -def enable_local_interactive(resources, cluster_name, namespace): - rayclient_route_item = resources["resources"].get("GenericItems")[2] +def enable_local_interactive(resources, cluster_name, namespace, ingress_domain): + rayclient_ingress_item = resources["resources"].get("GenericItems")[2] ca_secret_item = resources["resources"].get("GenericItems")[3] item = resources["resources"].get("GenericItems")[0] - update_rayclient_route(rayclient_route_item, cluster_name, namespace) update_ca_secret(ca_secret_item, cluster_name, namespace) # update_ca_secret_volumes item["generictemplate"]["spec"]["headGroupSpec"]["template"]["spec"]["volumes"][0][ @@ -331,16 +451,27 @@

    Module codeflare_sdk.utils.generate_yaml

    ][0].get("command")[2] command = command.replace("deployment-name", cluster_name) - try: - config_check() - api_client = client.CustomObjectsApi(api_config_handler()) - ingress = api_client.get_cluster_custom_object( - "config.openshift.io", "v1", "ingresses", "cluster" + + if is_openshift_cluster(): + # We can try get the domain through checking ingresses.config.openshift.io + try: + config_check() + api_client = client.CustomObjectsApi(api_config_handler()) + ingress = api_client.get_cluster_custom_object( + "config.openshift.io", "v1", "ingresses", "cluster" + ) + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + domain = ingress["spec"]["domain"] + elif ingress_domain is None: + raise ValueError( + "ingress_domain is invalid. For Kubernetes Clusters please specify an ingress domain" ) - except Exception as e: # pragma: no cover - return _kube_api_error_handling(e) - domain = ingress["spec"]["domain"] + else: + domain = ingress_domain + command = command.replace("server-name", domain) + update_rayclient_ingress(rayclient_ingress_item, cluster_name, namespace, domain) item["generictemplate"]["spec"]["headGroupSpec"]["template"]["spec"][ "initContainers" @@ -422,7 +553,7 @@

    Module codeflare_sdk.utils.generate_yaml

    port_name = "oauth-proxy" host = _get_api_host(k8_client) host = host.replace( - "api.", f"{gen_dashboard_route_name(cluster_name)}-{namespace}.apps." + "api.", f"{gen_dashboard_ingress_name(cluster_name)}-{namespace}.apps." ) oauth_sidecar = _create_oauth_sidecar_object( namespace, @@ -524,12 +655,14 @@

    Module codeflare_sdk.utils.generate_yaml

    dispatch_priority: str, priority_val: int, openshift_oauth: bool, + ingress_domain: str, + ingress_options: dict, ): user_yaml = read_template(template) appwrapper_name, cluster_name = gen_names(name) resources = user_yaml.get("spec", "resources") item = resources["resources"].get("GenericItems")[0] - route_item = resources["resources"].get("GenericItems")[1] + ingress_item = resources["resources"].get("GenericItems")[1] update_names(user_yaml, item, appwrapper_name, cluster_name, namespace) update_labels(user_yaml, instascale, instance_types) update_priority(user_yaml, item, dispatch_priority, priority_val) @@ -562,9 +695,11 @@

    Module codeflare_sdk.utils.generate_yaml

    head_memory, head_gpus, ) - update_dashboard_route(route_item, cluster_name, namespace) + update_dashboard_ingress( + ingress_item, cluster_name, namespace, ingress_options, ingress_domain + ) if local_interactive: - enable_local_interactive(resources, cluster_name, namespace) + enable_local_interactive(resources, cluster_name, namespace, ingress_domain) else: disable_raycluster_tls(resources["resources"]) @@ -654,7 +789,7 @@

    Functions

    -def enable_local_interactive(resources, cluster_name, namespace) +def enable_local_interactive(resources, cluster_name, namespace, ingress_domain)
    @@ -662,11 +797,10 @@

    Functions

    Expand source code -
    def enable_local_interactive(resources, cluster_name, namespace):
    -    rayclient_route_item = resources["resources"].get("GenericItems")[2]
    +
    def enable_local_interactive(resources, cluster_name, namespace, ingress_domain):
    +    rayclient_ingress_item = resources["resources"].get("GenericItems")[2]
         ca_secret_item = resources["resources"].get("GenericItems")[3]
         item = resources["resources"].get("GenericItems")[0]
    -    update_rayclient_route(rayclient_route_item, cluster_name, namespace)
         update_ca_secret(ca_secret_item, cluster_name, namespace)
         # update_ca_secret_volumes
         item["generictemplate"]["spec"]["headGroupSpec"]["template"]["spec"]["volumes"][0][
    @@ -688,16 +822,27 @@ 

    Functions

    ][0].get("command")[2] command = command.replace("deployment-name", cluster_name) - try: - config_check() - api_client = client.CustomObjectsApi(api_config_handler()) - ingress = api_client.get_cluster_custom_object( - "config.openshift.io", "v1", "ingresses", "cluster" + + if is_openshift_cluster(): + # We can try get the domain through checking ingresses.config.openshift.io + try: + config_check() + api_client = client.CustomObjectsApi(api_config_handler()) + ingress = api_client.get_cluster_custom_object( + "config.openshift.io", "v1", "ingresses", "cluster" + ) + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + domain = ingress["spec"]["domain"] + elif ingress_domain is None: + raise ValueError( + "ingress_domain is invalid. For Kubernetes Clusters please specify an ingress domain" ) - except Exception as e: # pragma: no cover - return _kube_api_error_handling(e) - domain = ingress["spec"]["domain"] + else: + domain = ingress_domain + command = command.replace("server-name", domain) + update_rayclient_ingress(rayclient_ingress_item, cluster_name, namespace, domain) item["generictemplate"]["spec"]["headGroupSpec"]["template"]["spec"][ "initContainers" @@ -724,7 +869,7 @@

    Functions

    port_name = "oauth-proxy" host = _get_api_host(k8_client) host = host.replace( - "api.", f"{gen_dashboard_route_name(cluster_name)}-{namespace}.apps." + "api.", f"{gen_dashboard_ingress_name(cluster_name)}-{namespace}.apps." ) oauth_sidecar = _create_oauth_sidecar_object( namespace, @@ -759,8 +904,8 @@

    Functions

    )
    -
    -def gen_dashboard_route_name(cluster_name) +
    +def gen_dashboard_ingress_name(cluster_name)
    @@ -768,7 +913,7 @@

    Functions

    Expand source code -
    def gen_dashboard_route_name(cluster_name):
    +
    def gen_dashboard_ingress_name(cluster_name):
         return f"ray-dashboard-{cluster_name}"
    @@ -792,7 +937,7 @@

    Functions

    -def generate_appwrapper(name: str, namespace: str, head_cpus: int, head_memory: int, head_gpus: int, min_cpu: int, max_cpu: int, min_memory: int, max_memory: int, gpu: int, workers: int, template: str, image: str, instascale: bool, mcad: bool, instance_types: list, env, local_interactive: bool, image_pull_secrets: list, dispatch_priority: str, priority_val: int, openshift_oauth: bool) +def generate_appwrapper(name: str, namespace: str, head_cpus: int, head_memory: int, head_gpus: int, min_cpu: int, max_cpu: int, min_memory: int, max_memory: int, gpu: int, workers: int, template: str, image: str, instascale: bool, mcad: bool, instance_types: list, env, local_interactive: bool, image_pull_secrets: list, dispatch_priority: str, priority_val: int, openshift_oauth: bool, ingress_domain: str, ingress_options: dict)
    @@ -823,12 +968,14 @@

    Functions

    dispatch_priority: str, priority_val: int, openshift_oauth: bool, + ingress_domain: str, + ingress_options: dict, ): user_yaml = read_template(template) appwrapper_name, cluster_name = gen_names(name) resources = user_yaml.get("spec", "resources") item = resources["resources"].get("GenericItems")[0] - route_item = resources["resources"].get("GenericItems")[1] + ingress_item = resources["resources"].get("GenericItems")[1] update_names(user_yaml, item, appwrapper_name, cluster_name, namespace) update_labels(user_yaml, instascale, instance_types) update_priority(user_yaml, item, dispatch_priority, priority_val) @@ -861,9 +1008,11 @@

    Functions

    head_memory, head_gpus, ) - update_dashboard_route(route_item, cluster_name, namespace) + update_dashboard_ingress( + ingress_item, cluster_name, namespace, ingress_options, ingress_domain + ) if local_interactive: - enable_local_interactive(resources, cluster_name, namespace) + enable_local_interactive(resources, cluster_name, namespace, ingress_domain) else: disable_raycluster_tls(resources["resources"]) @@ -878,6 +1027,32 @@

    Functions

    return outfile
    +
    +def is_openshift_cluster() +
    +
    +
    +
    + +Expand source code + +
    def is_openshift_cluster():
    +    try:
    +        config_check()
    +        api_instance = client.CustomObjectsApi(api_config_handler())
    +        api_instance.get_cluster_custom_object(
    +            "config.openshift.io", "v1", "ingresses", "cluster"
    +        )
    +
    +        return True
    +    except client.ApiException as e:  # pragma: no cover
    +        if e.status == 404 or e.status == 403:
    +            return False
    +        else:
    +            print(f"Error detecting cluster type defaulting to Kubernetes: {e}")
    +            return False
    +
    +
    def read_template(template)
    @@ -997,8 +1172,8 @@

    Functions

    sys.exit("Error: malformed template")
    -
    -def update_dashboard_route(route_item, cluster_name, namespace) +
    +def update_dashboard_ingress(ingress_item, cluster_name, namespace, ingress_options, ingress_domain)
    @@ -1006,13 +1181,79 @@

    Functions

    Expand source code -
    def update_dashboard_route(route_item, cluster_name, namespace):
    -    metadata = route_item.get("generictemplate", {}).get("metadata")
    -    metadata["name"] = gen_dashboard_route_name(cluster_name)
    -    metadata["namespace"] = namespace
    -    metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc"
    -    spec = route_item.get("generictemplate", {}).get("spec")
    -    spec["to"]["name"] = f"{cluster_name}-head-svc"
    +
    def update_dashboard_ingress(
    +    ingress_item, cluster_name, namespace, ingress_options, ingress_domain
    +):  # pragma: no cover
    +    metadata = ingress_item.get("generictemplate", {}).get("metadata")
    +    spec = ingress_item.get("generictemplate", {}).get("spec")
    +    if ingress_options != {}:
    +        for index, ingress_option in enumerate(ingress_options["ingresses"]):
    +            if "ingressName" not in ingress_option.keys():
    +                raise ValueError(
    +                    f"Error: 'ingressName' is missing or empty for ingress item at index {index}"
    +                )
    +            if "port" not in ingress_option.keys():
    +                raise ValueError(
    +                    f"Error: 'port' is missing or empty for ingress item at index {index}"
    +                )
    +            elif not isinstance(ingress_option["port"], int):
    +                raise ValueError(
    +                    f"Error: 'port' is not of type int for ingress item at index {index}"
    +                )
    +            if ingress_option["port"] == 8265:
    +                metadata["name"] = ingress_option["ingressName"]
    +                metadata["namespace"] = namespace
    +                if "annotations" not in ingress_option.keys():
    +                    del metadata["annotations"]
    +                else:
    +                    metadata["annotations"] = ingress_option["annotations"]
    +                if "path" not in ingress_option.keys():
    +                    del spec["rules"][0]["http"]["paths"][0]["path"]
    +                else:
    +                    spec["rules"][0]["http"]["paths"][0]["path"] = ingress_option[
    +                        "path"
    +                    ]
    +                if "pathType" not in ingress_option.keys():
    +                    spec["rules"][0]["http"]["paths"][0][
    +                        "pathType"
    +                    ] = "ImplementationSpecific"
    +                if "host" not in ingress_option.keys():
    +                    del spec["rules"][0]["host"]
    +                else:
    +                    spec["rules"][0]["host"] = ingress_option["host"]
    +                if "ingressClassName" not in ingress_option.keys():
    +                    del spec["ingressClassName"]
    +                else:
    +                    spec["ingressClassName"] = ingress_option["ingressClassName"]
    +
    +                spec["rules"][0]["http"]["paths"][0]["backend"]["service"][
    +                    "name"
    +                ] = f"{cluster_name}-head-svc"
    +    else:
    +        metadata["name"] = f"ray-dashboard-{cluster_name}"
    +        metadata["namespace"] = namespace
    +        spec["rules"][0]["http"]["paths"][0]["backend"]["service"][
    +            "name"
    +        ] = f"{cluster_name}-head-svc"
    +        if is_openshift_cluster():
    +            try:
    +                config_check()
    +                api_client = client.CustomObjectsApi(api_config_handler())
    +                ingress = api_client.get_cluster_custom_object(
    +                    "config.openshift.io", "v1", "ingresses", "cluster"
    +                )
    +                del spec["ingressClassName"]
    +            except Exception as e:  # pragma: no cover
    +                return _kube_api_error_handling(e)
    +            domain = ingress["spec"]["domain"]
    +        elif ingress_domain is None:
    +            raise ValueError(
    +                "ingress_domain is invalid. For Kubernetes Clusters please specify an ingress domain"
    +            )
    +        else:
    +            domain = ingress_domain
    +        del metadata["annotations"]
    +        spec["rules"][0]["host"] = f"ray-dashboard-{cluster_name}-{namespace}.{domain}"
    @@ -1188,8 +1429,8 @@

    Functions

    spec.pop("priority")
    -
    -def update_rayclient_route(route_item, cluster_name, namespace) +
    +def update_rayclient_ingress(ingress_item, cluster_name, namespace, ingress_domain)
    @@ -1197,13 +1438,51 @@

    Functions

    Expand source code -
    def update_rayclient_route(route_item, cluster_name, namespace):
    -    metadata = route_item.get("generictemplate", {}).get("metadata")
    +
    def update_rayclient_ingress(
    +    ingress_item, cluster_name, namespace, ingress_domain
    +):  # pragma: no cover
    +    metadata = ingress_item.get("generictemplate", {}).get("metadata")
    +    spec = ingress_item.get("generictemplate", {}).get("spec")
         metadata["name"] = f"rayclient-{cluster_name}"
         metadata["namespace"] = namespace
         metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc"
    -    spec = route_item.get("generictemplate", {}).get("spec")
    -    spec["to"]["name"] = f"{cluster_name}-head-svc"
    + + spec["rules"][0]["http"]["paths"][0]["backend"]["service"][ + "name" + ] = f"{cluster_name}-head-svc" + + if is_openshift_cluster(): + try: + config_check() + api_client = client.CustomObjectsApi(api_config_handler()) + ingress = api_client.get_cluster_custom_object( + "config.openshift.io", "v1", "ingresses", "cluster" + ) + ingressClassName = "openshift-default" + annotations = { + "nginx.ingress.kubernetes.io/rewrite-target": "/", + "nginx.ingress.kubernetes.io/ssl-redirect": "true", + "route.openshift.io/termination": "passthrough", + } + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + domain = ingress["spec"]["domain"] + elif ingress_domain is None: + raise ValueError( + "ingress_domain is invalid. For Kubernetes Clusters please specify an ingress domain" + ) + else: + domain = ingress_domain + ingressClassName = "nginx" + annotations = { + "nginx.ingress.kubernetes.io/rewrite-target": "/", + "nginx.ingress.kubernetes.io/ssl-redirect": "true", + "nginx.ingress.kubernetes.io/ssl-passthrough": "true", + } + + metadata["annotations"] = annotations + spec["ingressClassName"] = ingressClassName + spec["rules"][0]["host"] = f"rayclient-{cluster_name}-{namespace}.{domain}"
    @@ -1288,14 +1567,15 @@

    Index

  • disable_raycluster_tls
  • enable_local_interactive
  • enable_openshift_oauth
  • -
  • gen_dashboard_route_name
  • +
  • gen_dashboard_ingress_name
  • gen_names
  • generate_appwrapper
  • +
  • is_openshift_cluster
  • read_template
  • update_affinity
  • update_ca_secret
  • update_custompodresources
  • -
  • update_dashboard_route
  • +
  • update_dashboard_ingress
  • update_env
  • update_image
  • update_image_pull_secrets
  • @@ -1303,7 +1583,7 @@

    Index

  • update_names
  • update_nodes
  • update_priority
  • -
  • update_rayclient_route
  • +
  • update_rayclient_ingress
  • update_resources
  • write_components
  • write_user_appwrapper
  • diff --git a/docs/utils/openshift_oauth.html b/docs/utils/openshift_oauth.html index 0fc27b6b..e8df7482 100644 --- a/docs/utils/openshift_oauth.html +++ b/docs/utils/openshift_oauth.html @@ -27,7 +27,7 @@

    Module codeflare_sdk.utils.openshift_oauth

    Expand source code
    from urllib3.util import parse_url
    -from .generate_yaml import gen_dashboard_route_name
    +from .generate_yaml import gen_dashboard_ingress_name
     from .kube_api_helpers import _get_api_host
     from base64 import b64decode
     
    @@ -47,7 +47,7 @@ 

    Module codeflare_sdk.utils.openshift_oauth

    host = _get_api_host(api_client) # replace "^api" with the expected host - host = f"{gen_dashboard_route_name(cluster_name)}-{namespace}.apps" + host.lstrip( + host = f"{gen_dashboard_ingress_name(cluster_name)}-{namespace}.apps" + host.lstrip( "api" ) @@ -272,7 +272,7 @@

    Functions

    host = _get_api_host(api_client) # replace "^api" with the expected host - host = f"{gen_dashboard_route_name(cluster_name)}-{namespace}.apps" + host.lstrip( + host = f"{gen_dashboard_ingress_name(cluster_name)}-{namespace}.apps" + host.lstrip( "api" ) From 9380986ba18e0fb76f4882219a3309bb6e2d923b Mon Sep 17 00:00:00 2001 From: Karel Suta Date: Thu, 2 Nov 2023 09:49:31 +0100 Subject: [PATCH 049/496] Remove automatic triage/needs-triage label creation --- .github/workflows/auto-add-issues.yaml | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/.github/workflows/auto-add-issues.yaml b/.github/workflows/auto-add-issues.yaml index a8be31eb..868b33c5 100644 --- a/.github/workflows/auto-add-issues.yaml +++ b/.github/workflows/auto-add-issues.yaml @@ -1,20 +1,9 @@ -name: Label new issues as needs-triage and add to CodeFlare Sprint Board +name: Add new issues to CodeFlare Sprint Board on: issues: types: - opened jobs: - add_label: - name: Add needs-triage label to new issues - runs-on: ubuntu-latest - permissions: - issues: write - steps: - - uses: actions/checkout@v3 - - run: gh issue edit ${{ github.event.issue.number }} --add-label "triage/needs-triage" - env: - GH_TOKEN: ${{ github.token }} - add-to-project: name: Add issue to project runs-on: ubuntu-latest From 020ca8a2577c1e4896ba2a0c57570a385fcd8c07 Mon Sep 17 00:00:00 2001 From: Dimitri Saridakis Date: Thu, 9 Nov 2023 10:52:29 +0000 Subject: [PATCH 050/496] refactor: remove default raycluster image --- src/codeflare_sdk/cluster/config.py | 2 +- tests/unit_test_support.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/codeflare_sdk/cluster/config.py b/src/codeflare_sdk/cluster/config.py index 19209764..0311d0e3 100644 --- a/src/codeflare_sdk/cluster/config.py +++ b/src/codeflare_sdk/cluster/config.py @@ -48,7 +48,7 @@ class ClusterConfiguration: instascale: bool = False mcad: bool = True envs: dict = field(default_factory=dict) - image: str = "quay.io/project-codeflare/ray:latest-py39-cu118" + image: str = "" local_interactive: bool = False image_pull_secrets: list = field(default_factory=list) dispatch_priority: str = None diff --git a/tests/unit_test_support.py b/tests/unit_test_support.py index fea4ceaf..6d61130a 100644 --- a/tests/unit_test_support.py +++ b/tests/unit_test_support.py @@ -47,6 +47,7 @@ def createClusterConfig(): machine_types=["cpu.small", "gpu.large"], image_pull_secrets=["unit-test-pull-secret"], ingress_domain="apps.cluster.awsroute.org", + image="quay.io/project-codeflare/ray:latest-py39-cu118" ) return config From 6d4caec4da515d2b0a6d191c7a8d10606d11727c Mon Sep 17 00:00:00 2001 From: Dimitri Saridakis Date: Thu, 9 Nov 2023 10:53:06 +0000 Subject: [PATCH 051/496] refactor: update demo notebooks with raycluster images and note --- .../additional-demos/hf_interactive.ipynb | 5 +++- .../additional-demos/local_interactive.ipynb | 23 ++++++++++++++++++- demo-notebooks/guided-demos/0_basic_ray.ipynb | 9 +++++--- .../guided-demos/1_basic_instascale.ipynb | 6 ++++- .../guided-demos/2_basic_jobs.ipynb | 6 ++++- .../guided-demos/3_basic_interactive.ipynb | 6 ++++- demo-notebooks/guided-demos/4_gpt.ipynb | 10 ++++++++ .../notebook-ex-outputs/0_basic_ray.ipynb | 7 ++++-- .../1_basic_instascale.ipynb | 6 ++++- .../notebook-ex-outputs/2_basic_jobs.ipynb | 6 ++++- .../3_basic_interactive.ipynb | 6 ++++- .../notebook-ex-outputs/4_gpt.ipynb | 11 +++++++++ .../preview_nbs/0_basic_ray.ipynb | 7 ++++-- .../preview_nbs/1_basic_instascale.ipynb | 6 ++++- .../preview_nbs/2_basic_jobs.ipynb | 6 ++++- .../preview_nbs/3_basic_interactive.ipynb | 6 ++++- .../guided-demos/preview_nbs/4_gpt.ipynb | 10 ++++++++ 17 files changed, 118 insertions(+), 18 deletions(-) diff --git a/demo-notebooks/additional-demos/hf_interactive.ipynb b/demo-notebooks/additional-demos/hf_interactive.ipynb index a6780a8f..45e9653f 100644 --- a/demo-notebooks/additional-demos/hf_interactive.ipynb +++ b/demo-notebooks/additional-demos/hf_interactive.ipynb @@ -69,7 +69,10 @@ "id": "bc27f84c", "metadata": {}, "source": [ - "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding AppWrapper)." + "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding AppWrapper).\n", + "\n", + "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", + "The example here is a community image." ] }, { diff --git a/demo-notebooks/additional-demos/local_interactive.ipynb b/demo-notebooks/additional-demos/local_interactive.ipynb index 7533db3d..b6773ee1 100644 --- a/demo-notebooks/additional-demos/local_interactive.ipynb +++ b/demo-notebooks/additional-demos/local_interactive.ipynb @@ -30,6 +30,16 @@ "auth.login()" ] }, + { + "cell_type": "markdown", + "id": "18de2d65", + "metadata": {}, + "source": [ + "\n", + "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", + "The example here is a community image." + ] + }, { "cell_type": "code", "execution_count": null, @@ -44,7 +54,18 @@ "cluster_name = \"hfgputest-1\"\n", "local_interactive = True\n", "\n", - "cluster = Cluster(ClusterConfiguration(local_interactive=local_interactive, namespace=namespace, name=cluster_name, num_workers=1, min_cpus=1, max_cpus=1, min_memory=4, max_memory=4, num_gpus=0, instascale=False, machine_types=[\"m5.xlarge\", \"p3.8xlarge\"]))" + "cluster = Cluster(ClusterConfiguration(local_interactive=local_interactive,\n", + " namespace=namespace,\n", + " name=cluster_name,\n", + " num_workers=1,\n", + " min_cpus=1,\n", + " max_cpus=1,\n", + " min_memory=4,\n", + " max_memory=4,\n", + " num_gpus=0,\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " instascale=False,\n", + " machine_types=[\"m5.xlarge\", \"p3.8xlarge\"]))" ] }, { diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb index d37b7d7d..c905d175 100644 --- a/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -46,7 +46,10 @@ "id": "bc27f84c", "metadata": {}, "source": [ - "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding AppWrapper)." + "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding AppWrapper).\n", + "\n", + "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", + "The example here is a community image." ] }, { @@ -66,7 +69,7 @@ " min_memory=4,\n", " max_memory=4,\n", " num_gpus=0,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\", #current default\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " instascale=False\n", "))" ] @@ -191,7 +194,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.13" + "version": "3.9.18" }, "vscode": { "interpreter": { diff --git a/demo-notebooks/guided-demos/1_basic_instascale.ipynb b/demo-notebooks/guided-demos/1_basic_instascale.ipynb index d0faf5b9..f1795382 100644 --- a/demo-notebooks/guided-demos/1_basic_instascale.ipynb +++ b/demo-notebooks/guided-demos/1_basic_instascale.ipynb @@ -43,7 +43,10 @@ "id": "bc27f84c", "metadata": {}, "source": [ - "This time, we are working in a cloud environment, and our OpenShift cluster does not have the resources needed for our desired workloads. We will use InstaScale to dynamically scale-up guaranteed resources based on our request (that will also automatically scale-down when we are finished working):" + "This time, we are working in a cloud environment, and our OpenShift cluster does not have the resources needed for our desired workloads. We will use InstaScale to dynamically scale-up guaranteed resources based on our request (that will also automatically scale-down when we are finished working):\n", + "\n", + "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", + "The example here is a community image." ] }, { @@ -63,6 +66,7 @@ " min_memory=8,\n", " max_memory=8,\n", " num_gpus=1,\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " instascale=True, # InstaScale now enabled, will scale OCP cluster to guarantee resource request\n", " machine_types=[\"m5.xlarge\", \"g4dn.xlarge\"] # Head, worker AWS machine types desired\n", "))" diff --git a/demo-notebooks/guided-demos/2_basic_jobs.ipynb b/demo-notebooks/guided-demos/2_basic_jobs.ipynb index da74f9e5..5d862c03 100644 --- a/demo-notebooks/guided-demos/2_basic_jobs.ipynb +++ b/demo-notebooks/guided-demos/2_basic_jobs.ipynb @@ -43,7 +43,10 @@ "id": "bc27f84c", "metadata": {}, "source": [ - "Let's start by running through the same cluster setup as before:" + "Let's start by running through the same cluster setup as before:\n", + "\n", + "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", + "The example here is a community image." ] }, { @@ -63,6 +66,7 @@ " min_memory=4,\n", " max_memory=4,\n", " num_gpus=0,\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " instascale=False\n", "))" ] diff --git a/demo-notebooks/guided-demos/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/3_basic_interactive.ipynb index c8b2b1a0..bfcb2df3 100644 --- a/demo-notebooks/guided-demos/3_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/3_basic_interactive.ipynb @@ -43,7 +43,10 @@ "id": "bc27f84c", "metadata": {}, "source": [ - "Once again, let's start by running through the same cluster setup as before:" + "Once again, let's start by running through the same cluster setup as before:\n", + "\n", + "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", + "The example here is a community image." ] }, { @@ -63,6 +66,7 @@ " min_memory=8,\n", " max_memory=8,\n", " num_gpus=1,\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " instascale=True, #<---instascale enabled\n", " machine_types=[\"m5.xlarge\", \"g4dn.xlarge\"]\n", " \n", diff --git a/demo-notebooks/guided-demos/4_gpt.ipynb b/demo-notebooks/guided-demos/4_gpt.ipynb index 0fdcec96..dba03bc4 100644 --- a/demo-notebooks/guided-demos/4_gpt.ipynb +++ b/demo-notebooks/guided-demos/4_gpt.ipynb @@ -30,6 +30,15 @@ "auth.login()" ] }, + { + "cell_type": "markdown", + "id": "8f4b200f", + "metadata": {}, + "source": [ + "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", + "The example here is a community image." + ] + }, { "cell_type": "code", "execution_count": null, @@ -46,6 +55,7 @@ " min_memory=8,\n", " max_memory=8,\n", " num_gpus=1,\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " instascale=True, #<---instascale enabled\n", " machine_types=[\"m5.xlarge\", \"g4dn.xlarge\"],\n", "))" diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb index 18e5a840..d07fd430 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb @@ -46,7 +46,10 @@ "id": "bc27f84c", "metadata": {}, "source": [ - "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding AppWrapper)." + "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding AppWrapper).\n", + "\n", + "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", + "The example here is a community image." ] }, { @@ -74,7 +77,7 @@ " min_memory=4,\n", " max_memory=4,\n", " num_gpus=0,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\", #current default\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " instascale=False\n", "))" ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb index 97a2b382..6cfe8143 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb @@ -43,7 +43,10 @@ "id": "bc27f84c", "metadata": {}, "source": [ - "This time, we are working in a cloud environment, and our OpenShift cluster does not have the resources needed for our desired workloads. We will use InstaScale to dynamically scale-up guaranteed resources based on our request (that will also automatically scale-down when we are finished working):" + "This time, we are working in a cloud environment, and our OpenShift cluster does not have the resources needed for our desired workloads. We will use InstaScale to dynamically scale-up guaranteed resources based on our request (that will also automatically scale-down when we are finished working):\n", + "\n", + "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", + "The example here is a community image." ] }, { @@ -71,6 +74,7 @@ " min_memory=8,\n", " max_memory=8,\n", " num_gpus=1,\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " instascale=True, # InstaScale now enabled, will scale OCP cluster to guarantee resource request\n", " machine_types=[\"m5.xlarge\", \"g4dn.xlarge\"] # Head, worker AWS machine types desired\n", "))" diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_jobs.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_jobs.ipynb index 9c612267..42600a3f 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_jobs.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_jobs.ipynb @@ -43,7 +43,10 @@ "id": "bc27f84c", "metadata": {}, "source": [ - "Let's start by running through the same cluster setup as before:" + "Let's start by running through the same cluster setup as before:\n", + "\n", + "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", + "The example here is a community image." ] }, { @@ -71,6 +74,7 @@ " min_memory=4,\n", " max_memory=4,\n", " num_gpus=0,\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " instascale=False\n", "))" ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb index d6799161..974bd058 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb @@ -43,7 +43,10 @@ "id": "bc27f84c", "metadata": {}, "source": [ - "Once again, let's start by running through the same cluster setup as before:" + "Once again, let's start by running through the same cluster setup as before:\n", + "\n", + "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", + "The example here is a community image." ] }, { @@ -71,6 +74,7 @@ " min_memory=8,\n", " max_memory=8,\n", " num_gpus=1,\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " instascale=True, #<---instascale enabled\n", " machine_types=[\"m5.xlarge\", \"g4dn.xlarge\"]\n", " \n", diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/4_gpt.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/4_gpt.ipynb index 704f94f2..4ed6cc54 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/4_gpt.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/4_gpt.ipynb @@ -30,6 +30,16 @@ "auth.login()" ] }, + { + "cell_type": "markdown", + "id": "b43e8e21", + "metadata": {}, + "source": [ + "\n", + "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", + "The example here is a community image." + ] + }, { "cell_type": "code", "execution_count": 2, @@ -54,6 +64,7 @@ " min_memory=8,\n", " max_memory=8,\n", " num_gpus=1,\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " instascale=True, #<---instascale enabled\n", " machine_types=[\"m5.xlarge\", \"g4dn.xlarge\"],\n", "))" diff --git a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb index d37b7d7d..be2ca1e5 100644 --- a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb @@ -46,7 +46,10 @@ "id": "bc27f84c", "metadata": {}, "source": [ - "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding AppWrapper)." + "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding AppWrapper).\n", + "\n", + "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", + "The example here is a community image." ] }, { @@ -66,7 +69,7 @@ " min_memory=4,\n", " max_memory=4,\n", " num_gpus=0,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\", #current default\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " instascale=False\n", "))" ] diff --git a/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb b/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb index d0faf5b9..f1795382 100644 --- a/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb @@ -43,7 +43,10 @@ "id": "bc27f84c", "metadata": {}, "source": [ - "This time, we are working in a cloud environment, and our OpenShift cluster does not have the resources needed for our desired workloads. We will use InstaScale to dynamically scale-up guaranteed resources based on our request (that will also automatically scale-down when we are finished working):" + "This time, we are working in a cloud environment, and our OpenShift cluster does not have the resources needed for our desired workloads. We will use InstaScale to dynamically scale-up guaranteed resources based on our request (that will also automatically scale-down when we are finished working):\n", + "\n", + "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", + "The example here is a community image." ] }, { @@ -63,6 +66,7 @@ " min_memory=8,\n", " max_memory=8,\n", " num_gpus=1,\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " instascale=True, # InstaScale now enabled, will scale OCP cluster to guarantee resource request\n", " machine_types=[\"m5.xlarge\", \"g4dn.xlarge\"] # Head, worker AWS machine types desired\n", "))" diff --git a/demo-notebooks/guided-demos/preview_nbs/2_basic_jobs.ipynb b/demo-notebooks/guided-demos/preview_nbs/2_basic_jobs.ipynb index 4ac4f00a..e3bbbce9 100644 --- a/demo-notebooks/guided-demos/preview_nbs/2_basic_jobs.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/2_basic_jobs.ipynb @@ -43,7 +43,10 @@ "id": "bc27f84c", "metadata": {}, "source": [ - "Let's start by running through the same cluster setup as before:" + "Let's start by running through the same cluster setup as before:\n", + "\n", + "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", + "The example here is a community image." ] }, { @@ -63,6 +66,7 @@ " min_memory=4,\n", " max_memory=4,\n", " num_gpus=0,\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " instascale=False\n", "))" ] diff --git a/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb index c8b2b1a0..bfcb2df3 100644 --- a/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb @@ -43,7 +43,10 @@ "id": "bc27f84c", "metadata": {}, "source": [ - "Once again, let's start by running through the same cluster setup as before:" + "Once again, let's start by running through the same cluster setup as before:\n", + "\n", + "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", + "The example here is a community image." ] }, { @@ -63,6 +66,7 @@ " min_memory=8,\n", " max_memory=8,\n", " num_gpus=1,\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " instascale=True, #<---instascale enabled\n", " machine_types=[\"m5.xlarge\", \"g4dn.xlarge\"]\n", " \n", diff --git a/demo-notebooks/guided-demos/preview_nbs/4_gpt.ipynb b/demo-notebooks/guided-demos/preview_nbs/4_gpt.ipynb index 455bb9aa..77edf591 100644 --- a/demo-notebooks/guided-demos/preview_nbs/4_gpt.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/4_gpt.ipynb @@ -30,6 +30,15 @@ "auth.login()" ] }, + { + "cell_type": "markdown", + "id": "5e4e9ee9", + "metadata": {}, + "source": [ + "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", + "The example here is a community image." + ] + }, { "cell_type": "code", "execution_count": null, @@ -46,6 +55,7 @@ " min_memory=8,\n", " max_memory=8,\n", " num_gpus=1,\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " instascale=True, #<---instascale enabled\n", " machine_types=[\"m5.xlarge\", \"g4dn.xlarge\"],\n", "))" From fcdb75587b6b090806e7ae2020c728fd9afca4bb Mon Sep 17 00:00:00 2001 From: Dimitri Saridakis Date: Thu, 9 Nov 2023 14:40:05 +0000 Subject: [PATCH 052/496] refactor: validate image config --- src/codeflare_sdk/cluster/cluster.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index a2871000..a154994d 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -128,6 +128,17 @@ def evaluate_dispatch_priority(self): print(f"Priority class {priority_class} is not available in the cluster") return None + def validate_image_config(self): + """ + Validates that the image configuration is not empty. + + :param image: The image string to validate + :raises ValueError: If the image is not specified + """ + if self.config.image == "" or self.config.image == None: + raise ValueError("Image must be specified in the ClusterConfiguration") + + def create_app_wrapper(self): """ Called upon cluster object creation, creates an AppWrapper yaml based on @@ -142,6 +153,9 @@ def create_app_wrapper(self): raise TypeError( f"Namespace {self.config.namespace} is of type {type(self.config.namespace)}. Check your Kubernetes Authentication." ) + + # Validate image configuration + self.validate_image_config() # Before attempting to create the cluster AW, let's evaluate the ClusterConfig if self.config.dispatch_priority: From 3b903347ac87c613880f13793c4ea061e98dc613 Mon Sep 17 00:00:00 2001 From: Dimitri Saridakis Date: Thu, 9 Nov 2023 14:40:39 +0000 Subject: [PATCH 053/496] test: update tests --- tests/unit_test.py | 16 ++++++++++------ tests/unit_test_support.py | 1 + 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/tests/unit_test.py b/tests/unit_test.py index c6b57736..d6eb9ecf 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -21,6 +21,8 @@ import re import uuid +from codeflare_sdk.cluster import cluster + parent = Path(__file__).resolve().parents[1] sys.path.append(str(parent) + "/src") @@ -250,6 +252,7 @@ def test_config_creation(): assert config.dispatch_priority == None assert config.mcad == True assert config.local_interactive == False + def test_cluster_creation(mocker): @@ -259,8 +262,7 @@ def test_cluster_creation(mocker): assert filecmp.cmp( "unit-test-cluster.yaml", f"{parent}/tests/test-case.yaml", shallow=True ) - - + def test_cluster_creation_no_mcad(mocker): mocker.patch( "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", @@ -311,6 +313,7 @@ def test_default_cluster_creation(mocker): ) default_config = ClusterConfiguration( name="unit-test-default-cluster", + image="quay.io/project-codeflare/ray:latest-py39-cu118", ) cluster = Cluster(default_config) @@ -614,6 +617,7 @@ def ingress_retrieval(port, annotations=None): def test_ray_job_wrapping(mocker): cluster = cluster = createClusterWithConfig(mocker) + cluster.config.image = "quay.io/project-codeflare/ray:latest-py39-cu118" mocker.patch( "ray.job_submission.JobSubmissionClient._check_connection_and_version_with_url", return_value="None", @@ -732,7 +736,7 @@ def test_ray_details(mocker, capsys): "codeflare_sdk.cluster.cluster.Cluster.cluster_dashboard_uri", return_value="", ) - cf = Cluster(ClusterConfiguration(name="raytest2", namespace="ns")) + cf = Cluster(ClusterConfiguration(name="raytest2", namespace="ns", image= "quay.io/project-codeflare/ray:latest-py39-cu118")) captured = capsys.readouterr() ray2 = _copy_to_ray(cf) details = cf.details() @@ -1898,7 +1902,7 @@ def test_cluster_status(mocker): head_mem=8, head_gpu=0, ) - cf = Cluster(ClusterConfiguration(name="test", namespace="ns")) + cf = Cluster(ClusterConfiguration(name="test", namespace="ns", image="quay.io/project-codeflare/ray:latest-py39-cu118")) mocker.patch("codeflare_sdk.cluster.cluster._app_wrapper_status", return_value=None) mocker.patch("codeflare_sdk.cluster.cluster._ray_cluster_status", return_value=None) status, ready = cf.status() @@ -1988,7 +1992,7 @@ def test_wait_ready(mocker, capsys): mock_response = mocker.Mock() mock_response.status_code = 200 mocker.patch("requests.get", return_value=mock_response) - cf = Cluster(ClusterConfiguration(name="test", namespace="ns")) + cf = Cluster(ClusterConfiguration(name="test", namespace="ns", image= "quay.io/project-codeflare/ray:latest-py39-cu118")) try: cf.wait_ready(timeout=5) assert 1 == 0 @@ -2653,7 +2657,7 @@ def test_gen_app_wrapper_with_oauth(mocker: MockerFixture): mocker.patch( "codeflare_sdk.utils.generate_yaml.write_user_appwrapper", write_user_appwrapper ) - Cluster(ClusterConfiguration("test_cluster", openshift_oauth=True)) + Cluster(ClusterConfiguration("test_cluster", openshift_oauth=True, image= "quay.io/project-codeflare/ray:latest-py39-cu118")) user_yaml = write_user_appwrapper.call_args.args[0] assert any( container["name"] == "oauth-proxy" diff --git a/tests/unit_test_support.py b/tests/unit_test_support.py index 6d61130a..4c11c593 100644 --- a/tests/unit_test_support.py +++ b/tests/unit_test_support.py @@ -59,6 +59,7 @@ def createClusterWithConfig(mocker): return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, ) cluster = Cluster(createClusterConfig()) + cluster.config.image = "quay.io/project-codeflare/ray:latest-py39-cu118" return cluster From e452eff0dbe947bc3fe273f062415ec43135391d Mon Sep 17 00:00:00 2001 From: Dimitri Saridakis Date: Thu, 9 Nov 2023 14:41:28 +0000 Subject: [PATCH 054/496] test: add test which raises error when no image is supplied --- tests/unit_test.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/tests/unit_test.py b/tests/unit_test.py index d6eb9ecf..6f9cf879 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -263,6 +263,18 @@ def test_cluster_creation(mocker): "unit-test-cluster.yaml", f"{parent}/tests/test-case.yaml", shallow=True ) +def test_create_app_wrapper_raises_error_with_no_image(): + config = createClusterConfig() + config.image = "" # Clear the image to test error handling + try: + cluster = Cluster(config) + cluster.create_app_wrapper() + assert False, "Expected ValueError when 'image' is not specified." + except ValueError as error: + assert str(error) == "Image must be specified in the ClusterConfiguration", \ + "Error message did not match expected output." + + def test_cluster_creation_no_mcad(mocker): mocker.patch( "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", From 7f25549eb420f58b465fc3e03e4b636c99f91482 Mon Sep 17 00:00:00 2001 From: Dimitri Saridakis Date: Thu, 9 Nov 2023 14:43:24 +0000 Subject: [PATCH 055/496] refactor: update formatting using black --- src/codeflare_sdk/cluster/cluster.py | 5 ++-- tests/unit_test.py | 41 ++++++++++++++++++++++------ tests/unit_test_support.py | 2 +- 3 files changed, 36 insertions(+), 12 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index a154994d..59023d8e 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -131,14 +131,13 @@ def evaluate_dispatch_priority(self): def validate_image_config(self): """ Validates that the image configuration is not empty. - + :param image: The image string to validate :raises ValueError: If the image is not specified """ if self.config.image == "" or self.config.image == None: raise ValueError("Image must be specified in the ClusterConfiguration") - def create_app_wrapper(self): """ Called upon cluster object creation, creates an AppWrapper yaml based on @@ -153,7 +152,7 @@ def create_app_wrapper(self): raise TypeError( f"Namespace {self.config.namespace} is of type {type(self.config.namespace)}. Check your Kubernetes Authentication." ) - + # Validate image configuration self.validate_image_config() diff --git a/tests/unit_test.py b/tests/unit_test.py index 6f9cf879..14dc8a76 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -252,7 +252,6 @@ def test_config_creation(): assert config.dispatch_priority == None assert config.mcad == True assert config.local_interactive == False - def test_cluster_creation(mocker): @@ -262,7 +261,8 @@ def test_cluster_creation(mocker): assert filecmp.cmp( "unit-test-cluster.yaml", f"{parent}/tests/test-case.yaml", shallow=True ) - + + def test_create_app_wrapper_raises_error_with_no_image(): config = createClusterConfig() config.image = "" # Clear the image to test error handling @@ -271,8 +271,9 @@ def test_create_app_wrapper_raises_error_with_no_image(): cluster.create_app_wrapper() assert False, "Expected ValueError when 'image' is not specified." except ValueError as error: - assert str(error) == "Image must be specified in the ClusterConfiguration", \ - "Error message did not match expected output." + assert ( + str(error) == "Image must be specified in the ClusterConfiguration" + ), "Error message did not match expected output." def test_cluster_creation_no_mcad(mocker): @@ -748,7 +749,13 @@ def test_ray_details(mocker, capsys): "codeflare_sdk.cluster.cluster.Cluster.cluster_dashboard_uri", return_value="", ) - cf = Cluster(ClusterConfiguration(name="raytest2", namespace="ns", image= "quay.io/project-codeflare/ray:latest-py39-cu118")) + cf = Cluster( + ClusterConfiguration( + name="raytest2", + namespace="ns", + image="quay.io/project-codeflare/ray:latest-py39-cu118", + ) + ) captured = capsys.readouterr() ray2 = _copy_to_ray(cf) details = cf.details() @@ -1914,7 +1921,13 @@ def test_cluster_status(mocker): head_mem=8, head_gpu=0, ) - cf = Cluster(ClusterConfiguration(name="test", namespace="ns", image="quay.io/project-codeflare/ray:latest-py39-cu118")) + cf = Cluster( + ClusterConfiguration( + name="test", + namespace="ns", + image="quay.io/project-codeflare/ray:latest-py39-cu118", + ) + ) mocker.patch("codeflare_sdk.cluster.cluster._app_wrapper_status", return_value=None) mocker.patch("codeflare_sdk.cluster.cluster._ray_cluster_status", return_value=None) status, ready = cf.status() @@ -2004,7 +2017,13 @@ def test_wait_ready(mocker, capsys): mock_response = mocker.Mock() mock_response.status_code = 200 mocker.patch("requests.get", return_value=mock_response) - cf = Cluster(ClusterConfiguration(name="test", namespace="ns", image= "quay.io/project-codeflare/ray:latest-py39-cu118")) + cf = Cluster( + ClusterConfiguration( + name="test", + namespace="ns", + image="quay.io/project-codeflare/ray:latest-py39-cu118", + ) + ) try: cf.wait_ready(timeout=5) assert 1 == 0 @@ -2669,7 +2688,13 @@ def test_gen_app_wrapper_with_oauth(mocker: MockerFixture): mocker.patch( "codeflare_sdk.utils.generate_yaml.write_user_appwrapper", write_user_appwrapper ) - Cluster(ClusterConfiguration("test_cluster", openshift_oauth=True, image= "quay.io/project-codeflare/ray:latest-py39-cu118")) + Cluster( + ClusterConfiguration( + "test_cluster", + openshift_oauth=True, + image="quay.io/project-codeflare/ray:latest-py39-cu118", + ) + ) user_yaml = write_user_appwrapper.call_args.args[0] assert any( container["name"] == "oauth-proxy" diff --git a/tests/unit_test_support.py b/tests/unit_test_support.py index 4c11c593..85f1a76d 100644 --- a/tests/unit_test_support.py +++ b/tests/unit_test_support.py @@ -47,7 +47,7 @@ def createClusterConfig(): machine_types=["cpu.small", "gpu.large"], image_pull_secrets=["unit-test-pull-secret"], ingress_domain="apps.cluster.awsroute.org", - image="quay.io/project-codeflare/ray:latest-py39-cu118" + image="quay.io/project-codeflare/ray:latest-py39-cu118", ) return config From cd8bce07c855add28137d3887b1f28c08539cc71 Mon Sep 17 00:00:00 2001 From: dimakis Date: Thu, 9 Nov 2023 16:27:04 +0000 Subject: [PATCH 056/496] Changes in docs for release: v0.12.0 --- docs/cluster/cluster.html | 52 +++++++++++++++++++++++++++++++++++++++ docs/cluster/config.html | 6 ++--- 2 files changed, 55 insertions(+), 3 deletions(-) diff --git a/docs/cluster/cluster.html b/docs/cluster/cluster.html index 8a313dc7..82c7c154 100644 --- a/docs/cluster/cluster.html +++ b/docs/cluster/cluster.html @@ -160,6 +160,16 @@

    Module codeflare_sdk.cluster.cluster

    print(f"Priority class {priority_class} is not available in the cluster") return None + def validate_image_config(self): + """ + Validates that the image configuration is not empty. + + :param image: The image string to validate + :raises ValueError: If the image is not specified + """ + if self.config.image == "" or self.config.image == None: + raise ValueError("Image must be specified in the ClusterConfiguration") + def create_app_wrapper(self): """ Called upon cluster object creation, creates an AppWrapper yaml based on @@ -175,6 +185,9 @@

    Module codeflare_sdk.cluster.cluster

    f"Namespace {self.config.namespace} is of type {type(self.config.namespace)}. Check your Kubernetes Authentication." ) + # Validate image configuration + self.validate_image_config() + # Before attempting to create the cluster AW, let's evaluate the ClusterConfig if self.config.dispatch_priority: if not self.config.mcad: @@ -1095,6 +1108,16 @@

    Classes

    print(f"Priority class {priority_class} is not available in the cluster") return None + def validate_image_config(self): + """ + Validates that the image configuration is not empty. + + :param image: The image string to validate + :raises ValueError: If the image is not specified + """ + if self.config.image == "" or self.config.image == None: + raise ValueError("Image must be specified in the ClusterConfiguration") + def create_app_wrapper(self): """ Called upon cluster object creation, creates an AppWrapper yaml based on @@ -1110,6 +1133,9 @@

    Classes

    f"Namespace {self.config.namespace} is of type {type(self.config.namespace)}. Check your Kubernetes Authentication." ) + # Validate image configuration + self.validate_image_config() + # Before attempting to create the cluster AW, let's evaluate the ClusterConfig if self.config.dispatch_priority: if not self.config.mcad: @@ -1650,6 +1676,9 @@

    Methods

    f"Namespace {self.config.namespace} is of type {type(self.config.namespace)}. Check your Kubernetes Authentication." ) + # Validate image configuration + self.validate_image_config() + # Before attempting to create the cluster AW, let's evaluate the ClusterConfig if self.config.dispatch_priority: if not self.config.mcad: @@ -2088,6 +2117,28 @@

    Methods

    return _kube_api_error_handling(e)
    +
    +def validate_image_config(self) +
    +
    +

    Validates that the image configuration is not empty.

    +

    :param image: The image string to validate +:raises ValueError: If the image is not specified

    +
    + +Expand source code + +
    def validate_image_config(self):
    +    """
    +    Validates that the image configuration is not empty.
    +
    +    :param image: The image string to validate
    +    :raises ValueError: If the image is not specified
    +    """
    +    if self.config.image == "" or self.config.image == None:
    +        raise ValueError("Image must be specified in the ClusterConfiguration")
    +
    +
    def wait_ready(self, timeout: Optional[int] = None, dashboard_check: bool = True)
    @@ -2182,6 +2233,7 @@

    torchx_config
  • torchx_scheduler
  • up
  • +
  • validate_image_config
  • wait_ready
  • diff --git a/docs/cluster/config.html b/docs/cluster/config.html index f9b762ba..a4f2d662 100644 --- a/docs/cluster/config.html +++ b/docs/cluster/config.html @@ -80,7 +80,7 @@

    Module codeflare_sdk.cluster.config

    instascale: bool = False mcad: bool = True envs: dict = field(default_factory=dict) - image: str = "quay.io/project-codeflare/ray:latest-py39-cu118" + image: str = "" local_interactive: bool = False image_pull_secrets: list = field(default_factory=list) dispatch_priority: str = None @@ -100,7 +100,7 @@

    Classes

    class ClusterConfiguration -(name: str, namespace: str = None, head_info: list = <factory>, head_cpus: int = 2, head_memory: int = 8, head_gpus: int = 0, machine_types: list = <factory>, min_cpus: int = 1, max_cpus: int = 1, num_workers: int = 1, min_memory: int = 2, max_memory: int = 2, num_gpus: int = 0, template: str = '/home/runner/work/codeflare-sdk/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', instascale: bool = False, mcad: bool = True, envs: dict = <factory>, image: str = 'quay.io/project-codeflare/ray:latest-py39-cu118', local_interactive: bool = False, image_pull_secrets: list = <factory>, dispatch_priority: str = None, openshift_oauth: bool = False, ingress_options: dict = <factory>, ingress_domain: str = None) +(name: str, namespace: str = None, head_info: list = <factory>, head_cpus: int = 2, head_memory: int = 8, head_gpus: int = 0, machine_types: list = <factory>, min_cpus: int = 1, max_cpus: int = 1, num_workers: int = 1, min_memory: int = 2, max_memory: int = 2, num_gpus: int = 0, template: str = '/home/runner/work/codeflare-sdk/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', instascale: bool = False, mcad: bool = True, envs: dict = <factory>, image: str = '', local_interactive: bool = False, image_pull_secrets: list = <factory>, dispatch_priority: str = None, openshift_oauth: bool = False, ingress_options: dict = <factory>, ingress_domain: str = None)

    This dataclass is used to specify resource requirements and other details, and @@ -132,7 +132,7 @@

    Classes

    instascale: bool = False mcad: bool = True envs: dict = field(default_factory=dict) - image: str = "quay.io/project-codeflare/ray:latest-py39-cu118" + image: str = "" local_interactive: bool = False image_pull_secrets: list = field(default_factory=list) dispatch_priority: str = None From ec02437bd041e01a260f7c167a8fd063508d6321 Mon Sep 17 00:00:00 2001 From: ted chang Date: Thu, 2 Nov 2023 08:38:58 -0700 Subject: [PATCH 057/496] Add unit test for the enable_local_interactive --- tests/unit_test.py | 172 ++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 171 insertions(+), 1 deletion(-) diff --git a/tests/unit_test.py b/tests/unit_test.py index 14dc8a76..620476df 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -83,7 +83,12 @@ ) import codeflare_sdk.utils.kube_api_helpers -from codeflare_sdk.utils.generate_yaml import gen_names, is_openshift_cluster +from codeflare_sdk.utils.generate_yaml import ( + gen_names, + is_openshift_cluster, + read_template, + enable_local_interactive, +) import openshift from openshift.selector import Selector @@ -2563,6 +2568,171 @@ def test_export_env(): ) +def test_enable_local_interactive(mocker): + template = f"{parent}/src/codeflare_sdk/templates/base-template.yaml" + user_yaml = read_template(template) + aw_spec = user_yaml.get("spec", None) + cluster_name = "test-enable-local" + namespace = "default" + ingress_domain = "mytest.domain" + mocker.patch( + "codeflare_sdk.utils.generate_yaml.is_openshift_cluster", return_value=False + ) + volume_mounts = [ + {"name": "ca-vol", "mountPath": "/home/ray/workspace/ca", "readOnly": True}, + { + "name": "server-cert", + "mountPath": "/home/ray/workspace/tls", + "readOnly": False, + }, + ] + volumes = [ + { + "name": "ca-vol", + "secret": {"secretName": f"ca-secret-{cluster_name}"}, + "optional": False, + }, + {"name": "server-cert", "emptyDir": {}}, + ] + tls_env = [ + {"name": "RAY_USE_TLS", "value": "1"}, + {"name": "RAY_TLS_SERVER_CERT", "value": "/home/ray/workspace/tls/server.crt"}, + {"name": "RAY_TLS_SERVER_KEY", "value": "/home/ray/workspace/tls/server.key"}, + {"name": "RAY_TLS_CA_CERT", "value": "/home/ray/workspace/tls/ca.crt"}, + ] + assert aw_spec != None + enable_local_interactive(aw_spec, cluster_name, namespace, ingress_domain) + head_group_spec = aw_spec["resources"]["GenericItems"][0]["generictemplate"][ + "spec" + ]["headGroupSpec"] + worker_group_spec = aw_spec["resources"]["GenericItems"][0]["generictemplate"][ + "spec" + ]["workerGroupSpecs"] + ca_secret = aw_spec["resources"]["GenericItems"][3]["generictemplate"] + # At a minimal, make sure the following items are presented in the appwrapper spec.resources. + # 1. headgroup has the initContainers command to generated TLS cert from the mounted CA cert. + # Note: In this particular command, the DNS.5 in [alt_name] must match the exposed local_client_url: rayclient-{cluster_name}.{namespace}.{ingress_domain} + assert ( + head_group_spec["template"]["spec"]["initContainers"][0]["command"][2] + == f"cd /home/ray/workspace/tls && openssl req -nodes -newkey rsa:2048 -keyout server.key -out server.csr -subj '/CN=ray-head' && printf \"authorityKeyIdentifier=keyid,issuer\\nbasicConstraints=CA:FALSE\\nsubjectAltName = @alt_names\\n[alt_names]\\nDNS.1 = 127.0.0.1\\nDNS.2 = localhost\\nDNS.3 = ${{FQ_RAY_IP}}\\nDNS.4 = $(awk 'END{{print $1}}' /etc/hosts)\\nDNS.5 = rayclient-{cluster_name}-$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).{ingress_domain}\">./domain.ext && cp /home/ray/workspace/ca/* . && openssl x509 -req -CA ca.crt -CAkey ca.key -in server.csr -out server.crt -days 365 -CAcreateserial -extfile domain.ext" + ) + assert ( + head_group_spec["template"]["spec"]["initContainers"][0]["volumeMounts"] + == volume_mounts + ) + assert head_group_spec["template"]["spec"]["volumes"] == volumes + + # 2. workerGrooupSpec has the initContainers command to generated TLS cert from the mounted CA cert. + assert ( + worker_group_spec[0]["template"]["spec"]["initContainers"][1]["command"][2] + == "cd /home/ray/workspace/tls && openssl req -nodes -newkey rsa:2048 -keyout server.key -out server.csr -subj '/CN=ray-head' && printf \"authorityKeyIdentifier=keyid,issuer\\nbasicConstraints=CA:FALSE\\nsubjectAltName = @alt_names\\n[alt_names]\\nDNS.1 = 127.0.0.1\\nDNS.2 = localhost\\nDNS.3 = ${FQ_RAY_IP}\\nDNS.4 = $(awk 'END{print $1}' /etc/hosts)\">./domain.ext && cp /home/ray/workspace/ca/* . && openssl x509 -req -CA ca.crt -CAkey ca.key -in server.csr -out server.crt -days 365 -CAcreateserial -extfile domain.ext" + ) + assert ( + worker_group_spec[0]["template"]["spec"]["initContainers"][1]["volumeMounts"] + == volume_mounts + ) + assert worker_group_spec[0]["template"]["spec"]["volumes"] == volumes + + # 3. Required Envs to enable TLS encryption between head and workers + for i in range(len(tls_env)): + assert ( + head_group_spec["template"]["spec"]["containers"][0]["env"][i + 1]["name"] + == tls_env[i]["name"] + ) + assert ( + head_group_spec["template"]["spec"]["containers"][0]["env"][i + 1]["value"] + == tls_env[i]["value"] + ) + assert ( + worker_group_spec[0]["template"]["spec"]["containers"][0]["env"][i + 1][ + "name" + ] + == tls_env[i]["name"] + ) + assert ( + worker_group_spec[0]["template"]["spec"]["containers"][0]["env"][i + 1][ + "value" + ] + == tls_env[i]["value"] + ) + + # 4. Secret with ca.crt and ca.key + assert ca_secret["kind"] == "Secret" + assert ca_secret["data"]["ca.crt"] != None + assert ca_secret["data"]["ca.key"] != None + assert ca_secret["metadata"]["name"] == f"ca-secret-{cluster_name}" + assert ca_secret["metadata"]["namespace"] == namespace + + # 5. Rayclient ingress - Kind + rayclient_ingress = aw_spec["resources"]["GenericItems"][2]["generictemplate"] + paths = [ + { + "backend": { + "service": { + "name": f"{cluster_name}-head-svc", + "port": {"number": 10001}, + } + }, + "path": "", + "pathType": "ImplementationSpecific", + } + ] + + assert rayclient_ingress["kind"] == "Ingress" + assert rayclient_ingress["metadata"]["namespace"] == namespace + assert rayclient_ingress["metadata"]["annotations"] == { + "nginx.ingress.kubernetes.io/rewrite-target": "/", + "nginx.ingress.kubernetes.io/ssl-redirect": "true", + "nginx.ingress.kubernetes.io/ssl-passthrough": "true", + } + assert rayclient_ingress["metadata"]["name"] == f"rayclient-{cluster_name}" + assert rayclient_ingress["spec"]["rules"][0] == { + "host": f"rayclient-{cluster_name}-{namespace}.{ingress_domain}", + "http": {"paths": paths}, + } + # 5.1 Rayclient ingress - OCP + user_yaml = read_template(template) + aw_spec = user_yaml.get("spec", None) + cluster_name = "test-ocp-enable-local" + namespace = "default" + ocp_cluster_domain = {"spec": {"domain": "mytest.ocp.domain"}} + ingress_domain = ocp_cluster_domain["spec"]["domain"] + mocker.patch( + "codeflare_sdk.utils.generate_yaml.is_openshift_cluster", return_value=True + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value=ocp_cluster_domain, + ) + paths = [ + { + "backend": { + "service": { + "name": f"{cluster_name}-head-svc", + "port": {"number": 10001}, + } + }, + "path": "", + "pathType": "ImplementationSpecific", + } + ] + enable_local_interactive(aw_spec, cluster_name, namespace, ingress_domain) + rayclient_ocp_ingress = aw_spec["resources"]["GenericItems"][2]["generictemplate"] + assert rayclient_ocp_ingress["kind"] == "Ingress" + assert rayclient_ocp_ingress["metadata"]["annotations"] == { + "nginx.ingress.kubernetes.io/rewrite-target": "/", + "nginx.ingress.kubernetes.io/ssl-redirect": "true", + "route.openshift.io/termination": "passthrough", + } + assert rayclient_ocp_ingress["metadata"]["name"] == f"rayclient-{cluster_name}" + assert rayclient_ocp_ingress["metadata"]["namespace"] == namespace + assert rayclient_ocp_ingress["spec"]["ingressClassName"] == "openshift-default" + assert rayclient_ocp_ingress["spec"]["rules"][0] == { + "host": f"rayclient-{cluster_name}-{namespace}.{ingress_domain}", + "http": {"paths": paths}, + } + + def test_create_openshift_oauth(mocker: MockerFixture): create_namespaced_service_account = MagicMock() create_cluster_role_binding = MagicMock() From 53177f2ef90fd72946818ab73383cf70e5f58fac Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Mon, 13 Nov 2023 17:16:00 +0000 Subject: [PATCH 058/496] Updated coverage.svg --- coverage.svg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/coverage.svg b/coverage.svg index 607d3de4..59d64b37 100644 --- a/coverage.svg +++ b/coverage.svg @@ -15,7 +15,7 @@ coverage coverage - 91% - 91% + 93% + 93% From c212bd887457fdbfd8f85aa2be656a95a214792d Mon Sep 17 00:00:00 2001 From: Eoin Gallinagh Date: Tue, 7 Nov 2023 09:18:42 +0000 Subject: [PATCH 059/496] add: abstraction to remove appwrapper yaml visibility from the NB console. --- src/codeflare_sdk/cluster/cluster.py | 4 ++- src/codeflare_sdk/utils/generate_yaml.py | 10 +++++- tests/unit_test.py | 44 +++++++++++++----------- 3 files changed, 36 insertions(+), 22 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 59023d8e..7c94f950 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -71,8 +71,10 @@ def __init__(self, config: ClusterConfiguration): """ self.config = config self.app_wrapper_yaml = self.create_app_wrapper() - self.app_wrapper_name = self.app_wrapper_yaml.split(".")[0] self._job_submission_client = None + self.app_wrapper_name = self.app_wrapper_yaml.replace(".yaml", "").split("/")[ + -1 + ] @property def _client_headers(self): diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index cf9686c4..a6aae308 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -19,6 +19,7 @@ import yaml import sys +import os import argparse import uuid from kubernetes import client, config @@ -506,8 +507,14 @@ def disable_raycluster_tls(resources): def write_user_appwrapper(user_yaml, output_file_name): + # Create the directory if it doesn't exist + directory_path = os.path.dirname(output_file_name) + if not os.path.exists(directory_path): + os.makedirs(directory_path) + with open(output_file_name, "w") as outfile: yaml.dump(user_yaml, outfile, default_flow_style=False) + print(f"Written to: {output_file_name}") @@ -675,7 +682,8 @@ def generate_appwrapper( if openshift_oauth: enable_openshift_oauth(user_yaml, cluster_name, namespace) - outfile = appwrapper_name + ".yaml" + directory_path = os.path.expanduser("~/.codeflare/appwrapper/") + outfile = os.path.join(directory_path, appwrapper_name + ".yaml") if not mcad: write_components(user_yaml, outfile) else: diff --git a/tests/unit_test.py b/tests/unit_test.py index 620476df..44bcddea 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -24,6 +24,7 @@ from codeflare_sdk.cluster import cluster parent = Path(__file__).resolve().parents[1] +aw_dir = os.path.expanduser("~/.codeflare/appwrapper/") sys.path.append(str(parent) + "/src") from kubernetes import client, config @@ -261,10 +262,12 @@ def test_config_creation(): def test_cluster_creation(mocker): cluster = createClusterWithConfig(mocker) - assert cluster.app_wrapper_yaml == "unit-test-cluster.yaml" + assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster.yaml" assert cluster.app_wrapper_name == "unit-test-cluster" assert filecmp.cmp( - "unit-test-cluster.yaml", f"{parent}/tests/test-case.yaml", shallow=True + f"{aw_dir}unit-test-cluster.yaml", + f"{parent}/tests/test-case.yaml", + shallow=True, ) @@ -290,10 +293,10 @@ def test_cluster_creation_no_mcad(mocker): config.name = "unit-test-cluster-ray" config.mcad = False cluster = Cluster(config) - assert cluster.app_wrapper_yaml == "unit-test-cluster-ray.yaml" + assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-ray.yaml" assert cluster.app_wrapper_name == "unit-test-cluster-ray" assert filecmp.cmp( - "unit-test-cluster-ray.yaml", + f"{aw_dir}unit-test-cluster-ray.yaml", f"{parent}/tests/test-case-no-mcad.yamls", shallow=True, ) @@ -313,10 +316,12 @@ def test_cluster_creation_priority(mocker): return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, ) cluster = Cluster(config) - assert cluster.app_wrapper_yaml == "prio-test-cluster.yaml" + assert cluster.app_wrapper_yaml == f"{aw_dir}prio-test-cluster.yaml" assert cluster.app_wrapper_name == "prio-test-cluster" assert filecmp.cmp( - "prio-test-cluster.yaml", f"{parent}/tests/test-case-prio.yaml", shallow=True + f"{aw_dir}prio-test-cluster.yaml", + f"{parent}/tests/test-case-prio.yaml", + shallow=True, ) @@ -335,7 +340,7 @@ def test_default_cluster_creation(mocker): ) cluster = Cluster(default_config) - assert cluster.app_wrapper_yaml == "unit-test-default-cluster.yaml" + assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-default-cluster.yaml" assert cluster.app_wrapper_name == "unit-test-default-cluster" assert cluster.config.namespace == "opendatahub" @@ -365,13 +370,13 @@ def arg_check_apply_effect(group, version, namespace, plural, body, *args): if plural == "appwrappers": assert group == "workload.codeflare.dev" assert version == "v1beta1" - with open("unit-test-cluster.yaml") as f: + with open(f"{aw_dir}unit-test-cluster.yaml") as f: aw = yaml.load(f, Loader=yaml.FullLoader) assert body == aw elif plural == "rayclusters": assert group == "ray.io" assert version == "v1alpha1" - with open("unit-test-cluster-ray.yaml") as f: + with open(f"{aw_dir}unit-test-cluster-ray.yaml") as f: yamls = yaml.load_all(f, Loader=yaml.FullLoader) for resource in yamls: if resource["kind"] == "RayCluster": @@ -379,7 +384,7 @@ def arg_check_apply_effect(group, version, namespace, plural, body, *args): elif plural == "routes": assert group == "route.openshift.io" assert version == "v1" - with open("unit-test-cluster-ray.yaml") as f: + with open(f"{aw_dir}unit-test-cluster-ray.yaml") as f: yamls = yaml.load_all(f, Loader=yaml.FullLoader) for resource in yamls: if resource["kind"] == "Route": @@ -2408,7 +2413,7 @@ def parse_j(cmd): def test_AWManager_creation(): - testaw = AWManager("test.yaml") + testaw = AWManager(f"{aw_dir}test.yaml") assert testaw.name == "test" assert testaw.namespace == "ns" assert testaw.submitted == False @@ -2432,7 +2437,7 @@ def arg_check_aw_apply_effect(group, version, namespace, plural, body, *args): assert version == "v1beta1" assert namespace == "ns" assert plural == "appwrappers" - with open("test.yaml") as f: + with open(f"{aw_dir}test.yaml") as f: aw = yaml.load(f, Loader=yaml.FullLoader) assert body == aw assert args == tuple() @@ -2448,7 +2453,7 @@ def arg_check_aw_del_effect(group, version, namespace, plural, name, *args): def test_AWManager_submit_remove(mocker, capsys): - testaw = AWManager("test.yaml") + testaw = AWManager(f"{aw_dir}test.yaml") testaw.remove() captured = capsys.readouterr() assert ( @@ -2876,13 +2881,12 @@ def test_gen_app_wrapper_with_oauth(mocker: MockerFixture): # Make sure to always keep this function last def test_cleanup(): - os.remove("unit-test-cluster.yaml") - os.remove("prio-test-cluster.yaml") - os.remove("unit-test-default-cluster.yaml") - os.remove("unit-test-cluster-ray.yaml") - os.remove("test.yaml") - os.remove("raytest2.yaml") - os.remove("quicktest.yaml") + os.remove(f"{aw_dir}unit-test-cluster.yaml") + os.remove(f"{aw_dir}prio-test-cluster.yaml") + os.remove(f"{aw_dir}unit-test-default-cluster.yaml") + os.remove(f"{aw_dir}test.yaml") + os.remove(f"{aw_dir}raytest2.yaml") + os.remove(f"{aw_dir}quicktest.yaml") os.remove("tls-cluster-namespace/ca.crt") os.remove("tls-cluster-namespace/tls.crt") os.remove("tls-cluster-namespace/tls.key") From f24371914055b355bad708779e218b828b76143e Mon Sep 17 00:00:00 2001 From: codeflare-machine-account Date: Thu, 16 Nov 2023 12:25:27 +0000 Subject: [PATCH 060/496] Changes in docs for release: v0.12.1 --- docs/cluster/cluster.html | 8 ++++++-- docs/utils/generate_yaml.html | 19 +++++++++++++++++-- 2 files changed, 23 insertions(+), 4 deletions(-) diff --git a/docs/cluster/cluster.html b/docs/cluster/cluster.html index 82c7c154..bf84856b 100644 --- a/docs/cluster/cluster.html +++ b/docs/cluster/cluster.html @@ -103,8 +103,10 @@

    Module codeflare_sdk.cluster.cluster

    """ self.config = config self.app_wrapper_yaml = self.create_app_wrapper() - self.app_wrapper_name = self.app_wrapper_yaml.split(".")[0] self._job_submission_client = None + self.app_wrapper_name = self.app_wrapper_yaml.replace(".yaml", "").split("/")[ + -1 + ] @property def _client_headers(self): @@ -1051,8 +1053,10 @@

    Classes

    """ self.config = config self.app_wrapper_yaml = self.create_app_wrapper() - self.app_wrapper_name = self.app_wrapper_yaml.split(".")[0] self._job_submission_client = None + self.app_wrapper_name = self.app_wrapper_yaml.replace(".yaml", "").split("/")[ + -1 + ] @property def _client_headers(self): diff --git a/docs/utils/generate_yaml.html b/docs/utils/generate_yaml.html index ba244664..c5a6affa 100644 --- a/docs/utils/generate_yaml.html +++ b/docs/utils/generate_yaml.html @@ -50,6 +50,7 @@

    Module codeflare_sdk.utils.generate_yaml

    import yaml import sys +import os import argparse import uuid from kubernetes import client, config @@ -537,8 +538,14 @@

    Module codeflare_sdk.utils.generate_yaml

    def write_user_appwrapper(user_yaml, output_file_name): + # Create the directory if it doesn't exist + directory_path = os.path.dirname(output_file_name) + if not os.path.exists(directory_path): + os.makedirs(directory_path) + with open(output_file_name, "w") as outfile: yaml.dump(user_yaml, outfile, default_flow_style=False) + print(f"Written to: {output_file_name}") @@ -706,7 +713,8 @@

    Module codeflare_sdk.utils.generate_yaml

    if openshift_oauth: enable_openshift_oauth(user_yaml, cluster_name, namespace) - outfile = appwrapper_name + ".yaml" + directory_path = os.path.expanduser("~/.codeflare/appwrapper/") + outfile = os.path.join(directory_path, appwrapper_name + ".yaml") if not mcad: write_components(user_yaml, outfile) else: @@ -1019,7 +1027,8 @@

    Functions

    if openshift_oauth: enable_openshift_oauth(user_yaml, cluster_name, namespace) - outfile = appwrapper_name + ".yaml" + directory_path = os.path.expanduser("~/.codeflare/appwrapper/") + outfile = os.path.join(directory_path, appwrapper_name + ".yaml") if not mcad: write_components(user_yaml, outfile) else: @@ -1541,8 +1550,14 @@

    Functions

    Expand source code
    def write_user_appwrapper(user_yaml, output_file_name):
    +    # Create the directory if it doesn't exist
    +    directory_path = os.path.dirname(output_file_name)
    +    if not os.path.exists(directory_path):
    +        os.makedirs(directory_path)
    +
         with open(output_file_name, "w") as outfile:
             yaml.dump(user_yaml, outfile, default_flow_style=False)
    +
         print(f"Written to: {output_file_name}")
    From b0f4671888257513289c20d8b4d16ca2cb232685 Mon Sep 17 00:00:00 2001 From: James Busche Date: Mon, 13 Nov 2023 12:45:06 -0800 Subject: [PATCH 061/496] move busybox to quay.io Signed-off-by: James Busche --- .../guided-demos/notebook-ex-outputs/gptfttest.yaml | 2 +- .../notebook-ex-outputs/instascaletest.yaml | 2 +- .../notebook-ex-outputs/interactivetest.yaml | 2 +- .../guided-demos/notebook-ex-outputs/jobtest.yaml | 2 +- .../guided-demos/notebook-ex-outputs/raytest.yaml | 2 +- src/codeflare_sdk/templates/base-template.yaml | 2 +- tests/test-case-bad.yaml | 2 +- tests/test-case-no-mcad.yamls | 2 +- tests/test-case-prio.yaml | 2 +- tests/test-case.yaml | 2 +- tests/unit_test.py | 10 +++++----- 11 files changed, 15 insertions(+), 15 deletions(-) diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml index 436abe05..c1c6c7db 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml @@ -172,7 +172,7 @@ spec: - -c - until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; do echo waiting for myservice; sleep 2; done - image: busybox:1.28 + image: quay.io/project-codeflare/busybox:latest name: init-myservice replicas: 1 - generictemplate: diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml index 5f952649..ee108336 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml @@ -172,7 +172,7 @@ spec: - -c - until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; do echo waiting for myservice; sleep 2; done - image: busybox:1.28 + image: quay.io/project-codeflare/busybox:latest name: init-myservice replicas: 1 - generictemplate: diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml index c80684e1..6d0f94cb 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml @@ -172,7 +172,7 @@ spec: - -c - until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; do echo waiting for myservice; sleep 2; done - image: busybox:1.28 + image: quay.io/project-codeflare/busybox:latest name: init-myservice replicas: 1 - generictemplate: diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml index aa85803e..05136c2a 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml @@ -152,7 +152,7 @@ spec: - -c - until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; do echo waiting for myservice; sleep 2; done - image: busybox:1.28 + image: quay.io/project-codeflare/busybox:latest name: init-myservice replicas: 1 - generictemplate: diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml index c58b963a..c8ddab8a 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml @@ -152,7 +152,7 @@ spec: - -c - until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; do echo waiting for myservice; sleep 2; done - image: busybox:1.28 + image: quay.io/project-codeflare/busybox:latest name: init-myservice replicas: 1 - generictemplate: diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index 8f309630..2ebd3d2a 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -224,7 +224,7 @@ spec: initContainers: # the env var $RAY_IP is set by the operator if missing, with the value of the head service name - name: init-myservice - image: busybox:1.28 + image: quay.io/project-codeflare/busybox:latest command: ['sh', '-c', "until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; do echo waiting for myservice; sleep 2; done"] - name: create-cert image: rayproject/ray:2.7.0 diff --git a/tests/test-case-bad.yaml b/tests/test-case-bad.yaml index eb800e56..1a3b7e37 100644 --- a/tests/test-case-bad.yaml +++ b/tests/test-case-bad.yaml @@ -154,7 +154,7 @@ spec: - -c - until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; do echo waiting for myservice; sleep 2; done - image: busybox:1.28 + image: quay.io/project-codeflare/busybox:latest name: init-myservice replicas: 1 - generictemplate: diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index 0416aa7b..c6b128db 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -144,7 +144,7 @@ spec: - -c - until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; do echo waiting for myservice; sleep 2; done - image: busybox:1.28 + image: quay.io/project-codeflare/busybox:latest name: init-myservice --- apiVersion: networking.k8s.io/v1 diff --git a/tests/test-case-prio.yaml b/tests/test-case-prio.yaml index 9278e2c0..98fb6d68 100644 --- a/tests/test-case-prio.yaml +++ b/tests/test-case-prio.yaml @@ -175,7 +175,7 @@ spec: - -c - until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; do echo waiting for myservice; sleep 2; done - image: busybox:1.28 + image: quay.io/project-codeflare/busybox:latest name: init-myservice priorityClassName: default replicas: 1 diff --git a/tests/test-case.yaml b/tests/test-case.yaml index d6701eb4..802ef3f4 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -173,7 +173,7 @@ spec: - -c - until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; do echo waiting for myservice; sleep 2; done - image: busybox:1.28 + image: quay.io/project-codeflare/busybox:latest name: init-myservice replicas: 1 - generictemplate: diff --git a/tests/unit_test.py b/tests/unit_test.py index 44bcddea..0d9403e6 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -1119,7 +1119,7 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "-c", "until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; do echo waiting for myservice; sleep 2; done", ], - "image": "busybox:1.28", + "image": "quay.io/project-codeflare/busybox:latest", "name": "init-myservice", } ], @@ -1155,7 +1155,7 @@ def get_aw_obj(group, version, namespace, plural): "kind": "AppWrapper", "metadata": { "annotations": { - "kubectl.kubernetes.io/last-applied-configuration": '{"apiVersion":"codeflare.dev/v1beta1","kind":"AppWrapper","metadata":{"annotations":{},"name":"quicktest1","namespace":"ns"},"spec":{"priority":9,"resources":{"GenericItems":[{"custompodresources":[{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}},{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}],"generictemplate":{"apiVersion":"ray.io/v1alpha1","kind":"RayCluster","metadata":{"labels":{"appwrapper.codeflare.dev":"quicktest1","controller-tools.k8s.io":"1.0"},"name":"quicktest1","namespace":"ns"},"spec":{"autoscalerOptions":{"idleTimeoutSeconds":60,"imagePullPolicy":"Always","resources":{"limits":{"cpu":"500m","memory":"512Mi"},"requests":{"cpu":"500m","memory":"512Mi"}},"upscalingMode":"Default"},"enableInTreeAutoscaling":false,"headGroupSpec":{"rayStartParams":{"block":"true","dashboard-host":"0.0.0.0","num-gpus":"0"},"serviceType":"ClusterIP","template":{"spec":{"containers":[{"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","imagePullPolicy":"Always","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"ray-head","ports":[{"containerPort":6379,"name":"gcs"},{"containerPort":8265,"name":"dashboard"},{"containerPort":10001,"name":"client"}],"resources":{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}}}]}}},"rayVersion":"1.12.0","workerGroupSpecs":[{"groupName":"small-group-quicktest","maxReplicas":1,"minReplicas":1,"rayStartParams":{"block":"true","num-gpus":"0"},"replicas":1,"template":{"metadata":{"annotations":{"key":"value"},"labels":{"key":"value"}},"spec":{"containers":[{"env":[{"name":"MY_POD_IP","valueFrom":{"fieldRef":{"fieldPath":"status.podIP"}}}],"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"machine-learning","resources":{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}}],"initContainers":[{"command":["sh","-c","until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; do echo waiting for myservice; sleep 2; done"],"image":"busybox:1.28","name":"init-myservice"}]}}}]}},"replicas":1},{"generictemplate":{"apiVersion":"route.openshift.io/v1","kind":"Route","metadata":{"labels":{"odh-ray-cluster-service":"quicktest-head-svc"},"name":"ray-dashboard-quicktest","namespace":"default"},"spec":{"port":{"targetPort":"dashboard"},"to":{"kind":"Service","name":"quicktest-head-svc"}}},"replica":1}],"Items":[]}}}\n' + "kubectl.kubernetes.io/last-applied-configuration": '{"apiVersion":"codeflare.dev/v1beta1","kind":"AppWrapper","metadata":{"annotations":{},"name":"quicktest1","namespace":"ns"},"spec":{"priority":9,"resources":{"GenericItems":[{"custompodresources":[{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}},{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}],"generictemplate":{"apiVersion":"ray.io/v1alpha1","kind":"RayCluster","metadata":{"labels":{"appwrapper.codeflare.dev":"quicktest1","controller-tools.k8s.io":"1.0"},"name":"quicktest1","namespace":"ns"},"spec":{"autoscalerOptions":{"idleTimeoutSeconds":60,"imagePullPolicy":"Always","resources":{"limits":{"cpu":"500m","memory":"512Mi"},"requests":{"cpu":"500m","memory":"512Mi"}},"upscalingMode":"Default"},"enableInTreeAutoscaling":false,"headGroupSpec":{"rayStartParams":{"block":"true","dashboard-host":"0.0.0.0","num-gpus":"0"},"serviceType":"ClusterIP","template":{"spec":{"containers":[{"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","imagePullPolicy":"Always","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"ray-head","ports":[{"containerPort":6379,"name":"gcs"},{"containerPort":8265,"name":"dashboard"},{"containerPort":10001,"name":"client"}],"resources":{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}}}]}}},"rayVersion":"1.12.0","workerGroupSpecs":[{"groupName":"small-group-quicktest","maxReplicas":1,"minReplicas":1,"rayStartParams":{"block":"true","num-gpus":"0"},"replicas":1,"template":{"metadata":{"annotations":{"key":"value"},"labels":{"key":"value"}},"spec":{"containers":[{"env":[{"name":"MY_POD_IP","valueFrom":{"fieldRef":{"fieldPath":"status.podIP"}}}],"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"machine-learning","resources":{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}}],"initContainers":[{"command":["sh","-c","until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; do echo waiting for myservice; sleep 2; done"],"image":"quay.io/project-codeflare/busybox:latest","name":"init-myservice"}]}}}]}},"replicas":1},{"generictemplate":{"apiVersion":"route.openshift.io/v1","kind":"Route","metadata":{"labels":{"odh-ray-cluster-service":"quicktest-head-svc"},"name":"ray-dashboard-quicktest","namespace":"default"},"spec":{"port":{"targetPort":"dashboard"},"to":{"kind":"Service","name":"quicktest-head-svc"}}},"replica":1}],"Items":[]}}}\n' }, "creationTimestamp": "2023-02-22T16:26:07Z", "generation": 4, @@ -1395,7 +1395,7 @@ def get_aw_obj(group, version, namespace, plural): "-c", "until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; do echo waiting for myservice; sleep 2; done", ], - "image": "busybox:1.28", + "image": "quay.io/project-codeflare/busybox:latest", "name": "init-myservice", } ], @@ -1478,7 +1478,7 @@ def get_aw_obj(group, version, namespace, plural): "kind": "AppWrapper", "metadata": { "annotations": { - "kubectl.kubernetes.io/last-applied-configuration": '{"apiVersion":"codeflare.dev/v1beta1","kind":"AppWrapper","metadata":{"annotations":{},"name":"quicktest2","namespace":"ns"},"spec":{"priority":9,"resources":{"GenericItems":[{"custompodresources":[{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}},{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}],"generictemplate":{"apiVersion":"ray.io/v1alpha1","kind":"RayCluster","metadata":{"labels":{"appwrapper.codeflare.dev":"quicktest2","controller-tools.k8s.io":"1.0"},"name":"quicktest2","namespace":"ns"},"spec":{"autoscalerOptions":{"idleTimeoutSeconds":60,"imagePullPolicy":"Always","resources":{"limits":{"cpu":"500m","memory":"512Mi"},"requests":{"cpu":"500m","memory":"512Mi"}},"upscalingMode":"Default"},"enableInTreeAutoscaling":false,"headGroupSpec":{"rayStartParams":{"block":"true","dashboard-host":"0.0.0.0","num-gpus":"0"},"serviceType":"ClusterIP","template":{"spec":{"containers":[{"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","imagePullPolicy":"Always","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"ray-head","ports":[{"containerPort":6379,"name":"gcs"},{"containerPort":8265,"name":"dashboard"},{"containerPort":10001,"name":"client"}],"resources":{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}}}]}}},"rayVersion":"1.12.0","workerGroupSpecs":[{"groupName":"small-group-quicktest","maxReplicas":1,"minReplicas":1,"rayStartParams":{"block":"true","num-gpus":"0"},"replicas":1,"template":{"metadata":{"annotations":{"key":"value"},"labels":{"key":"value"}},"spec":{"containers":[{"env":[{"name":"MY_POD_IP","valueFrom":{"fieldRef":{"fieldPath":"status.podIP"}}}],"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"machine-learning","resources":{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}}],"initContainers":[{"command":["sh","-c","until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; do echo waiting for myservice; sleep 2; done"],"image":"busybox:1.28","name":"init-myservice"}]}}}]}},"replicas":1},{"generictemplate":{"apiVersion":"route.openshift.io/v1","kind":"Route","metadata":{"labels":{"odh-ray-cluster-service":"quicktest-head-svc"},"name":"ray-dashboard-quicktest","namespace":"default"},"spec":{"port":{"targetPort":"dashboard"},"to":{"kind":"Service","name":"quicktest-head-svc"}}},"replica":1}],"Items":[]}}}\n' + "kubectl.kubernetes.io/last-applied-configuration": '{"apiVersion":"codeflare.dev/v1beta1","kind":"AppWrapper","metadata":{"annotations":{},"name":"quicktest2","namespace":"ns"},"spec":{"priority":9,"resources":{"GenericItems":[{"custompodresources":[{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}},{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}],"generictemplate":{"apiVersion":"ray.io/v1alpha1","kind":"RayCluster","metadata":{"labels":{"appwrapper.codeflare.dev":"quicktest2","controller-tools.k8s.io":"1.0"},"name":"quicktest2","namespace":"ns"},"spec":{"autoscalerOptions":{"idleTimeoutSeconds":60,"imagePullPolicy":"Always","resources":{"limits":{"cpu":"500m","memory":"512Mi"},"requests":{"cpu":"500m","memory":"512Mi"}},"upscalingMode":"Default"},"enableInTreeAutoscaling":false,"headGroupSpec":{"rayStartParams":{"block":"true","dashboard-host":"0.0.0.0","num-gpus":"0"},"serviceType":"ClusterIP","template":{"spec":{"containers":[{"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","imagePullPolicy":"Always","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"ray-head","ports":[{"containerPort":6379,"name":"gcs"},{"containerPort":8265,"name":"dashboard"},{"containerPort":10001,"name":"client"}],"resources":{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}}}]}}},"rayVersion":"1.12.0","workerGroupSpecs":[{"groupName":"small-group-quicktest","maxReplicas":1,"minReplicas":1,"rayStartParams":{"block":"true","num-gpus":"0"},"replicas":1,"template":{"metadata":{"annotations":{"key":"value"},"labels":{"key":"value"}},"spec":{"containers":[{"env":[{"name":"MY_POD_IP","valueFrom":{"fieldRef":{"fieldPath":"status.podIP"}}}],"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"machine-learning","resources":{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}}],"initContainers":[{"command":["sh","-c","until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; do echo waiting for myservice; sleep 2; done"],"image":"quay.io/project-codeflare/busybox:latest","name":"init-myservice"}]}}}]}},"replicas":1},{"generictemplate":{"apiVersion":"route.openshift.io/v1","kind":"Route","metadata":{"labels":{"odh-ray-cluster-service":"quicktest-head-svc"},"name":"ray-dashboard-quicktest","namespace":"default"},"spec":{"port":{"targetPort":"dashboard"},"to":{"kind":"Service","name":"quicktest-head-svc"}}},"replica":1}],"Items":[]}}}\n' }, "creationTimestamp": "2023-02-22T16:26:07Z", "generation": 4, @@ -1718,7 +1718,7 @@ def get_aw_obj(group, version, namespace, plural): "-c", "until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; do echo waiting for myservice; sleep 2; done", ], - "image": "busybox:1.28", + "image": "quay.io/project-codeflare/busybox:latest", "name": "init-myservice", } ], From be1b46d0db6300515cc42b7389e7c40a20e18688 Mon Sep 17 00:00:00 2001 From: Dimitri Saridakis Date: Tue, 14 Nov 2023 10:21:08 +0000 Subject: [PATCH 062/496] refactor: add disconnected mnist training script --- .../guided-demos/mnist_disconnected.py | 162 ++++++++++++++++++ 1 file changed, 162 insertions(+) create mode 100644 demo-notebooks/guided-demos/mnist_disconnected.py diff --git a/demo-notebooks/guided-demos/mnist_disconnected.py b/demo-notebooks/guided-demos/mnist_disconnected.py new file mode 100644 index 00000000..758c2779 --- /dev/null +++ b/demo-notebooks/guided-demos/mnist_disconnected.py @@ -0,0 +1,162 @@ +# Copyright 2022 IBM, Red Hat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# In[] +import os + +import torch +from pytorch_lightning import LightningModule, Trainer +from pytorch_lightning.callbacks.progress import TQDMProgressBar +from pytorch_lightning.loggers import CSVLogger +from torch import nn +from torch.nn import functional as F +from torch.utils.data import DataLoader, random_split +from torchmetrics import Accuracy +from torchvision import transforms +from torchvision.datasets import MNIST + +PATH_DATASETS = os.environ.get("PATH_DATASETS", ".") +BATCH_SIZE = 256 if torch.cuda.is_available() else 64 +# %% + +local_minst_path = os.path.join(PATH_DATASETS, "mnist") + +print("prior to running the trainer") +print("MASTER_ADDR: is ", os.getenv("MASTER_ADDR")) +print("MASTER_PORT: is ", os.getenv("MASTER_PORT")) + + +class LitMNIST(LightningModule): + def __init__(self, data_dir=PATH_DATASETS, hidden_size=64, learning_rate=2e-4): + super().__init__() + + # Set our init args as class attributes + self.data_dir = data_dir + self.hidden_size = hidden_size + self.learning_rate = learning_rate + + # Hardcode some dataset specific attributes + self.num_classes = 10 + self.dims = (1, 28, 28) + channels, width, height = self.dims + self.transform = transforms.Compose( + [ + transforms.ToTensor(), + transforms.Normalize((0.1307,), (0.3081,)), + ] + ) + + # Define PyTorch model + self.model = nn.Sequential( + nn.Flatten(), + nn.Linear(channels * width * height, hidden_size), + nn.ReLU(), + nn.Dropout(0.1), + nn.Linear(hidden_size, hidden_size), + nn.ReLU(), + nn.Dropout(0.1), + nn.Linear(hidden_size, self.num_classes), + ) + + self.val_accuracy = Accuracy() + self.test_accuracy = Accuracy() + + def forward(self, x): + x = self.model(x) + return F.log_softmax(x, dim=1) + + def training_step(self, batch, batch_idx): + x, y = batch + logits = self(x) + loss = F.nll_loss(logits, y) + return loss + + def validation_step(self, batch, batch_idx): + x, y = batch + logits = self(x) + loss = F.nll_loss(logits, y) + preds = torch.argmax(logits, dim=1) + self.val_accuracy.update(preds, y) + + # Calling self.log will surface up scalars for you in TensorBoard + self.log("val_loss", loss, prog_bar=True) + self.log("val_acc", self.val_accuracy, prog_bar=True) + + def test_step(self, batch, batch_idx): + x, y = batch + logits = self(x) + loss = F.nll_loss(logits, y) + preds = torch.argmax(logits, dim=1) + self.test_accuracy.update(preds, y) + + # Calling self.log will surface up scalars for you in TensorBoard + self.log("test_loss", loss, prog_bar=True) + self.log("test_acc", self.test_accuracy, prog_bar=True) + + def configure_optimizers(self): + optimizer = torch.optim.Adam(self.parameters(), lr=self.learning_rate) + return optimizer + + #################### + # DATA RELATED HOOKS + #################### + + def prepare_data(self): + # download + print("Downloading MNIST dataset...") + MNIST(self.data_dir, train=True, download=False) + MNIST(self.data_dir, train=False, download=False) + + def setup(self, stage=None): + # Assign train/val datasets for use in dataloaders + if stage == "fit" or stage is None: + mnist_full = MNIST(self.data_dir, train=True, transform=self.transform, download=False) + self.mnist_train, self.mnist_val = random_split(mnist_full, [55000, 5000]) + + # Assign test dataset for use in dataloader(s) + if stage == "test" or stage is None: + self.mnist_test = MNIST( + self.data_dir, train=False, transform=self.transform, download=False + ) + + def train_dataloader(self): + return DataLoader(self.mnist_train, batch_size=BATCH_SIZE) + + def val_dataloader(self): + return DataLoader(self.mnist_val, batch_size=BATCH_SIZE) + + def test_dataloader(self): + return DataLoader(self.mnist_test, batch_size=BATCH_SIZE) + + +# Init DataLoader from MNIST Dataset + +model = LitMNIST(data_dir=local_minst_path) + +print("GROUP: ", int(os.environ.get("GROUP_WORLD_SIZE", 1))) +print("LOCAL: ", int(os.environ.get("LOCAL_WORLD_SIZE", 1))) + +# Initialize a trainer +trainer = Trainer( + accelerator="auto", + # devices=1 if torch.cuda.is_available() else None, # limiting got iPython runs + max_epochs=5, + callbacks=[TQDMProgressBar(refresh_rate=20)], + num_nodes=int(os.environ.get("GROUP_WORLD_SIZE", 1)), + devices=int(os.environ.get("LOCAL_WORLD_SIZE", 1)), + strategy="ddp", +) + +# Train the model ⚡ +trainer.fit(model) From 184a03bbde1a1bb11b238a061cfb1afec8a74d5e Mon Sep 17 00:00:00 2001 From: Dimitri Saridakis Date: Tue, 14 Nov 2023 10:23:59 +0000 Subject: [PATCH 063/496] refactor: addition of the mnist download script for use with disconnected env --- .../guided-demos/download_mnist_datasets.py | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 demo-notebooks/guided-demos/download_mnist_datasets.py diff --git a/demo-notebooks/guided-demos/download_mnist_datasets.py b/demo-notebooks/guided-demos/download_mnist_datasets.py new file mode 100644 index 00000000..00b2bd32 --- /dev/null +++ b/demo-notebooks/guided-demos/download_mnist_datasets.py @@ -0,0 +1,42 @@ +# Copyright 2022 IBM, Red Hat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +from torchvision.datasets import MNIST +from torchvision import transforms + +def download_mnist_dataset(destination_dir): + # Ensure the destination directory exists + if not os.path.exists(destination_dir): + os.makedirs(destination_dir) + + # Define transformations + transform = transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize((0.1307,), (0.3081,)) + ]) + + # Download the training data + train_set = MNIST(root=destination_dir, train=True, download=True, transform=transform) + + # Download the test data + test_set = MNIST(root=destination_dir, train=False, download=True, transform=transform) + + print(f"MNIST dataset downloaded in {destination_dir}") + +# Specify the directory where you +script_dir = os.path.dirname(os.path.abspath(__file__)) +destination_dir = script_dir + "/mnist_datasets" + +download_mnist_dataset(destination_dir) \ No newline at end of file From 1bea0ba4c3abacdb719aca6fa70fa5dff2c0680b Mon Sep 17 00:00:00 2001 From: Dimitri Saridakis Date: Tue, 14 Nov 2023 10:38:45 +0000 Subject: [PATCH 064/496] refactor: addition of note to test in disconnected env --- demo-notebooks/guided-demos/2_basic_jobs.ipynb | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/demo-notebooks/guided-demos/2_basic_jobs.ipynb b/demo-notebooks/guided-demos/2_basic_jobs.ipynb index 5d862c03..72accf92 100644 --- a/demo-notebooks/guided-demos/2_basic_jobs.ipynb +++ b/demo-notebooks/guided-demos/2_basic_jobs.ipynb @@ -203,7 +203,9 @@ "id": "31096641", "metadata": {}, "source": [ - "Now, an alternative option for job submission is to submit directly to MCAD, which will schedule pods to run the job with requested resources:" + "Now, an alternative option for job submission is to submit directly to MCAD, which will schedule pods to run the job with requested resources:\n", + "\n", + "NOTE: To test this demo in an air-gapped/ disconnected environment alter the training script to use a local dataset." ] }, { @@ -216,6 +218,7 @@ "jobdef = DDPJobDefinition(\n", " name=\"mnistjob\",\n", " script=\"mnist.py\",\n", + " # script=\"mnist_disconnected.py\", # training script for disconnected environment\n", " scheduler_args={\"namespace\": \"default\"},\n", " j=\"1x1\",\n", " gpu=0,\n", From 1c84c6adaeefb58f1793168a0283b072efa8abf4 Mon Sep 17 00:00:00 2001 From: Dimitri Saridakis Date: Tue, 14 Nov 2023 10:41:25 +0000 Subject: [PATCH 065/496] style: black formatting for precommit --- .../guided-demos/download_mnist_datasets.py | 19 ++++++++++++------- .../guided-demos/mnist_disconnected.py | 4 +++- 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/demo-notebooks/guided-demos/download_mnist_datasets.py b/demo-notebooks/guided-demos/download_mnist_datasets.py index 00b2bd32..5df8be0f 100644 --- a/demo-notebooks/guided-demos/download_mnist_datasets.py +++ b/demo-notebooks/guided-demos/download_mnist_datasets.py @@ -16,27 +16,32 @@ from torchvision.datasets import MNIST from torchvision import transforms + def download_mnist_dataset(destination_dir): # Ensure the destination directory exists if not os.path.exists(destination_dir): os.makedirs(destination_dir) # Define transformations - transform = transforms.Compose([ - transforms.ToTensor(), - transforms.Normalize((0.1307,), (0.3081,)) - ]) + transform = transforms.Compose( + [transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))] + ) # Download the training data - train_set = MNIST(root=destination_dir, train=True, download=True, transform=transform) + train_set = MNIST( + root=destination_dir, train=True, download=True, transform=transform + ) # Download the test data - test_set = MNIST(root=destination_dir, train=False, download=True, transform=transform) + test_set = MNIST( + root=destination_dir, train=False, download=True, transform=transform + ) print(f"MNIST dataset downloaded in {destination_dir}") + # Specify the directory where you script_dir = os.path.dirname(os.path.abspath(__file__)) destination_dir = script_dir + "/mnist_datasets" -download_mnist_dataset(destination_dir) \ No newline at end of file +download_mnist_dataset(destination_dir) diff --git a/demo-notebooks/guided-demos/mnist_disconnected.py b/demo-notebooks/guided-demos/mnist_disconnected.py index 758c2779..609f93aa 100644 --- a/demo-notebooks/guided-demos/mnist_disconnected.py +++ b/demo-notebooks/guided-demos/mnist_disconnected.py @@ -121,7 +121,9 @@ def prepare_data(self): def setup(self, stage=None): # Assign train/val datasets for use in dataloaders if stage == "fit" or stage is None: - mnist_full = MNIST(self.data_dir, train=True, transform=self.transform, download=False) + mnist_full = MNIST( + self.data_dir, train=True, transform=self.transform, download=False + ) self.mnist_train, self.mnist_val = random_split(mnist_full, [55000, 5000]) # Assign test dataset for use in dataloader(s) From 28026f26ff3a63edba14f2583159f0e9c0377bd3 Mon Sep 17 00:00:00 2001 From: Dimitri Saridakis Date: Tue, 14 Nov 2023 10:46:00 +0000 Subject: [PATCH 066/496] refactor: correct path to datasets --- demo-notebooks/guided-demos/mnist_disconnected.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/demo-notebooks/guided-demos/mnist_disconnected.py b/demo-notebooks/guided-demos/mnist_disconnected.py index 609f93aa..ffc71b41 100644 --- a/demo-notebooks/guided-demos/mnist_disconnected.py +++ b/demo-notebooks/guided-demos/mnist_disconnected.py @@ -30,7 +30,7 @@ BATCH_SIZE = 256 if torch.cuda.is_available() else 64 # %% -local_minst_path = os.path.join(PATH_DATASETS, "mnist") +local_minst_path = os.path.dirname(os.path.abspath(__file__) + "/mnist_datasets") print("prior to running the trainer") print("MASTER_ADDR: is ", os.getenv("MASTER_ADDR")) @@ -114,7 +114,7 @@ def configure_optimizers(self): def prepare_data(self): # download - print("Downloading MNIST dataset...") + print("Preparing MNIST dataset...") MNIST(self.data_dir, train=True, download=False) MNIST(self.data_dir, train=False, download=False) From e750d3e912f913e49cff612dc16dc069e729a564 Mon Sep 17 00:00:00 2001 From: Dimitri Saridakis Date: Tue, 14 Nov 2023 11:35:47 +0000 Subject: [PATCH 067/496] fix: fix paths to datasets --- demo-notebooks/guided-demos/download_mnist_datasets.py | 3 +-- demo-notebooks/guided-demos/mnist_disconnected.py | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/demo-notebooks/guided-demos/download_mnist_datasets.py b/demo-notebooks/guided-demos/download_mnist_datasets.py index 5df8be0f..6493296f 100644 --- a/demo-notebooks/guided-demos/download_mnist_datasets.py +++ b/demo-notebooks/guided-demos/download_mnist_datasets.py @@ -41,7 +41,6 @@ def download_mnist_dataset(destination_dir): # Specify the directory where you -script_dir = os.path.dirname(os.path.abspath(__file__)) -destination_dir = script_dir + "/mnist_datasets" +destination_dir = os.path.dirname(os.path.abspath(__file__)) download_mnist_dataset(destination_dir) diff --git a/demo-notebooks/guided-demos/mnist_disconnected.py b/demo-notebooks/guided-demos/mnist_disconnected.py index ffc71b41..d6cff250 100644 --- a/demo-notebooks/guided-demos/mnist_disconnected.py +++ b/demo-notebooks/guided-demos/mnist_disconnected.py @@ -30,7 +30,7 @@ BATCH_SIZE = 256 if torch.cuda.is_available() else 64 # %% -local_minst_path = os.path.dirname(os.path.abspath(__file__) + "/mnist_datasets") +local_minst_path = os.path.dirname(os.path.abspath(__file__) + "/MNIST/raw") print("prior to running the trainer") print("MASTER_ADDR: is ", os.getenv("MASTER_ADDR")) From db8b33d88c8b1ce5d24b3fe275de10d8787b32ca Mon Sep 17 00:00:00 2001 From: Dimitri Saridakis Date: Tue, 14 Nov 2023 11:36:18 +0000 Subject: [PATCH 068/496] feat: make it easier to download the datasets --- demo-notebooks/guided-demos/2_basic_jobs.ipynb | 11 +++++++++-- demo-notebooks/guided-demos/mnist_disconnected.py | 2 +- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/demo-notebooks/guided-demos/2_basic_jobs.ipynb b/demo-notebooks/guided-demos/2_basic_jobs.ipynb index 72accf92..4b3cee91 100644 --- a/demo-notebooks/guided-demos/2_basic_jobs.ipynb +++ b/demo-notebooks/guided-demos/2_basic_jobs.ipynb @@ -116,7 +116,13 @@ "id": "83d77b74", "metadata": {}, "source": [ - "First, let's begin by submitting to Ray, training a basic NN on the MNIST dataset:" + "First, let's begin by submitting to Ray, training a basic NN on the MNIST dataset:\n", + "\n", + "NOTE: To test this demo in an air-gapped/ disconnected environment alter the training script to use a local dataset.\n", + "First we must download the MNIST dataset. We've included a helper script to do this for you. \n", + "\n", + "You can run the python script (`python download_mnist_datasets.py`) directly and then place the dataset in the same directory as this notebook. \n", + "The path to the dataset would be: `..guided-demos/MNIST/raw/` " ] }, { @@ -129,6 +135,7 @@ "jobdef = DDPJobDefinition(\n", " name=\"mnisttest\",\n", " script=\"mnist.py\",\n", + " # script=\"mnist_disconnected.py\", # training script for disconnected environment\n", " scheduler_args={\"requirements\": \"requirements.txt\"}\n", ")\n", "job = jobdef.submit(cluster)" @@ -302,7 +309,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.13" + "version": "3.9.18" }, "vscode": { "interpreter": { diff --git a/demo-notebooks/guided-demos/mnist_disconnected.py b/demo-notebooks/guided-demos/mnist_disconnected.py index d6cff250..9fc72130 100644 --- a/demo-notebooks/guided-demos/mnist_disconnected.py +++ b/demo-notebooks/guided-demos/mnist_disconnected.py @@ -30,7 +30,7 @@ BATCH_SIZE = 256 if torch.cuda.is_available() else 64 # %% -local_minst_path = os.path.dirname(os.path.abspath(__file__) + "/MNIST/raw") +local_minst_path = os.path.dirname(os.path.abspath(__file__)) print("prior to running the trainer") print("MASTER_ADDR: is ", os.getenv("MASTER_ADDR")) From 083a2ea8a82fab00913228e3d200b458d9e3171e Mon Sep 17 00:00:00 2001 From: Kevin Postlethwait Date: Mon, 20 Nov 2023 14:09:45 -0500 Subject: [PATCH 069/496] get default client if api_config_handler returns None (#395) Signed-off-by: Kevin --- src/codeflare_sdk/cluster/cluster.py | 7 ++--- src/codeflare_sdk/utils/openshift_oauth.py | 33 +++++++++------------- 2 files changed, 16 insertions(+), 24 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 7c94f950..f5f226a0 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -91,14 +91,11 @@ def _client_verify_tls(self): @property def job_client(self): + k8client = api_config_handler() or client.ApiClient() if self._job_submission_client: return self._job_submission_client if self.config.openshift_oauth: - print( - api_config_handler().configuration.get_api_key_with_prefix( - "authorization" - ) - ) + print(k8client.configuration.get_api_key_with_prefix("authorization")) self._job_submission_client = JobSubmissionClient( self.cluster_dashboard_uri(), headers=self._client_headers, diff --git a/src/codeflare_sdk/utils/openshift_oauth.py b/src/codeflare_sdk/utils/openshift_oauth.py index 022e9adb..14e55b96 100644 --- a/src/codeflare_sdk/utils/openshift_oauth.py +++ b/src/codeflare_sdk/utils/openshift_oauth.py @@ -34,7 +34,6 @@ def create_openshift_oauth_objects(cluster_name, namespace): def _create_or_replace_oauth_sa(namespace, oauth_sa_name, host): - api_client = api_config_handler() oauth_sa = client.V1ServiceAccount( api_version="v1", kind="ServiceAccount", @@ -47,12 +46,12 @@ def _create_or_replace_oauth_sa(namespace, oauth_sa_name, host): ), ) try: - client.CoreV1Api(api_client).create_namespaced_service_account( + client.CoreV1Api(api_config_handler()).create_namespaced_service_account( namespace=namespace, body=oauth_sa ) except client.ApiException as e: if e.reason == "Conflict": - client.CoreV1Api(api_client).replace_namespaced_service_account( + client.CoreV1Api(api_config_handler()).replace_namespaced_service_account( namespace=namespace, body=oauth_sa, name=oauth_sa_name, @@ -62,7 +61,6 @@ def _create_or_replace_oauth_sa(namespace, oauth_sa_name, host): def _create_or_replace_oauth_rb(cluster_name, namespace, oauth_sa_name): - api_client = api_config_handler() oauth_crb = client.V1ClusterRoleBinding( api_version="rbac.authorization.k8s.io/v1", kind="ClusterRoleBinding", @@ -79,14 +77,14 @@ def _create_or_replace_oauth_rb(cluster_name, namespace, oauth_sa_name): ], ) try: - client.RbacAuthorizationV1Api(api_client).create_cluster_role_binding( + client.RbacAuthorizationV1Api(api_config_handler()).create_cluster_role_binding( body=oauth_crb ) except client.ApiException as e: if e.reason == "Conflict": - client.RbacAuthorizationV1Api(api_client).replace_cluster_role_binding( - body=oauth_crb, name=f"{cluster_name}-rb" - ) + client.RbacAuthorizationV1Api( + api_config_handler() + ).replace_cluster_role_binding(body=oauth_crb, name=f"{cluster_name}-rb") else: raise e @@ -98,19 +96,18 @@ def _gen_tls_secret_name(cluster_name): def delete_openshift_oauth_objects(cluster_name, namespace): # NOTE: it might be worth adding error handling here, but shouldn't be necessary because cluster.down(...) checks # for an existing cluster before calling this => the objects should never be deleted twice - api_client = api_config_handler() oauth_sa_name = f"{cluster_name}-oauth-proxy" service_name = f"{cluster_name}-oauth" - client.CoreV1Api(api_client).delete_namespaced_service_account( + client.CoreV1Api(api_config_handler()).delete_namespaced_service_account( name=oauth_sa_name, namespace=namespace ) - client.CoreV1Api(api_client).delete_namespaced_service( + client.CoreV1Api(api_config_handler()).delete_namespaced_service( name=service_name, namespace=namespace ) - client.NetworkingV1Api(api_client).delete_namespaced_ingress( + client.NetworkingV1Api(api_config_handler()).delete_namespaced_ingress( name=f"{cluster_name}-ingress", namespace=namespace ) - client.RbacAuthorizationV1Api(api_client).delete_cluster_role_binding( + client.RbacAuthorizationV1Api(api_config_handler()).delete_cluster_role_binding( name=f"{cluster_name}-rb" ) @@ -123,7 +120,6 @@ def _create_or_replace_oauth_service_obj( service_name: str, port_name: str, ) -> client.V1Service: - api_client = api_config_handler() oauth_service = client.V1Service( api_version="v1", kind="Service", @@ -153,12 +149,12 @@ def _create_or_replace_oauth_service_obj( ), ) try: - client.CoreV1Api(api_client).create_namespaced_service( + client.CoreV1Api(api_config_handler()).create_namespaced_service( namespace=namespace, body=oauth_service ) except client.ApiException as e: if e.reason == "Conflict": - client.CoreV1Api(api_client).replace_namespaced_service( + client.CoreV1Api(api_config_handler()).replace_namespaced_service( namespace=namespace, body=oauth_service, name=service_name ) else: @@ -172,7 +168,6 @@ def _create_or_replace_oauth_ingress_object( port_name: str, host: str, ) -> client.V1Ingress: - api_client = api_config_handler() ingress = client.V1Ingress( api_version="networking.k8s.io/v1", kind="Ingress", @@ -205,12 +200,12 @@ def _create_or_replace_oauth_ingress_object( ), ) try: - client.NetworkingV1Api(api_client).create_namespaced_ingress( + client.NetworkingV1Api(api_config_handler()).create_namespaced_ingress( namespace=namespace, body=ingress ) except client.ApiException as e: if e.reason == "Conflict": - client.NetworkingV1Api(api_client).replace_namespaced_ingress( + client.NetworkingV1Api(api_config_handler()).replace_namespaced_ingress( namespace=namespace, body=ingress, name=f"{cluster_name}-ingress" ) else: From 745772e5e1b30b25cf7b2521084651c7b3f55856 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Thu, 30 Nov 2023 14:26:58 +0000 Subject: [PATCH 070/496] Fix No module named "pyarrow" --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 531dfd84..e78d299a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,6 +29,7 @@ codeflare-torchx = "0.6.0.dev1" cryptography = "40.0.2" executing = "1.2.0" pydantic = "< 2" +pyarrow = ">= 6.0.1, < 7.0.0" [tool.poetry.group.docs] optional = true From 053d66b1e0bd6ec5e820ac348a4831bcc6563d4e Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Tue, 5 Dec 2023 12:09:04 +0000 Subject: [PATCH 071/496] Update poetry lock to fix nightly --- poetry.lock | 86 +++++++++++++++++++++++++++++++---------------------- 1 file changed, 50 insertions(+), 36 deletions(-) diff --git a/poetry.lock b/poetry.lock index f0be160f..94fe9d61 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "aiohttp" @@ -1382,40 +1382,6 @@ files = [ {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, ] -[[package]] -name = "numpy" -version = "1.25.2" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "numpy-1.25.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:db3ccc4e37a6873045580d413fe79b68e47a681af8db2e046f1dacfa11f86eb3"}, - {file = "numpy-1.25.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:90319e4f002795ccfc9050110bbbaa16c944b1c37c0baeea43c5fb881693ae1f"}, - {file = "numpy-1.25.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe4a913e29b418d096e696ddd422d8a5d13ffba4ea91f9f60440a3b759b0187"}, - {file = "numpy-1.25.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f08f2e037bba04e707eebf4bc934f1972a315c883a9e0ebfa8a7756eabf9e357"}, - {file = "numpy-1.25.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bec1e7213c7cb00d67093247f8c4db156fd03075f49876957dca4711306d39c9"}, - {file = "numpy-1.25.2-cp310-cp310-win32.whl", hash = "sha256:7dc869c0c75988e1c693d0e2d5b26034644399dd929bc049db55395b1379e044"}, - {file = "numpy-1.25.2-cp310-cp310-win_amd64.whl", hash = "sha256:834b386f2b8210dca38c71a6e0f4fd6922f7d3fcff935dbe3a570945acb1b545"}, - {file = "numpy-1.25.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5462d19336db4560041517dbb7759c21d181a67cb01b36ca109b2ae37d32418"}, - {file = "numpy-1.25.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c5652ea24d33585ea39eb6a6a15dac87a1206a692719ff45d53c5282e66d4a8f"}, - {file = "numpy-1.25.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d60fbae8e0019865fc4784745814cff1c421df5afee233db6d88ab4f14655a2"}, - {file = "numpy-1.25.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60e7f0f7f6d0eee8364b9a6304c2845b9c491ac706048c7e8cf47b83123b8dbf"}, - {file = "numpy-1.25.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bb33d5a1cf360304754913a350edda36d5b8c5331a8237268c48f91253c3a364"}, - {file = "numpy-1.25.2-cp311-cp311-win32.whl", hash = "sha256:5883c06bb92f2e6c8181df7b39971a5fb436288db58b5a1c3967702d4278691d"}, - {file = "numpy-1.25.2-cp311-cp311-win_amd64.whl", hash = "sha256:5c97325a0ba6f9d041feb9390924614b60b99209a71a69c876f71052521d42a4"}, - {file = "numpy-1.25.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b79e513d7aac42ae918db3ad1341a015488530d0bb2a6abcbdd10a3a829ccfd3"}, - {file = "numpy-1.25.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eb942bfb6f84df5ce05dbf4b46673ffed0d3da59f13635ea9b926af3deb76926"}, - {file = "numpy-1.25.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e0746410e73384e70d286f93abf2520035250aad8c5714240b0492a7302fdca"}, - {file = "numpy-1.25.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7806500e4f5bdd04095e849265e55de20d8cc4b661b038957354327f6d9b295"}, - {file = "numpy-1.25.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8b77775f4b7df768967a7c8b3567e309f617dd5e99aeb886fa14dc1a0791141f"}, - {file = "numpy-1.25.2-cp39-cp39-win32.whl", hash = "sha256:2792d23d62ec51e50ce4d4b7d73de8f67a2fd3ea710dcbc8563a51a03fb07b01"}, - {file = "numpy-1.25.2-cp39-cp39-win_amd64.whl", hash = "sha256:76b4115d42a7dfc5d485d358728cdd8719be33cc5ec6ec08632a5d6fca2ed380"}, - {file = "numpy-1.25.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1a1329e26f46230bf77b02cc19e900db9b52f398d6722ca853349a782d4cff55"}, - {file = "numpy-1.25.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c3abc71e8b6edba80a01a52e66d83c5d14433cbcd26a40c329ec7ed09f37901"}, - {file = "numpy-1.25.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1b9735c27cea5d995496f46a8b1cd7b408b3f34b6d50459d9ac8fe3a20cc17bf"}, - {file = "numpy-1.25.2.tar.gz", hash = "sha256:fd608e19c8d7c55021dffd43bfe5492fab8cc105cc8986f813f8c3c048b38760"}, -] - [[package]] name = "nvidia-ml-py" version = "12.535.108" @@ -1651,6 +1617,54 @@ files = [ {file = "py_spy-0.3.14-py2.py3-none-win_amd64.whl", hash = "sha256:8f5b311d09f3a8e33dbd0d44fc6e37b715e8e0c7efefafcda8bfd63b31ab5a31"}, ] +[[package]] +name = "pyarrow" +version = "6.0.1" +description = "Python library for Apache Arrow" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyarrow-6.0.1-cp310-cp310-macosx_10_13_universal2.whl", hash = "sha256:c80d2436294a07f9cc54852aa1cef034b6f9c97d29235c4bd53bbf52e24f1ebf"}, + {file = "pyarrow-6.0.1-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:f150b4f222d0ba397388908725692232345adaa8e58ad543ca00f03c7234ae7b"}, + {file = "pyarrow-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c3a727642c1283dcb44728f0d0a00f8864b171e31c835f4b8def07e3fa8f5c73"}, + {file = "pyarrow-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d29605727865177918e806d855fd8404b6242bf1e56ade0a0023cd4fe5f7f841"}, + {file = "pyarrow-6.0.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b63b54dd0bada05fff76c15b233f9322de0e6947071b7871ec45024e16045aeb"}, + {file = "pyarrow-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e90e75cb11e61ffeffb374f1db7c4788f1df0cb269596bf86c473155294958d"}, + {file = "pyarrow-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f4f3db1da51db4cfbafab3066a01b01578884206dced9f505da950d9ed4402d"}, + {file = "pyarrow-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:2523f87bd36877123fc8c4813f60d298722143ead73e907690a87e8557114693"}, + {file = "pyarrow-6.0.1-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:8f7d34efb9d667f9204b40ce91a77613c46691c24cd098e3b6986bd7401b8f06"}, + {file = "pyarrow-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e3c9184335da8faf08c0df95668ce9d778df3795ce4eec959f44908742900e10"}, + {file = "pyarrow-6.0.1-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:02baee816456a6e64486e587caaae2bf9f084fa3a891354ff18c3e945a1cb72f"}, + {file = "pyarrow-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604782b1c744b24a55df80125991a7154fbdef60991eb3d02bfaed06d22f055e"}, + {file = "pyarrow-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fab8132193ae095c43b1e8d6d7f393451ac198de5aaf011c6b576b1442966fec"}, + {file = "pyarrow-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:31038366484e538608f43920a5e2957b8862a43aa49438814619b527f50ec127"}, + {file = "pyarrow-6.0.1-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:632bea00c2fbe2da5d29ff1698fec312ed3aabfb548f06100144e1907e22093a"}, + {file = "pyarrow-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc03c875e5d68b0d0143f94c438add3ab3c2411ade2748423a9c24608fea571e"}, + {file = "pyarrow-6.0.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1cd4de317df01679e538004123d6d7bc325d73bad5c6bbc3d5f8aa2280408869"}, + {file = "pyarrow-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e77b1f7c6c08ec319b7882c1a7c7304731530923532b3243060e6e64c456cf34"}, + {file = "pyarrow-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a424fd9a3253d0322d53be7bbb20b5b01511706a61efadcf37f416da325e3d48"}, + {file = "pyarrow-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:c958cf3a4a9eee09e1063c02b89e882d19c61b3a2ce6cbd55191a6f45ed5004b"}, + {file = "pyarrow-6.0.1-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:0e0ef24b316c544f4bb56f5c376129097df3739e665feca0eb567f716d45c55a"}, + {file = "pyarrow-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c13ec3b26b3b069d673c5fa3a0c70c38f0d5c94686ac5dbc9d7e7d24040f812"}, + {file = "pyarrow-6.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:71891049dc58039a9523e1cb0d921be001dacb2b327fa7b62a35b96a3aad9f0d"}, + {file = "pyarrow-6.0.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:943141dd8cca6c5722552a0b11a3c2e791cdf85f1768dea8170b0a8a7e824ff9"}, + {file = "pyarrow-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fd077c06061b8fa8fdf91591a4270e368f63cf73c6ab56924d3b64efa96a873"}, + {file = "pyarrow-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5308f4bb770b48e07c8cff36cf6a4452862e8ce9492428ad5581d846420b3884"}, + {file = "pyarrow-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:cde4f711cd9476d4da18128c3a40cb529b6b7d2679aee6e0576212547530fef1"}, + {file = "pyarrow-6.0.1-cp39-cp39-macosx_10_13_universal2.whl", hash = "sha256:b8628269bd9289cae0ea668f5900451043252fe3666667f614e140084dd31aac"}, + {file = "pyarrow-6.0.1-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:981ccdf4f2696550733e18da882469893d2f33f55f3cbeb6a90f81741cbf67aa"}, + {file = "pyarrow-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:954326b426eec6e31ff55209f8840b54d788420e96c4005aaa7beed1fe60b42d"}, + {file = "pyarrow-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6b6483bf6b61fe9a046235e4ad4d9286b707607878d7dbdc2eb85a6ec4090baf"}, + {file = "pyarrow-6.0.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7ecad40a1d4e0104cd87757a403f36850261e7a989cf9e4cb3e30420bbbd1092"}, + {file = "pyarrow-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04c752fb41921d0064568a15a87dbb0222cfbe9040d4b2c1b306fe6e0a453530"}, + {file = "pyarrow-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:725d3fe49dfe392ff14a8ae6a75b230a60e8985f2b621b18cfa912fe02b65f1a"}, + {file = "pyarrow-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:2403c8af207262ce8e2bc1a9d19313941fd2e424f1cb3c4b749c17efe1fd699a"}, + {file = "pyarrow-6.0.1.tar.gz", hash = "sha256:423990d56cd8f12283b67367d48e142739b789085185018eb03d05087c3c8d43"}, +] + +[package.dependencies] +numpy = ">=1.16.6" + [[package]] name = "pyasn1" version = "0.5.0" @@ -2468,4 +2482,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "089204b324e02b65ee7dfcec621a4f41df3787ea4ac61a6a752ac4bb10f982bf" +content-hash = "544155851689941c754f95e16d21338783796d61aee9bd37564be3601f026c28" From b42d8e6e6c18bb276ee86a7a014df09c52f15c31 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 30 Nov 2023 11:46:29 +0000 Subject: [PATCH 072/496] Documented ingress functionality --- README.md | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/README.md b/README.md index e864189b..014b14d3 100644 --- a/README.md +++ b/README.md @@ -17,6 +17,31 @@ Full documentation can be found [here](https://project-codeflare.github.io/codef Can be installed via `pip`: `pip install codeflare-sdk` +## CodeFlare SDK in a Kubernetes environment +To create a Ray Cluster using the CodeFlare SDK in a Kubernetes environment an `ingress_domain` must be passed in the Cluster Configuration. +This is used for the creation of the Ray Dashboard and Client ingresses. + +`ingress_options` can be passed to create a custom Ray Dashboard ingress, `ingress_domain` is still a required variable for the Client ingress. +An example of `ingress_options` would look like this. + +``` +ingress_options = { + "ingresses": [ + { + "ingressName": "", + "port": , + "pathType": "", + "path": "", + "host":"", + "annotations": { + "foo": "bar", + "foo": "bar", + } + } + ] +} +``` + ## Development ### Prerequisites From 5bbaddec88a1234db45a395bb9e698e24eca34e3 Mon Sep 17 00:00:00 2001 From: Fiona Waters Date: Tue, 5 Dec 2023 16:17:01 +0000 Subject: [PATCH 073/496] Removal of init_myservice initContainer --- .../notebook-ex-outputs/gptfttest.yaml | 8 ---- .../notebook-ex-outputs/instascaletest.yaml | 8 ---- .../notebook-ex-outputs/interactivetest.yaml | 8 ---- .../notebook-ex-outputs/jobtest.yaml | 8 ---- .../notebook-ex-outputs/raytest.yaml | 8 ---- .../templates/base-template.yaml | 3 -- src/codeflare_sdk/utils/generate_yaml.py | 2 +- tests/test-case-bad.yaml | 8 ---- tests/test-case-no-mcad.yamls | 8 ---- tests/test-case-prio.yaml | 8 ---- tests/test-case.yaml | 8 ---- tests/unit_test.py | 43 +++---------------- 12 files changed, 6 insertions(+), 114 deletions(-) diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml index c1c6c7db..e65a68d2 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml @@ -166,14 +166,6 @@ spec: memory: 8G nvidia.com/gpu: 1 imagePullSecrets: [] - initContainers: - - command: - - sh - - -c - - until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; - do echo waiting for myservice; sleep 2; done - image: quay.io/project-codeflare/busybox:latest - name: init-myservice replicas: 1 - generictemplate: apiVersion: route.openshift.io/v1 diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml index ee108336..14e4b8c9 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml @@ -166,14 +166,6 @@ spec: memory: 8G nvidia.com/gpu: 1 imagePullSecrets: [] - initContainers: - - command: - - sh - - -c - - until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; - do echo waiting for myservice; sleep 2; done - image: quay.io/project-codeflare/busybox:latest - name: init-myservice replicas: 1 - generictemplate: apiVersion: route.openshift.io/v1 diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml index 6d0f94cb..d44b6340 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml @@ -166,14 +166,6 @@ spec: memory: 8G nvidia.com/gpu: 1 imagePullSecrets: [] - initContainers: - - command: - - sh - - -c - - until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; - do echo waiting for myservice; sleep 2; done - image: quay.io/project-codeflare/busybox:latest - name: init-myservice replicas: 1 - generictemplate: apiVersion: route.openshift.io/v1 diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml index 05136c2a..a5859efb 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml @@ -146,14 +146,6 @@ spec: memory: 4G nvidia.com/gpu: 0 imagePullSecrets: [] - initContainers: - - command: - - sh - - -c - - until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; - do echo waiting for myservice; sleep 2; done - image: quay.io/project-codeflare/busybox:latest - name: init-myservice replicas: 1 - generictemplate: apiVersion: route.openshift.io/v1 diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml index c8ddab8a..3a25aafd 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml @@ -146,14 +146,6 @@ spec: memory: 4G nvidia.com/gpu: 0 imagePullSecrets: [] - initContainers: - - command: - - sh - - -c - - until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; - do echo waiting for myservice; sleep 2; done - image: quay.io/project-codeflare/busybox:latest - name: init-myservice replicas: 1 - generictemplate: apiVersion: route.openshift.io/v1 diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index 2ebd3d2a..c98f53c9 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -223,9 +223,6 @@ spec: - "aw-kuberay" initContainers: # the env var $RAY_IP is set by the operator if missing, with the value of the head service name - - name: init-myservice - image: quay.io/project-codeflare/busybox:latest - command: ['sh', '-c', "until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; do echo waiting for myservice; sleep 2; done"] - name: create-cert image: rayproject/ray:2.7.0 command: diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index a6aae308..4b6fef8f 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -493,7 +493,7 @@ def disable_raycluster_tls(resources): ): del generic_template_spec["workerGroupSpecs"][0]["template"]["spec"][ "initContainers" - ][i] + ] updated_items = [] for i in resources["GenericItems"][:]: diff --git a/tests/test-case-bad.yaml b/tests/test-case-bad.yaml index 1a3b7e37..68cfec34 100644 --- a/tests/test-case-bad.yaml +++ b/tests/test-case-bad.yaml @@ -148,14 +148,6 @@ spec: cpu: 3 memory: 5G nvidia.com/gpu: 7 - initContainers: - - command: - - sh - - -c - - until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; - do echo waiting for myservice; sleep 2; done - image: quay.io/project-codeflare/busybox:latest - name: init-myservice replicas: 1 - generictemplate: apiVersion: route.openshift.io/v1 diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index c6b128db..b8993a7f 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -138,14 +138,6 @@ spec: nvidia.com/gpu: 7 imagePullSecrets: - name: unit-test-pull-secret - initContainers: - - command: - - sh - - -c - - until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; - do echo waiting for myservice; sleep 2; done - image: quay.io/project-codeflare/busybox:latest - name: init-myservice --- apiVersion: networking.k8s.io/v1 kind: Ingress diff --git a/tests/test-case-prio.yaml b/tests/test-case-prio.yaml index 98fb6d68..6051104a 100644 --- a/tests/test-case-prio.yaml +++ b/tests/test-case-prio.yaml @@ -169,14 +169,6 @@ spec: nvidia.com/gpu: 7 imagePullSecrets: - name: unit-test-pull-secret - initContainers: - - command: - - sh - - -c - - until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; - do echo waiting for myservice; sleep 2; done - image: quay.io/project-codeflare/busybox:latest - name: init-myservice priorityClassName: default replicas: 1 - generictemplate: diff --git a/tests/test-case.yaml b/tests/test-case.yaml index 802ef3f4..7c649c5f 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -167,14 +167,6 @@ spec: nvidia.com/gpu: 7 imagePullSecrets: - name: unit-test-pull-secret - initContainers: - - command: - - sh - - -c - - until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; - do echo waiting for myservice; sleep 2; done - image: quay.io/project-codeflare/busybox:latest - name: init-myservice replicas: 1 - generictemplate: apiVersion: networking.k8s.io/v1 diff --git a/tests/unit_test.py b/tests/unit_test.py index 0d9403e6..bc946538 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -1112,17 +1112,6 @@ def get_ray_obj(group, version, namespace, plural, cls=None): }, } ], - "initContainers": [ - { - "command": [ - "sh", - "-c", - "until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; do echo waiting for myservice; sleep 2; done", - ], - "image": "quay.io/project-codeflare/busybox:latest", - "name": "init-myservice", - } - ], }, }, } @@ -1155,7 +1144,7 @@ def get_aw_obj(group, version, namespace, plural): "kind": "AppWrapper", "metadata": { "annotations": { - "kubectl.kubernetes.io/last-applied-configuration": '{"apiVersion":"codeflare.dev/v1beta1","kind":"AppWrapper","metadata":{"annotations":{},"name":"quicktest1","namespace":"ns"},"spec":{"priority":9,"resources":{"GenericItems":[{"custompodresources":[{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}},{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}],"generictemplate":{"apiVersion":"ray.io/v1alpha1","kind":"RayCluster","metadata":{"labels":{"appwrapper.codeflare.dev":"quicktest1","controller-tools.k8s.io":"1.0"},"name":"quicktest1","namespace":"ns"},"spec":{"autoscalerOptions":{"idleTimeoutSeconds":60,"imagePullPolicy":"Always","resources":{"limits":{"cpu":"500m","memory":"512Mi"},"requests":{"cpu":"500m","memory":"512Mi"}},"upscalingMode":"Default"},"enableInTreeAutoscaling":false,"headGroupSpec":{"rayStartParams":{"block":"true","dashboard-host":"0.0.0.0","num-gpus":"0"},"serviceType":"ClusterIP","template":{"spec":{"containers":[{"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","imagePullPolicy":"Always","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"ray-head","ports":[{"containerPort":6379,"name":"gcs"},{"containerPort":8265,"name":"dashboard"},{"containerPort":10001,"name":"client"}],"resources":{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}}}]}}},"rayVersion":"1.12.0","workerGroupSpecs":[{"groupName":"small-group-quicktest","maxReplicas":1,"minReplicas":1,"rayStartParams":{"block":"true","num-gpus":"0"},"replicas":1,"template":{"metadata":{"annotations":{"key":"value"},"labels":{"key":"value"}},"spec":{"containers":[{"env":[{"name":"MY_POD_IP","valueFrom":{"fieldRef":{"fieldPath":"status.podIP"}}}],"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"machine-learning","resources":{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}}],"initContainers":[{"command":["sh","-c","until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; do echo waiting for myservice; sleep 2; done"],"image":"quay.io/project-codeflare/busybox:latest","name":"init-myservice"}]}}}]}},"replicas":1},{"generictemplate":{"apiVersion":"route.openshift.io/v1","kind":"Route","metadata":{"labels":{"odh-ray-cluster-service":"quicktest-head-svc"},"name":"ray-dashboard-quicktest","namespace":"default"},"spec":{"port":{"targetPort":"dashboard"},"to":{"kind":"Service","name":"quicktest-head-svc"}}},"replica":1}],"Items":[]}}}\n' + "kubectl.kubernetes.io/last-applied-configuration": '{"apiVersion":"codeflare.dev/v1beta1","kind":"AppWrapper","metadata":{"annotations":{},"name":"quicktest1","namespace":"ns"},"spec":{"priority":9,"resources":{"GenericItems":[{"custompodresources":[{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}},{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}],"generictemplate":{"apiVersion":"ray.io/v1alpha1","kind":"RayCluster","metadata":{"labels":{"appwrapper.codeflare.dev":"quicktest1","controller-tools.k8s.io":"1.0"},"name":"quicktest1","namespace":"ns"},"spec":{"autoscalerOptions":{"idleTimeoutSeconds":60,"imagePullPolicy":"Always","resources":{"limits":{"cpu":"500m","memory":"512Mi"},"requests":{"cpu":"500m","memory":"512Mi"}},"upscalingMode":"Default"},"enableInTreeAutoscaling":false,"headGroupSpec":{"rayStartParams":{"block":"true","dashboard-host":"0.0.0.0","num-gpus":"0"},"serviceType":"ClusterIP","template":{"spec":{"containers":[{"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","imagePullPolicy":"Always","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"ray-head","ports":[{"containerPort":6379,"name":"gcs"},{"containerPort":8265,"name":"dashboard"},{"containerPort":10001,"name":"client"}],"resources":{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}}}]}}},"rayVersion":"1.12.0","workerGroupSpecs":[{"groupName":"small-group-quicktest","maxReplicas":1,"minReplicas":1,"rayStartParams":{"block":"true","num-gpus":"0"},"replicas":1,"template":{"metadata":{"annotations":{"key":"value"},"labels":{"key":"value"}},"spec":{"containers":[{"env":[{"name":"MY_POD_IP","valueFrom":{"fieldRef":{"fieldPath":"status.podIP"}}}],"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"machine-learning","resources":{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}}],}}}]}},"replicas":1},{"generictemplate":{"apiVersion":"route.openshift.io/v1","kind":"Route","metadata":{"labels":{"odh-ray-cluster-service":"quicktest-head-svc"},"name":"ray-dashboard-quicktest","namespace":"default"},"spec":{"port":{"targetPort":"dashboard"},"to":{"kind":"Service","name":"quicktest-head-svc"}}},"replica":1}],"Items":[]}}}\n' }, "creationTimestamp": "2023-02-22T16:26:07Z", "generation": 4, @@ -1388,17 +1377,6 @@ def get_aw_obj(group, version, namespace, plural): }, } ], - "initContainers": [ - { - "command": [ - "sh", - "-c", - "until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; do echo waiting for myservice; sleep 2; done", - ], - "image": "quay.io/project-codeflare/busybox:latest", - "name": "init-myservice", - } - ], }, }, } @@ -1478,7 +1456,7 @@ def get_aw_obj(group, version, namespace, plural): "kind": "AppWrapper", "metadata": { "annotations": { - "kubectl.kubernetes.io/last-applied-configuration": '{"apiVersion":"codeflare.dev/v1beta1","kind":"AppWrapper","metadata":{"annotations":{},"name":"quicktest2","namespace":"ns"},"spec":{"priority":9,"resources":{"GenericItems":[{"custompodresources":[{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}},{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}],"generictemplate":{"apiVersion":"ray.io/v1alpha1","kind":"RayCluster","metadata":{"labels":{"appwrapper.codeflare.dev":"quicktest2","controller-tools.k8s.io":"1.0"},"name":"quicktest2","namespace":"ns"},"spec":{"autoscalerOptions":{"idleTimeoutSeconds":60,"imagePullPolicy":"Always","resources":{"limits":{"cpu":"500m","memory":"512Mi"},"requests":{"cpu":"500m","memory":"512Mi"}},"upscalingMode":"Default"},"enableInTreeAutoscaling":false,"headGroupSpec":{"rayStartParams":{"block":"true","dashboard-host":"0.0.0.0","num-gpus":"0"},"serviceType":"ClusterIP","template":{"spec":{"containers":[{"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","imagePullPolicy":"Always","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"ray-head","ports":[{"containerPort":6379,"name":"gcs"},{"containerPort":8265,"name":"dashboard"},{"containerPort":10001,"name":"client"}],"resources":{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}}}]}}},"rayVersion":"1.12.0","workerGroupSpecs":[{"groupName":"small-group-quicktest","maxReplicas":1,"minReplicas":1,"rayStartParams":{"block":"true","num-gpus":"0"},"replicas":1,"template":{"metadata":{"annotations":{"key":"value"},"labels":{"key":"value"}},"spec":{"containers":[{"env":[{"name":"MY_POD_IP","valueFrom":{"fieldRef":{"fieldPath":"status.podIP"}}}],"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"machine-learning","resources":{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}}],"initContainers":[{"command":["sh","-c","until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; do echo waiting for myservice; sleep 2; done"],"image":"quay.io/project-codeflare/busybox:latest","name":"init-myservice"}]}}}]}},"replicas":1},{"generictemplate":{"apiVersion":"route.openshift.io/v1","kind":"Route","metadata":{"labels":{"odh-ray-cluster-service":"quicktest-head-svc"},"name":"ray-dashboard-quicktest","namespace":"default"},"spec":{"port":{"targetPort":"dashboard"},"to":{"kind":"Service","name":"quicktest-head-svc"}}},"replica":1}],"Items":[]}}}\n' + "kubectl.kubernetes.io/last-applied-configuration": '{"apiVersion":"codeflare.dev/v1beta1","kind":"AppWrapper","metadata":{"annotations":{},"name":"quicktest2","namespace":"ns"},"spec":{"priority":9,"resources":{"GenericItems":[{"custompodresources":[{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}},{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}],"generictemplate":{"apiVersion":"ray.io/v1alpha1","kind":"RayCluster","metadata":{"labels":{"appwrapper.codeflare.dev":"quicktest2","controller-tools.k8s.io":"1.0"},"name":"quicktest2","namespace":"ns"},"spec":{"autoscalerOptions":{"idleTimeoutSeconds":60,"imagePullPolicy":"Always","resources":{"limits":{"cpu":"500m","memory":"512Mi"},"requests":{"cpu":"500m","memory":"512Mi"}},"upscalingMode":"Default"},"enableInTreeAutoscaling":false,"headGroupSpec":{"rayStartParams":{"block":"true","dashboard-host":"0.0.0.0","num-gpus":"0"},"serviceType":"ClusterIP","template":{"spec":{"containers":[{"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","imagePullPolicy":"Always","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"ray-head","ports":[{"containerPort":6379,"name":"gcs"},{"containerPort":8265,"name":"dashboard"},{"containerPort":10001,"name":"client"}],"resources":{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}}}]}}},"rayVersion":"1.12.0","workerGroupSpecs":[{"groupName":"small-group-quicktest","maxReplicas":1,"minReplicas":1,"rayStartParams":{"block":"true","num-gpus":"0"},"replicas":1,"template":{"metadata":{"annotations":{"key":"value"},"labels":{"key":"value"}},"spec":{"containers":[{"env":[{"name":"MY_POD_IP","valueFrom":{"fieldRef":{"fieldPath":"status.podIP"}}}],"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"machine-learning","resources":{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}}],}}}]}},"replicas":1},{"generictemplate":{"apiVersion":"route.openshift.io/v1","kind":"Route","metadata":{"labels":{"odh-ray-cluster-service":"quicktest-head-svc"},"name":"ray-dashboard-quicktest","namespace":"default"},"spec":{"port":{"targetPort":"dashboard"},"to":{"kind":"Service","name":"quicktest-head-svc"}}},"replica":1}],"Items":[]}}}\n' }, "creationTimestamp": "2023-02-22T16:26:07Z", "generation": 4, @@ -1711,17 +1689,6 @@ def get_aw_obj(group, version, namespace, plural): }, } ], - "initContainers": [ - { - "command": [ - "sh", - "-c", - "until nslookup $RAY_IP.$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).svc.cluster.local; do echo waiting for myservice; sleep 2; done", - ], - "image": "quay.io/project-codeflare/busybox:latest", - "name": "init-myservice", - } - ], }, }, } @@ -2627,13 +2594,13 @@ def test_enable_local_interactive(mocker): ) assert head_group_spec["template"]["spec"]["volumes"] == volumes - # 2. workerGrooupSpec has the initContainers command to generated TLS cert from the mounted CA cert. + # 2. workerGroupSpec has the initContainers command to generated TLS cert from the mounted CA cert. assert ( - worker_group_spec[0]["template"]["spec"]["initContainers"][1]["command"][2] + worker_group_spec[0]["template"]["spec"]["initContainers"][0]["command"][2] == "cd /home/ray/workspace/tls && openssl req -nodes -newkey rsa:2048 -keyout server.key -out server.csr -subj '/CN=ray-head' && printf \"authorityKeyIdentifier=keyid,issuer\\nbasicConstraints=CA:FALSE\\nsubjectAltName = @alt_names\\n[alt_names]\\nDNS.1 = 127.0.0.1\\nDNS.2 = localhost\\nDNS.3 = ${FQ_RAY_IP}\\nDNS.4 = $(awk 'END{print $1}' /etc/hosts)\">./domain.ext && cp /home/ray/workspace/ca/* . && openssl x509 -req -CA ca.crt -CAkey ca.key -in server.csr -out server.crt -days 365 -CAcreateserial -extfile domain.ext" ) assert ( - worker_group_spec[0]["template"]["spec"]["initContainers"][1]["volumeMounts"] + worker_group_spec[0]["template"]["spec"]["initContainers"][0]["volumeMounts"] == volume_mounts ) assert worker_group_spec[0]["template"]["spec"]["volumes"] == volumes From b4f19db8a1b7bcb71d692bd07024726b1eea640f Mon Sep 17 00:00:00 2001 From: Fiona Waters Date: Tue, 5 Dec 2023 16:43:29 +0000 Subject: [PATCH 074/496] removing unncessary for loop --- src/codeflare_sdk/utils/generate_yaml.py | 19 +++---------------- 1 file changed, 3 insertions(+), 16 deletions(-) diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 4b6fef8f..3ffbefb5 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -478,22 +478,9 @@ def disable_raycluster_tls(resources): "containers" ][0]["volumeMounts"] - for i in range( - len( - generic_template_spec["workerGroupSpecs"][0]["template"]["spec"][ - "initContainers" - ] - ) - ): - if ( - generic_template_spec["workerGroupSpecs"][0]["template"]["spec"][ - "initContainers" - ][i]["name"] - == "create-cert" - ): - del generic_template_spec["workerGroupSpecs"][0]["template"]["spec"][ - "initContainers" - ] + del generic_template_spec["workerGroupSpecs"][0]["template"]["spec"][ + "initContainers" + ] updated_items = [] for i in resources["GenericItems"][:]: From b5a14d9251cbccf36cbb97dac9bfe61a650e61cc Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Tue, 5 Dec 2023 17:22:45 +0000 Subject: [PATCH 075/496] Update Dependencies: Include 'ray[data]' for PyArrow Support --- poetry.lock | 762 ++++++------------------------------------------- pyproject.toml | 3 +- 2 files changed, 95 insertions(+), 670 deletions(-) diff --git a/poetry.lock b/poetry.lock index 94fe9d61..c97f1f78 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,127 +1,5 @@ # This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. -[[package]] -name = "aiohttp" -version = "3.8.6" -description = "Async http client/server framework (asyncio)" -optional = false -python-versions = ">=3.6" -files = [ - {file = "aiohttp-3.8.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:41d55fc043954cddbbd82503d9cc3f4814a40bcef30b3569bc7b5e34130718c1"}, - {file = "aiohttp-3.8.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1d84166673694841d8953f0a8d0c90e1087739d24632fe86b1a08819168b4566"}, - {file = "aiohttp-3.8.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:253bf92b744b3170eb4c4ca2fa58f9c4b87aeb1df42f71d4e78815e6e8b73c9e"}, - {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fd194939b1f764d6bb05490987bfe104287bbf51b8d862261ccf66f48fb4096"}, - {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c5f938d199a6fdbdc10bbb9447496561c3a9a565b43be564648d81e1102ac22"}, - {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2817b2f66ca82ee699acd90e05c95e79bbf1dc986abb62b61ec8aaf851e81c93"}, - {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fa375b3d34e71ccccf172cab401cd94a72de7a8cc01847a7b3386204093bb47"}, - {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9de50a199b7710fa2904be5a4a9b51af587ab24c8e540a7243ab737b45844543"}, - {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e1d8cb0b56b3587c5c01de3bf2f600f186da7e7b5f7353d1bf26a8ddca57f965"}, - {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8e31e9db1bee8b4f407b77fd2507337a0a80665ad7b6c749d08df595d88f1cf5"}, - {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7bc88fc494b1f0311d67f29fee6fd636606f4697e8cc793a2d912ac5b19aa38d"}, - {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ec00c3305788e04bf6d29d42e504560e159ccaf0be30c09203b468a6c1ccd3b2"}, - {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad1407db8f2f49329729564f71685557157bfa42b48f4b93e53721a16eb813ed"}, - {file = "aiohttp-3.8.6-cp310-cp310-win32.whl", hash = "sha256:ccc360e87341ad47c777f5723f68adbb52b37ab450c8bc3ca9ca1f3e849e5fe2"}, - {file = "aiohttp-3.8.6-cp310-cp310-win_amd64.whl", hash = "sha256:93c15c8e48e5e7b89d5cb4613479d144fda8344e2d886cf694fd36db4cc86865"}, - {file = "aiohttp-3.8.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e2f9cc8e5328f829f6e1fb74a0a3a939b14e67e80832975e01929e320386b34"}, - {file = "aiohttp-3.8.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e6a00ffcc173e765e200ceefb06399ba09c06db97f401f920513a10c803604ca"}, - {file = "aiohttp-3.8.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:41bdc2ba359032e36c0e9de5a3bd00d6fb7ea558a6ce6b70acedf0da86458321"}, - {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14cd52ccf40006c7a6cd34a0f8663734e5363fd981807173faf3a017e202fec9"}, - {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d5b785c792802e7b275c420d84f3397668e9d49ab1cb52bd916b3b3ffcf09ad"}, - {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1bed815f3dc3d915c5c1e556c397c8667826fbc1b935d95b0ad680787896a358"}, - {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96603a562b546632441926cd1293cfcb5b69f0b4159e6077f7c7dbdfb686af4d"}, - {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d76e8b13161a202d14c9584590c4df4d068c9567c99506497bdd67eaedf36403"}, - {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e3f1e3f1a1751bb62b4a1b7f4e435afcdade6c17a4fd9b9d43607cebd242924a"}, - {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:76b36b3124f0223903609944a3c8bf28a599b2cc0ce0be60b45211c8e9be97f8"}, - {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a2ece4af1f3c967a4390c284797ab595a9f1bc1130ef8b01828915a05a6ae684"}, - {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:16d330b3b9db87c3883e565340d292638a878236418b23cc8b9b11a054aaa887"}, - {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:42c89579f82e49db436b69c938ab3e1559e5a4409eb8639eb4143989bc390f2f"}, - {file = "aiohttp-3.8.6-cp311-cp311-win32.whl", hash = "sha256:efd2fcf7e7b9d7ab16e6b7d54205beded0a9c8566cb30f09c1abe42b4e22bdcb"}, - {file = "aiohttp-3.8.6-cp311-cp311-win_amd64.whl", hash = "sha256:3b2ab182fc28e7a81f6c70bfbd829045d9480063f5ab06f6e601a3eddbbd49a0"}, - {file = "aiohttp-3.8.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fdee8405931b0615220e5ddf8cd7edd8592c606a8e4ca2a00704883c396e4479"}, - {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d25036d161c4fe2225d1abff2bd52c34ed0b1099f02c208cd34d8c05729882f0"}, - {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d791245a894be071d5ab04bbb4850534261a7d4fd363b094a7b9963e8cdbd31"}, - {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0cccd1de239afa866e4ce5c789b3032442f19c261c7d8a01183fd956b1935349"}, - {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f13f60d78224f0dace220d8ab4ef1dbc37115eeeab8c06804fec11bec2bbd07"}, - {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a9b5a0606faca4f6cc0d338359d6fa137104c337f489cd135bb7fbdbccb1e39"}, - {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:13da35c9ceb847732bf5c6c5781dcf4780e14392e5d3b3c689f6d22f8e15ae31"}, - {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:4d4cbe4ffa9d05f46a28252efc5941e0462792930caa370a6efaf491f412bc66"}, - {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:229852e147f44da0241954fc6cb910ba074e597f06789c867cb7fb0621e0ba7a"}, - {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:713103a8bdde61d13490adf47171a1039fd880113981e55401a0f7b42c37d071"}, - {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:45ad816b2c8e3b60b510f30dbd37fe74fd4a772248a52bb021f6fd65dff809b6"}, - {file = "aiohttp-3.8.6-cp36-cp36m-win32.whl", hash = "sha256:2b8d4e166e600dcfbff51919c7a3789ff6ca8b3ecce16e1d9c96d95dd569eb4c"}, - {file = "aiohttp-3.8.6-cp36-cp36m-win_amd64.whl", hash = "sha256:0912ed87fee967940aacc5306d3aa8ba3a459fcd12add0b407081fbefc931e53"}, - {file = "aiohttp-3.8.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e2a988a0c673c2e12084f5e6ba3392d76c75ddb8ebc6c7e9ead68248101cd446"}, - {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebf3fd9f141700b510d4b190094db0ce37ac6361a6806c153c161dc6c041ccda"}, - {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3161ce82ab85acd267c8f4b14aa226047a6bee1e4e6adb74b798bd42c6ae1f80"}, - {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95fc1bf33a9a81469aa760617b5971331cdd74370d1214f0b3109272c0e1e3c"}, - {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c43ecfef7deaf0617cee936836518e7424ee12cb709883f2c9a1adda63cc460"}, - {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca80e1b90a05a4f476547f904992ae81eda5c2c85c66ee4195bb8f9c5fb47f28"}, - {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:90c72ebb7cb3a08a7f40061079817133f502a160561d0675b0a6adf231382c92"}, - {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bb54c54510e47a8c7c8e63454a6acc817519337b2b78606c4e840871a3e15349"}, - {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:de6a1c9f6803b90e20869e6b99c2c18cef5cc691363954c93cb9adeb26d9f3ae"}, - {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:a3628b6c7b880b181a3ae0a0683698513874df63783fd89de99b7b7539e3e8a8"}, - {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fc37e9aef10a696a5a4474802930079ccfc14d9f9c10b4662169671ff034b7df"}, - {file = "aiohttp-3.8.6-cp37-cp37m-win32.whl", hash = "sha256:f8ef51e459eb2ad8e7a66c1d6440c808485840ad55ecc3cafefadea47d1b1ba2"}, - {file = "aiohttp-3.8.6-cp37-cp37m-win_amd64.whl", hash = "sha256:b2fe42e523be344124c6c8ef32a011444e869dc5f883c591ed87f84339de5976"}, - {file = "aiohttp-3.8.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9e2ee0ac5a1f5c7dd3197de309adfb99ac4617ff02b0603fd1e65b07dc772e4b"}, - {file = "aiohttp-3.8.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01770d8c04bd8db568abb636c1fdd4f7140b284b8b3e0b4584f070180c1e5c62"}, - {file = "aiohttp-3.8.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3c68330a59506254b556b99a91857428cab98b2f84061260a67865f7f52899f5"}, - {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89341b2c19fb5eac30c341133ae2cc3544d40d9b1892749cdd25892bbc6ac951"}, - {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71783b0b6455ac8f34b5ec99d83e686892c50498d5d00b8e56d47f41b38fbe04"}, - {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f628dbf3c91e12f4d6c8b3f092069567d8eb17814aebba3d7d60c149391aee3a"}, - {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b04691bc6601ef47c88f0255043df6f570ada1a9ebef99c34bd0b72866c217ae"}, - {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ee912f7e78287516df155f69da575a0ba33b02dd7c1d6614dbc9463f43066e3"}, - {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9c19b26acdd08dd239e0d3669a3dddafd600902e37881f13fbd8a53943079dbc"}, - {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:99c5ac4ad492b4a19fc132306cd57075c28446ec2ed970973bbf036bcda1bcc6"}, - {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f0f03211fd14a6a0aed2997d4b1c013d49fb7b50eeb9ffdf5e51f23cfe2c77fa"}, - {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:8d399dade330c53b4106160f75f55407e9ae7505263ea86f2ccca6bfcbdb4921"}, - {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ec4fd86658c6a8964d75426517dc01cbf840bbf32d055ce64a9e63a40fd7b771"}, - {file = "aiohttp-3.8.6-cp38-cp38-win32.whl", hash = "sha256:33164093be11fcef3ce2571a0dccd9041c9a93fa3bde86569d7b03120d276c6f"}, - {file = "aiohttp-3.8.6-cp38-cp38-win_amd64.whl", hash = "sha256:bdf70bfe5a1414ba9afb9d49f0c912dc524cf60141102f3a11143ba3d291870f"}, - {file = "aiohttp-3.8.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d52d5dc7c6682b720280f9d9db41d36ebe4791622c842e258c9206232251ab2b"}, - {file = "aiohttp-3.8.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4ac39027011414dbd3d87f7edb31680e1f430834c8cef029f11c66dad0670aa5"}, - {file = "aiohttp-3.8.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3f5c7ce535a1d2429a634310e308fb7d718905487257060e5d4598e29dc17f0b"}, - {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b30e963f9e0d52c28f284d554a9469af073030030cef8693106d918b2ca92f54"}, - {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:918810ef188f84152af6b938254911055a72e0f935b5fbc4c1a4ed0b0584aed1"}, - {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:002f23e6ea8d3dd8d149e569fd580c999232b5fbc601c48d55398fbc2e582e8c"}, - {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fcf3eabd3fd1a5e6092d1242295fa37d0354b2eb2077e6eb670accad78e40e1"}, - {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:255ba9d6d5ff1a382bb9a578cd563605aa69bec845680e21c44afc2670607a95"}, - {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d67f8baed00870aa390ea2590798766256f31dc5ed3ecc737debb6e97e2ede78"}, - {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:86f20cee0f0a317c76573b627b954c412ea766d6ada1a9fcf1b805763ae7feeb"}, - {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:39a312d0e991690ccc1a61f1e9e42daa519dcc34ad03eb6f826d94c1190190dd"}, - {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e827d48cf802de06d9c935088c2924e3c7e7533377d66b6f31ed175c1620e05e"}, - {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bd111d7fc5591ddf377a408ed9067045259ff2770f37e2d94e6478d0f3fc0c17"}, - {file = "aiohttp-3.8.6-cp39-cp39-win32.whl", hash = "sha256:caf486ac1e689dda3502567eb89ffe02876546599bbf915ec94b1fa424eeffd4"}, - {file = "aiohttp-3.8.6-cp39-cp39-win_amd64.whl", hash = "sha256:3f0e27e5b733803333bb2371249f41cf42bae8884863e8e8965ec69bebe53132"}, - {file = "aiohttp-3.8.6.tar.gz", hash = "sha256:b0cf2a4501bff9330a8a5248b4ce951851e415bdcce9dc158e76cfd55e15085c"}, -] - -[package.dependencies] -aiosignal = ">=1.1.2" -async-timeout = ">=4.0.0a3,<5.0" -attrs = ">=17.3.0" -charset-normalizer = ">=2.0,<4.0" -frozenlist = ">=1.1.1" -multidict = ">=4.5,<7.0" -yarl = ">=1.0,<2.0" - -[package.extras] -speedups = ["Brotli", "aiodns", "cchardet"] - -[[package]] -name = "aiohttp-cors" -version = "0.7.0" -description = "CORS support for aiohttp" -optional = false -python-versions = "*" -files = [ - {file = "aiohttp-cors-0.7.0.tar.gz", hash = "sha256:4d39c6d7100fd9764ed1caf8cebf0eb01bf5e3f24e2e073fda6234bc48b19f5d"}, - {file = "aiohttp_cors-0.7.0-py3-none-any.whl", hash = "sha256:0451ba59fdf6909d0e2cd21e4c0a43752bc0703d33fc78ae94d9d9321710193e"}, -] - -[package.dependencies] -aiohttp = ">=1.1" - [[package]] name = "aiosignal" version = "1.3.1" @@ -136,28 +14,6 @@ files = [ [package.dependencies] frozenlist = ">=1.1.0" -[[package]] -name = "ansicon" -version = "1.89.0" -description = "Python wrapper for loading Jason Hood's ANSICON" -optional = false -python-versions = "*" -files = [ - {file = "ansicon-1.89.0-py2.py3-none-any.whl", hash = "sha256:f1def52d17f65c2c9682cf8370c03f541f410c1752d6a14029f97318e4b9dfec"}, - {file = "ansicon-1.89.0.tar.gz", hash = "sha256:e4d039def5768a47e4afec8e89e83ec3ae5a26bf00ad851f914d1240b444d2b1"}, -] - -[[package]] -name = "async-timeout" -version = "4.0.3" -description = "Timeout context manager for asyncio programs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, -] - [[package]] name = "attrs" version = "23.1.0" @@ -210,22 +66,6 @@ files = [ tests = ["pytest (>=3.2.1,!=3.3.0)"] typecheck = ["mypy"] -[[package]] -name = "blessed" -version = "1.20.0" -description = "Easy, practical library for making terminal apps, by providing an elegant, well-documented interface to Colors, Keyboard input, and screen Positioning capabilities." -optional = false -python-versions = ">=2.7" -files = [ - {file = "blessed-1.20.0-py2.py3-none-any.whl", hash = "sha256:0c542922586a265e699188e52d5f5ac5ec0dd517e5a1041d90d2bbf23f906058"}, - {file = "blessed-1.20.0.tar.gz", hash = "sha256:2cdd67f8746e048f00df47a2880f4d6acbcdb399031b604e34ba8f71d5787680"}, -] - -[package.dependencies] -jinxed = {version = ">=1.1.0", markers = "platform_system == \"Windows\""} -six = ">=1.9.0" -wcwidth = ">=0.1.4" - [[package]] name = "cachetools" version = "5.3.1" @@ -464,20 +304,6 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -[[package]] -name = "colorful" -version = "0.5.5" -description = "Terminal string styling done right, in Python." -optional = false -python-versions = "*" -files = [ - {file = "colorful-0.5.5-py2.py3-none-any.whl", hash = "sha256:62c187e27c1433db9463ff93b1451898d1e7e23a7e553583fd9daeb6325182e4"}, - {file = "colorful-0.5.5.tar.gz", hash = "sha256:66f8c1264b2a26f7293b96a03bb7a76c4bc8b9634369a0bffdcd12d618056a1d"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - [[package]] name = "commonmark" version = "0.9.1" @@ -605,17 +431,6 @@ test = ["iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-co test-randomorder = ["pytest-randomly"] tox = ["tox"] -[[package]] -name = "distlib" -version = "0.3.7" -description = "Distribution utilities" -optional = false -python-versions = "*" -files = [ - {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, - {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, -] - [[package]] name = "docker" version = "6.1.3" @@ -796,28 +611,6 @@ smb = ["smbprotocol"] ssh = ["paramiko"] tqdm = ["tqdm"] -[[package]] -name = "google-api-core" -version = "2.12.0" -description = "Google API client core library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-api-core-2.12.0.tar.gz", hash = "sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553"}, - {file = "google_api_core-2.12.0-py3-none-any.whl", hash = "sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160"}, -] - -[package.dependencies] -google-auth = ">=2.14.1,<3.0.dev0" -googleapis-common-protos = ">=1.56.2,<2.0.dev0" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" -requests = ">=2.18.0,<3.0.0.dev0" - -[package.extras] -grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] -grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] -grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] - [[package]] name = "google-auth" version = "2.23.3" @@ -841,108 +634,6 @@ pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] requests = ["requests (>=2.20.0,<3.0.0.dev0)"] -[[package]] -name = "googleapis-common-protos" -version = "1.60.0" -description = "Common protobufs used in Google APIs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "googleapis-common-protos-1.60.0.tar.gz", hash = "sha256:e73ebb404098db405ba95d1e1ae0aa91c3e15a71da031a2eeb6b2e23e7bc3708"}, - {file = "googleapis_common_protos-1.60.0-py2.py3-none-any.whl", hash = "sha256:69f9bbcc6acde92cab2db95ce30a70bd2b81d20b12eff3f1aabaffcbe8a93918"}, -] - -[package.dependencies] -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" - -[package.extras] -grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] - -[[package]] -name = "gpustat" -version = "1.1.1" -description = "An utility to monitor NVIDIA GPU status and usage" -optional = false -python-versions = ">=3.6" -files = [ - {file = "gpustat-1.1.1.tar.gz", hash = "sha256:c18d3ed5518fc16300c42d694debc70aebb3be55cae91f1db64d63b5fa8af9d8"}, -] - -[package.dependencies] -blessed = ">=1.17.1" -nvidia-ml-py = ">=11.450.129" -psutil = ">=5.6.0" - -[package.extras] -completion = ["shtab"] -test = ["mockito (>=1.2.1)", "pytest (>=5.4.1)", "pytest-runner"] - -[[package]] -name = "grpcio" -version = "1.59.0" -description = "HTTP/2-based RPC framework" -optional = false -python-versions = ">=3.7" -files = [ - {file = "grpcio-1.59.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:225e5fa61c35eeaebb4e7491cd2d768cd8eb6ed00f2664fa83a58f29418b39fd"}, - {file = "grpcio-1.59.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b95ec8ecc4f703f5caaa8d96e93e40c7f589bad299a2617bdb8becbcce525539"}, - {file = "grpcio-1.59.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:1a839ba86764cc48226f50b924216000c79779c563a301586a107bda9cbe9dcf"}, - {file = "grpcio-1.59.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6cfe44a5d7c7d5f1017a7da1c8160304091ca5dc64a0f85bca0d63008c3137a"}, - {file = "grpcio-1.59.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0fcf53df684fcc0154b1e61f6b4a8c4cf5f49d98a63511e3f30966feff39cd0"}, - {file = "grpcio-1.59.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa66cac32861500f280bb60fe7d5b3e22d68c51e18e65367e38f8669b78cea3b"}, - {file = "grpcio-1.59.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8cd2d38c2d52f607d75a74143113174c36d8a416d9472415eab834f837580cf7"}, - {file = "grpcio-1.59.0-cp310-cp310-win32.whl", hash = "sha256:228b91ce454876d7eed74041aff24a8f04c0306b7250a2da99d35dd25e2a1211"}, - {file = "grpcio-1.59.0-cp310-cp310-win_amd64.whl", hash = "sha256:ca87ee6183421b7cea3544190061f6c1c3dfc959e0b57a5286b108511fd34ff4"}, - {file = "grpcio-1.59.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:c173a87d622ea074ce79be33b952f0b424fa92182063c3bda8625c11d3585d09"}, - {file = "grpcio-1.59.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:ec78aebb9b6771d6a1de7b6ca2f779a2f6113b9108d486e904bde323d51f5589"}, - {file = "grpcio-1.59.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:0b84445fa94d59e6806c10266b977f92fa997db3585f125d6b751af02ff8b9fe"}, - {file = "grpcio-1.59.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c251d22de8f9f5cca9ee47e4bade7c5c853e6e40743f47f5cc02288ee7a87252"}, - {file = "grpcio-1.59.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:956f0b7cb465a65de1bd90d5a7475b4dc55089b25042fe0f6c870707e9aabb1d"}, - {file = "grpcio-1.59.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:38da5310ef84e16d638ad89550b5b9424df508fd5c7b968b90eb9629ca9be4b9"}, - {file = "grpcio-1.59.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:63982150a7d598281fa1d7ffead6096e543ff8be189d3235dd2b5604f2c553e5"}, - {file = "grpcio-1.59.0-cp311-cp311-win32.whl", hash = "sha256:50eff97397e29eeee5df106ea1afce3ee134d567aa2c8e04fabab05c79d791a7"}, - {file = "grpcio-1.59.0-cp311-cp311-win_amd64.whl", hash = "sha256:15f03bd714f987d48ae57fe092cf81960ae36da4e520e729392a59a75cda4f29"}, - {file = "grpcio-1.59.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:f1feb034321ae2f718172d86b8276c03599846dc7bb1792ae370af02718f91c5"}, - {file = "grpcio-1.59.0-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d09bd2a4e9f5a44d36bb8684f284835c14d30c22d8ec92ce796655af12163588"}, - {file = "grpcio-1.59.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:2f120d27051e4c59db2f267b71b833796770d3ea36ca712befa8c5fff5da6ebd"}, - {file = "grpcio-1.59.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba0ca727a173ee093f49ead932c051af463258b4b493b956a2c099696f38aa66"}, - {file = "grpcio-1.59.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5711c51e204dc52065f4a3327dca46e69636a0b76d3e98c2c28c4ccef9b04c52"}, - {file = "grpcio-1.59.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:d74f7d2d7c242a6af9d4d069552ec3669965b74fed6b92946e0e13b4168374f9"}, - {file = "grpcio-1.59.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3859917de234a0a2a52132489c4425a73669de9c458b01c9a83687f1f31b5b10"}, - {file = "grpcio-1.59.0-cp312-cp312-win32.whl", hash = "sha256:de2599985b7c1b4ce7526e15c969d66b93687571aa008ca749d6235d056b7205"}, - {file = "grpcio-1.59.0-cp312-cp312-win_amd64.whl", hash = "sha256:598f3530231cf10ae03f4ab92d48c3be1fee0c52213a1d5958df1a90957e6a88"}, - {file = "grpcio-1.59.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:b34c7a4c31841a2ea27246a05eed8a80c319bfc0d3e644412ec9ce437105ff6c"}, - {file = "grpcio-1.59.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:c4dfdb49f4997dc664f30116af2d34751b91aa031f8c8ee251ce4dcfc11277b0"}, - {file = "grpcio-1.59.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:61bc72a00ecc2b79d9695220b4d02e8ba53b702b42411397e831c9b0589f08a3"}, - {file = "grpcio-1.59.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f367e4b524cb319e50acbdea57bb63c3b717c5d561974ace0b065a648bb3bad3"}, - {file = "grpcio-1.59.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:849c47ef42424c86af069a9c5e691a765e304079755d5c29eff511263fad9c2a"}, - {file = "grpcio-1.59.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c0488c2b0528e6072010182075615620071371701733c63ab5be49140ed8f7f0"}, - {file = "grpcio-1.59.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:611d9aa0017fa386809bddcb76653a5ab18c264faf4d9ff35cb904d44745f575"}, - {file = "grpcio-1.59.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e5378785dce2b91eb2e5b857ec7602305a3b5cf78311767146464bfa365fc897"}, - {file = "grpcio-1.59.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:fe976910de34d21057bcb53b2c5e667843588b48bf11339da2a75f5c4c5b4055"}, - {file = "grpcio-1.59.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:c041a91712bf23b2a910f61e16565a05869e505dc5a5c025d429ca6de5de842c"}, - {file = "grpcio-1.59.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:0ae444221b2c16d8211b55326f8ba173ba8f8c76349bfc1768198ba592b58f74"}, - {file = "grpcio-1.59.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ceb1e68135788c3fce2211de86a7597591f0b9a0d2bb80e8401fd1d915991bac"}, - {file = "grpcio-1.59.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c4b1cc3a9dc1924d2eb26eec8792fedd4b3fcd10111e26c1d551f2e4eda79ce"}, - {file = "grpcio-1.59.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:871371ce0c0055d3db2a86fdebd1e1d647cf21a8912acc30052660297a5a6901"}, - {file = "grpcio-1.59.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:93e9cb546e610829e462147ce724a9cb108e61647a3454500438a6deef610be1"}, - {file = "grpcio-1.59.0-cp38-cp38-win32.whl", hash = "sha256:f21917aa50b40842b51aff2de6ebf9e2f6af3fe0971c31960ad6a3a2b24988f4"}, - {file = "grpcio-1.59.0-cp38-cp38-win_amd64.whl", hash = "sha256:14890da86a0c0e9dc1ea8e90101d7a3e0e7b1e71f4487fab36e2bfd2ecadd13c"}, - {file = "grpcio-1.59.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:34341d9e81a4b669a5f5dca3b2a760b6798e95cdda2b173e65d29d0b16692857"}, - {file = "grpcio-1.59.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:986de4aa75646e963466b386a8c5055c8b23a26a36a6c99052385d6fe8aaf180"}, - {file = "grpcio-1.59.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:aca8a24fef80bef73f83eb8153f5f5a0134d9539b4c436a716256b311dda90a6"}, - {file = "grpcio-1.59.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:936b2e04663660c600d5173bc2cc84e15adbad9c8f71946eb833b0afc205b996"}, - {file = "grpcio-1.59.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc8bf2e7bc725e76c0c11e474634a08c8f24bcf7426c0c6d60c8f9c6e70e4d4a"}, - {file = "grpcio-1.59.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81d86a096ccd24a57fa5772a544c9e566218bc4de49e8c909882dae9d73392df"}, - {file = "grpcio-1.59.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2ea95cd6abbe20138b8df965b4a8674ec312aaef3147c0f46a0bac661f09e8d0"}, - {file = "grpcio-1.59.0-cp39-cp39-win32.whl", hash = "sha256:3b8ff795d35a93d1df6531f31c1502673d1cebeeba93d0f9bd74617381507e3f"}, - {file = "grpcio-1.59.0-cp39-cp39-win_amd64.whl", hash = "sha256:38823bd088c69f59966f594d087d3a929d1ef310506bee9e3648317660d65b81"}, - {file = "grpcio-1.59.0.tar.gz", hash = "sha256:acf70a63cf09dd494000007b798aff88a436e1c03b394995ce450be437b8e54f"}, -] - -[package.extras] -protobuf = ["grpcio-tools (>=1.59.0)"] - [[package]] name = "idna" version = "3.4" @@ -1002,20 +693,6 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] -[[package]] -name = "jinxed" -version = "1.2.0" -description = "Jinxed Terminal Library" -optional = false -python-versions = "*" -files = [ - {file = "jinxed-1.2.0-py2.py3-none-any.whl", hash = "sha256:cfc2b2e4e3b4326954d546ba6d6b9a7a796ddcb0aef8d03161d005177eb0d48b"}, - {file = "jinxed-1.2.0.tar.gz", hash = "sha256:032acda92d5c57cd216033cbbd53de731e6ed50deb63eb4781336ca55f72cda5"}, -] - -[package.dependencies] -ansicon = {version = "*", markers = "platform_system == \"Windows\""} - [[package]] name = "jsonschema" version = "4.19.1" @@ -1251,89 +928,6 @@ files = [ {file = "msgpack-1.0.7.tar.gz", hash = "sha256:572efc93db7a4d27e404501975ca6d2d9775705c2d922390d878fcf768d92c87"}, ] -[[package]] -name = "multidict" -version = "6.0.4" -description = "multidict implementation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, - {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, - {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, - {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, - {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, - {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, - {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, - {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, - {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, - {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, - {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, - {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, - {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, -] - [[package]] name = "mypy-extensions" version = "1.0.0" @@ -1382,17 +976,6 @@ files = [ {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, ] -[[package]] -name = "nvidia-ml-py" -version = "12.535.108" -description = "Python Bindings for the NVIDIA Management Library" -optional = false -python-versions = "*" -files = [ - {file = "nvidia-ml-py-12.535.108.tar.gz", hash = "sha256:141fe818771a165fb93f75dbe7f01f767c3bafa7c13f6876f53583511b078ee1"}, - {file = "nvidia_ml_py-12.535.108-py3-none-any.whl", hash = "sha256:f4e260ad0adb06d7ca1ea5574862ed4ef70f0a17720836854594fe188a3acaf4"}, -] - [[package]] name = "oauthlib" version = "3.2.2" @@ -1409,32 +992,6 @@ rsa = ["cryptography (>=3.0.0)"] signals = ["blinker (>=1.4.0)"] signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] -[[package]] -name = "opencensus" -version = "0.11.3" -description = "A stats collection and distributed tracing framework" -optional = false -python-versions = "*" -files = [ - {file = "opencensus-0.11.3-py2.py3-none-any.whl", hash = "sha256:9c33d572059f0f0e874fc34c697a39a4193aa9cf3203f7e777df42e9edeea56a"}, - {file = "opencensus-0.11.3.tar.gz", hash = "sha256:af7a98bd51e63968144d772f346d696ed498a32dbdc4be267cd6011c4ce05da8"}, -] - -[package.dependencies] -google-api-core = {version = ">=1.0.0,<3.0.0", markers = "python_version >= \"3.6\""} -opencensus-context = ">=0.1.3" - -[[package]] -name = "opencensus-context" -version = "0.1.3" -description = "OpenCensus Runtime Context" -optional = false -python-versions = "*" -files = [ - {file = "opencensus-context-0.1.3.tar.gz", hash = "sha256:a03108c3c10d8c80bb5ddf5c8a1f033161fa61972a9917f9b9b3a18517f0088c"}, - {file = "opencensus_context-0.1.3-py2.py3-none-any.whl", hash = "sha256:073bb0590007af276853009fac7e4bab1d523c3f03baf4cb4511ca38967c6039"}, -] - [[package]] name = "openshift-client" version = "1.0.18" @@ -1462,6 +1019,73 @@ files = [ {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] +[[package]] +name = "pandas" +version = "2.0.3" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pandas-2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4c7c9f27a4185304c7caf96dc7d91bc60bc162221152de697c98eb0b2648dd8"}, + {file = "pandas-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f167beed68918d62bffb6ec64f2e1d8a7d297a038f86d4aed056b9493fca407f"}, + {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce0c6f76a0f1ba361551f3e6dceaff06bde7514a374aa43e33b588ec10420183"}, + {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba619e410a21d8c387a1ea6e8a0e49bb42216474436245718d7f2e88a2f8d7c0"}, + {file = "pandas-2.0.3-cp310-cp310-win32.whl", hash = "sha256:3ef285093b4fe5058eefd756100a367f27029913760773c8bf1d2d8bebe5d210"}, + {file = "pandas-2.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:9ee1a69328d5c36c98d8e74db06f4ad518a1840e8ccb94a4ba86920986bb617e"}, + {file = "pandas-2.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b084b91d8d66ab19f5bb3256cbd5ea661848338301940e17f4492b2ce0801fe8"}, + {file = "pandas-2.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:37673e3bdf1551b95bf5d4ce372b37770f9529743d2498032439371fc7b7eb26"}, + {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9cb1e14fdb546396b7e1b923ffaeeac24e4cedd14266c3497216dd4448e4f2d"}, + {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9cd88488cceb7635aebb84809d087468eb33551097d600c6dad13602029c2df"}, + {file = "pandas-2.0.3-cp311-cp311-win32.whl", hash = "sha256:694888a81198786f0e164ee3a581df7d505024fbb1f15202fc7db88a71d84ebd"}, + {file = "pandas-2.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:6a21ab5c89dcbd57f78d0ae16630b090eec626360085a4148693def5452d8a6b"}, + {file = "pandas-2.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4da0d45e7f34c069fe4d522359df7d23badf83abc1d1cef398895822d11061"}, + {file = "pandas-2.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:32fca2ee1b0d93dd71d979726b12b61faa06aeb93cf77468776287f41ff8fdc5"}, + {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:258d3624b3ae734490e4d63c430256e716f488c4fcb7c8e9bde2d3aa46c29089"}, + {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eae3dc34fa1aa7772dd3fc60270d13ced7346fcbcfee017d3132ec625e23bb0"}, + {file = "pandas-2.0.3-cp38-cp38-win32.whl", hash = "sha256:f3421a7afb1a43f7e38e82e844e2bca9a6d793d66c1a7f9f0ff39a795bbc5e02"}, + {file = "pandas-2.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:69d7f3884c95da3a31ef82b7618af5710dba95bb885ffab339aad925c3e8ce78"}, + {file = "pandas-2.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5247fb1ba347c1261cbbf0fcfba4a3121fbb4029d95d9ef4dc45406620b25c8b"}, + {file = "pandas-2.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:81af086f4543c9d8bb128328b5d32e9986e0c84d3ee673a2ac6fb57fd14f755e"}, + {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1994c789bf12a7c5098277fb43836ce090f1073858c10f9220998ac74f37c69b"}, + {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ec591c48e29226bcbb316e0c1e9423622bc7a4eaf1ef7c3c9fa1a3981f89641"}, + {file = "pandas-2.0.3-cp39-cp39-win32.whl", hash = "sha256:04dbdbaf2e4d46ca8da896e1805bc04eb85caa9a82e259e8eed00254d5e0c682"}, + {file = "pandas-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:1168574b036cd8b93abc746171c9b4f1b83467438a5e45909fed645cf8692dbc"}, + {file = "pandas-2.0.3.tar.gz", hash = "sha256:c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.20.3", markers = "python_version < \"3.10\""}, + {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.1" + +[package.extras] +all = ["PyQt5 (>=5.15.1)", "SQLAlchemy (>=1.4.16)", "beautifulsoup4 (>=4.9.3)", "bottleneck (>=1.3.2)", "brotlipy (>=0.7.0)", "fastparquet (>=0.6.3)", "fsspec (>=2021.07.0)", "gcsfs (>=2021.07.0)", "html5lib (>=1.1)", "hypothesis (>=6.34.2)", "jinja2 (>=3.0.0)", "lxml (>=4.6.3)", "matplotlib (>=3.6.1)", "numba (>=0.53.1)", "numexpr (>=2.7.3)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pandas-gbq (>=0.15.0)", "psycopg2 (>=2.8.6)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "python-snappy (>=0.6.0)", "pyxlsb (>=1.0.8)", "qtpy (>=2.2.0)", "s3fs (>=2021.08.0)", "scipy (>=1.7.1)", "tables (>=3.6.1)", "tabulate (>=0.8.9)", "xarray (>=0.21.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)", "zstandard (>=0.15.2)"] +aws = ["s3fs (>=2021.08.0)"] +clipboard = ["PyQt5 (>=5.15.1)", "qtpy (>=2.2.0)"] +compression = ["brotlipy (>=0.7.0)", "python-snappy (>=0.6.0)", "zstandard (>=0.15.2)"] +computation = ["scipy (>=1.7.1)", "xarray (>=0.21.0)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pyxlsb (>=1.0.8)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)"] +feather = ["pyarrow (>=7.0.0)"] +fss = ["fsspec (>=2021.07.0)"] +gcp = ["gcsfs (>=2021.07.0)", "pandas-gbq (>=0.15.0)"] +hdf5 = ["tables (>=3.6.1)"] +html = ["beautifulsoup4 (>=4.9.3)", "html5lib (>=1.1)", "lxml (>=4.6.3)"] +mysql = ["SQLAlchemy (>=1.4.16)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.0.0)", "tabulate (>=0.8.9)"] +parquet = ["pyarrow (>=7.0.0)"] +performance = ["bottleneck (>=1.3.2)", "numba (>=0.53.1)", "numexpr (>=2.7.1)"] +plot = ["matplotlib (>=3.6.1)"] +postgresql = ["SQLAlchemy (>=1.4.16)", "psycopg2 (>=2.8.6)"] +spss = ["pyreadstat (>=1.1.2)"] +sql-other = ["SQLAlchemy (>=1.4.16)"] +test = ["hypothesis (>=6.34.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.6.3)"] + [[package]] name = "paramiko" version = "3.3.1" @@ -1509,21 +1133,6 @@ files = [ {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, ] -[[package]] -name = "platformdirs" -version = "3.11.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -optional = false -python-versions = ">=3.7" -files = [ - {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, - {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, -] - -[package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] - [[package]] name = "pluggy" version = "1.3.0" @@ -1539,20 +1148,6 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] -[[package]] -name = "prometheus-client" -version = "0.17.1" -description = "Python client for the Prometheus monitoring system." -optional = false -python-versions = ">=3.6" -files = [ - {file = "prometheus_client-0.17.1-py3-none-any.whl", hash = "sha256:e537f37160f6807b8202a6fc4764cdd19bac5480ddd3e0d463c3002b34462101"}, - {file = "prometheus_client-0.17.1.tar.gz", hash = "sha256:21e674f39831ae3f8acde238afd9a27a37d0d2fb5a28ea094f0ce25d2cbf2091"}, -] - -[package.extras] -twisted = ["twisted"] - [[package]] name = "protobuf" version = "4.24.4" @@ -1575,48 +1170,6 @@ files = [ {file = "protobuf-4.24.4.tar.gz", hash = "sha256:5a70731910cd9104762161719c3d883c960151eea077134458503723b60e3667"}, ] -[[package]] -name = "psutil" -version = "5.9.5" -description = "Cross-platform lib for process and system monitoring in Python." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"}, - {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab8ed1a1d77c95453db1ae00a3f9c50227ebd955437bcf2a574ba8adbf6a74d5"}, - {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4aef137f3345082a3d3232187aeb4ac4ef959ba3d7c10c33dd73763fbc063da4"}, - {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48"}, - {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:acf2aef9391710afded549ff602b5887d7a2349831ae4c26be7c807c0a39fac4"}, - {file = "psutil-5.9.5-cp27-none-win32.whl", hash = "sha256:5b9b8cb93f507e8dbaf22af6a2fd0ccbe8244bf30b1baad6b3954e935157ae3f"}, - {file = "psutil-5.9.5-cp27-none-win_amd64.whl", hash = "sha256:8c5f7c5a052d1d567db4ddd231a9d27a74e8e4a9c3f44b1032762bd7b9fdcd42"}, - {file = "psutil-5.9.5-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3c6f686f4225553615612f6d9bc21f1c0e305f75d7d8454f9b46e901778e7217"}, - {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a7dd9997128a0d928ed4fb2c2d57e5102bb6089027939f3b722f3a210f9a8da"}, - {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89518112647f1276b03ca97b65cc7f64ca587b1eb0278383017c2a0dcc26cbe4"}, - {file = "psutil-5.9.5-cp36-abi3-win32.whl", hash = "sha256:104a5cc0e31baa2bcf67900be36acde157756b9c44017b86b2c049f11957887d"}, - {file = "psutil-5.9.5-cp36-abi3-win_amd64.whl", hash = "sha256:b258c0c1c9d145a1d5ceffab1134441c4c5113b2417fafff7315a917a026c3c9"}, - {file = "psutil-5.9.5-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30"}, - {file = "psutil-5.9.5.tar.gz", hash = "sha256:5410638e4df39c54d957fc51ce03048acd8e6d60abc0f5107af51e5fb566eb3c"}, -] - -[package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] - -[[package]] -name = "py-spy" -version = "0.3.14" -description = "Sampling profiler for Python programs" -optional = false -python-versions = "*" -files = [ - {file = "py_spy-0.3.14-py2.py3-none-macosx_10_7_x86_64.whl", hash = "sha256:5b342cc5feb8d160d57a7ff308de153f6be68dcf506ad02b4d67065f2bae7f45"}, - {file = "py_spy-0.3.14-py2.py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:fe7efe6c91f723442259d428bf1f9ddb9c1679828866b353d539345ca40d9dd2"}, - {file = "py_spy-0.3.14-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590905447241d789d9de36cff9f52067b6f18d8b5e9fb399242041568d414461"}, - {file = "py_spy-0.3.14-py2.py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd6211fe7f587b3532ba9d300784326d9a6f2b890af7bf6fff21a029ebbc812b"}, - {file = "py_spy-0.3.14-py2.py3-none-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3e8e48032e71c94c3dd51694c39e762e4bbfec250df5bf514adcdd64e79371e0"}, - {file = "py_spy-0.3.14-py2.py3-none-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f59b0b52e56ba9566305236375e6fc68888261d0d36b5addbe3cf85affbefc0e"}, - {file = "py_spy-0.3.14-py2.py3-none-win_amd64.whl", hash = "sha256:8f5b311d09f3a8e33dbd0d44fc6e37b715e8e0c7efefafcda8bfd63b31ab5a31"}, -] - [[package]] name = "pyarrow" version = "6.0.1" @@ -1861,6 +1414,17 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "pytz" +version = "2023.3.post1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, + {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, +] + [[package]] name = "pywin32" version = "306" @@ -1977,34 +1541,24 @@ files = [ ] [package.dependencies] -aiohttp = {version = ">=3.7", optional = true, markers = "extra == \"default\""} -aiohttp-cors = {version = "*", optional = true, markers = "extra == \"default\""} aiosignal = "*" click = ">=7.0" -colorful = {version = "*", optional = true, markers = "extra == \"default\""} filelock = "*" frozenlist = "*" -gpustat = {version = ">=1.0.0", optional = true, markers = "extra == \"default\""} -grpcio = [ - {version = ">=1.32.0", optional = true, markers = "python_version < \"3.10\" and extra == \"default\""}, - {version = ">=1.42.0", optional = true, markers = "python_version >= \"3.10\" and extra == \"default\""}, -] +fsspec = {version = "*", optional = true, markers = "extra == \"data\""} jsonschema = "*" msgpack = ">=1.0.0,<2.0.0" numpy = [ {version = ">=1.16", markers = "python_version < \"3.9\""}, + {version = ">=1.20", optional = true, markers = "extra == \"data\""}, {version = ">=1.19.3", markers = "python_version >= \"3.9\""}, ] -opencensus = {version = "*", optional = true, markers = "extra == \"default\""} packaging = "*" -prometheus-client = {version = ">=0.7.1", optional = true, markers = "extra == \"default\""} +pandas = {version = ">=1.3", optional = true, markers = "extra == \"data\""} protobuf = ">=3.15.3,<3.19.5 || >3.19.5" -py-spy = {version = ">=0.2.0", optional = true, markers = "extra == \"default\""} -pydantic = {version = "<2", optional = true, markers = "extra == \"default\""} +pyarrow = {version = ">=6.0.1", optional = true, markers = "extra == \"data\""} pyyaml = "*" requests = "*" -smart-open = {version = "*", optional = true, markers = "extra == \"default\""} -virtualenv = {version = ">=20.0.24,<20.21.1", optional = true, markers = "extra == \"default\""} [package.extras] air = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi", "fsspec", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "numpy (>=1.20)", "opencensus", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "requests", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)", "watchfiles"] @@ -2242,27 +1796,6 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] -[[package]] -name = "smart-open" -version = "6.4.0" -description = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)" -optional = false -python-versions = ">=3.6,<4.0" -files = [ - {file = "smart_open-6.4.0-py3-none-any.whl", hash = "sha256:8d3ef7e6997e8e42dd55c74166ed21e6ac70664caa32dd940b26d54a8f6b4142"}, - {file = "smart_open-6.4.0.tar.gz", hash = "sha256:be3c92c246fbe80ebce8fbacb180494a481a77fcdcb7c1aadb2ea5b9c2bee8b9"}, -] - -[package.extras] -all = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "paramiko", "requests"] -azure = ["azure-common", "azure-core", "azure-storage-blob"] -gcs = ["google-cloud-storage (>=2.6.0)"] -http = ["requests"] -s3 = ["boto3"] -ssh = ["paramiko"] -test = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "moto[server]", "paramiko", "pytest", "pytest-rerunfailures", "requests", "responses"] -webhdfs = ["requests"] - [[package]] name = "tabulate" version = "0.9.0" @@ -2314,6 +1847,17 @@ files = [ mypy-extensions = ">=0.3.0" typing-extensions = ">=3.7.4" +[[package]] +name = "tzdata" +version = "2023.3" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, + {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, +] + [[package]] name = "urllib3" version = "1.26.17" @@ -2330,37 +1874,6 @@ brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotl secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] -[[package]] -name = "virtualenv" -version = "20.21.0" -description = "Virtual Python Environment builder" -optional = false -python-versions = ">=3.7" -files = [ - {file = "virtualenv-20.21.0-py3-none-any.whl", hash = "sha256:31712f8f2a17bd06234fa97fdf19609e789dd4e3e4bf108c3da71d710651adbc"}, - {file = "virtualenv-20.21.0.tar.gz", hash = "sha256:f50e3e60f990a0757c9b68333c9fdaa72d7188caa417f96af9e52407831a3b68"}, -] - -[package.dependencies] -distlib = ">=0.3.6,<1" -filelock = ">=3.4.1,<4" -platformdirs = ">=2.4,<4" - -[package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] -test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23)", "pytest (>=7.2.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"] - -[[package]] -name = "wcwidth" -version = "0.2.8" -description = "Measures the displayed width of unicode strings in a terminal" -optional = false -python-versions = "*" -files = [ - {file = "wcwidth-0.2.8-py2.py3-none-any.whl", hash = "sha256:77f719e01648ed600dfa5402c347481c0992263b81a027344f3e1ba25493a704"}, - {file = "wcwidth-0.2.8.tar.gz", hash = "sha256:8705c569999ffbb4f6a87c6d1b80f324bd6db952f5eb0b95bc07517f4c1813d4"}, -] - [[package]] name = "websocket-client" version = "1.6.4" @@ -2377,93 +1890,6 @@ docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] optional = ["python-socks", "wsaccel"] test = ["websockets"] -[[package]] -name = "yarl" -version = "1.9.2" -description = "Yet another URL library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"}, - {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"}, - {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"}, - {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"}, - {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"}, - {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"}, - {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"}, - {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"}, - {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"}, - {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"}, - {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"}, - {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"}, - {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"}, - {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"}, - {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"}, -] - -[package.dependencies] -idna = ">=2.0" -multidict = ">=4.0" - [[package]] name = "zipp" version = "3.17.0" @@ -2482,4 +1908,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "544155851689941c754f95e16d21338783796d61aee9bd37564be3601f026c28" +content-hash = "2a21d955dba1069ee968ad0486bedef6c1ccdec946962ac2c3f9b28707203845" diff --git a/pyproject.toml b/pyproject.toml index e78d299a..c1ca5e5a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,13 +23,12 @@ keywords = ['codeflare', 'python', 'sdk', 'client', 'batch', 'scale'] python = "^3.8" openshift-client = "1.0.18" rich = "^12.5" -ray = {version = "2.7.0", extras = ["default"]} +ray = {version = "2.7.0", extras = ["data"]} kubernetes = ">= 25.3.0, < 27" codeflare-torchx = "0.6.0.dev1" cryptography = "40.0.2" executing = "1.2.0" pydantic = "< 2" -pyarrow = ">= 6.0.1, < 7.0.0" [tool.poetry.group.docs] optional = true From ce3ac98e95bc1326e493c74054f9fbdef5d000d4 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Wed, 29 Nov 2023 17:25:05 +0000 Subject: [PATCH 076/496] Added RayJobSubmission Client wrapper for RCS not created by CodeFlare --- src/codeflare_sdk/job/ray_jobs.py | 117 ++++++++++++++++++++++++++++++ tests/unit_test.py | 80 ++++++++++++++++++++ 2 files changed, 197 insertions(+) create mode 100644 src/codeflare_sdk/job/ray_jobs.py diff --git a/src/codeflare_sdk/job/ray_jobs.py b/src/codeflare_sdk/job/ray_jobs.py new file mode 100644 index 00000000..422d3f88 --- /dev/null +++ b/src/codeflare_sdk/job/ray_jobs.py @@ -0,0 +1,117 @@ +# Copyright 2022 IBM, Red Hat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +The ray_jobs sub-module contains methods needed to submit jobs and connect to Ray Clusters that were not created by CodeFlare. +The SDK acts as a wrapper for the Ray Job Submission Client. +""" +from ray.job_submission import JobSubmissionClient +from typing import Iterator, Optional, Dict, Any, Union + + +class RayJobClient: + """ + An object for that acts as the Ray Job Submission Client. + """ + + def __init__( + self, + address: Optional[str] = None, + create_cluster_if_needed: bool = False, + cookies: Optional[Dict[str, Any]] = None, + metadata: Optional[Dict[str, Any]] = None, + headers: Optional[Dict[str, Any]] = None, + verify: Optional[Union[str, bool]] = True, + ): + self.rayJobClient = JobSubmissionClient( + address=address, + create_cluster_if_needed=create_cluster_if_needed, + cookies=cookies, + metadata=metadata, + headers=headers, + verify=verify, + ) + + def submit_job( + self, + entrypoint: str, + job_id: Optional[str] = None, + runtime_env: Optional[Dict[str, Any]] = None, + metadata: Optional[Dict[str, str]] = None, + submission_id: Optional[str] = None, + entrypoint_num_cpus: Optional[Union[int, float]] = None, + entrypoint_num_gpus: Optional[Union[int, float]] = None, + entrypoint_resources: Optional[Dict[str, float]] = None, + ) -> str: + """ + Method for submitting jobs to a Ray Cluster and returning the job id with entrypoint being a mandatory field. + """ + return self.rayJobClient.submit_job( + entrypoint=entrypoint, + job_id=job_id, + runtime_env=runtime_env, + metadata=metadata, + submission_id=submission_id, + entrypoint_num_cpus=entrypoint_num_cpus, + entrypoint_num_gpus=entrypoint_num_gpus, + entrypoint_resources=entrypoint_resources, + ) + + def delete_job(self, job_id: str) -> bool: + """ + Method for deleting jobs with the job id being a mandatory field. + """ + return self.rayJobClient.delete_job(job_id=job_id) + + def get_address(self) -> str: + """ + Method for getting the address from the RayJobClient + """ + return self.rayJobClient.get_address() + + def get_job_info(self, job_id: str): + """ + Method for getting the job info with the job id being a mandatory field. + """ + return self.rayJobClient.get_job_info(job_id=job_id) + + def get_job_logs(self, job_id: str) -> str: + """ + Method for getting the job info with the job id being a mandatory field. + """ + return self.rayJobClient.get_job_logs(job_id=job_id) + + def get_job_status(self, job_id: str) -> str: + """ + Method for getting the job's status with the job id being a mandatory field. + """ + return self.rayJobClient.get_job_status(job_id=job_id) + + def list_jobs(self): + """ + Method for getting a list of current jobs in the Ray Cluster. + """ + return self.rayJobClient.list_jobs() + + def stop_job(self, job_id: str) -> bool: + """ + Method for stopping a job with the job id being a mandatory field. + """ + return self.rayJobClient.stop_job(job_id=job_id) + + def tail_job_logs(self, job_id: str) -> Iterator[str]: + """ + Method for getting an iterator that follows the logs of a job with the job id being a mandatory field. + """ + return self.rayJobClient.tail_job_logs(job_id=job_id) diff --git a/tests/unit_test.py b/tests/unit_test.py index bc946538..68897eea 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -103,6 +103,7 @@ from unittest.mock import MagicMock from pytest_mock import MockerFixture from ray.job_submission import JobSubmissionClient +from codeflare_sdk.job.ray_jobs import RayJobClient # For mocking openshift client results fake_res = openshift.Result("fake") @@ -2846,6 +2847,85 @@ def test_gen_app_wrapper_with_oauth(mocker: MockerFixture): ) +""" +Ray Jobs tests +""" +# rjc == RayJobClient +@pytest.fixture +def ray_job_client(mocker): + # Creating a fixture to instantiate RayJobClient with a mocked JobSubmissionClient + mocker.patch.object(JobSubmissionClient, "__init__", return_value=None) + return RayJobClient( + "https://ray-dashboard-unit-test-cluster-ns.apps.cluster.awsroute.org" + ) + + +def test_rjc_submit_job(ray_job_client, mocker): + mocked_submit_job = mocker.patch.object( + JobSubmissionClient, "submit_job", return_value="mocked_submission_id" + ) + submission_id = ray_job_client.submit_job(entrypoint={"pip": ["numpy"]}) + + mocked_submit_job.assert_called_once_with( + entrypoint={"pip": ["numpy"]}, + job_id=None, + runtime_env=None, + metadata=None, + submission_id=None, + entrypoint_num_cpus=None, + entrypoint_num_gpus=None, + entrypoint_resources=None, + ) + + assert submission_id == "mocked_submission_id" + + +def test_rjc_delete_job(ray_job_client, mocker): + mocked_delete_job = mocker.patch.object( + JobSubmissionClient, "delete_job", return_value=True + ) + result = ray_job_client.delete_job(job_id="mocked_job_id") + + mocked_delete_job.assert_called_once_with(job_id="mocked_job_id") + assert result is True + + +def test_rjc_address(ray_job_client, mocker): + mocked_rjc_address = mocker.patch.object( + JobSubmissionClient, + "get_address", + return_value="https://ray-dashboard-unit-test-cluster-ns.apps.cluster.awsroute.org", + ) + address = ray_job_client.get_address() + + mocked_rjc_address.assert_called_once() + assert ( + address + == "https://ray-dashboard-unit-test-cluster-ns.apps.cluster.awsroute.org" + ) + + +def test_rjc_get_job_logs(ray_job_client, mocker): + mocked_rjc_get_job_logs = mocker.patch.object( + JobSubmissionClient, "get_job_logs", return_value="Logs" + ) + logs = ray_job_client.get_job_logs(job_id="mocked_job_id") + + mocked_rjc_get_job_logs.assert_called_once_with(job_id="mocked_job_id") + assert logs == "Logs" + + +def test_rjc_get_job_info(ray_job_client, mocker): + job_details_example = "JobDetails(type=, job_id=None, submission_id='mocked_submission_id', driver_info=None, status=, entrypoint='python test.py', message='Job has not started yet. It may be waiting for the runtime environment to be set up.', error_type=None, start_time=1701271760641, end_time=None, metadata={}, runtime_env={'working_dir': 'gcs://_ray_pkg_67de6f0e60d43b19.zip', 'pip': {'packages': ['numpy'], 'pip_check': False}, '_ray_commit': 'b4bba4717f5ba04ee25580fe8f88eed63ef0c5dc'}, driver_agent_http_address=None, driver_node_id=None)" + mocked_rjc_get_job_info = mocker.patch.object( + JobSubmissionClient, "get_job_info", return_value=job_details_example + ) + job_details = ray_job_client.get_job_info(job_id="mocked_job_id") + + mocked_rjc_get_job_info.assert_called_once_with(job_id="mocked_job_id") + assert job_details == job_details_example + + # Make sure to always keep this function last def test_cleanup(): os.remove(f"{aw_dir}unit-test-cluster.yaml") From 4792573ab63b723099563a85ff26491c5eabebee Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Wed, 29 Nov 2023 17:54:06 +0000 Subject: [PATCH 077/496] Fixed formatting in the unit-test.py file --- tests/unit_test.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/unit_test.py b/tests/unit_test.py index 68897eea..d12b844d 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -2850,6 +2850,8 @@ def test_gen_app_wrapper_with_oauth(mocker: MockerFixture): """ Ray Jobs tests """ + + # rjc == RayJobClient @pytest.fixture def ray_job_client(mocker): From 5d752244845e8929463e54087bb62d9a5e53e1dd Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 30 Nov 2023 14:31:23 +0000 Subject: [PATCH 078/496] Review changes & added more unit tests --- src/codeflare_sdk/job/ray_jobs.py | 16 ++++++- tests/unit_test.py | 75 ++++++++++++++++++++++++++++++- 2 files changed, 87 insertions(+), 4 deletions(-) diff --git a/src/codeflare_sdk/job/ray_jobs.py b/src/codeflare_sdk/job/ray_jobs.py index 422d3f88..96900ff2 100644 --- a/src/codeflare_sdk/job/ray_jobs.py +++ b/src/codeflare_sdk/job/ray_jobs.py @@ -72,7 +72,14 @@ def delete_job(self, job_id: str) -> bool: """ Method for deleting jobs with the job id being a mandatory field. """ - return self.rayJobClient.delete_job(job_id=job_id) + deletion_status = self.rayJobClient.delete_job(job_id=job_id) + + if deletion_status: + print(f"Successfully deleted Job {job_id}") + return deletion_status + else: + print(f"Failed to delete Job {job_id}") + return deletion_status def get_address(self) -> str: """ @@ -108,7 +115,12 @@ def stop_job(self, job_id: str) -> bool: """ Method for stopping a job with the job id being a mandatory field. """ - return self.rayJobClient.stop_job(job_id=job_id) + stop_job_status = self.rayJobClient.stop_job(job_id=job_id) + if stop_job_status: + print(f"Successfully stopped Job {job_id}") + else: + print(f"Failed to stop Job, {job_id} could have already completed.") + return stop_job_status def tail_job_logs(self, job_id: str) -> Iterator[str]: """ diff --git a/tests/unit_test.py b/tests/unit_test.py index d12b844d..86e3a940 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -2883,14 +2883,44 @@ def test_rjc_submit_job(ray_job_client, mocker): def test_rjc_delete_job(ray_job_client, mocker): - mocked_delete_job = mocker.patch.object( + # Case return True + mocked_delete_job_True = mocker.patch.object( JobSubmissionClient, "delete_job", return_value=True ) result = ray_job_client.delete_job(job_id="mocked_job_id") - mocked_delete_job.assert_called_once_with(job_id="mocked_job_id") + mocked_delete_job_True.assert_called_once_with(job_id="mocked_job_id") assert result is True + # Case return False + mocked_delete_job_False = mocker.patch.object( + JobSubmissionClient, "delete_job", return_value=False + ) + result = ray_job_client.delete_job(job_id="mocked_job_id") + + mocked_delete_job_False.assert_called_once_with(job_id="mocked_job_id") + assert result is False + + +def test_rjc_stop_job(ray_job_client, mocker): + # Case return True + mocked_stop_job_True = mocker.patch.object( + JobSubmissionClient, "stop_job", return_value=True + ) + result = ray_job_client.stop_job(job_id="mocked_job_id") + + mocked_stop_job_True.assert_called_once_with(job_id="mocked_job_id") + assert result is True + + # Case return False + mocked_stop_job_False = mocker.patch.object( + JobSubmissionClient, "stop_job", return_value=False + ) + result = ray_job_client.stop_job(job_id="mocked_job_id") + + mocked_stop_job_False.assert_called_once_with(job_id="mocked_job_id") + assert result is False + def test_rjc_address(ray_job_client, mocker): mocked_rjc_address = mocker.patch.object( @@ -2928,6 +2958,47 @@ def test_rjc_get_job_info(ray_job_client, mocker): assert job_details == job_details_example +def test_rjc_get_job_status(ray_job_client, mocker): + job_status_example = "" + mocked_rjc_get_job_status = mocker.patch.object( + JobSubmissionClient, "get_job_status", return_value=job_status_example + ) + job_status = ray_job_client.get_job_status(job_id="mocked_job_id") + + mocked_rjc_get_job_status.assert_called_once_with(job_id="mocked_job_id") + assert job_status == job_status_example + + +def test_rjc_tail_job_logs(ray_job_client, mocker): + logs_example = [ + "Job started...", + "Processing input data...", + "Finalizing results...", + "Job completed successfully.", + ] + mocked_rjc_tail_job_logs = mocker.patch.object( + JobSubmissionClient, "tail_job_logs", return_value=logs_example + ) + job_tail_job_logs = ray_job_client.tail_job_logs(job_id="mocked_job_id") + + mocked_rjc_tail_job_logs.assert_called_once_with(job_id="mocked_job_id") + assert job_tail_job_logs == logs_example + + +def test_rjc_list_jobs(ray_job_client, mocker): + jobs_list = [ + "JobDetails(type=, job_id=None, submission_id='raysubmit_4k2NYS1YbRXYPZCM', driver_info=None, status=, entrypoint='python mnist.py', message='Job finished successfully.', error_type=None, start_time=1701352132585, end_time=1701352192002, metadata={}, runtime_env={'working_dir': 'gcs://_ray_pkg_6200b93a110e8033.zip', 'pip': {'packages': ['pytorch_lightning==1.5.10', 'ray_lightning', 'torchmetrics==0.9.1', 'torchvision==0.12.0'], 'pip_check': False}, '_ray_commit': 'b4bba4717f5ba04ee25580fe8f88eed63ef0c5dc'}, driver_agent_http_address='http://10.131.0.18:52365', driver_node_id='9fb515995f5fb13ad4db239ceea378333bebf0a2d45b6aa09d02e691')", + "JobDetails(type=, job_id=None, submission_id='raysubmit_iRuwU8vdkbUZZGvT', driver_info=None, status=, entrypoint='python mnist.py', message='Job was intentionally stopped.', error_type=None, start_time=1701353096163, end_time=1701353097733, metadata={}, runtime_env={'working_dir': 'gcs://_ray_pkg_6200b93a110e8033.zip', 'pip': {'packages': ['pytorch_lightning==1.5.10', 'ray_lightning', 'torchmetrics==0.9.1', 'torchvision==0.12.0'], 'pip_check': False}, '_ray_commit': 'b4bba4717f5ba04ee25580fe8f88eed63ef0c5dc'}, driver_agent_http_address='http://10.131.0.18:52365', driver_node_id='9fb515995f5fb13ad4db239ceea378333bebf0a2d45b6aa09d02e691')", + ] + mocked_rjc_list_jobs = mocker.patch.object( + JobSubmissionClient, "list_jobs", return_value=jobs_list + ) + job_list_jobs = ray_job_client.list_jobs() + + mocked_rjc_list_jobs.assert_called_once() + assert job_list_jobs == jobs_list + + # Make sure to always keep this function last def test_cleanup(): os.remove(f"{aw_dir}unit-test-cluster.yaml") From e85fe63c9ccc8260591159e34dfbcfd863fb01a4 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 30 Nov 2023 15:06:14 +0000 Subject: [PATCH 079/496] Changed returns for delete_jobs and stop_jobs and updated unit tests --- src/codeflare_sdk.egg-info/SOURCES.txt | 2 ++ src/codeflare_sdk/job/ray_jobs.py | 20 ++++++++++---------- tests/unit_test.py | 17 ++++++++++------- 3 files changed, 22 insertions(+), 17 deletions(-) diff --git a/src/codeflare_sdk.egg-info/SOURCES.txt b/src/codeflare_sdk.egg-info/SOURCES.txt index cfea1dbf..d922d0db 100644 --- a/src/codeflare_sdk.egg-info/SOURCES.txt +++ b/src/codeflare_sdk.egg-info/SOURCES.txt @@ -14,8 +14,10 @@ src/codeflare_sdk/cluster/config.py src/codeflare_sdk/cluster/model.py src/codeflare_sdk/job/__init__.py src/codeflare_sdk/job/jobs.py +src/codeflare_sdk/job/ray_jobs.py src/codeflare_sdk/utils/__init__.py src/codeflare_sdk/utils/generate_cert.py src/codeflare_sdk/utils/generate_yaml.py src/codeflare_sdk/utils/kube_api_helpers.py +src/codeflare_sdk/utils/openshift_oauth.py src/codeflare_sdk/utils/pretty_print.py diff --git a/src/codeflare_sdk/job/ray_jobs.py b/src/codeflare_sdk/job/ray_jobs.py index 96900ff2..b02531e9 100644 --- a/src/codeflare_sdk/job/ray_jobs.py +++ b/src/codeflare_sdk/job/ray_jobs.py @@ -68,18 +68,18 @@ def submit_job( entrypoint_resources=entrypoint_resources, ) - def delete_job(self, job_id: str) -> bool: + def delete_job(self, job_id: str) -> (bool, str): """ Method for deleting jobs with the job id being a mandatory field. """ deletion_status = self.rayJobClient.delete_job(job_id=job_id) if deletion_status: - print(f"Successfully deleted Job {job_id}") - return deletion_status + message = f"Successfully deleted Job {job_id}" else: - print(f"Failed to delete Job {job_id}") - return deletion_status + message = f"Failed to delete Job {job_id}" + + return deletion_status, message def get_address(self) -> str: """ @@ -95,7 +95,7 @@ def get_job_info(self, job_id: str): def get_job_logs(self, job_id: str) -> str: """ - Method for getting the job info with the job id being a mandatory field. + Method for getting the job logs with the job id being a mandatory field. """ return self.rayJobClient.get_job_logs(job_id=job_id) @@ -111,16 +111,16 @@ def list_jobs(self): """ return self.rayJobClient.list_jobs() - def stop_job(self, job_id: str) -> bool: + def stop_job(self, job_id: str) -> (bool, str): """ Method for stopping a job with the job id being a mandatory field. """ stop_job_status = self.rayJobClient.stop_job(job_id=job_id) if stop_job_status: - print(f"Successfully stopped Job {job_id}") + message = f"Successfully stopped Job {job_id}" else: - print(f"Failed to stop Job, {job_id} could have already completed.") - return stop_job_status + message = f"Failed to stop Job, {job_id} could have already completed." + return stop_job_status, message def tail_job_logs(self, job_id: str) -> Iterator[str]: """ diff --git a/tests/unit_test.py b/tests/unit_test.py index 86e3a940..c33b95ab 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -2890,36 +2890,39 @@ def test_rjc_delete_job(ray_job_client, mocker): result = ray_job_client.delete_job(job_id="mocked_job_id") mocked_delete_job_True.assert_called_once_with(job_id="mocked_job_id") - assert result is True + assert result == (True, "Successfully deleted Job mocked_job_id") # Case return False mocked_delete_job_False = mocker.patch.object( - JobSubmissionClient, "delete_job", return_value=False + JobSubmissionClient, "delete_job", return_value=(False) ) result = ray_job_client.delete_job(job_id="mocked_job_id") mocked_delete_job_False.assert_called_once_with(job_id="mocked_job_id") - assert result is False + assert result == (False, "Failed to delete Job mocked_job_id") def test_rjc_stop_job(ray_job_client, mocker): # Case return True mocked_stop_job_True = mocker.patch.object( - JobSubmissionClient, "stop_job", return_value=True + JobSubmissionClient, "stop_job", return_value=(True) ) result = ray_job_client.stop_job(job_id="mocked_job_id") mocked_stop_job_True.assert_called_once_with(job_id="mocked_job_id") - assert result is True + assert result == (True, "Successfully stopped Job mocked_job_id") # Case return False mocked_stop_job_False = mocker.patch.object( - JobSubmissionClient, "stop_job", return_value=False + JobSubmissionClient, "stop_job", return_value=(False) ) result = ray_job_client.stop_job(job_id="mocked_job_id") mocked_stop_job_False.assert_called_once_with(job_id="mocked_job_id") - assert result is False + assert result == ( + False, + "Failed to stop Job, mocked_job_id could have already completed.", + ) def test_rjc_address(ray_job_client, mocker): From 073b17114e91443977f802758f099c8dc2d78d46 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Wed, 6 Dec 2023 15:11:06 +0000 Subject: [PATCH 080/496] Added descriptions for parameters passed in methods --- src/codeflare_sdk/job/ray_jobs.py | 26 +++++++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/src/codeflare_sdk/job/ray_jobs.py b/src/codeflare_sdk/job/ray_jobs.py index b02531e9..ff1ebdfe 100644 --- a/src/codeflare_sdk/job/ray_jobs.py +++ b/src/codeflare_sdk/job/ray_jobs.py @@ -17,12 +17,22 @@ The SDK acts as a wrapper for the Ray Job Submission Client. """ from ray.job_submission import JobSubmissionClient -from typing import Iterator, Optional, Dict, Any, Union +from ray.dashboard.modules.job.pydantic_models import JobDetails +from typing import Iterator, Optional, Dict, Any, Union, List class RayJobClient: """ - An object for that acts as the Ray Job Submission Client. + A class that functions as a wrapper for the Ray Job Submission Client. + + parameters: + address -- Either (1) the address of the Ray cluster, or (2) the HTTP address of the dashboard server on the head node, e.g. “http://:8265”. In case (1) it must be specified as an address that can be passed to ray.init(), + e.g. a Ray Client address (ray://:10001), or “auto”, or “localhost:”. If unspecified, will try to connect to a running local Ray cluster. This argument is always overridden by the RAY_ADDRESS environment variable. + create_cluster_if_needed -- Indicates whether the cluster at the specified address needs to already be running. Ray doesn't start a cluster before interacting with jobs, but third-party job managers may do so. + cookies -- Cookies to use when sending requests to the HTTP job server. + metadata -- Arbitrary metadata to store along with all jobs. New metadata specified per job will be merged with the global metadata provided here via a simple dict update. + headers -- Headers to use when sending requests to the HTTP job server, used for cases like authentication to a remote cluster. + verify -- Boolean indication to verify the server's TLS certificate or a path to a file or directory of trusted certificates. Default: True. """ def __init__( @@ -56,6 +66,16 @@ def submit_job( ) -> str: """ Method for submitting jobs to a Ray Cluster and returning the job id with entrypoint being a mandatory field. + + Parameters: + entrypoint -- The shell command to run for this job. + submission_id -- A unique ID for this job. + runtime_env -- The runtime environment to install and run this job in. + metadata -- Arbitrary data to store along with this job. + job_id -- DEPRECATED. This has been renamed to submission_id + entrypoint_num_cpus -- The quantity of CPU cores to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0. + entrypoint_num_gpus -- The quantity of GPUs to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0. + entrypoint_resources -- The quantity of custom resources to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. """ return self.rayJobClient.submit_job( entrypoint=entrypoint, @@ -105,7 +125,7 @@ def get_job_status(self, job_id: str) -> str: """ return self.rayJobClient.get_job_status(job_id=job_id) - def list_jobs(self): + def list_jobs(self) -> List[JobDetails]: """ Method for getting a list of current jobs in the Ray Cluster. """ From b4aca0208d21dcd1ac96671e13a63410a748ddfa Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Fri, 8 Dec 2023 12:25:41 +0000 Subject: [PATCH 081/496] Added aiohttp to SDK --- poetry.lock | 381 ++++++++++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 1 + 2 files changed, 377 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index c97f1f78..d02577b0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,9 +1,107 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry and should not be changed by hand. + +[[package]] +name = "aiohttp" +version = "3.9.1" +description = "Async http client/server framework (asyncio)" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1f80197f8b0b846a8d5cf7b7ec6084493950d0882cc5537fb7b96a69e3c8590"}, + {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72444d17777865734aa1a4d167794c34b63e5883abb90356a0364a28904e6c0"}, + {file = "aiohttp-3.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b05d5cbe9dafcdc733262c3a99ccf63d2f7ce02543620d2bd8db4d4f7a22f83"}, + {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c4fa235d534b3547184831c624c0b7c1e262cd1de847d95085ec94c16fddcd5"}, + {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:289ba9ae8e88d0ba16062ecf02dd730b34186ea3b1e7489046fc338bdc3361c4"}, + {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bff7e2811814fa2271be95ab6e84c9436d027a0e59665de60edf44e529a42c1f"}, + {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81b77f868814346662c96ab36b875d7814ebf82340d3284a31681085c051320f"}, + {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b9c7426923bb7bd66d409da46c41e3fb40f5caf679da624439b9eba92043fa6"}, + {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8d44e7bf06b0c0a70a20f9100af9fcfd7f6d9d3913e37754c12d424179b4e48f"}, + {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22698f01ff5653fe66d16ffb7658f582a0ac084d7da1323e39fd9eab326a1f26"}, + {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ca7ca5abfbfe8d39e653870fbe8d7710be7a857f8a8386fc9de1aae2e02ce7e4"}, + {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:8d7f98fde213f74561be1d6d3fa353656197f75d4edfbb3d94c9eb9b0fc47f5d"}, + {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5216b6082c624b55cfe79af5d538e499cd5f5b976820eac31951fb4325974501"}, + {file = "aiohttp-3.9.1-cp310-cp310-win32.whl", hash = "sha256:0e7ba7ff228c0d9a2cd66194e90f2bca6e0abca810b786901a569c0de082f489"}, + {file = "aiohttp-3.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:c7e939f1ae428a86e4abbb9a7c4732bf4706048818dfd979e5e2839ce0159f23"}, + {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:df9cf74b9bc03d586fc53ba470828d7b77ce51b0582d1d0b5b2fb673c0baa32d"}, + {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ecca113f19d5e74048c001934045a2b9368d77b0b17691d905af18bd1c21275e"}, + {file = "aiohttp-3.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8cef8710fb849d97c533f259103f09bac167a008d7131d7b2b0e3a33269185c0"}, + {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bea94403a21eb94c93386d559bce297381609153e418a3ffc7d6bf772f59cc35"}, + {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91c742ca59045dce7ba76cab6e223e41d2c70d79e82c284a96411f8645e2afff"}, + {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c93b7c2e52061f0925c3382d5cb8980e40f91c989563d3d32ca280069fd6a87"}, + {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee2527134f95e106cc1653e9ac78846f3a2ec1004cf20ef4e02038035a74544d"}, + {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11ff168d752cb41e8492817e10fb4f85828f6a0142b9726a30c27c35a1835f01"}, + {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b8c3a67eb87394386847d188996920f33b01b32155f0a94f36ca0e0c635bf3e3"}, + {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c7b5d5d64e2a14e35a9240b33b89389e0035e6de8dbb7ffa50d10d8b65c57449"}, + {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:69985d50a2b6f709412d944ffb2e97d0be154ea90600b7a921f95a87d6f108a2"}, + {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:c9110c06eaaac7e1f5562caf481f18ccf8f6fdf4c3323feab28a93d34cc646bd"}, + {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d737e69d193dac7296365a6dcb73bbbf53bb760ab25a3727716bbd42022e8d7a"}, + {file = "aiohttp-3.9.1-cp311-cp311-win32.whl", hash = "sha256:4ee8caa925aebc1e64e98432d78ea8de67b2272252b0a931d2ac3bd876ad5544"}, + {file = "aiohttp-3.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:a34086c5cc285be878622e0a6ab897a986a6e8bf5b67ecb377015f06ed316587"}, + {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f800164276eec54e0af5c99feb9494c295118fc10a11b997bbb1348ba1a52065"}, + {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:500f1c59906cd142d452074f3811614be04819a38ae2b3239a48b82649c08821"}, + {file = "aiohttp-3.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0b0a6a36ed7e164c6df1e18ee47afbd1990ce47cb428739d6c99aaabfaf1b3af"}, + {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69da0f3ed3496808e8cbc5123a866c41c12c15baaaead96d256477edf168eb57"}, + {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:176df045597e674fa950bf5ae536be85699e04cea68fa3a616cf75e413737eb5"}, + {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b796b44111f0cab6bbf66214186e44734b5baab949cb5fb56154142a92989aeb"}, + {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f27fdaadce22f2ef950fc10dcdf8048407c3b42b73779e48a4e76b3c35bca26c"}, + {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcb6532b9814ea7c5a6a3299747c49de30e84472fa72821b07f5a9818bce0f66"}, + {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:54631fb69a6e44b2ba522f7c22a6fb2667a02fd97d636048478db2fd8c4e98fe"}, + {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4b4c452d0190c5a820d3f5c0f3cd8a28ace48c54053e24da9d6041bf81113183"}, + {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:cae4c0c2ca800c793cae07ef3d40794625471040a87e1ba392039639ad61ab5b"}, + {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:565760d6812b8d78d416c3c7cfdf5362fbe0d0d25b82fed75d0d29e18d7fc30f"}, + {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54311eb54f3a0c45efb9ed0d0a8f43d1bc6060d773f6973efd90037a51cd0a3f"}, + {file = "aiohttp-3.9.1-cp312-cp312-win32.whl", hash = "sha256:85c3e3c9cb1d480e0b9a64c658cd66b3cfb8e721636ab8b0e746e2d79a7a9eed"}, + {file = "aiohttp-3.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:11cb254e397a82efb1805d12561e80124928e04e9c4483587ce7390b3866d213"}, + {file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8a22a34bc594d9d24621091d1b91511001a7eea91d6652ea495ce06e27381f70"}, + {file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:598db66eaf2e04aa0c8900a63b0101fdc5e6b8a7ddd805c56d86efb54eb66672"}, + {file = "aiohttp-3.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c9376e2b09895c8ca8b95362283365eb5c03bdc8428ade80a864160605715f1"}, + {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41473de252e1797c2d2293804e389a6d6986ef37cbb4a25208de537ae32141dd"}, + {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c5857612c9813796960c00767645cb5da815af16dafb32d70c72a8390bbf690"}, + {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffcd828e37dc219a72c9012ec44ad2e7e3066bec6ff3aaa19e7d435dbf4032ca"}, + {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:219a16763dc0294842188ac8a12262b5671817042b35d45e44fd0a697d8c8361"}, + {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f694dc8a6a3112059258a725a4ebe9acac5fe62f11c77ac4dcf896edfa78ca28"}, + {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bcc0ea8d5b74a41b621ad4a13d96c36079c81628ccc0b30cfb1603e3dfa3a014"}, + {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:90ec72d231169b4b8d6085be13023ece8fa9b1bb495e4398d847e25218e0f431"}, + {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:cf2a0ac0615842b849f40c4d7f304986a242f1e68286dbf3bd7a835e4f83acfd"}, + {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:0e49b08eafa4f5707ecfb321ab9592717a319e37938e301d462f79b4e860c32a"}, + {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2c59e0076ea31c08553e868cec02d22191c086f00b44610f8ab7363a11a5d9d8"}, + {file = "aiohttp-3.9.1-cp38-cp38-win32.whl", hash = "sha256:4831df72b053b1eed31eb00a2e1aff6896fb4485301d4ccb208cac264b648db4"}, + {file = "aiohttp-3.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:3135713c5562731ee18f58d3ad1bf41e1d8883eb68b363f2ffde5b2ea4b84cc7"}, + {file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cfeadf42840c1e870dc2042a232a8748e75a36b52d78968cda6736de55582766"}, + {file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:70907533db712f7aa791effb38efa96f044ce3d4e850e2d7691abd759f4f0ae0"}, + {file = "aiohttp-3.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cdefe289681507187e375a5064c7599f52c40343a8701761c802c1853a504558"}, + {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7481f581251bb5558ba9f635db70908819caa221fc79ee52a7f58392778c636"}, + {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:49f0c1b3c2842556e5de35f122fc0f0b721334ceb6e78c3719693364d4af8499"}, + {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d406b01a9f5a7e232d1b0d161b40c05275ffbcbd772dc18c1d5a570961a1ca4"}, + {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d8e4450e7fe24d86e86b23cc209e0023177b6d59502e33807b732d2deb6975f"}, + {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c0266cd6f005e99f3f51e583012de2778e65af6b73860038b968a0a8888487a"}, + {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab221850108a4a063c5b8a70f00dd7a1975e5a1713f87f4ab26a46e5feac5a0e"}, + {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c88a15f272a0ad3d7773cf3a37cc7b7d077cbfc8e331675cf1346e849d97a4e5"}, + {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:237533179d9747080bcaad4d02083ce295c0d2eab3e9e8ce103411a4312991a0"}, + {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:02ab6006ec3c3463b528374c4cdce86434e7b89ad355e7bf29e2f16b46c7dd6f"}, + {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04fa38875e53eb7e354ece1607b1d2fdee2d175ea4e4d745f6ec9f751fe20c7c"}, + {file = "aiohttp-3.9.1-cp39-cp39-win32.whl", hash = "sha256:82eefaf1a996060602f3cc1112d93ba8b201dbf5d8fd9611227de2003dddb3b7"}, + {file = "aiohttp-3.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:9b05d33ff8e6b269e30a7957bd3244ffbce2a7a35a81b81c382629b80af1a8bf"}, + {file = "aiohttp-3.9.1.tar.gz", hash = "sha256:8fc49a87ac269d4529da45871e2ffb6874e87779c3d0e2ccd813c0899221239d"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "brotlicffi"] [[package]] name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -14,10 +112,23 @@ files = [ [package.dependencies] frozenlist = ">=1.1.0" +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + [[package]] name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -36,6 +147,7 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "bcrypt" version = "4.0.1" description = "Modern password hashing for your software and your servers" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -70,6 +182,7 @@ typecheck = ["mypy"] name = "cachetools" version = "5.3.1" description = "Extensible memoizing collections and decorators" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -81,6 +194,7 @@ files = [ name = "certifi" version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -92,6 +206,7 @@ files = [ name = "cffi" version = "1.16.0" description = "Foreign Function Interface for Python calling C code." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -156,6 +271,7 @@ pycparser = "*" name = "charset-normalizer" version = "3.3.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -255,6 +371,7 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -269,6 +386,7 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "codeflare-torchx" version = "0.6.0.dev1" description = "TorchX SDK and Components" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -297,6 +415,7 @@ ray = ["ray (>=1.12.1)"] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -308,6 +427,7 @@ files = [ name = "commonmark" version = "0.9.1" description = "Python parser for the CommonMark Markdown spec" +category = "main" optional = false python-versions = "*" files = [ @@ -322,6 +442,7 @@ test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] name = "coverage" version = "7.2.7" description = "Code coverage measurement for Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -394,6 +515,7 @@ toml = ["tomli"] name = "cryptography" version = "40.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -435,6 +557,7 @@ tox = ["tox"] name = "docker" version = "6.1.3" description = "A Python library for the Docker Engine API." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -456,6 +579,7 @@ ssh = ["paramiko (>=2.4.3)"] name = "docstring-parser" version = "0.8.1" description = "\"Parse Python docstrings in reST, Google and Numpydoc format\"" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -466,6 +590,7 @@ files = [ name = "exceptiongroup" version = "1.1.3" description = "Backport of PEP 654 (exception groups)" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -480,6 +605,7 @@ test = ["pytest (>=6)"] name = "executing" version = "1.2.0" description = "Get the currently executing AST node of a frame, and other information" +category = "main" optional = false python-versions = "*" files = [ @@ -494,6 +620,7 @@ tests = ["asttokens", "littleutils", "pytest", "rich"] name = "filelock" version = "3.12.4" description = "A platform independent file lock." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -510,6 +637,7 @@ typing = ["typing-extensions (>=4.7.1)"] name = "frozenlist" version = "1.4.0" description = "A list-like structure which implements collections.abc.MutableSequence" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -580,6 +708,7 @@ files = [ name = "fsspec" version = "2023.9.2" description = "File-system specification" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -615,6 +744,7 @@ tqdm = ["tqdm"] name = "google-auth" version = "2.23.3" description = "Google Authentication Library" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -638,6 +768,7 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -649,6 +780,7 @@ files = [ name = "importlib-metadata" version = "6.8.0" description = "Read metadata from Python packages" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -668,6 +800,7 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs name = "importlib-resources" version = "6.1.0" description = "Read resources from Python packages" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -686,6 +819,7 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -697,6 +831,7 @@ files = [ name = "jsonschema" version = "4.19.1" description = "An implementation of JSON Schema validation for Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -720,6 +855,7 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "jsonschema-specifications" version = "2023.7.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -735,6 +871,7 @@ referencing = ">=0.28.0" name = "kubernetes" version = "26.1.0" description = "Kubernetes python client" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -752,7 +889,7 @@ requests-oauthlib = "*" setuptools = ">=21.0.0" six = ">=1.9.0" urllib3 = ">=1.24.2" -websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" +websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.0 || >=0.43.0" [package.extras] adal = ["adal (>=1.0.2)"] @@ -761,6 +898,7 @@ adal = ["adal (>=1.0.2)"] name = "mako" version = "1.2.4" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -780,6 +918,7 @@ testing = ["pytest"] name = "markdown" version = "3.5" description = "Python implementation of John Gruber's Markdown." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -798,6 +937,7 @@ testing = ["coverage", "pyyaml"] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -867,6 +1007,7 @@ files = [ name = "msgpack" version = "1.0.7" description = "MessagePack serializer" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -928,10 +1069,95 @@ files = [ {file = "msgpack-1.0.7.tar.gz", hash = "sha256:572efc93db7a4d27e404501975ca6d2d9775705c2d922390d878fcf768d92c87"}, ] +[[package]] +name = "multidict" +version = "6.0.4" +description = "multidict implementation" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, + {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, + {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, + {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, + {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, + {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, + {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, + {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, + {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, + {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, + {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, + {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, + {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, +] + [[package]] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -943,6 +1169,7 @@ files = [ name = "numpy" version = "1.24.4" description = "Fundamental package for array computing in Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -980,6 +1207,7 @@ files = [ name = "oauthlib" version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -996,6 +1224,7 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] name = "openshift-client" version = "1.0.18" description = "OpenShift python client" +category = "main" optional = false python-versions = "*" files = [ @@ -1012,6 +1241,7 @@ six = "*" name = "packaging" version = "23.2" description = "Core utilities for Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1023,6 +1253,7 @@ files = [ name = "pandas" version = "2.0.3" description = "Powerful data structures for data analysis, time series, and statistics" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1056,8 +1287,8 @@ files = [ [package.dependencies] numpy = [ {version = ">=1.20.3", markers = "python_version < \"3.10\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, - {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -1090,6 +1321,7 @@ xml = ["lxml (>=4.6.3)"] name = "paramiko" version = "3.3.1" description = "SSH2 protocol library" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1111,6 +1343,7 @@ invoke = ["invoke (>=2.0)"] name = "pdoc3" version = "0.10.0" description = "Auto-generate API documentation for Python projects." +category = "dev" optional = false python-versions = ">= 3.6" files = [ @@ -1126,6 +1359,7 @@ markdown = ">=3.0" name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1137,6 +1371,7 @@ files = [ name = "pluggy" version = "1.3.0" description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1152,6 +1387,7 @@ testing = ["pytest", "pytest-benchmark"] name = "protobuf" version = "4.24.4" description = "" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1174,6 +1410,7 @@ files = [ name = "pyarrow" version = "6.0.1" description = "Python library for Apache Arrow" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1222,6 +1459,7 @@ numpy = ">=1.16.6" name = "pyasn1" version = "0.5.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -1233,6 +1471,7 @@ files = [ name = "pyasn1-modules" version = "0.3.0" description = "A collection of ASN.1-based protocols modules" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -1247,6 +1486,7 @@ pyasn1 = ">=0.4.6,<0.6.0" name = "pycparser" version = "2.21" description = "C parser in Python" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1258,6 +1498,7 @@ files = [ name = "pydantic" version = "1.10.13" description = "Data validation and settings management using python type hints" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1310,6 +1551,7 @@ email = ["email-validator (>=1.0.3)"] name = "pygments" version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1324,6 +1566,7 @@ plugins = ["importlib-metadata"] name = "pynacl" version = "1.5.0" description = "Python binding to the Networking and Cryptography (NaCl) library" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1350,6 +1593,7 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] name = "pyre-extensions" version = "0.0.30" description = "Type system extensions for use with the pyre type checker" +category = "main" optional = false python-versions = "*" files = [ @@ -1365,6 +1609,7 @@ typing-inspect = "*" name = "pytest" version = "7.4.0" description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1387,6 +1632,7 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-mock" version = "3.11.1" description = "Thin-wrapper around the mock package for easier use with pytest" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1404,6 +1650,7 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -1418,6 +1665,7 @@ six = ">=1.5" name = "pytz" version = "2023.3.post1" description = "World timezone definitions, modern and historical" +category = "main" optional = false python-versions = "*" files = [ @@ -1429,6 +1677,7 @@ files = [ name = "pywin32" version = "306" description = "Python for Window Extensions" +category = "main" optional = false python-versions = "*" files = [ @@ -1452,6 +1701,7 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1511,6 +1761,7 @@ files = [ name = "ray" version = "2.7.0" description = "Ray provides a simple, universal API for building distributed applications." +category = "main" optional = false python-versions = "*" files = [ @@ -1550,8 +1801,8 @@ jsonschema = "*" msgpack = ">=1.0.0,<2.0.0" numpy = [ {version = ">=1.16", markers = "python_version < \"3.9\""}, - {version = ">=1.20", optional = true, markers = "extra == \"data\""}, {version = ">=1.19.3", markers = "python_version >= \"3.9\""}, + {version = ">=1.20", optional = true, markers = "extra == \"data\""}, ] packaging = "*" pandas = {version = ">=1.3", optional = true, markers = "extra == \"data\""} @@ -1578,6 +1829,7 @@ tune = ["fsspec", "pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1. name = "referencing" version = "0.30.2" description = "JSON Referencing + Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1593,6 +1845,7 @@ rpds-py = ">=0.7.0" name = "requests" version = "2.31.0" description = "Python HTTP for Humans." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1614,6 +1867,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-oauthlib" version = "1.3.1" description = "OAuthlib authentication support for Requests." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1632,6 +1886,7 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] name = "rich" version = "12.6.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +category = "main" optional = false python-versions = ">=3.6.3,<4.0.0" files = [ @@ -1651,6 +1906,7 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] name = "rpds-py" version = "0.10.4" description = "Python bindings to Rust's persistent data structures (rpds)" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1759,6 +2015,7 @@ files = [ name = "rsa" version = "4.9" description = "Pure-Python RSA implementation" +category = "main" optional = false python-versions = ">=3.6,<4" files = [ @@ -1773,6 +2030,7 @@ pyasn1 = ">=0.1.3" name = "setuptools" version = "68.2.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1789,6 +2047,7 @@ testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jar name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1800,6 +2059,7 @@ files = [ name = "tabulate" version = "0.9.0" description = "Pretty-print tabular data" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1814,6 +2074,7 @@ widechars = ["wcwidth"] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1825,6 +2086,7 @@ files = [ name = "typing-extensions" version = "4.8.0" description = "Backported and Experimental Type Hints for Python 3.8+" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1836,6 +2098,7 @@ files = [ name = "typing-inspect" version = "0.9.0" description = "Runtime inspection utilities for typing module." +category = "main" optional = false python-versions = "*" files = [ @@ -1851,6 +2114,7 @@ typing-extensions = ">=3.7.4" name = "tzdata" version = "2023.3" description = "Provider of IANA time zone data" +category = "main" optional = false python-versions = ">=2" files = [ @@ -1862,6 +2126,7 @@ files = [ name = "urllib3" version = "1.26.17" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -1878,6 +2143,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "websocket-client" version = "1.6.4" description = "WebSocket client for Python with low level API options" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1890,10 +2156,115 @@ docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] optional = ["python-socks", "wsaccel"] test = ["websockets"] +[[package]] +name = "yarl" +version = "1.9.4" +description = "Yet another URL library" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, + {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, + {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, + {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, + {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, + {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, + {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, + {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, + {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, + {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, + {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, + {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, + {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, + {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, + {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, + {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, + {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, + {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, + {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, + {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, + {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, + {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, + {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, + {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, + {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, + {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, + {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, + {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, + {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, + {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, + {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, + {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, + {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + [[package]] name = "zipp" version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1908,4 +2279,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "2a21d955dba1069ee968ad0486bedef6c1ccdec946962ac2c3f9b28707203845" +content-hash = "cf7c65917105b634657707b66d641eca477590e2cb9782dc83960d183be00f4c" diff --git a/pyproject.toml b/pyproject.toml index c1ca5e5a..ce76da53 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,6 +29,7 @@ codeflare-torchx = "0.6.0.dev1" cryptography = "40.0.2" executing = "1.2.0" pydantic = "< 2" +aiohttp = ">= 3.7" [tool.poetry.group.docs] optional = true From 9c51eb7df9e4ca695e61e1849eb51c83d251a811 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Mon, 11 Dec 2023 10:50:41 +0000 Subject: [PATCH 082/496] Removed aiohttp inplace of ray[default] --- poetry.lock | 478 +++++++++++++++++++++++++++++++++++++++++++++---- pyproject.toml | 3 +- 2 files changed, 440 insertions(+), 41 deletions(-) diff --git a/poetry.lock b/poetry.lock index d02577b0..a0ed4770 100644 --- a/poetry.lock +++ b/poetry.lock @@ -97,6 +97,21 @@ yarl = ">=1.0,<2.0" [package.extras] speedups = ["Brotli", "aiodns", "brotlicffi"] +[[package]] +name = "aiohttp-cors" +version = "0.7.0" +description = "CORS support for aiohttp" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "aiohttp-cors-0.7.0.tar.gz", hash = "sha256:4d39c6d7100fd9764ed1caf8cebf0eb01bf5e3f24e2e073fda6234bc48b19f5d"}, + {file = "aiohttp_cors-0.7.0-py3-none-any.whl", hash = "sha256:0451ba59fdf6909d0e2cd21e4c0a43752bc0703d33fc78ae94d9d9321710193e"}, +] + +[package.dependencies] +aiohttp = ">=1.1" + [[package]] name = "aiosignal" version = "1.3.1" @@ -112,6 +127,18 @@ files = [ [package.dependencies] frozenlist = ">=1.1.0" +[[package]] +name = "ansicon" +version = "1.89.0" +description = "Python wrapper for loading Jason Hood's ANSICON" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "ansicon-1.89.0-py2.py3-none-any.whl", hash = "sha256:f1def52d17f65c2c9682cf8370c03f541f410c1752d6a14029f97318e4b9dfec"}, + {file = "ansicon-1.89.0.tar.gz", hash = "sha256:e4d039def5768a47e4afec8e89e83ec3ae5a26bf00ad851f914d1240b444d2b1"}, +] + [[package]] name = "async-timeout" version = "4.0.3" @@ -178,6 +205,23 @@ files = [ tests = ["pytest (>=3.2.1,!=3.3.0)"] typecheck = ["mypy"] +[[package]] +name = "blessed" +version = "1.20.0" +description = "Easy, practical library for making terminal apps, by providing an elegant, well-documented interface to Colors, Keyboard input, and screen Positioning capabilities." +category = "main" +optional = false +python-versions = ">=2.7" +files = [ + {file = "blessed-1.20.0-py2.py3-none-any.whl", hash = "sha256:0c542922586a265e699188e52d5f5ac5ec0dd517e5a1041d90d2bbf23f906058"}, + {file = "blessed-1.20.0.tar.gz", hash = "sha256:2cdd67f8746e048f00df47a2880f4d6acbcdb399031b604e34ba8f71d5787680"}, +] + +[package.dependencies] +jinxed = {version = ">=1.1.0", markers = "platform_system == \"Windows\""} +six = ">=1.9.0" +wcwidth = ">=0.1.4" + [[package]] name = "cachetools" version = "5.3.1" @@ -423,6 +467,21 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "colorful" +version = "0.5.5" +description = "Terminal string styling done right, in Python." +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "colorful-0.5.5-py2.py3-none-any.whl", hash = "sha256:62c187e27c1433db9463ff93b1451898d1e7e23a7e553583fd9daeb6325182e4"}, + {file = "colorful-0.5.5.tar.gz", hash = "sha256:66f8c1264b2a26f7293b96a03bb7a76c4bc8b9634369a0bffdcd12d618056a1d"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + [[package]] name = "commonmark" version = "0.9.1" @@ -553,6 +612,18 @@ test = ["iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-co test-randomorder = ["pytest-randomly"] tox = ["tox"] +[[package]] +name = "distlib" +version = "0.3.7" +description = "Distribution utilities" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, + {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, +] + [[package]] name = "docker" version = "6.1.3" @@ -740,6 +811,29 @@ smb = ["smbprotocol"] ssh = ["paramiko"] tqdm = ["tqdm"] +[[package]] +name = "google-api-core" +version = "2.15.0" +description = "Google API client core library" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-api-core-2.15.0.tar.gz", hash = "sha256:abc978a72658f14a2df1e5e12532effe40f94f868f6e23d95133bd6abcca35ca"}, + {file = "google_api_core-2.15.0-py3-none-any.whl", hash = "sha256:2aa56d2be495551e66bbff7f729b790546f87d5c90e74781aa77233bcb395a8a"}, +] + +[package.dependencies] +google-auth = ">=2.14.1,<3.0.dev0" +googleapis-common-protos = ">=1.56.2,<2.0.dev0" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +requests = ">=2.18.0,<3.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] + [[package]] name = "google-auth" version = "2.23.3" @@ -764,6 +858,111 @@ pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] requests = ["requests (>=2.20.0,<3.0.0.dev0)"] +[[package]] +name = "googleapis-common-protos" +version = "1.62.0" +description = "Common protobufs used in Google APIs" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, + {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, +] + +[package.dependencies] +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] + +[[package]] +name = "gpustat" +version = "1.1.1" +description = "An utility to monitor NVIDIA GPU status and usage" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "gpustat-1.1.1.tar.gz", hash = "sha256:c18d3ed5518fc16300c42d694debc70aebb3be55cae91f1db64d63b5fa8af9d8"}, +] + +[package.dependencies] +blessed = ">=1.17.1" +nvidia-ml-py = ">=11.450.129" +psutil = ">=5.6.0" + +[package.extras] +completion = ["shtab"] +test = ["mockito (>=1.2.1)", "pytest (>=5.4.1)", "pytest-runner"] + +[[package]] +name = "grpcio" +version = "1.60.0" +description = "HTTP/2-based RPC framework" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpcio-1.60.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:d020cfa595d1f8f5c6b343530cd3ca16ae5aefdd1e832b777f9f0eb105f5b139"}, + {file = "grpcio-1.60.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b98f43fcdb16172dec5f4b49f2fece4b16a99fd284d81c6bbac1b3b69fcbe0ff"}, + {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:20e7a4f7ded59097c84059d28230907cd97130fa74f4a8bfd1d8e5ba18c81491"}, + {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452ca5b4afed30e7274445dd9b441a35ece656ec1600b77fff8c216fdf07df43"}, + {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43e636dc2ce9ece583b3e2ca41df5c983f4302eabc6d5f9cd04f0562ee8ec1ae"}, + {file = "grpcio-1.60.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e306b97966369b889985a562ede9d99180def39ad42c8014628dd3cc343f508"}, + {file = "grpcio-1.60.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f897c3b127532e6befdcf961c415c97f320d45614daf84deba0a54e64ea2457b"}, + {file = "grpcio-1.60.0-cp310-cp310-win32.whl", hash = "sha256:b87efe4a380887425bb15f220079aa8336276398dc33fce38c64d278164f963d"}, + {file = "grpcio-1.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:a9c7b71211f066908e518a2ef7a5e211670761651039f0d6a80d8d40054047df"}, + {file = "grpcio-1.60.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:fb464479934778d7cc5baf463d959d361954d6533ad34c3a4f1d267e86ee25fd"}, + {file = "grpcio-1.60.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:4b44d7e39964e808b071714666a812049765b26b3ea48c4434a3b317bac82f14"}, + {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:90bdd76b3f04bdb21de5398b8a7c629676c81dfac290f5f19883857e9371d28c"}, + {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91229d7203f1ef0ab420c9b53fe2ca5c1fbeb34f69b3bc1b5089466237a4a134"}, + {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b36a2c6d4920ba88fa98075fdd58ff94ebeb8acc1215ae07d01a418af4c0253"}, + {file = "grpcio-1.60.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:297eef542156d6b15174a1231c2493ea9ea54af8d016b8ca7d5d9cc65cfcc444"}, + {file = "grpcio-1.60.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:87c9224acba0ad8bacddf427a1c2772e17ce50b3042a789547af27099c5f751d"}, + {file = "grpcio-1.60.0-cp311-cp311-win32.whl", hash = "sha256:95ae3e8e2c1b9bf671817f86f155c5da7d49a2289c5cf27a319458c3e025c320"}, + {file = "grpcio-1.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:467a7d31554892eed2aa6c2d47ded1079fc40ea0b9601d9f79204afa8902274b"}, + {file = "grpcio-1.60.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:a7152fa6e597c20cb97923407cf0934e14224af42c2b8d915f48bc3ad2d9ac18"}, + {file = "grpcio-1.60.0-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:7db16dd4ea1b05ada504f08d0dca1cd9b926bed3770f50e715d087c6f00ad748"}, + {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:b0571a5aef36ba9177e262dc88a9240c866d903a62799e44fd4aae3f9a2ec17e"}, + {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fd9584bf1bccdfff1512719316efa77be235469e1e3295dce64538c4773840b"}, + {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6a478581b1a1a8fdf3318ecb5f4d0cda41cacdffe2b527c23707c9c1b8fdb55"}, + {file = "grpcio-1.60.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:77c8a317f0fd5a0a2be8ed5cbe5341537d5c00bb79b3bb27ba7c5378ba77dbca"}, + {file = "grpcio-1.60.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1c30bb23a41df95109db130a6cc1b974844300ae2e5d68dd4947aacba5985aa5"}, + {file = "grpcio-1.60.0-cp312-cp312-win32.whl", hash = "sha256:2aef56e85901c2397bd557c5ba514f84de1f0ae5dd132f5d5fed042858115951"}, + {file = "grpcio-1.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:e381fe0c2aa6c03b056ad8f52f8efca7be29fb4d9ae2f8873520843b6039612a"}, + {file = "grpcio-1.60.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:92f88ca1b956eb8427a11bb8b4a0c0b2b03377235fc5102cb05e533b8693a415"}, + {file = "grpcio-1.60.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:e278eafb406f7e1b1b637c2cf51d3ad45883bb5bd1ca56bc05e4fc135dfdaa65"}, + {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:a48edde788b99214613e440fce495bbe2b1e142a7f214cce9e0832146c41e324"}, + {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de2ad69c9a094bf37c1102b5744c9aec6cf74d2b635558b779085d0263166454"}, + {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:073f959c6f570797272f4ee9464a9997eaf1e98c27cb680225b82b53390d61e6"}, + {file = "grpcio-1.60.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c826f93050c73e7769806f92e601e0efdb83ec8d7c76ddf45d514fee54e8e619"}, + {file = "grpcio-1.60.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9e30be89a75ee66aec7f9e60086fadb37ff8c0ba49a022887c28c134341f7179"}, + {file = "grpcio-1.60.0-cp37-cp37m-win_amd64.whl", hash = "sha256:b0fb2d4801546598ac5cd18e3ec79c1a9af8b8f2a86283c55a5337c5aeca4b1b"}, + {file = "grpcio-1.60.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:9073513ec380434eb8d21970e1ab3161041de121f4018bbed3146839451a6d8e"}, + {file = "grpcio-1.60.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:74d7d9fa97809c5b892449b28a65ec2bfa458a4735ddad46074f9f7d9550ad13"}, + {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:1434ca77d6fed4ea312901122dc8da6c4389738bf5788f43efb19a838ac03ead"}, + {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e61e76020e0c332a98290323ecfec721c9544f5b739fab925b6e8cbe1944cf19"}, + {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675997222f2e2f22928fbba640824aebd43791116034f62006e19730715166c0"}, + {file = "grpcio-1.60.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5208a57eae445ae84a219dfd8b56e04313445d146873117b5fa75f3245bc1390"}, + {file = "grpcio-1.60.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:428d699c8553c27e98f4d29fdc0f0edc50e9a8a7590bfd294d2edb0da7be3629"}, + {file = "grpcio-1.60.0-cp38-cp38-win32.whl", hash = "sha256:83f2292ae292ed5a47cdcb9821039ca8e88902923198f2193f13959360c01860"}, + {file = "grpcio-1.60.0-cp38-cp38-win_amd64.whl", hash = "sha256:705a68a973c4c76db5d369ed573fec3367d7d196673fa86614b33d8c8e9ebb08"}, + {file = "grpcio-1.60.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c193109ca4070cdcaa6eff00fdb5a56233dc7610216d58fb81638f89f02e4968"}, + {file = "grpcio-1.60.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:676e4a44e740deaba0f4d95ba1d8c5c89a2fcc43d02c39f69450b1fa19d39590"}, + {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:5ff21e000ff2f658430bde5288cb1ac440ff15c0d7d18b5fb222f941b46cb0d2"}, + {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c86343cf9ff7b2514dd229bdd88ebba760bd8973dac192ae687ff75e39ebfab"}, + {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fd3b3968ffe7643144580f260f04d39d869fcc2cddb745deef078b09fd2b328"}, + {file = "grpcio-1.60.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:30943b9530fe3620e3b195c03130396cd0ee3a0d10a66c1bee715d1819001eaf"}, + {file = "grpcio-1.60.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b10241250cb77657ab315270b064a6c7f1add58af94befa20687e7c8d8603ae6"}, + {file = "grpcio-1.60.0-cp39-cp39-win32.whl", hash = "sha256:79a050889eb8d57a93ed21d9585bb63fca881666fc709f5d9f7f9372f5e7fd03"}, + {file = "grpcio-1.60.0-cp39-cp39-win_amd64.whl", hash = "sha256:8a97a681e82bc11a42d4372fe57898d270a2707f36c45c6676e49ce0d5c41353"}, + {file = "grpcio-1.60.0.tar.gz", hash = "sha256:2199165a1affb666aa24adf0c97436686d0a61bc5fc113c037701fb7c7fceb96"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.60.0)"] + [[package]] name = "idna" version = "3.4" @@ -827,6 +1026,21 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "jinxed" +version = "1.2.1" +description = "Jinxed Terminal Library" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "jinxed-1.2.1-py2.py3-none-any.whl", hash = "sha256:37422659c4925969c66148c5e64979f553386a4226b9484d910d3094ced37d30"}, + {file = "jinxed-1.2.1.tar.gz", hash = "sha256:30c3f861b73279fea1ed928cfd4dfb1f273e16cd62c8a32acfac362da0f78f3f"}, +] + +[package.dependencies] +ansicon = {version = "*", markers = "platform_system == \"Windows\""} + [[package]] name = "jsonschema" version = "4.19.1" @@ -1203,6 +1417,18 @@ files = [ {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, ] +[[package]] +name = "nvidia-ml-py" +version = "12.535.133" +description = "Python Bindings for the NVIDIA Management Library" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "nvidia-ml-py-12.535.133.tar.gz", hash = "sha256:b1559af0d57dd20955bf58d05afff7b166ddd44947eb3051c9905638799eb1dc"}, + {file = "nvidia_ml_py-12.535.133-py3-none-any.whl", hash = "sha256:91d808d3f246d30bead2a0a2540b74b9e9fc584a9c3f1f55abfc2940c4e44fd2"}, +] + [[package]] name = "oauthlib" version = "3.2.2" @@ -1220,6 +1446,34 @@ rsa = ["cryptography (>=3.0.0)"] signals = ["blinker (>=1.4.0)"] signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] +[[package]] +name = "opencensus" +version = "0.11.3" +description = "A stats collection and distributed tracing framework" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "opencensus-0.11.3-py2.py3-none-any.whl", hash = "sha256:9c33d572059f0f0e874fc34c697a39a4193aa9cf3203f7e777df42e9edeea56a"}, + {file = "opencensus-0.11.3.tar.gz", hash = "sha256:af7a98bd51e63968144d772f346d696ed498a32dbdc4be267cd6011c4ce05da8"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.0.0,<3.0.0", markers = "python_version >= \"3.6\""} +opencensus-context = ">=0.1.3" + +[[package]] +name = "opencensus-context" +version = "0.1.3" +description = "OpenCensus Runtime Context" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "opencensus-context-0.1.3.tar.gz", hash = "sha256:a03108c3c10d8c80bb5ddf5c8a1f033161fa61972a9917f9b9b3a18517f0088c"}, + {file = "opencensus_context-0.1.3-py2.py3-none-any.whl", hash = "sha256:073bb0590007af276853009fac7e4bab1d523c3f03baf4cb4511ca38967c6039"}, +] + [[package]] name = "openshift-client" version = "1.0.18" @@ -1367,6 +1621,22 @@ files = [ {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, ] +[[package]] +name = "platformdirs" +version = "3.11.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, + {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, +] + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] + [[package]] name = "pluggy" version = "1.3.0" @@ -1383,6 +1653,21 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "prometheus-client" +version = "0.19.0" +description = "Python client for the Prometheus monitoring system." +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "prometheus_client-0.19.0-py3-none-any.whl", hash = "sha256:c88b1e6ecf6b41cd8fb5731c7ae919bf66df6ec6fafa555cd6c0e16ca169ae92"}, + {file = "prometheus_client-0.19.0.tar.gz", hash = "sha256:4585b0d1223148c27a225b10dbec5ae9bc4c81a99a3fa80774fa6209935324e1"}, +] + +[package.extras] +twisted = ["twisted"] + [[package]] name = "protobuf" version = "4.24.4" @@ -1406,50 +1691,96 @@ files = [ {file = "protobuf-4.24.4.tar.gz", hash = "sha256:5a70731910cd9104762161719c3d883c960151eea077134458503723b60e3667"}, ] +[[package]] +name = "psutil" +version = "5.9.6" +description = "Cross-platform lib for process and system monitoring in Python." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "psutil-5.9.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:fb8a697f11b0f5994550555fcfe3e69799e5b060c8ecf9e2f75c69302cc35c0d"}, + {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:91ecd2d9c00db9817a4b4192107cf6954addb5d9d67a969a4f436dbc9200f88c"}, + {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:10e8c17b4f898d64b121149afb136c53ea8b68c7531155147867b7b1ac9e7e28"}, + {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:18cd22c5db486f33998f37e2bb054cc62fd06646995285e02a51b1e08da97017"}, + {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:ca2780f5e038379e520281e4c032dddd086906ddff9ef0d1b9dcf00710e5071c"}, + {file = "psutil-5.9.6-cp27-none-win32.whl", hash = "sha256:70cb3beb98bc3fd5ac9ac617a327af7e7f826373ee64c80efd4eb2856e5051e9"}, + {file = "psutil-5.9.6-cp27-none-win_amd64.whl", hash = "sha256:51dc3d54607c73148f63732c727856f5febec1c7c336f8f41fcbd6315cce76ac"}, + {file = "psutil-5.9.6-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c69596f9fc2f8acd574a12d5f8b7b1ba3765a641ea5d60fb4736bf3c08a8214a"}, + {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92e0cc43c524834af53e9d3369245e6cc3b130e78e26100d1f63cdb0abeb3d3c"}, + {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:748c9dd2583ed86347ed65d0035f45fa8c851e8d90354c122ab72319b5f366f4"}, + {file = "psutil-5.9.6-cp36-cp36m-win32.whl", hash = "sha256:3ebf2158c16cc69db777e3c7decb3c0f43a7af94a60d72e87b2823aebac3d602"}, + {file = "psutil-5.9.6-cp36-cp36m-win_amd64.whl", hash = "sha256:ff18b8d1a784b810df0b0fff3bcb50ab941c3b8e2c8de5726f9c71c601c611aa"}, + {file = "psutil-5.9.6-cp37-abi3-win32.whl", hash = "sha256:a6f01f03bf1843280f4ad16f4bde26b817847b4c1a0db59bf6419807bc5ce05c"}, + {file = "psutil-5.9.6-cp37-abi3-win_amd64.whl", hash = "sha256:6e5fb8dc711a514da83098bc5234264e551ad980cec5f85dabf4d38ed6f15e9a"}, + {file = "psutil-5.9.6-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:daecbcbd29b289aac14ece28eca6a3e60aa361754cf6da3dfb20d4d32b6c7f57"}, + {file = "psutil-5.9.6.tar.gz", hash = "sha256:e4b92ddcd7dd4cdd3f900180ea1e104932c7bce234fb88976e2a3b296441225a"}, +] + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + +[[package]] +name = "py-spy" +version = "0.3.14" +description = "Sampling profiler for Python programs" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "py_spy-0.3.14-py2.py3-none-macosx_10_7_x86_64.whl", hash = "sha256:5b342cc5feb8d160d57a7ff308de153f6be68dcf506ad02b4d67065f2bae7f45"}, + {file = "py_spy-0.3.14-py2.py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:fe7efe6c91f723442259d428bf1f9ddb9c1679828866b353d539345ca40d9dd2"}, + {file = "py_spy-0.3.14-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590905447241d789d9de36cff9f52067b6f18d8b5e9fb399242041568d414461"}, + {file = "py_spy-0.3.14-py2.py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd6211fe7f587b3532ba9d300784326d9a6f2b890af7bf6fff21a029ebbc812b"}, + {file = "py_spy-0.3.14-py2.py3-none-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3e8e48032e71c94c3dd51694c39e762e4bbfec250df5bf514adcdd64e79371e0"}, + {file = "py_spy-0.3.14-py2.py3-none-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f59b0b52e56ba9566305236375e6fc68888261d0d36b5addbe3cf85affbefc0e"}, + {file = "py_spy-0.3.14-py2.py3-none-win_amd64.whl", hash = "sha256:8f5b311d09f3a8e33dbd0d44fc6e37b715e8e0c7efefafcda8bfd63b31ab5a31"}, +] + [[package]] name = "pyarrow" -version = "6.0.1" +version = "14.0.1" description = "Python library for Apache Arrow" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pyarrow-6.0.1-cp310-cp310-macosx_10_13_universal2.whl", hash = "sha256:c80d2436294a07f9cc54852aa1cef034b6f9c97d29235c4bd53bbf52e24f1ebf"}, - {file = "pyarrow-6.0.1-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:f150b4f222d0ba397388908725692232345adaa8e58ad543ca00f03c7234ae7b"}, - {file = "pyarrow-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c3a727642c1283dcb44728f0d0a00f8864b171e31c835f4b8def07e3fa8f5c73"}, - {file = "pyarrow-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d29605727865177918e806d855fd8404b6242bf1e56ade0a0023cd4fe5f7f841"}, - {file = "pyarrow-6.0.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b63b54dd0bada05fff76c15b233f9322de0e6947071b7871ec45024e16045aeb"}, - {file = "pyarrow-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e90e75cb11e61ffeffb374f1db7c4788f1df0cb269596bf86c473155294958d"}, - {file = "pyarrow-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f4f3db1da51db4cfbafab3066a01b01578884206dced9f505da950d9ed4402d"}, - {file = "pyarrow-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:2523f87bd36877123fc8c4813f60d298722143ead73e907690a87e8557114693"}, - {file = "pyarrow-6.0.1-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:8f7d34efb9d667f9204b40ce91a77613c46691c24cd098e3b6986bd7401b8f06"}, - {file = "pyarrow-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e3c9184335da8faf08c0df95668ce9d778df3795ce4eec959f44908742900e10"}, - {file = "pyarrow-6.0.1-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:02baee816456a6e64486e587caaae2bf9f084fa3a891354ff18c3e945a1cb72f"}, - {file = "pyarrow-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604782b1c744b24a55df80125991a7154fbdef60991eb3d02bfaed06d22f055e"}, - {file = "pyarrow-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fab8132193ae095c43b1e8d6d7f393451ac198de5aaf011c6b576b1442966fec"}, - {file = "pyarrow-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:31038366484e538608f43920a5e2957b8862a43aa49438814619b527f50ec127"}, - {file = "pyarrow-6.0.1-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:632bea00c2fbe2da5d29ff1698fec312ed3aabfb548f06100144e1907e22093a"}, - {file = "pyarrow-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc03c875e5d68b0d0143f94c438add3ab3c2411ade2748423a9c24608fea571e"}, - {file = "pyarrow-6.0.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1cd4de317df01679e538004123d6d7bc325d73bad5c6bbc3d5f8aa2280408869"}, - {file = "pyarrow-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e77b1f7c6c08ec319b7882c1a7c7304731530923532b3243060e6e64c456cf34"}, - {file = "pyarrow-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a424fd9a3253d0322d53be7bbb20b5b01511706a61efadcf37f416da325e3d48"}, - {file = "pyarrow-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:c958cf3a4a9eee09e1063c02b89e882d19c61b3a2ce6cbd55191a6f45ed5004b"}, - {file = "pyarrow-6.0.1-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:0e0ef24b316c544f4bb56f5c376129097df3739e665feca0eb567f716d45c55a"}, - {file = "pyarrow-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c13ec3b26b3b069d673c5fa3a0c70c38f0d5c94686ac5dbc9d7e7d24040f812"}, - {file = "pyarrow-6.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:71891049dc58039a9523e1cb0d921be001dacb2b327fa7b62a35b96a3aad9f0d"}, - {file = "pyarrow-6.0.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:943141dd8cca6c5722552a0b11a3c2e791cdf85f1768dea8170b0a8a7e824ff9"}, - {file = "pyarrow-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fd077c06061b8fa8fdf91591a4270e368f63cf73c6ab56924d3b64efa96a873"}, - {file = "pyarrow-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5308f4bb770b48e07c8cff36cf6a4452862e8ce9492428ad5581d846420b3884"}, - {file = "pyarrow-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:cde4f711cd9476d4da18128c3a40cb529b6b7d2679aee6e0576212547530fef1"}, - {file = "pyarrow-6.0.1-cp39-cp39-macosx_10_13_universal2.whl", hash = "sha256:b8628269bd9289cae0ea668f5900451043252fe3666667f614e140084dd31aac"}, - {file = "pyarrow-6.0.1-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:981ccdf4f2696550733e18da882469893d2f33f55f3cbeb6a90f81741cbf67aa"}, - {file = "pyarrow-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:954326b426eec6e31ff55209f8840b54d788420e96c4005aaa7beed1fe60b42d"}, - {file = "pyarrow-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6b6483bf6b61fe9a046235e4ad4d9286b707607878d7dbdc2eb85a6ec4090baf"}, - {file = "pyarrow-6.0.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7ecad40a1d4e0104cd87757a403f36850261e7a989cf9e4cb3e30420bbbd1092"}, - {file = "pyarrow-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04c752fb41921d0064568a15a87dbb0222cfbe9040d4b2c1b306fe6e0a453530"}, - {file = "pyarrow-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:725d3fe49dfe392ff14a8ae6a75b230a60e8985f2b621b18cfa912fe02b65f1a"}, - {file = "pyarrow-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:2403c8af207262ce8e2bc1a9d19313941fd2e424f1cb3c4b749c17efe1fd699a"}, - {file = "pyarrow-6.0.1.tar.gz", hash = "sha256:423990d56cd8f12283b67367d48e142739b789085185018eb03d05087c3c8d43"}, + {file = "pyarrow-14.0.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:96d64e5ba7dceb519a955e5eeb5c9adcfd63f73a56aea4722e2cc81364fc567a"}, + {file = "pyarrow-14.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a8ae88c0038d1bc362a682320112ee6774f006134cd5afc291591ee4bc06505"}, + {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f6f053cb66dc24091f5511e5920e45c83107f954a21032feadc7b9e3a8e7851"}, + {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:906b0dc25f2be12e95975722f1e60e162437023f490dbd80d0deb7375baf3171"}, + {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:78d4a77a46a7de9388b653af1c4ce539350726cd9af62e0831e4f2bd0c95a2f4"}, + {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:06ca79080ef89d6529bb8e5074d4b4f6086143b2520494fcb7cf8a99079cde93"}, + {file = "pyarrow-14.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:32542164d905002c42dff896efdac79b3bdd7291b1b74aa292fac8450d0e4dcd"}, + {file = "pyarrow-14.0.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:c7331b4ed3401b7ee56f22c980608cf273f0380f77d0f73dd3c185f78f5a6220"}, + {file = "pyarrow-14.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:922e8b49b88da8633d6cac0e1b5a690311b6758d6f5d7c2be71acb0f1e14cd61"}, + {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58c889851ca33f992ea916b48b8540735055201b177cb0dcf0596a495a667b00"}, + {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30d8494870d9916bb53b2a4384948491444741cb9a38253c590e21f836b01222"}, + {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:be28e1a07f20391bb0b15ea03dcac3aade29fc773c5eb4bee2838e9b2cdde0cb"}, + {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:981670b4ce0110d8dcb3246410a4aabf5714db5d8ea63b15686bce1c914b1f83"}, + {file = "pyarrow-14.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:4756a2b373a28f6166c42711240643fb8bd6322467e9aacabd26b488fa41ec23"}, + {file = "pyarrow-14.0.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:cf87e2cec65dd5cf1aa4aba918d523ef56ef95597b545bbaad01e6433851aa10"}, + {file = "pyarrow-14.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:470ae0194fbfdfbf4a6b65b4f9e0f6e1fa0ea5b90c1ee6b65b38aecee53508c8"}, + {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6263cffd0c3721c1e348062997babdf0151301f7353010c9c9a8ed47448f82ab"}, + {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8089d7e77d1455d529dbd7cff08898bbb2666ee48bc4085203af1d826a33cc"}, + {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:fada8396bc739d958d0b81d291cfd201126ed5e7913cb73de6bc606befc30226"}, + {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:2a145dab9ed7849fc1101bf03bcdc69913547f10513fdf70fc3ab6c0a50c7eee"}, + {file = "pyarrow-14.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:05fe7994745b634c5fb16ce5717e39a1ac1fac3e2b0795232841660aa76647cd"}, + {file = "pyarrow-14.0.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:a8eeef015ae69d104c4c3117a6011e7e3ecd1abec79dc87fd2fac6e442f666ee"}, + {file = "pyarrow-14.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3c76807540989fe8fcd02285dd15e4f2a3da0b09d27781abec3adc265ddbeba1"}, + {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:450e4605e3c20e558485f9161a79280a61c55efe585d51513c014de9ae8d393f"}, + {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:323cbe60210173ffd7db78bfd50b80bdd792c4c9daca8843ef3cd70b186649db"}, + {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0140c7e2b740e08c5a459439d87acd26b747fc408bde0a8806096ee0baaa0c15"}, + {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:e592e482edd9f1ab32f18cd6a716c45b2c0f2403dc2af782f4e9674952e6dd27"}, + {file = "pyarrow-14.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d264ad13605b61959f2ae7c1d25b1a5b8505b112715c961418c8396433f213ad"}, + {file = "pyarrow-14.0.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:01e44de9749cddc486169cb632f3c99962318e9dacac7778315a110f4bf8a450"}, + {file = "pyarrow-14.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d0351fecf0e26e152542bc164c22ea2a8e8c682726fce160ce4d459ea802d69c"}, + {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33c1f6110c386464fd2e5e4ea3624466055bbe681ff185fd6c9daa98f30a3f9a"}, + {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11e045dfa09855b6d3e7705a37c42e2dc2c71d608fab34d3c23df2e02df9aec3"}, + {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:097828b55321897db0e1dbfc606e3ff8101ae5725673498cbfa7754ee0da80e4"}, + {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1daab52050a1c48506c029e6fa0944a7b2436334d7e44221c16f6f1b2cc9c510"}, + {file = "pyarrow-14.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:3f6d5faf4f1b0d5a7f97be987cf9e9f8cd39902611e818fe134588ee99bf0283"}, + {file = "pyarrow-14.0.1.tar.gz", hash = "sha256:b8b3f4fe8d4ec15e1ef9b599b94683c5216adaed78d5cb4c606180546d1e2ee1"}, ] [package.dependencies] @@ -1792,11 +2123,19 @@ files = [ ] [package.dependencies] +aiohttp = {version = ">=3.7", optional = true, markers = "extra == \"default\""} +aiohttp-cors = {version = "*", optional = true, markers = "extra == \"default\""} aiosignal = "*" click = ">=7.0" +colorful = {version = "*", optional = true, markers = "extra == \"default\""} filelock = "*" frozenlist = "*" fsspec = {version = "*", optional = true, markers = "extra == \"data\""} +gpustat = {version = ">=1.0.0", optional = true, markers = "extra == \"default\""} +grpcio = [ + {version = ">=1.32.0", optional = true, markers = "python_version < \"3.10\""}, + {version = ">=1.42.0", optional = true, markers = "python_version >= \"3.10\""}, +] jsonschema = "*" msgpack = ">=1.0.0,<2.0.0" numpy = [ @@ -1804,12 +2143,18 @@ numpy = [ {version = ">=1.19.3", markers = "python_version >= \"3.9\""}, {version = ">=1.20", optional = true, markers = "extra == \"data\""}, ] +opencensus = {version = "*", optional = true, markers = "extra == \"default\""} packaging = "*" pandas = {version = ">=1.3", optional = true, markers = "extra == \"data\""} +prometheus-client = {version = ">=0.7.1", optional = true, markers = "extra == \"default\""} protobuf = ">=3.15.3,<3.19.5 || >3.19.5" +py-spy = {version = ">=0.2.0", optional = true, markers = "extra == \"default\""} pyarrow = {version = ">=6.0.1", optional = true, markers = "extra == \"data\""} +pydantic = {version = "<2", optional = true, markers = "extra == \"default\""} pyyaml = "*" requests = "*" +smart-open = {version = "*", optional = true, markers = "extra == \"default\""} +virtualenv = {version = ">=20.0.24,<20.21.1", optional = true, markers = "extra == \"default\""} [package.extras] air = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi", "fsspec", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "numpy (>=1.20)", "opencensus", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "requests", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)", "watchfiles"] @@ -2055,6 +2400,28 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "smart-open" +version = "6.4.0" +description = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)" +category = "main" +optional = false +python-versions = ">=3.6,<4.0" +files = [ + {file = "smart_open-6.4.0-py3-none-any.whl", hash = "sha256:8d3ef7e6997e8e42dd55c74166ed21e6ac70664caa32dd940b26d54a8f6b4142"}, + {file = "smart_open-6.4.0.tar.gz", hash = "sha256:be3c92c246fbe80ebce8fbacb180494a481a77fcdcb7c1aadb2ea5b9c2bee8b9"}, +] + +[package.extras] +all = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "paramiko", "requests"] +azure = ["azure-common", "azure-core", "azure-storage-blob"] +gcs = ["google-cloud-storage (>=2.6.0)"] +http = ["requests"] +s3 = ["boto3"] +ssh = ["paramiko"] +test = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "moto[server]", "paramiko", "pytest", "pytest-rerunfailures", "requests", "responses"] +webhdfs = ["requests"] + [[package]] name = "tabulate" version = "0.9.0" @@ -2139,6 +2506,39 @@ brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotl secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +[[package]] +name = "virtualenv" +version = "20.21.0" +description = "Virtual Python Environment builder" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.21.0-py3-none-any.whl", hash = "sha256:31712f8f2a17bd06234fa97fdf19609e789dd4e3e4bf108c3da71d710651adbc"}, + {file = "virtualenv-20.21.0.tar.gz", hash = "sha256:f50e3e60f990a0757c9b68333c9fdaa72d7188caa417f96af9e52407831a3b68"}, +] + +[package.dependencies] +distlib = ">=0.3.6,<1" +filelock = ">=3.4.1,<4" +platformdirs = ">=2.4,<4" + +[package.extras] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] +test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23)", "pytest (>=7.2.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"] + +[[package]] +name = "wcwidth" +version = "0.2.12" +description = "Measures the displayed width of unicode strings in a terminal" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.12-py2.py3-none-any.whl", hash = "sha256:f26ec43d96c8cbfed76a5075dac87680124fa84e0855195a6184da9c187f133c"}, + {file = "wcwidth-0.2.12.tar.gz", hash = "sha256:f01c104efdf57971bcb756f054dd58ddec5204dd15fa31d6503ea57947d97c02"}, +] + [[package]] name = "websocket-client" version = "1.6.4" @@ -2279,4 +2679,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "cf7c65917105b634657707b66d641eca477590e2cb9782dc83960d183be00f4c" +content-hash = "75531b507aa2ee5b0514864aa92fbb127ef52a4faf119b5affdfbc3c694c5b03" diff --git a/pyproject.toml b/pyproject.toml index ce76da53..66dd6cd0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,13 +23,12 @@ keywords = ['codeflare', 'python', 'sdk', 'client', 'batch', 'scale'] python = "^3.8" openshift-client = "1.0.18" rich = "^12.5" -ray = {version = "2.7.0", extras = ["data"]} +ray = {version = "2.7.0", extras = ["data", "default"]} kubernetes = ">= 25.3.0, < 27" codeflare-torchx = "0.6.0.dev1" cryptography = "40.0.2" executing = "1.2.0" pydantic = "< 2" -aiohttp = ">= 3.7" [tool.poetry.group.docs] optional = true From d9de5a3af61dccfca0d048757d147cd45b8e0c82 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Fri, 17 Nov 2023 16:41:46 +0000 Subject: [PATCH 083/496] Fixed creating ingresses without admin access --- src/codeflare_sdk/cluster/cluster.py | 42 +++++++-- src/codeflare_sdk/cluster/config.py | 2 +- src/codeflare_sdk/utils/generate_yaml.py | 107 ++++++++--------------- tests/test-case-no-mcad.yamls | 1 + tests/test-case-prio.yaml | 1 + tests/test-case.yaml | 1 + tests/unit_test.py | 55 +++--------- tests/unit_test_support.py | 2 +- 8 files changed, 90 insertions(+), 121 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index f5f226a0..129d045a 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -189,7 +189,7 @@ def create_app_wrapper(self): local_interactive = self.config.local_interactive image_pull_secrets = self.config.image_pull_secrets dispatch_priority = self.config.dispatch_priority - ingress_domain = self.config.ingress_domain + domain_name = self.config.domain_name ingress_options = self.config.ingress_options return generate_appwrapper( name=name, @@ -214,7 +214,7 @@ def create_app_wrapper(self): dispatch_priority=dispatch_priority, priority_val=priority_val, openshift_oauth=self.config.openshift_oauth, - ingress_domain=ingress_domain, + domain_name=domain_name, ingress_options=ingress_options, ) @@ -468,7 +468,7 @@ def torchx_config( to_return["requirements"] = requirements return to_return - def from_k8_cluster_object(rc, mcad=True): + def from_k8_cluster_object(rc, mcad=True, domain_name=None): machine_types = ( rc["metadata"]["labels"]["orderedinstance"].split("_") if "orderedinstance" in rc["metadata"]["labels"] @@ -508,6 +508,7 @@ def from_k8_cluster_object(rc, mcad=True): ]["image"], local_interactive=local_interactive, mcad=mcad, + domain_name=domain_name, ) return Cluster(cluster_config) @@ -644,7 +645,10 @@ def get_cluster(cluster_name: str, namespace: str = "default"): for rc in rcs["items"]: if rc["metadata"]["name"] == cluster_name: mcad = _check_aw_exists(cluster_name, namespace) - return Cluster.from_k8_cluster_object(rc, mcad=mcad) + domain_name = _extract_domain_name(cluster_name, namespace) + return Cluster.from_k8_cluster_object( + rc, mcad=mcad, domain_name=domain_name + ) raise FileNotFoundError( f"Cluster {cluster_name} is not found in {namespace} namespace" ) @@ -663,14 +667,40 @@ def _check_aw_exists(name: str, namespace: str) -> bool: ) except Exception as e: # pragma: no cover return _kube_api_error_handling(e, print_error=False) - for aw in aws["items"]: if aw["metadata"]["name"] == name: return True return False -# Cant test this until get_current_namespace is fixed +def _extract_domain_name(name: str, namespace: str) -> str: + try: + config_check() + api_instance = client.CustomObjectsApi(api_config_handler()) + aws = api_instance.list_namespaced_custom_object( + group="workload.codeflare.dev", + version="v1beta1", + namespace=namespace, + plural="appwrappers", + ) + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e, print_error=False) + for aw in aws["items"]: + if aw["metadata"]["name"] == name: + host = aw["spec"]["resources"]["GenericItems"][1]["generictemplate"][ + "spec" + ]["rules"][0]["host"] + + dot_index = host.find(".") + if dot_index != -1: + domain_name = host[dot_index + 1 :] + return domain_name + else: + print("Host is not configured correctly.") + return None + + +# Cant test this until get_current_namespace is fixed and placed in this function over using `self` def _get_ingress_domain(self): # pragma: no cover try: config_check() diff --git a/src/codeflare_sdk/cluster/config.py b/src/codeflare_sdk/cluster/config.py index 0311d0e3..f36a6c64 100644 --- a/src/codeflare_sdk/cluster/config.py +++ b/src/codeflare_sdk/cluster/config.py @@ -54,4 +54,4 @@ class ClusterConfiguration: dispatch_priority: str = None openshift_oauth: bool = False # NOTE: to use the user must have permission to create a RoleBinding for system:auth-delegator ingress_options: dict = field(default_factory=dict) - ingress_domain: str = None + domain_name: str = None diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 3ffbefb5..cd5f29ca 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -29,8 +29,6 @@ from base64 import b64encode from urllib3.util import parse_url -from kubernetes import client, config - from .kube_api_helpers import _get_api_host @@ -56,26 +54,23 @@ def gen_dashboard_ingress_name(cluster_name): return f"ray-dashboard-{cluster_name}" -# Check if the ingress api cluster resource exists +# Check if the routes api exists def is_openshift_cluster(): try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) - api_instance.get_cluster_custom_object( - "config.openshift.io", "v1", "ingresses", "cluster" - ) - - return True - except client.ApiException as e: # pragma: no cover - if e.status == 404 or e.status == 403: - return False + for api in client.ApisApi(api_config_handler()).get_api_versions().groups: + for v in api.versions: + if "route.openshift.io/v1" in v.group_version: + return True else: - print(f"Error detecting cluster type defaulting to Kubernetes: {e}") return False + except client.ApiException as e: # pragma: no cover + print(f"Error detecting cluster type defaulting to Kubernetes: {e}") + return False def update_dashboard_ingress( - ingress_item, cluster_name, namespace, ingress_options, ingress_domain + ingress_item, cluster_name, namespace, ingress_options, domain_name ): # pragma: no cover metadata = ingress_item.get("generictemplate", {}).get("metadata") spec = ingress_item.get("generictemplate", {}).get("spec") @@ -123,34 +118,26 @@ def update_dashboard_ingress( "name" ] = f"{cluster_name}-head-svc" else: + if is_openshift_cluster(): + spec["ingressClassName"] = "openshift-default" + else: + spec["ingressClassName"] = "nginx" + metadata["name"] = f"ray-dashboard-{cluster_name}" metadata["namespace"] = namespace spec["rules"][0]["http"]["paths"][0]["backend"]["service"][ "name" ] = f"{cluster_name}-head-svc" - if is_openshift_cluster(): - try: - config_check() - api_client = client.CustomObjectsApi(api_config_handler()) - ingress = api_client.get_cluster_custom_object( - "config.openshift.io", "v1", "ingresses", "cluster" - ) - del spec["ingressClassName"] - except Exception as e: # pragma: no cover - return _kube_api_error_handling(e) - domain = ingress["spec"]["domain"] - elif ingress_domain is None: - raise ValueError( - "ingress_domain is invalid. For Kubernetes Clusters please specify an ingress domain" - ) + if domain_name is None: + raise ValueError("domain_name is invalid. Please specify an ingress domain") else: - domain = ingress_domain + domain = domain_name del metadata["annotations"] spec["rules"][0]["host"] = f"ray-dashboard-{cluster_name}-{namespace}.{domain}" def update_rayclient_ingress( - ingress_item, cluster_name, namespace, ingress_domain + ingress_item, cluster_name, namespace, domain_name ): # pragma: no cover metadata = ingress_item.get("generictemplate", {}).get("metadata") spec = ingress_item.get("generictemplate", {}).get("spec") @@ -162,38 +149,27 @@ def update_rayclient_ingress( "name" ] = f"{cluster_name}-head-svc" - if is_openshift_cluster(): - try: - config_check() - api_client = client.CustomObjectsApi(api_config_handler()) - ingress = api_client.get_cluster_custom_object( - "config.openshift.io", "v1", "ingresses", "cluster" - ) + if domain_name is not None: + if is_openshift_cluster(): ingressClassName = "openshift-default" annotations = { "nginx.ingress.kubernetes.io/rewrite-target": "/", "nginx.ingress.kubernetes.io/ssl-redirect": "true", "route.openshift.io/termination": "passthrough", } - except Exception as e: # pragma: no cover - return _kube_api_error_handling(e) - domain = ingress["spec"]["domain"] - elif ingress_domain is None: - raise ValueError( - "ingress_domain is invalid. For Kubernetes Clusters please specify an ingress domain" - ) + else: + ingressClassName = "nginx" + annotations = { + "nginx.ingress.kubernetes.io/rewrite-target": "/", + "nginx.ingress.kubernetes.io/ssl-redirect": "true", + "nginx.ingress.kubernetes.io/ssl-passthrough": "true", + } else: - domain = ingress_domain - ingressClassName = "nginx" - annotations = { - "nginx.ingress.kubernetes.io/rewrite-target": "/", - "nginx.ingress.kubernetes.io/ssl-redirect": "true", - "nginx.ingress.kubernetes.io/ssl-passthrough": "true", - } + raise ValueError("domain_name is invalid. Please specify a domain") metadata["annotations"] = annotations spec["ingressClassName"] = ingressClassName - spec["rules"][0]["host"] = f"rayclient-{cluster_name}-{namespace}.{domain}" + spec["rules"][0]["host"] = f"rayclient-{cluster_name}-{namespace}.{domain_name}" def update_names(yaml, item, appwrapper_name, cluster_name, namespace): @@ -396,7 +372,7 @@ def update_ca_secret(ca_secret_item, cluster_name, namespace): data["ca.key"], data["ca.crt"] = generate_cert.generate_ca_cert(365) -def enable_local_interactive(resources, cluster_name, namespace, ingress_domain): +def enable_local_interactive(resources, cluster_name, namespace, domain_name): rayclient_ingress_item = resources["resources"].get("GenericItems")[2] ca_secret_item = resources["resources"].get("GenericItems")[3] item = resources["resources"].get("GenericItems")[0] @@ -422,23 +398,12 @@ def enable_local_interactive(resources, cluster_name, namespace, ingress_domain) command = command.replace("deployment-name", cluster_name) - if is_openshift_cluster(): - # We can try get the domain through checking ingresses.config.openshift.io - try: - config_check() - api_client = client.CustomObjectsApi(api_config_handler()) - ingress = api_client.get_cluster_custom_object( - "config.openshift.io", "v1", "ingresses", "cluster" - ) - except Exception as e: # pragma: no cover - return _kube_api_error_handling(e) - domain = ingress["spec"]["domain"] - elif ingress_domain is None: + if domain_name is None: raise ValueError( - "ingress_domain is invalid. For Kubernetes Clusters please specify an ingress domain" + "domain_name is invalid. For Kubernetes Clusters please specify an ingress domain" ) else: - domain = ingress_domain + domain = domain_name command = command.replace("server-name", domain) update_rayclient_ingress(rayclient_ingress_item, cluster_name, namespace, domain) @@ -618,7 +583,7 @@ def generate_appwrapper( dispatch_priority: str, priority_val: int, openshift_oauth: bool, - ingress_domain: str, + domain_name: str, ingress_options: dict, ): user_yaml = read_template(template) @@ -659,10 +624,10 @@ def generate_appwrapper( head_gpus, ) update_dashboard_ingress( - ingress_item, cluster_name, namespace, ingress_options, ingress_domain + ingress_item, cluster_name, namespace, ingress_options, domain_name ) if local_interactive: - enable_local_interactive(resources, cluster_name, namespace, ingress_domain) + enable_local_interactive(resources, cluster_name, namespace, domain_name) else: disable_raycluster_tls(resources["resources"]) diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index b8993a7f..484636bc 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -145,6 +145,7 @@ metadata: name: ray-dashboard-unit-test-cluster-ray namespace: ns spec: + ingressClassName: nginx rules: - host: ray-dashboard-unit-test-cluster-ray-ns.apps.cluster.awsroute.org http: diff --git a/tests/test-case-prio.yaml b/tests/test-case-prio.yaml index 6051104a..70b68e97 100644 --- a/tests/test-case-prio.yaml +++ b/tests/test-case-prio.yaml @@ -178,6 +178,7 @@ spec: name: ray-dashboard-prio-test-cluster namespace: ns spec: + ingressClassName: nginx rules: - host: ray-dashboard-prio-test-cluster-ns.apps.cluster.awsroute.org http: diff --git a/tests/test-case.yaml b/tests/test-case.yaml index 7c649c5f..920459c4 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -175,6 +175,7 @@ spec: name: ray-dashboard-unit-test-cluster namespace: ns spec: + ingressClassName: nginx rules: - host: ray-dashboard-unit-test-cluster-ns.apps.cluster.awsroute.org http: diff --git a/tests/unit_test.py b/tests/unit_test.py index c33b95ab..11697d26 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -331,13 +331,10 @@ def test_default_cluster_creation(mocker): "codeflare_sdk.cluster.cluster.get_current_namespace", return_value="opendatahub", ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", - return_value={"spec": {"domain": ""}}, - ) default_config = ClusterConfiguration( name="unit-test-default-cluster", image="quay.io/project-codeflare/ray:latest-py39-cu118", + domain_name="apps.cluster.awsroute.org", ) cluster = Cluster(default_config) @@ -469,14 +466,14 @@ def arg_check_list_effect(group, version, plural, name, *args): return {"spec": {"domain": "test"}} -""" -def test_get_ingress_domain(self, mocker): +""" We need to fix get_current_namespace in order to reuse this test. +def test_get_ingress_domain(mocker): mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") mocker.patch( "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", side_effect=arg_check_list_effect, ) - domain = _get_ingress_domain(self) + domain = _get_ingress_domain() assert domain == "test" """ @@ -734,10 +731,6 @@ def test_print_appwrappers(capsys): def test_ray_details(mocker, capsys): - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", - return_value={"spec": {"domain": ""}}, - ) ray1 = RayCluster( name="raytest1", status=RayClusterStatus.READY, @@ -765,6 +758,7 @@ def test_ray_details(mocker, capsys): name="raytest2", namespace="ns", image="quay.io/project-codeflare/ray:latest-py39-cu118", + domain_name="apps.cluster.awsroute.org", ) ) captured = capsys.readouterr() @@ -1771,14 +1765,14 @@ def get_aw_obj(group, version, namespace, plural): def test_get_cluster(mocker): mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", - return_value={"spec": {"domain": ""}}, - ) mocker.patch( "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", side_effect=get_ray_obj, ) + mocker.patch( + "codeflare_sdk.cluster.cluster._extract_domain_name", + return_value="apps.cluster.awsroute.org", + ) cluster = get_cluster("quicktest") cluster_config = cluster.config assert cluster_config.name == "quicktest" and cluster_config.namespace == "ns" @@ -1790,6 +1784,7 @@ def test_get_cluster(mocker): assert cluster_config.min_memory == 2 and cluster_config.max_memory == 2 assert cluster_config.num_gpus == 0 assert cluster_config.instascale + assert cluster_config.domain_name == "apps.cluster.awsroute.org" assert ( cluster_config.image == "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103" @@ -1878,10 +1873,6 @@ def test_list_queue(mocker, capsys): def test_cluster_status(mocker): mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", - return_value={"spec": {"domain": ""}}, - ) fake_aw = AppWrapper( "test", AppWrapperStatus.FAILED, can_run=True, job_state="unused" ) @@ -1904,6 +1895,7 @@ def test_cluster_status(mocker): name="test", namespace="ns", image="quay.io/project-codeflare/ray:latest-py39-cu118", + domain_name="apps.cluster.awsroute.org", ) ) mocker.patch("codeflare_sdk.cluster.cluster._app_wrapper_status", return_value=None) @@ -1969,10 +1961,6 @@ def test_cluster_status(mocker): def test_wait_ready(mocker, capsys): - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", - return_value={"spec": {"domain": ""}}, - ) mocker.patch( "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", return_value=ingress_retrieval(8265), @@ -2000,6 +1988,7 @@ def test_wait_ready(mocker, capsys): name="test", namespace="ns", image="quay.io/project-codeflare/ray:latest-py39-cu118", + domain_name="apps.cluster.awsroute.org", ) ) try: @@ -2483,21 +2472,6 @@ def secret_ca_retreival(secret_name, namespace): return client.models.V1Secret(data=data) -def test_is_openshift_cluster(mocker): - mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch.object( - client.CustomObjectsApi, - "get_cluster_custom_object", - side_effect=client.ApiException(status=404), - ) - assert is_openshift_cluster() == False - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", - return_value={"spec": {"domain": ""}}, - ) - assert is_openshift_cluster() == True - - def test_generate_tls_cert(mocker): """ test the function codeflare_sdk.utils.generate_ca_cert generates the correct outputs @@ -2823,10 +2797,6 @@ def test_gen_app_wrapper_with_oauth(mocker: MockerFixture): "codeflare_sdk.cluster.cluster.get_current_namespace", return_value="opendatahub", ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", - return_value={"spec": {"domain": ""}}, - ) write_user_appwrapper = MagicMock() mocker.patch( "codeflare_sdk.utils.generate_yaml.write_user_appwrapper", write_user_appwrapper @@ -2836,6 +2806,7 @@ def test_gen_app_wrapper_with_oauth(mocker: MockerFixture): "test_cluster", openshift_oauth=True, image="quay.io/project-codeflare/ray:latest-py39-cu118", + domain_name="apps.cluster.awsroute.org", ) ) user_yaml = write_user_appwrapper.call_args.args[0] diff --git a/tests/unit_test_support.py b/tests/unit_test_support.py index 85f1a76d..801c988c 100644 --- a/tests/unit_test_support.py +++ b/tests/unit_test_support.py @@ -46,7 +46,7 @@ def createClusterConfig(): instascale=True, machine_types=["cpu.small", "gpu.large"], image_pull_secrets=["unit-test-pull-secret"], - ingress_domain="apps.cluster.awsroute.org", + domain_name="apps.cluster.awsroute.org", image="quay.io/project-codeflare/ray:latest-py39-cu118", ) return config From ff103718fd8815fa534345cf323676b2aaf88511 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Mon, 20 Nov 2023 11:03:20 +0000 Subject: [PATCH 084/496] Fixed unit tests --- tests/unit_test.py | 51 +++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 48 insertions(+), 3 deletions(-) diff --git a/tests/unit_test.py b/tests/unit_test.py index 11697d26..574898a1 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -261,7 +261,28 @@ def test_config_creation(): assert config.local_interactive == False +def sample_no_routes(): + api_versions = client.V1APIGroupList( + api_version="v1", + groups=[ + { + "name": "route.openshift.io", + "preferred_version": { + "group_version": "route.openshift.io/v1", + "version": "v1", + }, + "versions": [ + {"group_version": "route.openshift.io/v1", "version": "v1"} + ], + } + ], + ) + + return api_versions + + def test_cluster_creation(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") cluster = createClusterWithConfig(mocker) assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster.yaml" assert cluster.app_wrapper_name == "unit-test-cluster" @@ -286,6 +307,7 @@ def test_create_app_wrapper_raises_error_with_no_image(): def test_cluster_creation_no_mcad(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch( "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, @@ -304,6 +326,7 @@ def test_cluster_creation_no_mcad(mocker): def test_cluster_creation_priority(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") mocker.patch( "kubernetes.client.CustomObjectsApi.list_cluster_custom_object", @@ -327,6 +350,7 @@ def test_cluster_creation_priority(mocker): def test_default_cluster_creation(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch( "codeflare_sdk.cluster.cluster.get_current_namespace", return_value="opendatahub", @@ -409,6 +433,7 @@ def arg_check_del_effect(group, version, namespace, plural, name, *args): def test_cluster_up_down(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") mocker.patch( "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", @@ -432,6 +457,7 @@ def test_cluster_up_down(mocker): def test_cluster_up_down_no_mcad(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") mocker.patch( "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", @@ -538,6 +564,7 @@ def test_delete_openshift_oauth_objects(mocker): def test_cluster_uris(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") mocker.patch( "codeflare_sdk.cluster.cluster._get_ingress_domain", @@ -637,6 +664,7 @@ def ingress_retrieval(port, annotations=None): def test_ray_job_wrapping(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") cluster = cluster = createClusterWithConfig(mocker) cluster.config.image = "quay.io/project-codeflare/ray:latest-py39-cu118" mocker.patch( @@ -731,6 +759,7 @@ def test_print_appwrappers(capsys): def test_ray_details(mocker, capsys): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") ray1 = RayCluster( name="raytest1", status=RayClusterStatus.READY, @@ -1764,6 +1793,7 @@ def get_aw_obj(group, version, namespace, plural): def test_get_cluster(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") mocker.patch( "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", @@ -1872,6 +1902,7 @@ def test_list_queue(mocker, capsys): def test_cluster_status(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") fake_aw = AppWrapper( "test", AppWrapperStatus.FAILED, can_run=True, job_state="unused" @@ -1961,6 +1992,7 @@ def test_cluster_status(mocker): def test_wait_ready(mocker, capsys): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch( "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", return_value=ingress_retrieval(8265), @@ -2021,6 +2053,7 @@ def test_wait_ready(mocker, capsys): def test_jobdefinition_coverage(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch( "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", return_value={"spec": {"domain": ""}}, @@ -2037,7 +2070,8 @@ def test_job_coverage(): abstract.logs() -def test_DDPJobDefinition_creation(): +def test_DDPJobDefinition_creation(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") ddp = createTestDDP() assert ddp.script == "test.py" assert ddp.m == None @@ -2061,6 +2095,7 @@ def test_DDPJobDefinition_dry_run(mocker: MockerFixture): that the attributes of the returned object are of the correct type, and that the values from cluster and job definition are correctly passed. """ + mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") mocker.patch( "codeflare_sdk.cluster.cluster.Cluster.cluster_dashboard_uri", @@ -2097,7 +2132,7 @@ def test_DDPJobDefinition_dry_run_no_cluster(mocker): that the attributes of the returned object are of the correct type, and that the values from cluster and job definition are correctly passed. """ - + mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch( "codeflare_sdk.job.jobs.get_current_namespace", return_value="opendatahub", @@ -2136,6 +2171,7 @@ def test_DDPJobDefinition_dry_run_no_resource_args(mocker): Test that the dry run correctly gets resources from the cluster object when the job definition does not specify resources. """ + mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch.object(Cluster, "job_client") mocker.patch( "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", @@ -2175,6 +2211,7 @@ def test_DDPJobDefinition_dry_run_no_cluster_no_resource_args(mocker): that the attributes of the returned object are of the correct type, and that the values from cluster and job definition are correctly passed. """ + mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch( "codeflare_sdk.job.jobs.get_current_namespace", @@ -2229,6 +2266,7 @@ def test_DDPJobDefinition_submit(mocker: MockerFixture): Tests that the submit method returns the correct type: DDPJob And that the attributes of the returned object are of the correct type """ + mocker.patch("kubernetes.client.ApisApi.get_api_versions") mock_schedule = MagicMock() mocker.patch.object(Runner, "schedule", mock_schedule) mock_schedule.return_value = "fake-dashboard-url" @@ -2259,6 +2297,7 @@ def test_DDPJobDefinition_submit(mocker: MockerFixture): def test_DDPJob_creation(mocker: MockerFixture): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch.object(Cluster, "job_client") mock_schedule = MagicMock() mocker.patch.object(Runner, "schedule", mock_schedule) @@ -2284,6 +2323,7 @@ def test_DDPJob_creation(mocker: MockerFixture): def test_DDPJob_creation_no_cluster(mocker: MockerFixture): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") ddp_def = createTestDDP() ddp_def.image = "fake-image" mocker.patch( @@ -2309,6 +2349,7 @@ def test_DDPJob_creation_no_cluster(mocker: MockerFixture): def test_DDPJob_status(mocker: MockerFixture): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") # Setup the neccesary mock patches mock_status = MagicMock() mocker.patch.object(Runner, "status", mock_status) @@ -2323,6 +2364,7 @@ def test_DDPJob_status(mocker: MockerFixture): def test_DDPJob_logs(mocker: MockerFixture): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") mock_log = MagicMock() mocker.patch.object(Runner, "log_lines", mock_log) # Setup the neccesary mock patches @@ -2369,7 +2411,8 @@ def parse_j(cmd): return f"{worker}x{gpu}" -def test_AWManager_creation(): +def test_AWManager_creation(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") testaw = AWManager(f"{aw_dir}test.yaml") assert testaw.name == "test" assert testaw.namespace == "ns" @@ -2410,6 +2453,7 @@ def arg_check_aw_del_effect(group, version, namespace, plural, name, *args): def test_AWManager_submit_remove(mocker, capsys): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") testaw = AWManager(f"{aw_dir}test.yaml") testaw.remove() captured = capsys.readouterr() @@ -2790,6 +2834,7 @@ def test_replace_openshift_oauth(mocker: MockerFixture): def test_gen_app_wrapper_with_oauth(mocker: MockerFixture): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch( "codeflare_sdk.utils.generate_yaml._get_api_host", return_value="foo.com" ) From bb34548095f2be616a9c1217a4c68ac8d147cffc Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Mon, 27 Nov 2023 11:42:05 +0000 Subject: [PATCH 085/496] Changed behaviour to create routes instead of ingresses on OpenShift --- src/codeflare_sdk/cluster/cluster.py | 163 +++++++++++------- src/codeflare_sdk/cluster/config.py | 2 +- .../templates/base-template.yaml | 38 +++- src/codeflare_sdk/utils/generate_yaml.py | 150 +++++++++++----- tests/unit_test.py | 92 ++-------- tests/unit_test_support.py | 2 +- 6 files changed, 265 insertions(+), 182 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 129d045a..2493df03 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -32,6 +32,7 @@ generate_appwrapper, ) from ..utils.kube_api_helpers import _kube_api_error_handling +from ..utils.generate_yaml import is_openshift_cluster from ..utils.openshift_oauth import ( create_openshift_oauth_objects, delete_openshift_oauth_objects, @@ -189,7 +190,7 @@ def create_app_wrapper(self): local_interactive = self.config.local_interactive image_pull_secrets = self.config.image_pull_secrets dispatch_priority = self.config.dispatch_priority - domain_name = self.config.domain_name + ingress_domain = self.config.ingress_domain ingress_options = self.config.ingress_options return generate_appwrapper( name=name, @@ -214,7 +215,7 @@ def create_app_wrapper(self): dispatch_priority=dispatch_priority, priority_val=priority_val, openshift_oauth=self.config.openshift_oauth, - domain_name=domain_name, + ingress_domain=ingress_domain, ingress_options=ingress_options, ) @@ -415,25 +416,48 @@ def cluster_dashboard_uri(self) -> str: """ Returns a string containing the cluster's dashboard URI. """ - try: - config_check() - api_instance = client.NetworkingV1Api(api_config_handler()) - ingresses = api_instance.list_namespaced_ingress(self.config.namespace) - except Exception as e: # pragma no cover - return _kube_api_error_handling(e) + config_check() + if is_openshift_cluster(): + try: + api_instance = client.CustomObjectsApi(api_config_handler()) + routes = api_instance.list_namespaced_custom_object( + group="route.openshift.io", + version="v1", + namespace=self.config.namespace, + plural="routes", + ) + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + + for route in routes["items"]: + if route["metadata"][ + "name" + ] == f"ray-dashboard-{self.config.name}" or route["metadata"][ + "name" + ].startswith( + f"{self.config.name}-ingress" + ): + protocol = "https" if route["spec"].get("tls") else "http" + return f"{protocol}://{route['spec']['host']}" + else: + try: + api_instance = client.NetworkingV1Api(api_config_handler()) + ingresses = api_instance.list_namespaced_ingress(self.config.namespace) + except Exception as e: # pragma no cover + return _kube_api_error_handling(e) - for ingress in ingresses.items: - annotations = ingress.metadata.annotations - protocol = "http" - if ( - ingress.metadata.name == f"ray-dashboard-{self.config.name}" - or ingress.metadata.name.startswith(f"{self.config.name}-ingress") - ): - if annotations == None: - protocol = "http" - elif "route.openshift.io/termination" in annotations: - protocol = "https" - return f"{protocol}://{ingress.spec.rules[0].host}" + for ingress in ingresses.items: + annotations = ingress.metadata.annotations + protocol = "http" + if ( + ingress.metadata.name == f"ray-dashboard-{self.config.name}" + or ingress.metadata.name.startswith(f"{self.config.name}-ingress") + ): + if annotations == None: + protocol = "http" + elif "route.openshift.io/termination" in annotations: + protocol = "https" + return f"{protocol}://{ingress.spec.rules[0].host}" return "Dashboard ingress not available yet, have you run cluster.up()?" def list_jobs(self) -> List: @@ -468,7 +492,7 @@ def torchx_config( to_return["requirements"] = requirements return to_return - def from_k8_cluster_object(rc, mcad=True, domain_name=None): + def from_k8_cluster_object(rc, mcad=True, ingress_domain=None): machine_types = ( rc["metadata"]["labels"]["orderedinstance"].split("_") if "orderedinstance" in rc["metadata"]["labels"] @@ -508,7 +532,7 @@ def from_k8_cluster_object(rc, mcad=True, domain_name=None): ]["image"], local_interactive=local_interactive, mcad=mcad, - domain_name=domain_name, + ingress_domain=ingress_domain, ) return Cluster(cluster_config) @@ -533,6 +557,14 @@ def _component_resources_up( plural="rayclusters", body=resource, ) + elif resource["kind"] == "Ingress": + api_instance.create_namespaced_custom_object( + group="networking.k8s.io", + version="v1", + namespace=namespace, + plural="ingresses", + body=resource, + ) elif resource["kind"] == "Route": api_instance.create_namespaced_custom_object( group="route.openshift.io", @@ -562,6 +594,15 @@ def _component_resources_down( plural="rayclusters", name=self.app_wrapper_name, ) + elif resource["kind"] == "Ingress": + name = resource["metadata"]["name"] + api_instance.delete_namespaced_custom_object( + group="networking.k8s.io", + version="v1", + namespace=namespace, + plural="ingresses", + name=name, + ) elif resource["kind"] == "Route": name = resource["metadata"]["name"] api_instance.delete_namespaced_custom_object( @@ -629,7 +670,7 @@ def get_current_namespace(): # pragma: no cover return None -def get_cluster(cluster_name: str, namespace: str = "default"): +def get_cluster(cluster_name: str, namespace: str = "default", ingress_domain=None): try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) @@ -645,9 +686,8 @@ def get_cluster(cluster_name: str, namespace: str = "default"): for rc in rcs["items"]: if rc["metadata"]["name"] == cluster_name: mcad = _check_aw_exists(cluster_name, namespace) - domain_name = _extract_domain_name(cluster_name, namespace) return Cluster.from_k8_cluster_object( - rc, mcad=mcad, domain_name=domain_name + rc, mcad=mcad, ingress_domain=ingress_domain ) raise FileNotFoundError( f"Cluster {cluster_name} is not found in {namespace} namespace" @@ -673,49 +713,42 @@ def _check_aw_exists(name: str, namespace: str) -> bool: return False -def _extract_domain_name(name: str, namespace: str) -> str: - try: - config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) - aws = api_instance.list_namespaced_custom_object( - group="workload.codeflare.dev", - version="v1beta1", - namespace=namespace, - plural="appwrappers", - ) - except Exception as e: # pragma: no cover - return _kube_api_error_handling(e, print_error=False) - for aw in aws["items"]: - if aw["metadata"]["name"] == name: - host = aw["spec"]["resources"]["GenericItems"][1]["generictemplate"][ - "spec" - ]["rules"][0]["host"] - - dot_index = host.find(".") - if dot_index != -1: - domain_name = host[dot_index + 1 :] - return domain_name - else: - print("Host is not configured correctly.") - return None - - # Cant test this until get_current_namespace is fixed and placed in this function over using `self` def _get_ingress_domain(self): # pragma: no cover - try: - config_check() - api_client = client.NetworkingV1Api(api_config_handler()) - if self.config.namespace != None: - namespace = self.config.namespace - else: - namespace = get_current_namespace() - ingresses = api_client.list_namespaced_ingress(namespace) - except Exception as e: # pragma: no cover - return _kube_api_error_handling(e) + config_check() + + if self.config.namespace != None: + namespace = self.config.namespace + else: + namespace = get_current_namespace() domain = None - for ingress in ingresses.items: - if ingress.spec.rules[0].http.paths[0].backend.service.port.number == 10001: - domain = ingress.spec.rules[0].host + + if is_openshift_cluster(): + try: + api_instance = client.CustomObjectsApi(api_config_handler()) + + routes = api_instance.list_namespaced_custom_object( + group="route.openshift.io", + version="v1", + namespace=namespace, + plural="routes", + ) + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + + for route in routes["items"]: + if route["spec"]["port"]["targetPort"] == "client": + domain = route["spec"]["host"] + else: + try: + api_client = client.NetworkingV1Api(api_config_handler()) + ingresses = api_client.list_namespaced_ingress(namespace) + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + + for ingress in ingresses.items: + if ingress.spec.rules[0].http.paths[0].backend.service.port.number == 10001: + domain = ingress.spec.rules[0].host return domain diff --git a/src/codeflare_sdk/cluster/config.py b/src/codeflare_sdk/cluster/config.py index f36a6c64..0311d0e3 100644 --- a/src/codeflare_sdk/cluster/config.py +++ b/src/codeflare_sdk/cluster/config.py @@ -54,4 +54,4 @@ class ClusterConfiguration: dispatch_priority: str = None openshift_oauth: bool = False # NOTE: to use the user must have permission to create a RoleBinding for system:auth-delegator ingress_options: dict = field(default_factory=dict) - domain_name: str = None + ingress_domain: str = None diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index c98f53c9..8e6fd0e9 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -289,7 +289,7 @@ spec: apiVersion: networking.k8s.io/v1 kind: Ingress metadata: - name: ray-dashboard-raytest + name: ray-dashboard-deployment-ingress namespace: default annotations: annotations-example:annotations-example @@ -306,12 +306,28 @@ spec: pathType: Prefix path: / host: ray-dashboard-raytest. + - replicas: 1 + generictemplate: + kind: Route + apiVersion: route.openshift.io/v1 + metadata: + name: ray-dashboard-deployment-route + namespace: default + labels: + # allows me to return name of service that Ray operator creates + odh-ray-cluster-service: deployment-name-head-svc + spec: + to: + kind: Service + name: deployment-name-head-svc + port: + targetPort: dashboard - replicas: 1 generictemplate: apiVersion: networking.k8s.io/v1 kind: Ingress metadata: - name: rayclient-deployment-name + name: rayclient-deployment-ingress namespace: default annotations: annotations-example:annotations-example @@ -330,6 +346,24 @@ spec: path: '' pathType: ImplementationSpecific host: rayclient-raytest. + - replicas: 1 + generictemplate: + apiVersion: route.openshift.io/v1 + kind: Route + metadata: + name: rayclient-deployment-route + namespace: default + labels: + # allows me to return name of service that Ray operator creates + odh-ray-cluster-service: deployment-name-head-svc + spec: + port: + targetPort: client + tls: + termination: passthrough + to: + kind: Service + name: deployment-name-head-svc - replicas: 1 generictemplate: apiVersion: v1 diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index cd5f29ca..d09e4911 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -69,8 +69,49 @@ def is_openshift_cluster(): return False +def update_dashboard_route(route_item, cluster_name, namespace): + metadata = route_item.get("generictemplate", {}).get("metadata") + metadata["name"] = gen_dashboard_ingress_name(cluster_name) + metadata["namespace"] = namespace + metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc" + spec = route_item.get("generictemplate", {}).get("spec") + spec["to"]["name"] = f"{cluster_name}-head-svc" + + +# ToDo: refactor the update_x_route() functions +def update_rayclient_route(route_item, cluster_name, namespace): + metadata = route_item.get("generictemplate", {}).get("metadata") + metadata["name"] = f"rayclient-{cluster_name}" + metadata["namespace"] = namespace + metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc" + spec = route_item.get("generictemplate", {}).get("spec") + spec["to"]["name"] = f"{cluster_name}-head-svc" + + +def update_dashboard_exposure( + ingress_item, route_item, cluster_name, namespace, ingress_options, ingress_domain +): + if is_openshift_cluster(): + update_dashboard_route(route_item, cluster_name, namespace) + else: + update_dashboard_ingress( + ingress_item, cluster_name, namespace, ingress_options, ingress_domain + ) + + +def update_rayclient_exposure( + client_route_item, client_ingress_item, cluster_name, namespace, ingress_domain +): + if is_openshift_cluster(): + update_rayclient_route(client_route_item, cluster_name, namespace) + else: + update_rayclient_ingress( + client_ingress_item, cluster_name, namespace, ingress_domain + ) + + def update_dashboard_ingress( - ingress_item, cluster_name, namespace, ingress_options, domain_name + ingress_item, cluster_name, namespace, ingress_options, ingress_domain ): # pragma: no cover metadata = ingress_item.get("generictemplate", {}).get("metadata") spec = ingress_item.get("generictemplate", {}).get("spec") @@ -118,26 +159,24 @@ def update_dashboard_ingress( "name" ] = f"{cluster_name}-head-svc" else: - if is_openshift_cluster(): - spec["ingressClassName"] = "openshift-default" - else: - spec["ingressClassName"] = "nginx" - - metadata["name"] = f"ray-dashboard-{cluster_name}" + spec["ingressClassName"] = "nginx" + metadata["name"] = gen_dashboard_ingress_name(cluster_name) metadata["namespace"] = namespace spec["rules"][0]["http"]["paths"][0]["backend"]["service"][ "name" ] = f"{cluster_name}-head-svc" - if domain_name is None: - raise ValueError("domain_name is invalid. Please specify an ingress domain") + if ingress_domain is None: + raise ValueError( + "ingress_domain is invalid. Please specify an ingress domain" + ) else: - domain = domain_name + domain = ingress_domain del metadata["annotations"] spec["rules"][0]["host"] = f"ray-dashboard-{cluster_name}-{namespace}.{domain}" def update_rayclient_ingress( - ingress_item, cluster_name, namespace, domain_name + ingress_item, cluster_name, namespace, ingress_domain ): # pragma: no cover metadata = ingress_item.get("generictemplate", {}).get("metadata") spec = ingress_item.get("generictemplate", {}).get("spec") @@ -149,27 +188,19 @@ def update_rayclient_ingress( "name" ] = f"{cluster_name}-head-svc" - if domain_name is not None: - if is_openshift_cluster(): - ingressClassName = "openshift-default" - annotations = { - "nginx.ingress.kubernetes.io/rewrite-target": "/", - "nginx.ingress.kubernetes.io/ssl-redirect": "true", - "route.openshift.io/termination": "passthrough", - } - else: - ingressClassName = "nginx" - annotations = { - "nginx.ingress.kubernetes.io/rewrite-target": "/", - "nginx.ingress.kubernetes.io/ssl-redirect": "true", - "nginx.ingress.kubernetes.io/ssl-passthrough": "true", - } + if ingress_domain is not None: + ingressClassName = "nginx" + annotations = { + "nginx.ingress.kubernetes.io/rewrite-target": "/", + "nginx.ingress.kubernetes.io/ssl-redirect": "true", + "nginx.ingress.kubernetes.io/ssl-passthrough": "true", + } else: - raise ValueError("domain_name is invalid. Please specify a domain") + raise ValueError("ingress_domain is invalid. Please specify a domain") metadata["annotations"] = annotations spec["ingressClassName"] = ingressClassName - spec["rules"][0]["host"] = f"rayclient-{cluster_name}-{namespace}.{domain_name}" + spec["rules"][0]["host"] = f"rayclient-{cluster_name}-{namespace}.{ingress_domain}" def update_names(yaml, item, appwrapper_name, cluster_name, namespace): @@ -372,9 +403,10 @@ def update_ca_secret(ca_secret_item, cluster_name, namespace): data["ca.key"], data["ca.crt"] = generate_cert.generate_ca_cert(365) -def enable_local_interactive(resources, cluster_name, namespace, domain_name): - rayclient_ingress_item = resources["resources"].get("GenericItems")[2] - ca_secret_item = resources["resources"].get("GenericItems")[3] +def enable_local_interactive(resources, cluster_name, namespace, ingress_domain): + rayclient_ingress_item = resources["resources"].get("GenericItems")[3] + rayclient_route_item = resources["resources"].get("GenericItems")[4] + ca_secret_item = resources["resources"].get("GenericItems")[5] item = resources["resources"].get("GenericItems")[0] update_ca_secret(ca_secret_item, cluster_name, namespace) # update_ca_secret_volumes @@ -398,15 +430,21 @@ def enable_local_interactive(resources, cluster_name, namespace, domain_name): command = command.replace("deployment-name", cluster_name) - if domain_name is None: + if ingress_domain is None: raise ValueError( - "domain_name is invalid. For Kubernetes Clusters please specify an ingress domain" + "ingress_domain is invalid. For creating the client route/ingress please specify an ingress domain" ) else: - domain = domain_name + domain = ingress_domain command = command.replace("server-name", domain) - update_rayclient_ingress(rayclient_ingress_item, cluster_name, namespace, domain) + update_rayclient_exposure( + rayclient_route_item, + rayclient_ingress_item, + cluster_name, + namespace, + ingress_domain, + ) item["generictemplate"]["spec"]["headGroupSpec"]["template"]["spec"][ "initContainers" @@ -449,7 +487,9 @@ def disable_raycluster_tls(resources): updated_items = [] for i in resources["GenericItems"][:]: - if "rayclient-deployment-name" in i["generictemplate"]["metadata"]["name"]: + if "rayclient-deployment-ingress" in i["generictemplate"]["metadata"]["name"]: + continue + if "rayclient-deployment-route" in i["generictemplate"]["metadata"]["name"]: continue if "ca-secret-deployment-name" in i["generictemplate"]["metadata"]["name"]: continue @@ -458,6 +498,26 @@ def disable_raycluster_tls(resources): resources["GenericItems"] = updated_items +def delete_route_or_ingress(resources): + if is_openshift_cluster(): + client_to_remove_name = "rayclient-deployment-ingress" + dashboard_to_remove_name = "ray-dashboard-deployment-ingress" + else: + client_to_remove_name = "rayclient-deployment-route" + dashboard_to_remove_name = "ray-dashboard-deployment-route" + + updated_items = [] + for i in resources["GenericItems"][:]: + if dashboard_to_remove_name in i["generictemplate"]["metadata"]["name"]: + continue + elif client_to_remove_name in i["generictemplate"]["metadata"]["name"]: + continue + + updated_items.append(i) + + resources["GenericItems"] = updated_items + + def write_user_appwrapper(user_yaml, output_file_name): # Create the directory if it doesn't exist directory_path = os.path.dirname(output_file_name) @@ -583,7 +643,7 @@ def generate_appwrapper( dispatch_priority: str, priority_val: int, openshift_oauth: bool, - domain_name: str, + ingress_domain: str, ingress_options: dict, ): user_yaml = read_template(template) @@ -591,6 +651,7 @@ def generate_appwrapper( resources = user_yaml.get("spec", "resources") item = resources["resources"].get("GenericItems")[0] ingress_item = resources["resources"].get("GenericItems")[1] + route_item = resources["resources"].get("GenericItems")[2] update_names(user_yaml, item, appwrapper_name, cluster_name, namespace) update_labels(user_yaml, instascale, instance_types) update_priority(user_yaml, item, dispatch_priority, priority_val) @@ -623,14 +684,23 @@ def generate_appwrapper( head_memory, head_gpus, ) - update_dashboard_ingress( - ingress_item, cluster_name, namespace, ingress_options, domain_name + update_dashboard_exposure( + ingress_item, + route_item, + cluster_name, + namespace, + ingress_options, + ingress_domain, ) if local_interactive: - enable_local_interactive(resources, cluster_name, namespace, domain_name) + enable_local_interactive( + resources["resources"], cluster_name, namespace, ingress_domain + ) else: disable_raycluster_tls(resources["resources"]) + delete_route_or_ingress(resources["resources"]) + if openshift_oauth: enable_openshift_oauth(user_yaml, cluster_name, namespace) diff --git a/tests/unit_test.py b/tests/unit_test.py index 574898a1..50b0b4d9 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -261,26 +261,6 @@ def test_config_creation(): assert config.local_interactive == False -def sample_no_routes(): - api_versions = client.V1APIGroupList( - api_version="v1", - groups=[ - { - "name": "route.openshift.io", - "preferred_version": { - "group_version": "route.openshift.io/v1", - "version": "v1", - }, - "versions": [ - {"group_version": "route.openshift.io/v1", "version": "v1"} - ], - } - ], - ) - - return api_versions - - def test_cluster_creation(mocker): mocker.patch("kubernetes.client.ApisApi.get_api_versions") cluster = createClusterWithConfig(mocker) @@ -358,7 +338,7 @@ def test_default_cluster_creation(mocker): default_config = ClusterConfiguration( name="unit-test-default-cluster", image="quay.io/project-codeflare/ray:latest-py39-cu118", - domain_name="apps.cluster.awsroute.org", + ingress_domain="apps.cluster.awsroute.org", ) cluster = Cluster(default_config) @@ -403,6 +383,14 @@ def arg_check_apply_effect(group, version, namespace, plural, body, *args): for resource in yamls: if resource["kind"] == "RayCluster": assert body == resource + elif plural == "ingresses": + assert group == "networking.k8s.io" + assert version == "v1" + with open(f"{aw_dir}unit-test-cluster-ray.yaml") as f: + yamls = yaml.load_all(f, Loader=yaml.FullLoader) + for resource in yamls: + if resource["kind"] == "Ingress": + assert body == resource elif plural == "routes": assert group == "route.openshift.io" assert version == "v1" @@ -787,7 +775,7 @@ def test_ray_details(mocker, capsys): name="raytest2", namespace="ns", image="quay.io/project-codeflare/ray:latest-py39-cu118", - domain_name="apps.cluster.awsroute.org", + ingress_domain="apps.cluster.awsroute.org", ) ) captured = capsys.readouterr() @@ -1799,11 +1787,9 @@ def test_get_cluster(mocker): "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", side_effect=get_ray_obj, ) - mocker.patch( - "codeflare_sdk.cluster.cluster._extract_domain_name", - return_value="apps.cluster.awsroute.org", + cluster = get_cluster( + cluster_name="quicktest", ingress_domain="apps.cluster.awsroute.org" ) - cluster = get_cluster("quicktest") cluster_config = cluster.config assert cluster_config.name == "quicktest" and cluster_config.namespace == "ns" assert ( @@ -1814,7 +1800,7 @@ def test_get_cluster(mocker): assert cluster_config.min_memory == 2 and cluster_config.max_memory == 2 assert cluster_config.num_gpus == 0 assert cluster_config.instascale - assert cluster_config.domain_name == "apps.cluster.awsroute.org" + assert cluster_config.ingress_domain == "apps.cluster.awsroute.org" assert ( cluster_config.image == "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103" @@ -1926,7 +1912,7 @@ def test_cluster_status(mocker): name="test", namespace="ns", image="quay.io/project-codeflare/ray:latest-py39-cu118", - domain_name="apps.cluster.awsroute.org", + ingress_domain="apps.cluster.awsroute.org", ) ) mocker.patch("codeflare_sdk.cluster.cluster._app_wrapper_status", return_value=None) @@ -2020,7 +2006,7 @@ def test_wait_ready(mocker, capsys): name="test", namespace="ns", image="quay.io/project-codeflare/ray:latest-py39-cu118", - domain_name="apps.cluster.awsroute.org", + ingress_domain="apps.cluster.awsroute.org", ) ) try: @@ -2566,6 +2552,7 @@ def test_enable_local_interactive(mocker): cluster_name = "test-enable-local" namespace = "default" ingress_domain = "mytest.domain" + mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch( "codeflare_sdk.utils.generate_yaml.is_openshift_cluster", return_value=False ) @@ -2599,7 +2586,7 @@ def test_enable_local_interactive(mocker): worker_group_spec = aw_spec["resources"]["GenericItems"][0]["generictemplate"][ "spec" ]["workerGroupSpecs"] - ca_secret = aw_spec["resources"]["GenericItems"][3]["generictemplate"] + ca_secret = aw_spec["resources"]["GenericItems"][5]["generictemplate"] # At a minimal, make sure the following items are presented in the appwrapper spec.resources. # 1. headgroup has the initContainers command to generated TLS cert from the mounted CA cert. # Note: In this particular command, the DNS.5 in [alt_name] must match the exposed local_client_url: rayclient-{cluster_name}.{namespace}.{ingress_domain} @@ -2655,7 +2642,7 @@ def test_enable_local_interactive(mocker): assert ca_secret["metadata"]["namespace"] == namespace # 5. Rayclient ingress - Kind - rayclient_ingress = aw_spec["resources"]["GenericItems"][2]["generictemplate"] + rayclient_ingress = aw_spec["resources"]["GenericItems"][3]["generictemplate"] paths = [ { "backend": { @@ -2681,47 +2668,6 @@ def test_enable_local_interactive(mocker): "host": f"rayclient-{cluster_name}-{namespace}.{ingress_domain}", "http": {"paths": paths}, } - # 5.1 Rayclient ingress - OCP - user_yaml = read_template(template) - aw_spec = user_yaml.get("spec", None) - cluster_name = "test-ocp-enable-local" - namespace = "default" - ocp_cluster_domain = {"spec": {"domain": "mytest.ocp.domain"}} - ingress_domain = ocp_cluster_domain["spec"]["domain"] - mocker.patch( - "codeflare_sdk.utils.generate_yaml.is_openshift_cluster", return_value=True - ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", - return_value=ocp_cluster_domain, - ) - paths = [ - { - "backend": { - "service": { - "name": f"{cluster_name}-head-svc", - "port": {"number": 10001}, - } - }, - "path": "", - "pathType": "ImplementationSpecific", - } - ] - enable_local_interactive(aw_spec, cluster_name, namespace, ingress_domain) - rayclient_ocp_ingress = aw_spec["resources"]["GenericItems"][2]["generictemplate"] - assert rayclient_ocp_ingress["kind"] == "Ingress" - assert rayclient_ocp_ingress["metadata"]["annotations"] == { - "nginx.ingress.kubernetes.io/rewrite-target": "/", - "nginx.ingress.kubernetes.io/ssl-redirect": "true", - "route.openshift.io/termination": "passthrough", - } - assert rayclient_ocp_ingress["metadata"]["name"] == f"rayclient-{cluster_name}" - assert rayclient_ocp_ingress["metadata"]["namespace"] == namespace - assert rayclient_ocp_ingress["spec"]["ingressClassName"] == "openshift-default" - assert rayclient_ocp_ingress["spec"]["rules"][0] == { - "host": f"rayclient-{cluster_name}-{namespace}.{ingress_domain}", - "http": {"paths": paths}, - } def test_create_openshift_oauth(mocker: MockerFixture): @@ -2851,7 +2797,7 @@ def test_gen_app_wrapper_with_oauth(mocker: MockerFixture): "test_cluster", openshift_oauth=True, image="quay.io/project-codeflare/ray:latest-py39-cu118", - domain_name="apps.cluster.awsroute.org", + ingress_domain="apps.cluster.awsroute.org", ) ) user_yaml = write_user_appwrapper.call_args.args[0] diff --git a/tests/unit_test_support.py b/tests/unit_test_support.py index 801c988c..85f1a76d 100644 --- a/tests/unit_test_support.py +++ b/tests/unit_test_support.py @@ -46,7 +46,7 @@ def createClusterConfig(): instascale=True, machine_types=["cpu.small", "gpu.large"], image_pull_secrets=["unit-test-pull-secret"], - domain_name="apps.cluster.awsroute.org", + ingress_domain="apps.cluster.awsroute.org", image="quay.io/project-codeflare/ray:latest-py39-cu118", ) return config From 27908c2b7cd1903ca6b7dd9ace346cee1a5bccbb Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Tue, 28 Nov 2023 16:41:06 +0000 Subject: [PATCH 086/496] updated resources reference in local_interactive --- src/codeflare_sdk/utils/generate_yaml.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index d09e4911..95c17cc2 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -693,9 +693,7 @@ def generate_appwrapper( ingress_domain, ) if local_interactive: - enable_local_interactive( - resources["resources"], cluster_name, namespace, ingress_domain - ) + enable_local_interactive(resources, cluster_name, namespace, ingress_domain) else: disable_raycluster_tls(resources["resources"]) From dce93f7e45ec62c5002864d4594ddbffe9cae7f7 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Wed, 20 Dec 2023 15:14:00 +0000 Subject: [PATCH 087/496] Removed get cluster fix --- src/codeflare_sdk/cluster/cluster.py | 9 +++------ tests/unit_test.py | 7 ++++--- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 2493df03..11cf5fdb 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -492,7 +492,7 @@ def torchx_config( to_return["requirements"] = requirements return to_return - def from_k8_cluster_object(rc, mcad=True, ingress_domain=None): + def from_k8_cluster_object(rc, mcad=True): machine_types = ( rc["metadata"]["labels"]["orderedinstance"].split("_") if "orderedinstance" in rc["metadata"]["labels"] @@ -532,7 +532,6 @@ def from_k8_cluster_object(rc, mcad=True, ingress_domain=None): ]["image"], local_interactive=local_interactive, mcad=mcad, - ingress_domain=ingress_domain, ) return Cluster(cluster_config) @@ -670,7 +669,7 @@ def get_current_namespace(): # pragma: no cover return None -def get_cluster(cluster_name: str, namespace: str = "default", ingress_domain=None): +def get_cluster(cluster_name: str, namespace: str = "default"): try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) @@ -686,9 +685,7 @@ def get_cluster(cluster_name: str, namespace: str = "default", ingress_domain=No for rc in rcs["items"]: if rc["metadata"]["name"] == cluster_name: mcad = _check_aw_exists(cluster_name, namespace) - return Cluster.from_k8_cluster_object( - rc, mcad=mcad, ingress_domain=ingress_domain - ) + return Cluster.from_k8_cluster_object(rc, mcad=mcad) raise FileNotFoundError( f"Cluster {cluster_name} is not found in {namespace} namespace" ) diff --git a/tests/unit_test.py b/tests/unit_test.py index 50b0b4d9..7ad0d08d 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -1787,9 +1787,11 @@ def test_get_cluster(mocker): "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", side_effect=get_ray_obj, ) - cluster = get_cluster( - cluster_name="quicktest", ingress_domain="apps.cluster.awsroute.org" + mocker.patch( + "codeflare_sdk.utils.generate_yaml.is_openshift_cluster", + return_value=True, ) + cluster = get_cluster(cluster_name="quicktest") cluster_config = cluster.config assert cluster_config.name == "quicktest" and cluster_config.namespace == "ns" assert ( @@ -1800,7 +1802,6 @@ def test_get_cluster(mocker): assert cluster_config.min_memory == 2 and cluster_config.max_memory == 2 assert cluster_config.num_gpus == 0 assert cluster_config.instascale - assert cluster_config.ingress_domain == "apps.cluster.awsroute.org" assert ( cluster_config.image == "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103" From 6490f945845d937064ee4503452f545df358ddd1 Mon Sep 17 00:00:00 2001 From: Antonin Stefanutti Date: Thu, 21 Dec 2023 16:26:16 +0100 Subject: [PATCH 088/496] Add ChristianZaccaria as reviewer --- OWNERS | 1 + 1 file changed, 1 insertion(+) diff --git a/OWNERS b/OWNERS index 22fbcf24..a15a75dc 100644 --- a/OWNERS +++ b/OWNERS @@ -7,6 +7,7 @@ approvers: reviewers: - anishasthana - Bobbins228 + - ChristianZaccaria - dimakis - Fiona-Waters - kpostoffice From f509ae154b46c77daa21cba9fda8d950ca0ee71e Mon Sep 17 00:00:00 2001 From: Antonin Stefanutti Date: Thu, 21 Dec 2023 16:23:29 +0100 Subject: [PATCH 089/496] Add Fiona-Waters as approver --- OWNERS | 1 + 1 file changed, 1 insertion(+) diff --git a/OWNERS b/OWNERS index a15a75dc..6ef2c689 100644 --- a/OWNERS +++ b/OWNERS @@ -1,6 +1,7 @@ approvers: - Bobbins228 - dimakis + - Fiona-Waters - kpostoffice - maxusmusti - MichaelClifford From d6eb184efaf458c20a9788e172a4afed1f1ffa69 Mon Sep 17 00:00:00 2001 From: Kevin Date: Mon, 8 Jan 2024 16:45:43 -0500 Subject: [PATCH 090/496] remove auto add issue workflow Signed-off-by: Kevin --- .github/workflows/auto-add-issues.yaml | 14 -------------- 1 file changed, 14 deletions(-) delete mode 100644 .github/workflows/auto-add-issues.yaml diff --git a/.github/workflows/auto-add-issues.yaml b/.github/workflows/auto-add-issues.yaml deleted file mode 100644 index 868b33c5..00000000 --- a/.github/workflows/auto-add-issues.yaml +++ /dev/null @@ -1,14 +0,0 @@ -name: Add new issues to CodeFlare Sprint Board -on: - issues: - types: - - opened -jobs: - add-to-project: - name: Add issue to project - runs-on: ubuntu-latest - steps: - - uses: actions/add-to-project@v0.5.0 - with: - project-url: https://github.com/orgs/project-codeflare/projects/8 - github-token: ${{ secrets.CODEFLARE_MACHINE_ACCOUNT_TOKEN }} From 96e4ec4167031466f59fcecf2561488eda6a3ff0 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Mon, 14 Aug 2023 13:16:55 +0100 Subject: [PATCH 091/496] WIP - e2e --- .github/actions/kind/action.yml | 74 ++ .github/resources-kind/kind.yaml | 31 + .github/workflows/e2e_tests.yaml | 123 +++ Dockerfile | 23 + Makefile | 386 +++++++++ go.mod | 70 ++ go.sum | 803 ++++++++++++++++++ .../templates/base-template.yaml | 16 +- .../codeflare_sdk-0.0.0.dev0-py3-none-any.whl | Bin 0 -> 39252 bytes tests/e2e/install-codeflare-sdk.sh | 23 + tests/e2e/kind.sh | 129 +++ tests/e2e/mnist.py | 160 ++++ tests/e2e/mnist_pip_requirements.txt | 3 + tests/e2e/mnist_raycluster_sdk.py | 91 ++ tests/e2e/mnist_raycluster_sdk_test.go | 207 +++++ tests/e2e/setup.sh | 58 ++ tests/e2e/support.go | 44 + tmp.yaml | 37 + tmpclusterrole.yaml | 9 + tmpclusterrolebinding.yaml | 12 + tmpingressesrole.yaml | 8 + tmpingressesroleb.yaml | 12 + 22 files changed, 2311 insertions(+), 8 deletions(-) create mode 100644 .github/actions/kind/action.yml create mode 100644 .github/resources-kind/kind.yaml create mode 100644 .github/workflows/e2e_tests.yaml create mode 100644 Dockerfile create mode 100644 Makefile create mode 100644 go.mod create mode 100644 go.sum create mode 100644 tests/e2e/codeflare_sdk-0.0.0.dev0-py3-none-any.whl create mode 100644 tests/e2e/install-codeflare-sdk.sh create mode 100755 tests/e2e/kind.sh create mode 100644 tests/e2e/mnist.py create mode 100644 tests/e2e/mnist_pip_requirements.txt create mode 100644 tests/e2e/mnist_raycluster_sdk.py create mode 100644 tests/e2e/mnist_raycluster_sdk_test.go create mode 100755 tests/e2e/setup.sh create mode 100644 tests/e2e/support.go create mode 100644 tmp.yaml create mode 100644 tmpclusterrole.yaml create mode 100644 tmpclusterrolebinding.yaml create mode 100644 tmpingressesrole.yaml create mode 100644 tmpingressesroleb.yaml diff --git a/.github/actions/kind/action.yml b/.github/actions/kind/action.yml new file mode 100644 index 00000000..f76e60af --- /dev/null +++ b/.github/actions/kind/action.yml @@ -0,0 +1,74 @@ +name: "Set up KinD" +description: "Step to start and configure KinD cluster" + +inputs: + kind-node-hostname: + description: "Hostname of the main kind node" + required: false + default: kind + +runs: + using: "composite" + steps: + - name: Init directories + shell: bash + run: | + TEMP_DIR="$(pwd)/tmp" + mkdir -p "${TEMP_DIR}" + echo "TEMP_DIR=${TEMP_DIR}" >> $GITHUB_ENV + + mkdir -p "$(pwd)/bin" + echo "$(pwd)/bin" >> $GITHUB_PATH + + - name: Container image registry + shell: bash + run: | + podman run -d -p 5000:5000 --name registry registry:2.8.1 + + export REGISTRY_ADDRESS=$(hostname -i):5000 + echo "REGISTRY_ADDRESS=${REGISTRY_ADDRESS}" >> $GITHUB_ENV + echo "Container image registry started at ${REGISTRY_ADDRESS}" + + KIND_CONFIG_FILE=${{ env.TEMP_DIR }}/kind.yaml + echo "KIND_CONFIG_FILE=${KIND_CONFIG_FILE}" >> $GITHUB_ENV + envsubst < .github/resources-kind/kind.yaml > ${KIND_CONFIG_FILE} + + sudo --preserve-env=REGISTRY_ADDRESS sh -c 'cat > /etc/containers/registries.conf.d/local.conf <> $GITHUB_ENV + echo "CLUSTER_HOSTNAME=${{ inputs.kind-node-hostname }}" >> $GITHUB_ENV diff --git a/.github/resources-kind/kind.yaml b/.github/resources-kind/kind.yaml new file mode 100644 index 00000000..4546589b --- /dev/null +++ b/.github/resources-kind/kind.yaml @@ -0,0 +1,31 @@ +# --------------------------------------------------------------------------- +# Copyright 2023. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# --------------------------------------------------------------------------- + +kind: Cluster +apiVersion: kind.x-k8s.io/v1alpha4 +nodes: + - role: control-plane + image: kindest/node:v1.25.3@sha256:f52781bc0d7a19fb6c405c2af83abfeb311f130707a0e219175677e366cc45d1 + kubeadmConfigPatches: + - | + kind: InitConfiguration + nodeRegistration: + kubeletExtraArgs: + node-labels: "ingress-ready=true" +containerdConfigPatches: + - |- + [plugins."io.containerd.grpc.v1.cri".registry.mirrors."${REGISTRY_ADDRESS}"] + endpoint = ["http://${REGISTRY_ADDRESS}"] diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml new file mode 100644 index 00000000..42463583 --- /dev/null +++ b/.github/workflows/e2e_tests.yaml @@ -0,0 +1,123 @@ +name: e2e + +on: + pull_request: + branches: + - main + - 'release-*' + paths-ignore: + - 'docs/**' + - '**.adoc' + - '**.md' + - 'LICENSE' + push: + branches: + - main + - 'release-*' + paths-ignore: + - 'docs/**' + - '**.adoc' + - '**.md' + - 'LICENSE' + +concurrency: + group: ${{ github.head_ref }}-${{ github.workflow }} + cancel-in-progress: true + +jobs: + kubernetes: + + runs-on: ubuntu-20.04-4core + + steps: + - name: Cleanup + run: | + ls -lart + echo "Initial status:" + df -h + + echo "Cleaning up resources:" + sudo swapoff -a + sudo rm -f /swapfile + sudo apt clean + sudo rm -rf /usr/share/dotnet + sudo rm -rf /opt/ghc + sudo rm -rf "/usr/local/share/boost" + sudo rm -rf "$AGENT_TOOLSDIRECTORY" + docker rmi $(docker image ls -aq) + + echo "Final status:" + df -h + + - name: Checkout code + uses: actions/checkout@v2 + with: + submodules: recursive + + - name: Checkout CodeFlare operator repository + uses: actions/checkout@v3 + with: + repository: project-codeflare/codeflare-operator + path: codeflare-operator + + - name: Set Go + uses: actions/setup-go@v3 + with: + go-version: v1.19 + + - name: Set up gotestfmt + uses: gotesttools/gotestfmt-action@v2 + with: + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Setup and start KinD cluster + uses: ./.github/actions/kind + + - name: Deploy CodeFlare stack + id: deploy + run: | + cd codeflare-operator + echo Deploying CodeFlare operator + IMG="${REGISTRY_ADDRESS}"/codeflare-operator + make image-push -e IMG="${IMG}" + make deploy -e IMG="${IMG}" -e ENV="e2e" + kubectl wait --timeout=120s --for=condition=Available=true deployment -n openshift-operators codeflare-operator-manager + + echo Setting up CodeFlare stack + make setup-e2e + make defaults manifests fmt vet + + cd .. + + - name: Run e2e tests + run: | + export CODEFLARE_TEST_TIMEOUT_SHORT=1m + export CODEFLARE_TEST_TIMEOUT_MEDIUM=5m + export CODEFLARE_TEST_TIMEOUT_LONG=10m + + export CODEFLARE_TEST_OUTPUT_DIR=${{ env.TEMP_DIR }} + echo "CODEFLARE_TEST_OUTPUT_DIR=${CODEFLARE_TEST_OUTPUT_DIR}" >> $GITHUB_ENV + + set -euo pipefail + go test -timeout 30m -v ./tests/e2e -json 2>&1 | tee ${CODEFLARE_TEST_OUTPUT_DIR}/gotest.log | gotestfmt + + - name: Print CodeFlare operator logs + if: always() && steps.deploy.outcome == 'success' + run: | + echo "Printing CodeFlare operator logs" + kubectl logs -n openshift-operators --tail -1 -l app.kubernetes.io/name=codeflare-operator | tee ${CODEFLARE_TEST_OUTPUT_DIR}/codeflare-operator.log + + - name: Print KubeRay operator logs + if: always() && steps.deploy.outcome == 'success' + run: | + echo "Printing KubeRay operator logs" + kubectl logs -n ray-system --tail -1 -l app.kubernetes.io/name=kuberay | tee ${CODEFLARE_TEST_OUTPUT_DIR}/kuberay.log + + - name: Upload logs + uses: actions/upload-artifact@v3 + if: always() && steps.deploy.outcome == 'success' + with: + name: logs + retention-days: 10 + path: | + ${{ env.CODEFLARE_TEST_OUTPUT_DIR }}/**/*.log diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..2ea7fead --- /dev/null +++ b/Dockerfile @@ -0,0 +1,23 @@ +# Build the manager binary +FROM registry.access.redhat.com/ubi8/go-toolset:1.19.10-10 as builder + +WORKDIR /workspace +# Copy the Go Modules manifests +COPY go.mod go.mod +COPY go.sum go.sum +RUN go mod download + +# Copy the go source +COPY main.go main.go +COPY pkg/ pkg/ + +# Build +USER root +RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -a -o manager main.go + +FROM registry.access.redhat.com/ubi8/ubi-minimal:8.7 +WORKDIR / +COPY --from=builder /workspace/manager . + +USER 65532:65532 +ENTRYPOINT ["/manager"] diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..c0d41155 --- /dev/null +++ b/Makefile @@ -0,0 +1,386 @@ +# VERSION defines the project version for the bundle. +# Update this value when you upgrade the version of your project. +# To re-generate a bundle for another specific version without changing the standard setup, you can: +# - use the VERSION as arg of the bundle target (e.g make bundle VERSION=v0.0.2) +# - use environment variables to overwrite this value (e.g export VERSION=v0.0.2) +# best if we could detect this. If we cannot, we need to document it somewhere. +# then we can add a patch in the `PHONY: bundle` +# BUNDLE_VERSION is declared as bundle versioning doesn't use semver + +PREVIOUS_VERSION ?= v0.0.0-dev +VERSION ?= v0.0.0-dev +BUNDLE_VERSION ?= $(VERSION:v%=%) + +# INSTASCALE_VERSION defines the default version of the InstaScale controller +INSTASCALE_VERSION ?= v0.0.9 +INSTASCALE_REPO ?= github.com/project-codeflare/instascale + +# MCAD_VERSION defines the default version of the MCAD controller +MCAD_VERSION ?= 725a614debe3d34d1547c1659ef5ad49f8f6c5df +MCAD_REPO ?= github.com/project-codeflare/multi-cluster-app-dispatcher +# Upstream MCAD is currently only creating release tags of the form `vX.Y.Z` (i.e the version) +MCAD_CRD ?= ${MCAD_REPO}/config/crd?ref=${MCAD_VERSION} + +# KUBERAY_VERSION defines the default version of the KubeRay operator (used for testing) +KUBERAY_VERSION ?= v0.6.0 + +# RAY_VERSION defines the default version of Ray (used for testing) +RAY_VERSION ?= 2.5.0 + +# CODEFLARE_SDK_VERSION defines the default version of the CodeFlare SDK +CODEFLARE_SDK_VERSION ?= 0.8.0 + +# OPERATORS_REPO_ORG points to GitHub repository organization where bundle PR is opened against +# OPERATORS_REPO_FORK_ORG points to GitHub repository fork organization where bundle build is pushed to +OPERATORS_REPO_ORG ?= redhat-openshift-ecosystem +OPERATORS_REPO_FORK_ORG ?= project-codeflare + +# CHANNELS define the bundle channels used in the bundle. +# Add a new line here if you would like to change its default config. (E.g CHANNELS = "candidate,fast,stable") +# To re-generate a bundle for other specific channels without changing the standard setup, you can: +# - use the CHANNELS as arg of the bundle target (e.g make bundle CHANNELS=candidate,fast,stable) +# - use environment variables to overwrite this value (e.g export CHANNELS="candidate,fast,stable") +ifneq ($(origin CHANNELS), undefined) +BUNDLE_CHANNELS := --channels=$(CHANNELS) +endif + +# DEFAULT_CHANNEL defines the default channel used in the bundle. +# Add a new line here if you would like to change its default config. (E.g DEFAULT_CHANNEL = "stable") +# To re-generate a bundle for any other default channel without changing the default setup, you can: +# - use the DEFAULT_CHANNEL as arg of the bundle target (e.g make bundle DEFAULT_CHANNEL=stable) +# - use environment variables to overwrite this value (e.g export DEFAULT_CHANNEL="stable") +ifneq ($(origin DEFAULT_CHANNEL), undefined) +BUNDLE_DEFAULT_CHANNEL := --default-channel=$(DEFAULT_CHANNEL) +endif +BUNDLE_METADATA_OPTS ?= $(BUNDLE_CHANNELS) $(BUNDLE_DEFAULT_CHANNEL) + +# IMAGE_ORG_BASE defines the base container registry and organization for container images. +IMAGE_ORG_BASE ?= quay.io/project-codeflare + +# IMAGE_TAG_BASE defines the docker.io namespace and part of the image name for remote images. +# This variable is used to construct full image tags for bundle and catalog images. +# +# For example, running 'make bundle-build bundle-push catalog-build catalog-push' will build and push both +# codeflare.dev/codeflare-operator-bundle:$VERSION and codeflare.dev/codeflare-operator-catalog:$VERSION. +IMAGE_TAG_BASE ?= $(IMAGE_ORG_BASE)/codeflare-operator + +# RAY_IMAGE defines the default container image for Ray (used for testing) +RAY_IMAGE ?= rayproject/ray:$(RAY_VERSION) + +# BUNDLE_IMG defines the image:tag used for the bundle. +# You can use it as an arg. (E.g make bundle-build BUNDLE_IMG=/:) +BUNDLE_IMG ?= $(IMAGE_TAG_BASE)-bundle:$(VERSION) + +# BUNDLE_GEN_FLAGS are the flags passed to the operator-sdk generate bundle command +BUNDLE_GEN_FLAGS ?= -q --overwrite --version $(BUNDLE_VERSION) $(BUNDLE_METADATA_OPTS) + +# USE_IMAGE_DIGESTS defines if images are resolved via tags or digests +# You can enable this value if you would like to use SHA Based Digests +# To enable set flag to true +USE_IMAGE_DIGESTS ?= false +ifeq ($(USE_IMAGE_DIGESTS), true) + BUNDLE_GEN_FLAGS += --use-image-digests +endif + +# Image URL to use all building/pushing image targets +IMG ?= ${IMAGE_TAG_BASE}:${VERSION} +# ENVTEST_K8S_VERSION refers to the version of kubebuilder assets to be downloaded by envtest binary. +ENVTEST_K8S_VERSION = 1.24.2 + +# Get the currently used golang install path (in GOPATH/bin, unless GOBIN is set) +ifeq (,$(shell go env GOBIN)) +GOBIN=$(shell go env GOPATH)/bin +else +GOBIN=$(shell go env GOBIN) +endif + +# Setting SHELL to bash allows bash commands to be executed by recipes. +# Options are set to exit when a recipe line exits non-zero or a piped command fails. +SHELL = /usr/bin/env bash -o pipefail +.SHELLFLAGS = -ec + +.PHONY: all +all: build + +##@ General + +# The help target prints out all targets with their descriptions organized +# beneath their categories. The categories are represented by '##@' and the +# target descriptions by '##'. The awk commands is responsible for reading the +# entire set of makefiles included in this invocation, looking for lines of the +# file as xyz: ## something, and then pretty-format the target and help. Then, +# if there's a line with ##@ something, that gets pretty-printed as a category. +# More info on the usage of ANSI control characters for terminal formatting: +# https://en.wikipedia.org/wiki/ANSI_escape_code#SGR_parameters +# More info on the awk command: +# http://linuxcommand.org/lc3_adv_awk.php + +.PHONY: help +help: ## Display this help. + @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m\n"} /^[a-zA-Z_0-9-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST) + +##@ Development + +DEFAULTS_TEST_FILE := tests/support/defaults.go + +.PHONY: defaults +defaults: + $(info Regenerating $(DEFAULTS_TEST_FILE)) + @echo "package support" > $(DEFAULTS_TEST_FILE) + @echo "" >> $(DEFAULTS_TEST_FILE) + @echo "// ***********************" >> $(DEFAULTS_TEST_FILE) + @echo "// DO NOT EDIT THIS FILE" >> $(DEFAULTS_TEST_FILE) + @echo "// ***********************" >> $(DEFAULTS_TEST_FILE) + @echo "" >> $(DEFAULTS_TEST_FILE) + @echo "const (" >> $(DEFAULTS_TEST_FILE) + @echo " CodeFlareSDKVersion = \"$(CODEFLARE_SDK_VERSION)\"" >> $(DEFAULTS_TEST_FILE) + @echo " RayVersion = \"$(RAY_VERSION)\"" >> $(DEFAULTS_TEST_FILE) + @echo " RayImage = \"$(RAY_IMAGE)\"" >> $(DEFAULTS_TEST_FILE) + @echo "" >> $(DEFAULTS_TEST_FILE) + @echo ")" >> $(DEFAULTS_TEST_FILE) + @echo "" >> $(DEFAULTS_TEST_FILE) + + gofmt -w $(DEFAULTS_TEST_FILE) + +.PHONY: manifests +manifests: controller-gen ## Generate RBAC objects. + $(CONTROLLER_GEN) rbac:roleName=manager-role webhook paths="./..." + +.PHONY: fmt +fmt: ## Run go fmt against code. + go fmt ./... + +.PHONY: vet +vet: ## Run go vet against code. + go vet ./... + + +##@ Build + +.PHONY: modules +modules: ## Update Go dependencies. + go get $(MCAD_REPO)@$(MCAD_VERSION) + go get $(INSTASCALE_REPO)@$(INSTASCALE_VERSION) + go get github.com/ray-project/kuberay/ray-operator + go mod tidy + +.PHONY: build +build: modules defaults fmt vet ## Build manager binary. + go build -o bin/manager main.go + +.PHONY: run +run: modules defaults manifests fmt vet ## Run a controller from your host. + go run ./main.go + +.PHONY: image-build +image-build: test-unit ## Build container image with the manager. + podman build -t ${IMG} . + +.PHONY: image-push +image-push: image-build ## Push container image with the manager. + podman push ${IMG} + +##@ Deployment + +ifndef ignore-not-found + ignore-not-found = false +endif + +.PHONY: install +install: manifests kustomize ## Install CRDs into the K8s cluster specified in ~/.kube/config. + $(SED) -i -E "s|(- )\${MCAD_REPO}.*|\1\${MCAD_CRD}|" config/crd/mcad/kustomization.yaml + $(KUSTOMIZE) build config/crd | kubectl apply -f - + git restore config/* + +.PHONY: uninstall +uninstall: manifests kustomize ## Uninstall CRDs from the K8s cluster specified in ~/.kube/config. Call with ignore-not-found=true to ignore resource not found errors during deletion. + $(SED) -i -E "s|(- )\${MCAD_REPO}.*|\1\${MCAD_CRD}|" config/crd/mcad/kustomization.yaml + $(KUSTOMIZE) build config/crd | kubectl delete --ignore-not-found=$(ignore-not-found) -f - + git restore config/* + +.PHONY: deploy +deploy: manifests kustomize ## Deploy controller to the K8s cluster specified in ~/.kube/config. + $(SED) -i -E "s|(- )\${MCAD_REPO}.*|\1\${MCAD_CRD}|" config/crd/mcad/kustomization.yaml + cd config/manager && $(KUSTOMIZE) edit set image controller=${IMG} + $(KUSTOMIZE) build config/default | kubectl apply -f - + git restore config/* + +.PHONY: undeploy +undeploy: ## Undeploy controller from the K8s cluster specified in ~/.kube/config. Call with ignore-not-found=true to ignore resource not found errors during deletion. + $(SED) -i -E "s|(- )\${MCAD_REPO}.*|\1\${MCAD_CRD}|" config/crd/mcad/kustomization.yaml + $(KUSTOMIZE) build config/default | kubectl delete --ignore-not-found=$(ignore-not-found) -f - + git restore config/* + +##@ Build Dependencies + +## Location to install dependencies to +LOCALBIN ?= $(shell pwd)/bin +$(LOCALBIN): + mkdir -p $(LOCALBIN) + +## Tool Binaries +KUSTOMIZE ?= $(LOCALBIN)/kustomize +CONTROLLER_GEN ?= $(LOCALBIN)/controller-gen +ENVTEST ?= $(LOCALBIN)/setup-envtest +OPENSHIFT-GOIMPORTS ?= $(LOCALBIN)/openshift-goimports +OPERATOR_SDK ?= $(LOCALBIN)/operator-sdk +GH_CLI ?= $(LOCALBIN)/gh +SED ?= /usr/bin/sed + +## Tool Versions +KUSTOMIZE_VERSION ?= v4.5.4 +CODEGEN_VERSION ?= v0.27.2 +CONTROLLER_TOOLS_VERSION ?= v0.9.2 +OPERATOR_SDK_VERSION ?= v1.27.0 +GH_CLI_VERSION ?= 2.30.0 + +KUSTOMIZE_INSTALL_SCRIPT ?= "https://raw.githubusercontent.com/kubernetes-sigs/kustomize/master/hack/install_kustomize.sh" +.PHONY: kustomize +kustomize: $(KUSTOMIZE) ## Download kustomize locally if necessary. +$(KUSTOMIZE): $(LOCALBIN) + test -s $(LOCALBIN)/kustomize || { curl -s $(KUSTOMIZE_INSTALL_SCRIPT) | bash -s -- $(subst v,,$(KUSTOMIZE_VERSION)) $(LOCALBIN); } + +GH_CLI_DL_URL := https://github.com/cli/cli/releases/download/v$(GH_CLI_VERSION) +GH_CLI_DL_FILENAME := gh_$(GH_CLI_VERSION)_$(shell go env GOOS)_$(shell go env GOARCH) +.PHONY: install-gh-cli +install-gh-cli: $(GH_CLI) +$(GH_CLI): $(LOCALBIN) + curl -L $(GH_CLI_DL_URL)/$(GH_CLI_DL_FILENAME).tar.gz --output $(GH_CLI_DL_FILENAME).tar.gz + tar -xvzf $(GH_CLI_DL_FILENAME).tar.gz + cp $(GH_CLI_DL_FILENAME)/bin/gh $(GH_CLI) + rm -rf $(GH_CLI_DL_FILENAME) + rm $(GH_CLI_DL_FILENAME).tar.gz + +.PHONY: controller-gen +controller-gen: $(CONTROLLER_GEN) ## Download controller-gen locally if necessary. +$(CONTROLLER_GEN): $(LOCALBIN) + test -s $(LOCALBIN)/controller-gen || GOBIN=$(LOCALBIN) go install sigs.k8s.io/controller-tools/cmd/controller-gen@$(CONTROLLER_TOOLS_VERSION) + +.PHONY: envtest +envtest: $(ENVTEST) ## Download envtest-setup locally if necessary. +$(ENVTEST): $(LOCALBIN) + test -s $(LOCALBIN)/setup-envtest || GOBIN=$(LOCALBIN) go install sigs.k8s.io/controller-runtime/tools/setup-envtest@latest + +.PHONY: openshift-goimports +openshift-goimports: $(OPENSHIFT-GOIMPORTS) ## Download openshift-goimports locally if necessary. +$(OPENSHIFT-GOIMPORTS): $(LOCALBIN) + test -s $(LOCALBIN)/openshift-goimports || GOBIN=$(LOCALBIN) go install github.com/openshift-eng/openshift-goimports@latest + +OPERATOR_SDK_DL_URL := https://github.com/operator-framework/operator-sdk/releases/download/$(OPERATOR_SDK_VERSION) +.PHONY: install-operator-sdk +install-operator-sdk: $(OPERATOR_SDK) ## Download fixed version operator-sdk binary for consist outcome +$(OPERATOR_SDK): $(LOCALBIN) + curl -L $(OPERATOR_SDK_DL_URL)/operator-sdk_$(shell go env GOOS)_$(shell go env GOARCH) --output $(LOCALBIN)/operator-sdk + chmod +x $(OPERATOR_SDK) + +.PHONY: validate-bundle +validate-bundle: install-operator-sdk + $(OPERATOR_SDK) bundle validate ./bundle --select-optional suite=operatorframework + +.PHONY: bundle +bundle: defaults manifests kustomize install-operator-sdk ## Generate bundle manifests and metadata, then validate generated files. + $(OPERATOR_SDK) generate kustomize manifests -q + $(SED) -i -E "s|(- )\${MCAD_REPO}.*|\1\${MCAD_CRD}|" config/crd/mcad/kustomization.yaml + cd config/manager && $(KUSTOMIZE) edit set image controller=$(IMG) + cd config/manifests && $(KUSTOMIZE) edit add patch --patch '[{"op":"add", "path":"/metadata/annotations/containerImage", "value": "$(IMG)" }]' --kind ClusterServiceVersion + cd config/manifests && $(KUSTOMIZE) edit add patch --patch '[{"op":"add", "path":"/spec/replaces", "value": "codeflare-operator.$(PREVIOUS_VERSION)" }]' --kind ClusterServiceVersion + $(KUSTOMIZE) build config/manifests | $(OPERATOR_SDK) generate bundle $(BUNDLE_GEN_FLAGS) + $(MAKE) validate-bundle + git restore config/* + +.PHONY: bundle-build +bundle-build: bundle ## Build the bundle image. + podman build -f bundle.Dockerfile -t $(BUNDLE_IMG) . + +.PHONY: bundle-push +bundle-push: ## Push the bundle image. + podman push $(BUNDLE_IMG) $(BUNDLE_PUSH_OPT) + +.PHONY: openshift-community-operator-release +openshift-community-operator-release: install-gh-cli bundle ## build bundle and create PR in OpenShift community operators repository + git clone https://x-access-token:$(GH_TOKEN)@github.com/$(OPERATORS_REPO_FORK_ORG)/community-operators-prod.git + cd community-operators-prod && git remote add upstream https://github.com/$(OPERATORS_REPO_ORG)/community-operators-prod.git && git pull upstream main && git push origin main + cp -r bundle community-operators-prod/operators/codeflare-operator/$(BUNDLE_VERSION) + cd community-operators-prod && git checkout -b codeflare-release-$(BUNDLE_VERSION) && git add operators/codeflare-operator/$(BUNDLE_VERSION)/* && git commit -m "add bundle manifests codeflare version $(BUNDLE_VERSION)" --signoff && git push origin codeflare-release-$(BUNDLE_VERSION) + gh pr create --repo $(OPERATORS_REPO_ORG)/community-operators-prod --title "CodeFlare $(BUNDLE_VERSION)" --body "New release of codeflare operator" --head $(OPERATORS_REPO_FORK_ORG):codeflare-release-$(BUNDLE_VERSION) --base main + rm -rf community-operators-prod + +.PHONY: opm +OPM = ./bin/opm +opm: ## Download opm locally if necessary. +ifeq (,$(wildcard $(OPM))) +ifeq (,$(shell which opm 2>/dev/null)) + @{ \ + set -e ;\ + mkdir -p $(dir $(OPM)) ;\ + OS=$(shell go env GOOS) && ARCH=$(shell go env GOARCH) && \ + curl -sSLo $(OPM) https://github.com/operator-framework/operator-registry/releases/download/v1.23.0/$${OS}-$${ARCH}-opm ;\ + chmod +x $(OPM) ;\ + } +else +OPM = $(shell which opm) +endif +endif + +# A comma-separated list of bundle images (e.g. make catalog-build BUNDLE_IMGS=example.com/operator-bundle:v0.1.0,example.com/operator-bundle:v0.2.0). +# These images MUST exist in a registry and be pull-able. +BUNDLE_IMGS ?= $(BUNDLE_IMG) + +# The image tag given to the resulting catalog image (e.g. make catalog-build CATALOG_IMG=example.com/operator-catalog:v0.2.0). +CATALOG_IMG ?= $(IMAGE_TAG_BASE)-catalog:$(VERSION) + +# Set CATALOG_BASE_IMG to an existing catalog image tag to add $BUNDLE_IMGS to that image. +ifneq ($(origin CATALOG_BASE_IMG), undefined) +FROM_INDEX_OPT := --from-index $(CATALOG_BASE_IMG) +endif + +# Build a catalog image by adding bundle images to an empty catalog using the operator package manager tool, 'opm'. +# This recipe invokes 'opm' in 'semver' bundle add mode. For more information on add modes, see: +# https://github.com/operator-framework/community-operators/blob/7f1438c/docs/packaging-operator.md#updating-your-existing-operator +.PHONY: catalog-build +catalog-build: opm ## Build a catalog image. + $(OPM) index add --container-tool podman --mode semver --tag $(CATALOG_IMG) --bundles $(BUNDLE_IMGS) $(FROM_INDEX_OPT) + +# Build a catalog image by adding bundle images to existing catalog using the operator package manager tool, 'opm'. +.PHONY: catalog-build-from-index +catalog-build-from-index: opm ## Build a catalog image. + mkdir catalog + $(OPM) render $(CATALOG_BASE_IMG) -o yaml > catalog/bundles.yaml + $(OPM) render $(BUNDLE_IMG) $(OPM_BUNDLE_OPT) > catalog/codeflare-operator-bundle.yaml + $(SED) -i -E "s/(.*)(- name: codeflare-operator.$(PREVIOUS_VERSION).*)/\1- name: codeflare-operator.$(VERSION)\n replaces: codeflare-operator.$(PREVIOUS_VERSION)\n\2/" catalog/bundles.yaml + $(OPM) validate catalog + $(OPM) generate dockerfile catalog + podman build . -f catalog.Dockerfile -t $(CATALOG_IMG) + +# Push the catalog image. +.PHONY: catalog-push +catalog-push: ## Push a catalog image. + podman push $(CATALOG_IMG) $(CATALOG_PUSH_OPT) + +.PHONY: test-unit +test-unit: defaults manifests fmt vet envtest ## Run unit tests. + KUBEBUILDER_ASSETS="$(shell $(ENVTEST) use $(ENVTEST_K8S_VERSION) --bin-dir $(LOCALBIN) -p path)" go test $(go list ./... | grep -v /test/) -coverprofile cover.out + +.PHONY: test-e2e +test-e2e: ## Run e2e tests. + go test -timeout 30m -v ./tests/e2e -run TestMNISTRayClusterSDK + +.PHONY: kind-e2e +kind-e2e: ## Set up e2e KinD cluster. + tests/e2e/kind.sh + +.PHONY: setup-e2e +setup-e2e: ## Set up e2e tests. + KUBERAY_VERSION=$(KUBERAY_VERSION) tests/e2e/setup.sh + +.PHONY: imports +imports: openshift-goimports ## Organize imports in go files using openshift-goimports. Example: make imports + $(OPENSHIFT-GOIMPORTS) + +.PHONY: verify-imports +verify-imports: openshift-goimports ## Run import verifications. + ./hack/verify-imports.sh $(OPENSHIFT-GOIMPORTS) + +.PHONY: scorecard-bundle +scorecard-bundle: install-operator-sdk ## Run scorecard tests on bundle image. + $(OPERATOR_SDK) scorecard bundle \ No newline at end of file diff --git a/go.mod b/go.mod new file mode 100644 index 00000000..76ca0575 --- /dev/null +++ b/go.mod @@ -0,0 +1,70 @@ +module github.com/project-codeflare/codeflare-sdk + +go 1.19 + +require ( + github.com/onsi/gomega v1.27.10 + github.com/project-codeflare/codeflare-common v0.0.0-20231110155354-042fb171fcdb + github.com/project-codeflare/multi-cluster-app-dispatcher v1.37.0 + github.com/ray-project/kuberay/ray-operator v1.0.0 + k8s.io/api v0.26.3 + k8s.io/apimachinery v0.26.3 +) + +require ( + github.com/aymerick/douceur v0.2.0 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/cenkalti/backoff/v4 v4.1.3 // indirect + github.com/cespare/xxhash/v2 v2.1.2 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/emicklei/go-restful/v3 v3.10.1 // indirect + github.com/go-logr/logr v1.2.4 // indirect + github.com/go-openapi/jsonpointer v0.19.6 // indirect + github.com/go-openapi/jsonreference v0.20.1 // indirect + github.com/go-openapi/swag v0.22.3 // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/golang-jwt/jwt/v4 v4.4.1 // indirect + github.com/golang/glog v1.0.0 // indirect + github.com/golang/protobuf v1.5.3 // indirect + github.com/google/gnostic v0.6.9 // indirect + github.com/google/go-cmp v0.5.9 // indirect + github.com/google/gofuzz v1.2.0 // indirect + github.com/google/uuid v1.3.0 // indirect + github.com/gorilla/css v1.0.0 // indirect + github.com/imdario/mergo v0.3.12 // indirect + github.com/josharian/intern v1.0.0 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/mailru/easyjson v0.7.7 // indirect + github.com/matttproud/golang_protobuf_extensions v1.0.2 // indirect + github.com/microcosm-cc/bluemonday v1.0.18 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect + github.com/openshift-online/ocm-sdk-go v0.1.368 // indirect + github.com/openshift/api v0.0.0-20230213134911-7ba313770556 // indirect + github.com/openshift/client-go v0.0.0-20221019143426-16aed247da5c // indirect + github.com/prometheus/client_golang v1.14.0 // indirect + github.com/prometheus/client_model v0.3.0 // indirect + github.com/prometheus/common v0.37.0 // indirect + github.com/prometheus/procfs v0.8.0 // indirect + github.com/spf13/pflag v1.0.5 // indirect + golang.org/x/net v0.17.0 // indirect + golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 // indirect + golang.org/x/sys v0.13.0 // indirect + golang.org/x/term v0.13.0 // indirect + golang.org/x/text v0.13.0 // indirect + golang.org/x/time v0.3.0 // indirect + google.golang.org/appengine v1.6.7 // indirect + google.golang.org/protobuf v1.28.1 // indirect + gopkg.in/inf.v0 v0.9.1 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect + k8s.io/client-go v0.26.3 // indirect + k8s.io/klog/v2 v2.100.1 // indirect + k8s.io/kube-openapi v0.0.0-20230501164219-8b0f38b5fd1f // indirect + k8s.io/utils v0.0.0-20230505201702-9f6742963106 // indirect + sigs.k8s.io/controller-runtime v0.14.6 // indirect + sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd // indirect + sigs.k8s.io/structured-merge-diff/v4 v4.2.3 // indirect + sigs.k8s.io/yaml v1.3.0 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 00000000..c9b425cb --- /dev/null +++ b/go.sum @@ -0,0 +1,803 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= +github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk= +github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= +github.com/cenkalti/backoff/v4 v4.1.3 h1:cFAlzYUlVYDysBEH2T5hyJZMh3+5+WCBvSnK6Q8UtC4= +github.com/cenkalti/backoff/v4 v4.1.3/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.1.2 h1:YRXhKfTDauu4ajMg1TPgFO5jnlC2HCbmLXMcTG5cbYE= +github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= +github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= +github.com/emicklei/go-restful/v3 v3.10.1 h1:rc42Y5YTp7Am7CS630D7JmhRjq4UlEUuEKfrDac4bSQ= +github.com/emicklei/go-restful/v3 v3.10.1/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/evanphx/json-patch v4.12.0+incompatible h1:4onqiflcdA9EOZ4RxV643DvftH5pOlLGNtQ5lPWQu84= +github.com/evanphx/json-patch/v5 v5.6.0 h1:b91NhWfaz02IuVxO9faSllyAtNXHMPkC5J8sJCLunww= +github.com/evanphx/json-patch/v5 v5.6.0/go.mod h1:G79N1coSVB93tBe7j6PhzjmR3/2VvlbKOFpnXhI9Bw4= +github.com/flowstack/go-jsonschema v0.1.1/go.mod h1:yL7fNggx1o8rm9RlgXv7hTBWxdBM0rVwpMwimd3F3N0= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= +github.com/go-kit/log v0.2.0/go.mod h1:NwTd00d/i8cPZ3xOwwiv2PO5MOcx78fFErGNcVmBjv0= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= +github.com/go-logr/logr v1.2.0/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.2.4 h1:g01GSCwiDw2xSZfjJ2/T9M+S6pFdcNtFYsp+Y43HYDQ= +github.com/go-logr/logr v1.2.4/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-openapi/jsonpointer v0.19.6 h1:eCs3fxoIi3Wh6vtgmLTOjdhSpiqphQ+DaPn38N2ZdrE= +github.com/go-openapi/jsonpointer v0.19.6/go.mod h1:osyAmYz/mB/C3I+WsTTSgw1ONzaLJoLCyoi6/zppojs= +github.com/go-openapi/jsonreference v0.20.1 h1:FBLnyygC4/IZZr893oiomc9XaghoveYTrLC1F86HID8= +github.com/go-openapi/jsonreference v0.20.1/go.mod h1:Bl1zwGIM8/wsvqjsOQLJ/SH+En5Ap4rVB5KVcIDZG2k= +github.com/go-openapi/swag v0.22.3 h1:yMBqmnQ0gyZvEb/+KzuWZOXgllrXT4SADYbvDaXHv/g= +github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= +github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI= +github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang-jwt/jwt/v4 v4.4.1 h1:pC5DB52sCeK48Wlb9oPcdhnjkz1TKt1D/P7WKJ0kUcQ= +github.com/golang-jwt/jwt/v4 v4.4.1/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/glog v1.0.0 h1:nfP3RFugxnNRyKgeWd4oI1nYvXpxrx8ck8ZrcizshdQ= +github.com/golang/glog v1.0.0/go.mod h1:EWib/APOK0SL3dFbYqvxE3UYd8E6s1ouQ7iEp/0LWV4= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= +github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/gnostic v0.6.9 h1:ZK/5VhkoX835RikCHpSUJV9a+S3e1zLh59YnyWeBW+0= +github.com/google/gnostic v0.6.9/go.mod h1:Nm8234We1lq6iB9OmlgNv3nH91XLLVZHCDayfA3xq+E= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= +github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1 h1:K6RDEckDVWvDI9JAJYCmNdQXq6neHJOYx3V6jnqNEec= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY= +github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c= +github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/imdario/mergo v0.3.12 h1:b6R2BslTbIEToALKP7LxUvijTsNI9TAe80pLWN2g/HU= +github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= +github.com/itchyny/gojq v0.12.7 h1:hYPTpeWfrJ1OT+2j6cvBScbhl0TkdwGM4bc66onUSOQ= +github.com/itchyny/gojq v0.12.7/go.mod h1:ZdvNHVlzPgUf8pgjnuDTmGfHA/21KoutQUJ3An/xNuw= +github.com/itchyny/timefmt-go v0.1.3 h1:7M3LGVDsqcd0VZH2U+x393obrzZisp7C0uEe921iRkU= +github.com/itchyny/timefmt-go v0.1.3/go.mod h1:0osSSCQSASBJMsIZnhAaF1C2fCBTJZXrnj37mG8/c+A= +github.com/jackc/chunkreader v1.0.0 h1:4s39bBR8ByfqH+DKm8rQA3E1LHZWB9XWcrz8fqaZbe0= +github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= +github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= +github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8= +github.com/jackc/chunkreader/v2 v2.0.1/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= +github.com/jackc/pgconn v0.0.0-20190420214824-7e0022ef6ba3/go.mod h1:jkELnwuX+w9qN5YIfX0fl88Ehu4XC3keFuOJJk9pcnA= +github.com/jackc/pgconn v0.0.0-20190824142844-760dd75542eb/go.mod h1:lLjNuW/+OfW9/pnVKPazfWOgNfH2aPem8YQ7ilXGvJE= +github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsUgOEh9hBm+xYTstcNHg7UPMVJqRfQxq4s= +github.com/jackc/pgconn v1.8.0/go.mod h1:1C2Pb36bGIP9QHGBYCjnyhqu7Rv3sGshaQUvmfGIB/o= +github.com/jackc/pgconn v1.9.0/go.mod h1:YctiPyvzfU11JFxoXokUOOKQXQmDMoJL9vJzHH8/2JY= +github.com/jackc/pgconn v1.9.1-0.20210724152538-d89c8390a530/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI= +github.com/jackc/pgconn v1.12.0 h1:/RvQ24k3TnNdfBSW0ou9EOi5jx2cX7zfE8n2nLKuiP0= +github.com/jackc/pgconn v1.12.0/go.mod h1:ZkhRC59Llhrq3oSfrikvwQ5NaxYExr6twkdkMLaKono= +github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE= +github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8= +github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE= +github.com/jackc/pgmock v0.0.0-20201204152224-4fe30f7445fd/go.mod h1:hrBW0Enj2AZTNpt/7Y5rr2xe/9Mn757Wtb2xeBzPv2c= +github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65/go.mod h1:5R2h2EEX+qri8jOWMbJCtaPWkrrNc7OHwsp2TCqp7ak= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgproto3 v1.1.0 h1:FYYE4yRw+AgI8wXIinMlNjBbp/UitDJwfj5LqqewP1A= +github.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78= +github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA= +github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg= +github.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= +github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= +github.com/jackc/pgproto3/v2 v2.0.6/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgproto3/v2 v2.1.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgproto3/v2 v2.3.0 h1:brH0pCGBDkBW07HWlN/oSBXrmo3WB0UvZd1pIuDcL8Y= +github.com/jackc/pgproto3/v2 v2.3.0/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= +github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b h1:C8S2+VttkHFdOOCXJe+YGfa4vHYwlt4Zx+IVXQ97jYg= +github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E= +github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg= +github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc= +github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw= +github.com/jackc/pgtype v1.8.1-0.20210724151600-32e20a603178/go.mod h1:C516IlIV9NKqfsMCXTdChteoXmwgUceqaLfjg2e3NlM= +github.com/jackc/pgtype v1.11.0 h1:u4uiGPz/1hryuXzyaBhSk6dnIyyG2683olG2OV+UUgs= +github.com/jackc/pgtype v1.11.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4= +github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y= +github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM= +github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc= +github.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgSXP7iUjYm9C1NxKhny7lq6ee99u/z+IHFcgs= +github.com/jackc/pgx/v4 v4.16.0 h1:4k1tROTJctHotannFYzu77dY3bgtMRymQP7tXQjqpPk= +github.com/jackc/pgx/v4 v4.16.0/go.mod h1:N0A9sFdWzkw/Jy1lwoiB64F2+ugFZi987zRxcPez/wI= +github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jackc/puddle v1.2.1/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI= +github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/lib/pq v1.10.5/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= +github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/matttproud/golang_protobuf_extensions v1.0.2 h1:hAHbPm5IJGijwng3PWk09JkG9WeqChjprR5s9bBZ+OM= +github.com/matttproud/golang_protobuf_extensions v1.0.2/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= +github.com/microcosm-cc/bluemonday v1.0.18 h1:6HcxvXDAi3ARt3slx6nTesbvorIc3QeTzBNRvWktHBo= +github.com/microcosm-cc/bluemonday v1.0.18/go.mod h1:Z0r70sCuXHig8YpBzCc5eGHAap2K7e/u082ZUpDRRqM= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= +github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= +github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= +github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= +github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= +github.com/onsi/ginkgo/v2 v2.1.3/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c= +github.com/onsi/ginkgo/v2 v2.1.4/go.mod h1:um6tUpWM/cxCK3/FK8BXqEiUMUwRgSM4JXG47RKZmLU= +github.com/onsi/ginkgo/v2 v2.11.0 h1:WgqUCUt/lT6yXoQ8Wef0fsNn5cAuMK7+KT9UFRz2tcU= +github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= +github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= +github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= +github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro= +github.com/onsi/gomega v1.27.10 h1:naR28SdDFlqrG6kScpT8VWpu1xWY5nJRCF3XaYyBjhI= +github.com/onsi/gomega v1.27.10/go.mod h1:RsS8tutOdbdgzbPtzzATp12yT7kM5I5aElG3evPbQ0M= +github.com/openshift-online/ocm-sdk-go v0.1.368 h1:qP+gkChV8WDwwpkUw1xUyjTXKdvrwyd70Gff2GMUSeU= +github.com/openshift-online/ocm-sdk-go v0.1.368/go.mod h1:KYOw8kAKAHyPrJcQoVR82CneQ4ofC02Na4cXXaTq4Nw= +github.com/openshift/api v0.0.0-20230213134911-7ba313770556 h1:7W2fOhJicyEff24VaF7ASNzPtYvr+iSCVft4SIBAzaE= +github.com/openshift/api v0.0.0-20230213134911-7ba313770556/go.mod h1:aQ6LDasvHMvHZXqLHnX2GRmnfTWCF/iIwz8EMTTIE9A= +github.com/openshift/client-go v0.0.0-20221019143426-16aed247da5c h1:CV76yFOTXmq9VciBR3Bve5ZWzSxdft7gaMVB3kS0rwg= +github.com/openshift/client-go v0.0.0-20221019143426-16aed247da5c/go.mod h1:lFMO8mLHXWFzSdYvGNo8ivF9SfF6zInA8ZGw4phRnUE= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/project-codeflare/codeflare-common v0.0.0-20231110155354-042fb171fcdb h1:L2Gdr2SlvshDKZY2KK6507AwzQ1NSfRbMQuz5dOsYNM= +github.com/project-codeflare/codeflare-common v0.0.0-20231110155354-042fb171fcdb/go.mod h1:zdi2GCYJX+QyxFWyCLMoTme3NMz/aucWDJWMqKfigxk= +github.com/project-codeflare/multi-cluster-app-dispatcher v1.37.0 h1:oyhdLdc4BgA4zcH1zlRrSrYpzuVxV5QLDbyIXrwnQqs= +github.com/project-codeflare/multi-cluster-app-dispatcher v1.37.0/go.mod h1:Yge6GRNpO9YIDfeL+XOcCE9xbmfCTD5C1h5dlW87mxQ= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= +github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= +github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= +github.com/prometheus/client_golang v1.14.0 h1:nJdhIvne2eSX/XRAFV9PcvFFRbrjbcTUj0VP62TMhnw= +github.com/prometheus/client_golang v1.14.0/go.mod h1:8vpkKitgIVNcqrRBWh1C4TIUQgYNtG/XQE4E/Zae36Y= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.3.0 h1:UBgGFHqYdG/TPFD1B1ogZywDqEkwp3fBMvqdiQ7Xew4= +github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= +github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= +github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= +github.com/prometheus/common v0.37.0 h1:ccBbHCgIiT9uSoFY0vX8H3zsNR5eLt17/RQLUvn8pXE= +github.com/prometheus/common v0.37.0/go.mod h1:phzohg0JFMnBEFGxTDbfu3QyL5GI8gTQJFhYO5B3mfA= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/prometheus/procfs v0.8.0 h1:ODq8ZFEaYeCaZOJlZZdJA2AbQR98dSHSM1KW/You5mo= +github.com/prometheus/procfs v0.8.0/go.mod h1:z7EfXMXOkbkqb9IINtpCn86r/to3BnA0uaxHdg830/4= +github.com/ray-project/kuberay/ray-operator v1.0.0 h1:i69nvbV7az2FG41VHQgxrmhD+SUl8ca+ek4RPbSE2Q0= +github.com/ray-project/kuberay/ray-operator v1.0.0/go.mod h1:7C7ebIkxtkmOX8w1iiLrKM1j4hkZs/Guzm3WdePk/yg= +github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= +github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= +github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= +github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= +github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= +github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= +github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/stoewer/go-strcase v1.2.0/go.mod h1:IBiWB2sKIp3wVVQ3Y035++gc+knqhUQag1KpM8ahLw8= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8= +github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= +github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= +github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= +go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= +go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= +go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= +go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20220427172511-eb4f295cb31f/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= +golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= +golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 h1:OSnWWcOd/CtWQC2cYSBgbTSJv3ciqd8r54ySIW2y3RE= +golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220319134239-a9b59b0215f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE= +golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek= +golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= +golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4= +golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E= +golang.org/x/tools v0.9.3 h1:Gn1I8+64MsuTb/HpH+LmQtNas23LhUVr3rYZ0eKuaMM= +golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20220107163113-42d7afdf6368/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= +google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= +google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= +gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= +gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +k8s.io/api v0.26.3 h1:emf74GIQMTik01Aum9dPP0gAypL8JTLl/lHa4V9RFSU= +k8s.io/api v0.26.3/go.mod h1:PXsqwPMXBSBcL1lJ9CYDKy7kIReUydukS5JiRlxC3qE= +k8s.io/apimachinery v0.26.3 h1:dQx6PNETJ7nODU3XPtrwkfuubs6w7sX0M8n61zHIV/k= +k8s.io/apimachinery v0.26.3/go.mod h1:ats7nN1LExKHvJ9TmwootT00Yz05MuYqPXEXaVeOy5I= +k8s.io/client-go v0.26.3 h1:k1UY+KXfkxV2ScEL3gilKcF7761xkYsSD6BC9szIu8s= +k8s.io/client-go v0.26.3/go.mod h1:ZPNu9lm8/dbRIPAgteN30RSXea6vrCpFvq+MateTUuQ= +k8s.io/klog/v2 v2.100.1 h1:7WCHKK6K8fNhTqfBhISHQ97KrnJNFZMcQvKp7gP/tmg= +k8s.io/klog/v2 v2.100.1/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0= +k8s.io/kube-openapi v0.0.0-20230501164219-8b0f38b5fd1f h1:2kWPakN3i/k81b0gvD5C5FJ2kxm1WrQFanWchyKuqGg= +k8s.io/kube-openapi v0.0.0-20230501164219-8b0f38b5fd1f/go.mod h1:byini6yhqGC14c3ebc/QwanvYwhuMWF6yz2F8uwW8eg= +k8s.io/utils v0.0.0-20230505201702-9f6742963106 h1:EObNQ3TW2D+WptiYXlApGNLVy0zm/JIBVY9i+M4wpAU= +k8s.io/utils v0.0.0-20230505201702-9f6742963106/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= +sigs.k8s.io/controller-runtime v0.14.6 h1:oxstGVvXGNnMvY7TAESYk+lzr6S3V5VFxQ6d92KcwQA= +sigs.k8s.io/controller-runtime v0.14.6/go.mod h1:WqIdsAY6JBsjfc/CqO0CORmNtoCtE4S6qbPc9s68h+0= +sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd h1:EDPBXCAspyGV4jQlpZSudPeMmr1bNJefnuqLsRAsHZo= +sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd/go.mod h1:B8JuhiUyNFVKdsE8h686QcCxMaH6HrOAZj4vswFpcB0= +sigs.k8s.io/structured-merge-diff/v4 v4.2.3 h1:PRbqxJClWWYMNV1dhaG4NsibJbArud9kFxnAMREiWFE= +sigs.k8s.io/structured-merge-diff/v4 v4.2.3/go.mod h1:qjx8mGObPmV2aSZepjQjbmb2ihdVs8cGKBraizNC69E= +sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo= +sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8= diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index 8e6fd0e9..8f4575a0 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -16,21 +16,21 @@ spec: custompodresources: - replicas: 1 requests: - cpu: 2 - memory: 8G + cpu: 1 + memory: 4G nvidia.com/gpu: 0 limits: - cpu: 2 - memory: 8G + cpu: 1 + memory: 4G nvidia.com/gpu: 0 - replicas: 3 requests: - cpu: 2 - memory: 12G + cpu: 1 + memory: 4G nvidia.com/gpu: 1 limits: - cpu: 2 - memory: 12G + cpu: 1 + memory: 4G nvidia.com/gpu: 1 generictemplate: # This config demonstrates KubeRay's Ray autoscaler integration. diff --git a/tests/e2e/codeflare_sdk-0.0.0.dev0-py3-none-any.whl b/tests/e2e/codeflare_sdk-0.0.0.dev0-py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..da85a840077f75398ff1b2a1d190bdf882c681e1 GIT binary patch literal 39252 zcmaI6Q;;S=w1@XTC0s{GeW*{K!f2*m3nK{7L*xB6B#mt7$(9p`>%GJ=2!O;s8 zNa?>#{I4?qnP_V3=HhDZ{67^?xW#HQxUScr|l_oF?0V5uARwJuA{n_f3cM7t!kI#YFZOy#( z;Evidrfqtu9CsRJ1X%i%Kmqr;!zIwTiYD&-*|C(Md-vvi-^V|Xseah}e6cs>hnvvd z0Y;p!9uA|6_uu$Ey{0AIyKA5K+nd0sd$dTTeHb{;kmTea%5?KqsKg-P8WYrJFi8_> zas&0g`VVbJHqT_P2q%pqXLW2?@}L*=L9!Gb3(m5JIuhi{XroSSVT?VYjakmjTh->! z3bN|)O)O%4XtY_fqopdQV7OxT1EX*FBA<*&I;=+BadMnUFl^8(q@~+1-O92dPEM0} zNu}B_3`ddzAR(X*C>qTRT17tP1xzXq!1bq9Vc_;c6MSfbgkq9m9v6|JG7o&JHd(J9 zF0Qb>RSSP?{s@GSgtG}d*668qq8SozovH2RS_5w+*#e`exoX&mY&xF{R^uN*`f6@+ zP(})X0h=NTAEmV$S>n{O%?YH#LeU`Mo@A&9N!UYU!ebc<*16mMF1>y2}fXf=&orrB7c#*bqvKWEfE) zd8NrkkQca%%3aal56*;<=SP12Dv-Q;2U1ur8N;Q<>Ekfryj^1+JsEH6<|;3g zZSkkHwXK3;WYVdZ=d__~=t?$IKX!*SGs6cq8ftTG+IBnmW$TE^zJ zmlT-+tepc|b_NK~E(iAGvnB9%@#|4W>-G@gyj58Sfpo=dHJT-nnnP zl(oh`)uSMHvN;WOITOHV^z_g&b~c^i$4ve(lU4d)L|!!Hg=N48i2U{N@s(MuXGDrT zgvsmQQ;OOQUzI@!e(+iBKcuERxUHiSQy`b;*7C^@`B+P}Pi6T-3*x;e7nAbp%N0s2 zRa%B2>N<U*zmeGc3(Y0hFc8r@j;9b^6s{vU>m3bsOAd(g z5~eP{lyL$LT=RlUw;w9|o%K%-L8g}n>p2!Vj%=Iskx|4Bp{aBgM(Q@LpcJ{W=|#a# za$%xmQ^9~-cd}tXLNDh}>GB(*5w;;Lm)O}GTuK~EI*}jH+apT17=8-n8l&d)Q)jh! zDJipF7c~E}#*nZD04|+LOj3xkImM*F6b{eAplG6!NL7?s|LdzKr@skq){B*EWS@BK zsT$lvU|3a_<%Rnmh+P@$b2#Zmmk*ZSWiy7>xw=SbmS$&8KOc;ZTFR8s9lN6|*NOO@ z0>&K-wRQBQGLiFX3D%Qc=)x)l1Blm0GXC3)o-Xd)rf#Ab^>{$bz%aZ-rfNU&6Ziy* z%6kuc*dQmf_Xe!#H#`@Pqi)yVqZ@O|XDZq> zgHBMF>eg}!spS_)LknWLJk_P$`XB#l@4jpGS&QiPETPwGt zM3&W0`=x)plQOiAfeK&He;QyP*2DfXXzmj;Y9hq^h^T>K+OtznBw46q^6DvrUOSO^ zs53#wawTDGQ#UE=GV99N^21DQp{r61&@+Xj5dkgV3KX39_UM-Y1nPgkG8pJ!`f5pL0jIYE-aTf)?%wI!H99ISzuH&kGlm_WD9Km6V#6$6*cW$~|e+cWs+inN30!9d7+!Al0oGr}< zP@=qQ5jTe~AD8NHQaTKa5xScjd!gV0I$iU<#8qI~uIe0>jyqe^2MP`qUbxbVB9TwR z|0Fgg_I|6sz<_}C;DLY$|9f8IVe4RQ_K%rtYYff95+v%$(I#l$uL#uI?Izs=`R+}9V`vuP4m&;A7jP2IYgBzYxShTp zUeGCs9-y70U;-?Md0rNdlcs3d2B{>%;D9(LIxV>bfCW&2vK0fk9>i^-1^gFa^7gMV zGDmZ0uAPt+Xqg1e0DCE)XnzJ4EU2Ck-6P4N+z=Exr;UdkDp?_tVK$Pp6_z$@GoxkN z&89Hu+V{!`nkFWux;pq&5qznRzFAvYUuRzI$m083E)s8nB8h07@P0=!?Sxwwum)0L zCOtG@geDPYO@{ay+X`4S8ka!>bdH4`T~$gMr9SRpa=~?N<4;NHD|{p)ML*$;G0W@z zprb$*BGt&C*}cRcOLdHFk(=;ZrcT)P_LozB5xPbD>K6rxg@lTtX9%#Ysrj2+7muRSJjd&7%D zqf*9VX&ROMc6Wmok+RS&hSHs%%|Y54iPz9*gp#6<-Y2=;KEj(<)AkB zO~H~jQ+Puol-8VQVpSv=%w>z7EzXdQ2)IdshZVqD87K17$q#&tGgxVT=8qFCA0616 zr%G&Bzb<%X;!I~$s+lro-JL=eGuqslrS4>_9g>$0I@-E%;T!gKqsXa^d~!cU&BLjh z)8dQsN9gc7WuVdz(G%Z39e*h0%KC9m1k^rpsm&mAh!;i9FNQcQgsF9V+%Y0oKGD5B z9_CFwFT$1ezjrW<0wTtL&<>Z_I9s{XP9&0FM%#7xE0T@)^5QCDg3!^*!XSCS^$)%| zYRSpg`obIAEa77}T!lN0sbJB1;~faGqNL<{LZAHY&Bmh;X0ng(0_Swnx`*c z`cr~6MU(yNr84}cWI*|g{3j8hKG|@KpSw}IbU!`c7>a!&=>L4(_t$evaWT?=aUJ_w zypl1&l>Kw68RgOhpDFcYcN65Rk#!wrV24sBsl7SwIwcpIx5#k3D(A7Lm?KZzfOxA> z?yw&Jf?o4C=aOJ4ke6}mb@Q7gP+yrVY2fMA+r@CV)q1ZD_yfn6WvqPb9I+t{E+U18S0IdyQ2v4 zw8hLC|CV1lupu<2eAUo+PLZVOQR^N?^;TN1l3u34uhrVSURGXzq07&l-QDx0ZHPBT zw~P8?PB$5R^o9-7SkO+>XYXuM1S`2?o`*RI_UM&L9bjMNDir0 z?yMzG^TC*1K^4YZ!6hR%;vYkJ0O*y-x9-1p@Dby^>ZjVLMkD63w%|WuFP2^cWey7n zD8m;Bi0HqCz5n-L_Ye23`E49FrIL4_X*;JPq`1P|Pni1uc;xaYTwlFW98RszfPoeW zrwgpzdmYR!Yc-)b)3Vv?^rN|cM zGA{1rP6Zb%*gx4m(Rt=sl-ll#8_r0fy27I1tyn<>c#gBZ#j)8Lqz!n?`!h)dCSr!A z`~`x$tVT>f=b@@$IMW58df>=B1&1Dsh+Wc8%n;U~za&wdj0LOd(VYWuB||BF%_ysJ z>ch-Y_pjdB)enN_8hz0Hsux(>)UDG{mZ^%XK5=g_O1$2=Tr5dxiAli}Vy=5^r<2-* zfPa@P;GA2{aZ6kgQcB+e$`bztQ%xb|fD2--9|qQt-7`74X*H+uXrIOb);RUhZu5}S z$J$9g7NT$o)Zfqw_h&F>?}-G?pRb3jHxne@qas7F*UJaHhnG)KkguDMzYA(77gEw; zCSH+IaW{9MLvvt%{dQos!eVqW5U8OL`=Q7_(VvJ#QXmnKf zKdr;hTQ+#}BA$)hm9kR0Wl9%s8e=R6IGx1sM1NzRQ=y`Go(gVop1fx~y6?KM_1eLJ zu0|Wtcd{BKqNa=;V3wc3eSm!EK4hcD8IV|;15qQMPj3X*$XI%{febQmTJ6B{2O^>s z9~YkvMBBP}uN8KoxK7RVv2O~iYt0zz0XYA;Jx(@AR~G3vN%6 zZYljxcg$!?fAzQZ#xf*{2+b)s&U1>#9Dedr2+R1X3W9ix@W2?By#DOc1(JeWd8Sp~ zK8T=zg{-<=mfAa8ib6)(OG{_Bl8*D}V}SVbiP)uy7rNF!{E$w&vN6|~?xK>J2*Xfi zz*95_vdJ$q9R#ie?8awG7rMu?XK{k#beW)2Cf#L>0i{GaMqq8R;vj;U_oN@q4wX1g zLUYI_P5X`YyHPtiyNp7hJ__T=YQE}$!~0{;&5y3ryBoXUOJ+*Rm0EIPV)q?f8Y*PE z_fa(_yc;Cxq;s8US11+xfeWD%B0ieV(-qzm$|@<|pnqIqJ{DH>K2@rnTCCM+LXep4 z1Ch%<0z}bWC6d}g9n&)QCIplvlv-IqFhE01#hCNd@i!*XDsOWjJe6xJ!O@uk_$sUNh663igaBkHZ2ha*bC;YEDbz?_hRlOI%=B88I+<>iu{WTU(GzJoMv zDPnziAg%sZU43*V$Y(YT*n(I_ zi4w7SQ6TS>a?vdNGEDi8Y=(7AbPkr%zzY^I=ni7Xf5GUq3 z!VX?3P0=!=7NRFm4zkZb#bk4gRE9x!;zlTQb#46Sa3U-BQplVEE^nzOlcV{mvF{&Y z+twL}3IXZP2*ztyQ)4ss{Hqx@(l&nx%>xcc#8c}6_-gA5>bl8cd80B^$|nf6tI7WQ zTWK2#S)D|+SO#J1Eh&HN)2uM(0NRas@MvsnRDz`nTB2MZ`Q91_xmSMl=(d24fa@tZ zH@Z~X-~K83=A@^E%CY0yXLW`U8}Eq2(a&r7ZI*3$u6NPA#JPrupKm8>hLG}U*W&84 zg_r?0f>3@vPG^V7slG9l+&4a&QUyAlMht+@!8s6NZDI~eqmqZ@k%&&m<1^=kJX;g4 zEhS3nZate%9#AP!=!T2nix9qEX*Y^_h0ZB96(z{qAk}WLS)ovM<($6&ognBu!q;CN z{sV_Ed|Gpid%_(gkMl|9Wl|F;L1)!fhuTT{w1+yd9D8tDpT!H(A-}xpG-hF*iC*#= zU{}%u)9W0f7eCmYk-zW@h@bXA32D>{7VU#>wpylYzF@fej^D8t*V zcDg3^Z!(SjgJoXuJsrMe(NH616UxOQmAu?bntS!J&goCfY=$H`HoAkOEPm-lRYDB3 zNt|oC3DC|$hd}cDuezx3J1V1}ZKdJNUu{HlZad5DF>_-Z_W|_-Q7*WDiP>~1!CMhw zjagn$n30?3+~Xvd7Vw}tLpLa?zJIW73G&Ao{GK>O!0S!Szz8{!9;spC=L(JQs-gWg zVDX+}3v+yXNO#$&4f$dH$qJLOvOuk~r_U_~?WF_)?T`hDU~cl#+9FNZm>iQKf{3$|x**3@}PYjBfJ5 zSv$AX*CO@JSgTUP^tDis>!6dtsx+>k?<$rF7j2p29c0<87}$3d3$=Ulr> z?k0x75!8aq=Wa$=rZK$6CsWtbJuvWkl!wK^!2(SoWRd~;{uZd`sX6?L7kyO8>49-q zw%&R`+N;cvy5HKvLAr2xlr7d#ys#XS&mt8H>Mh*6hEW+C2(gqaRvUO~Ygm7CcpBDy zX$7-@RQyce$3gqe_*Mjb0`1Aubs=F_7c&30!DQKwd^PB%a5f11dIpdDvJ3_izpsnS zw$YvXn;j-HYH~k7S8js7__q$EO0ns~avpX9?JyueTcytAu?8Au0(DUEv8;rw>2YKc zQk31~HBEb1Icm+zRAbJfLhG+duy#?br)cINAiLhS>3tb-LY=O?kJjiWmS5GtPboRg zwfjiJbX(2i9WL(^Vin!W&~<4u4K}WCr?ru5s8?j+VOrUAo(Wzk1Ve!>JA&Dmd0u_Q z8HAs-o%I;e=|*6BmvX4p3v%c=YDgt#gC-}oYF0zr=AZ#rp;7DbHKNLqNOEL{>lQ5I z)};f7+!g$2-pEn8tmWPpr>CBY4Smt0L5OnwHA;0e()vz0i7(d<)^2Zb5Z4mA$?1%A zjL{xboYna>%4Dx(nA=_|<0BaV%{DK1h?IoXI=ZEVqOgbV6Lh_`&I|i zJ9CfbtggZ`nxwgSzxCcOcoDZKI=ng+g9jE)2((3ex$;pTzM~M96kd;vaG{#Vdc8Ma zVjfa2|L6PFbazXUuv^+E_T*L(KPWL`XXRr!T?}a}#Kx-W05l<*htapN;N}>PpKJFdld{ae#@5c|W{j`6at*fo! z`zIzK_Ss_7(@>8f-w*^^!5jzt2XCgAhtIEIlg{>L-HZ$ptzIo zIY2mN&G_xaCsp+u-?tqehx}(vcU#bI2ewHO-t%wnyKy5qe*=TbGMnrwjH*W&m-}@+ z!`;5~iAT6v!PnpnQh)z@3?)@#eB#2mG>rA!$r*M#emQ&)+d^A?pM{AA%VIlOrYAfE zVd2rrtth+zoOt56yTyt%8Bqwba=|b{jLZ7VmC<0ob`NyN4{O`_%6Ti0v9Lp$?HV z6}evkQ3T5&kc@EzU4(;sEHGKvH%-4$E1Qf5ufieg7RJ*}dUq)rT03dxh7V_s9dqpm zARA8cKe&2?qf|Q^Ab&VpXh!)R+Su@PMl`O@GD=SCB3(2F=2*2j+B&Duv70iTpIf`+ zGo1{c^}oJL%J`ee^RAGQZFn1{FdNP(s@nS>8*cN#fpEsMh11OaL{|y!vdE2E673oT z$;aV?R_WHj{Z)3QFK1XmJ~lc$+0gO9G~pmQ#i8qulw*Hy?!KO!V{6Q%iI!kfGt{u% zvN31D{1A@${?I0_*!21F!Y^4adQEEqA&73i<1K7zii`am+T?86%IsaS7LvAz*<)P z&?EG99KUbPlbd@q#s2_rsHYhq*_!6jPQz`RiYN+sVrx=fAyzCY7wRrPdUwvZ(rHM zcRTKF+$pXqK$LPJ6Z4Yc!MakGUYeNAzl(%Z2hWArU+v}I8LHpiUB90zK_#NFj1JO4 z`uHeRZ&zTR)%953Hd_+GGX^(@WPE;pGKzK(mWMoL@~Z^%taZ%pUT~4(3zEETPU3s; zWHPcPuGHFDIQ9qSif@MC!c7P&`x&CT%9mEjMAbuNTVLC8wN>x$$6KA5d*6@JZx&<{hUGkFarzbpr8Q88)8dO z;8qPU4JG2s?ge<0MLCuX;s6Y;H=dCAInM{qX-g&a_mW>$X+`~qZT_-OQIxaBY?#Y* zQZ2FPc-c^`nrvHY2id@xVvGJ^X`|8@oBN-1Q`-HkqM!t={zjsCbkS&48@6JzE|rzb zseNM>kd~7iIlZ%IawT@S!ajds(IJP-IpETdTYTJ59_QKwns&6l>8m#zmUV4Els~0E z51x_>zRcVQcF>MmL|GP6ro6zk92rhtM*c8T?Pki^S>Hjqp&P zXB=l#PsnY>BiP_mKwo2n_5Z;~WoFnze{mo*h;fw%LoVEE8w888`Sz?Mk;?}`9S!u8 zbmGJA;1FD6t}MdSBV?m*s=+$M#oeXbqOs!J3F2ldD?s%YmA+&fe?0RLKHP6QL_L$^ zgu#ZvMuBO_dNEo`j91_8-g>)RmMX!_f+H z?smZPfqR{R6Ordb$|2lWzm~d3cSJ4cGTE!E8Qf~?#CC7#A-}0$M-9*O@i0qhDwOMg zqnih7rOR>rI>Yg5*2}95WA8Rv&b^{XwgR!6h7=!ly7xWTg{;R&TvGQ<`^hC49H0HZ z0<&l-HW?YNEv(xQXO9|sR(a0p16Nw6(^r_wS^3)nu#OK$uE-Lglyn~`XYRXuAw40j zgVs$!(?SoNKORrON%C0l0KTLS^|Kb6M}wd)M2{y`@N-3G>Fa4ixf6ZFzeP_#pT$al zF>J&^DZp#tdO+ID69j_1vKg2S5P|iFFV%d*{Pw3XCEAmIyA;E7KYT86!9MpCfiRpC zWe3FrP}=)?&8dsJ!^0vFJ;q{%U=dXNFuPI;*tNT`dAupgdML`i7CyQ8{*2RY`#G=O zd(pLwDk{}@-#FhwL!1{3>mlHYqR*COO+>^`V6r#ePzhE?ZBJ~@OO3P-=v3(oZ${eh z^s?Pbk4}P=0 ziyScOV_U3=h>KywRSd9zOEKt=u-^NXy`Lh=hP~y|y={1<&k9t#QK`@^p4#C#dB?TY zEt}I@&iMK!e8%r)Om3gpR@ybmnhd}Wr+HPQn%!t|X%D|Idc_)@qP&+BCD{02gUYdc z7f`c4bH?IcOhWWT#7;&t-Vsg8=wF2L7`}9R`)z)vfkA4v$0XXnGi?TmJlpH3yYSsz z4?-j4^o9h9qZlE@w9wb%QNjoGJ8pC663&vvt&92q(NgzprBe#Z2+1fb7A(lF`%d72 z!>$W);R2r<11T6$LvDlO!lD@vl^e3Pu(T9?M5JS{Bo7+%IlLtRv&>vcFjnfB3R)uH z&hBLrQqVpgv~$AdkoU&PJ6a2?Yi7Q>1oumCeC;Lrc7)S)4#L5Qb!0W_bzY4UuZv8d z{3-crB@s#prP?Q~>QG~2gAc8P*>8zDDPv;Z)x~T? z^7@Cd49)iiH_mZn{PxbCp#Aq&{Mw(Di|fKI-KqmcipTQ{WHbK?ms#Bz-rHJ`pU3GO zHKRl6gF0(sJ%JAcvM(8VY0V`{U3g2GUpNbBnxa>+&a57IWLH$fK5_RWh#!}bOpB*s zyU;La38cP6tQ29Q`>&tme2Lo2ZtQL@=N&n0K8h3CL%rir4#o*@%4CUJ+16I%)Ue6n zs2#4#Cz?jg9sM3-8v@J?dcbT6500Tav3h^47=AOmyo=kJHp|)Q>0fxXmD8H$RdVO| zspj#@WWxLsXbb(8xfRJpxu)~h+cBEoj@U%BXS##L@;MpY<0w(jr~yV z@ACp~fkf;eFzUlYj7%gJb=TL=-@40{tTaoEhe^ke?WxjDdU_f&EG&q2L{c8 zFFjt?oI<^QZSZKc9wrq7>*a@tuRsjfLq>BDQh4Wvh~Xk2XNmeZxQP84R>}t4#Mn(A z0e<0ZzHafAh-hlm7>R>q=DgjiFTi4c)*q}gL)f9WQh~$I_5tp3>~0STq_^TILvnV( zn#p7veFzCi50`VUe=#qrlbw82O-xplPGtOC&4qa=2zoYAL*Qbfj>LG27nqUIgd#{A}k#(3rb^x_rWxOKu4)<>#u zf!URj2tyqId^z@UFiGz8`KlUsER)o8`dLbavq+w#CSR!~qs`a4Nko?t_K>0{l%s^& z!a7M%!DIn5w(+(f)D#$v)YS@`aWNPn1Yz-WvbVZMpVe|$c59Hs6S4(a8Uf(%fY
      }!qyuU}t*7Sqlt^~O-!W~)>Zb0CLpJCQO-&KUraw*dv ze1rwr@ZBG9_yGh1rudI?e#aKdHAN(?(5RnxhHksFr?PBgc+V>GOFdqIw!IV67b0r8 zDN{fj-&tOwbFW;xc;^MxT)47>-Oc|6{ZBRIB*~j)2ownD8S;M;flVFk0ah0O#E>#| zU58B$H2>8GE+qq${&ds^#JmU)mjyKT6`qjmda7{Y!ZOk*T=C|fA>{DySA3ObQ%Y5)SFVFdp|i^^>Q$)Hqlk5 zjY;N!v;;#RJj2oo^8%o>;amg%asY_SALsPRmSz3g9XrO7)%+srjQ#p(@!-V0s}CRb z!17TXrWFIu|8+~LeiI>+LI`{lMQ*^o5Uz29sFxcZ=oZdHZ-un6+L;%S*ps@&GuW|{ z*Vq;zDNRU=WGE-XV>oD%_vhKSS1Vy`banXTa61FuH?KkC@fhIL9o~(;wMD` zzAe!2`ws7~O}CryPXLAqLEHfQ0!xM@Xo4%PLskS*tA;dV$LKi#KPI=CwGP$UE=fv7 zuwywtvS@>VCS?7PV4J_|W2{pFt&u8rc&wFi4jF*P}G6~M$2O%Bg69)WP4 zUX-S-1P-t!yvkpIP_)_susUGw*Z!0Jgb|bh!^+^<>wldcvrathh8jZbh*el9O>XF#iaK zzc9q=QP?B%w|wQxqB&!8JY5^_wn2MTXZ{sEuH3ZUDA=|`2AbukKlB=Q1*4WPl5^x$ zCG{1p?`Sw61&iPl<{{mEl0f5QyM^JNDaxi&|<)V{Wx&>s);=illfDClcNi!TN#!K_H<;aG1Br}7u z(KOVBQW?+Hi{5!>h~eyyk!E#z`kl1NUy%O<00)KOxvGBvQ2x)MO7P#xp?3e=vbO*F zzfOHOVM7AR|J9H=I795fF`gq$M!gE*&08Z<`h9+pzo?DB|Kv*F!ULc1ub;gHm3 zm5OK6YtFg@pf8KrFLUgn@Xg-A=*gHZ_-ru)n2ssL2hh{E8iB2-Yzab>R0Pr@2_=)h zP|^msXQ-Af3tX}f>1l!QU03z`gJ{Av1_R>`L=3Nn6BTT;_J9Xmph^et2wX)sk;|Gy zvb1%jpP6nZg_b}cCI?6AAn`+<*M90hM{(uvV>;1vyXwYo#;{qW#w+N+*CrT4VcY}f zKxOJ-LGQeTgXZ7RS*()++L(+Yqb*`}QVUTPc#W1kEtTU~*ST-P2}Ivf4uFXwKir_o zS341@Gxb(`>H<9<273x77n>yTg-M@x7Q#cEzwqxf7!YmL0=uVy$zU?Xta_ah=b^^la&Ml_}fDnZjg z%BX>_w&!FoOh%HQf^$xB9f>Q8&R#xy??fkx05U`BlX1Okq}t#*^a{h3)WXWcp*MMO zLtrOL9a9-9z}Y#bY3-D}vPFtuYAILnfE znb?0+(}?oE$ugo0nPm5`k|)ed6J}-V=@0&PntOFwbiB+#Y-Wl^d3UCKuip~t_lRT3 z5$-FVQJbw7Rhi-MMhw`x-QLMuB^?8HH*SJ0F%;^ICoP{_4&!F;zliO>3PNkeZ%g%8 z;|hxNxJY{zZVKEw5lY@1az-hPL-dc+lpCFR`>^Tg}wsf*;^t=8m*w>U2M zw~|`MznT3UUydL21?JVoHFg!umglS_z5Jy+FgVfniK6(mc-=}Q==MN&`Qm$B&&D@1 zSjS7RIbgC3#dbl~?)Kwc_tuc{$FCM z(_b}P(!Ap%j4P|yR<$kiIy$_VnM##T;XlYz;BxxG z4+i{almF}M|KI%ne_GJ39Zdd5|N45kOVU4ju#WeCbI$))^1pETFNc_xtji_`Qs1Sf z(HZQqGS;XaqI$&jSOSMNDz7Q8G%Eq3yfPAa7?^foY%Snnw;T2jJUKbnELqeIX?(aN zX4u5AADCNyA+PMR50;;d=j~<*yUks76LK^^dT5LRhH<;R{?xHJylLcFi?`2 zg~XvUk#7HJjhlo6q}F5%=LZCZLt4QkK-4oiNlyi7fK0nF66FJFj*T=ng(zw<@|~~H zY`VEDyBz5tFw!F|(k%Tw@CCTw&uVcLY1p5Oj;UtXucveGUU%QJx|GN}tJ0*0^O*DV z_u=Pv)H$vDo)hG>WT+3t8L(Qnc4B`R81NssrPwcaX|Q`yVPiU3rTsP|?8E)YF|(vP zKVjjC+;O=^?R@MNn&l92PGBR(0#qAk`SMi^?kqjU21%mB>!*|V<4fEJcV{>DuFkA{ z!<0!{XKybjj@~Q-gZ|Z9Gw1vGFZNz8Tx6K`V5>7x8l?cGiq}e>qj2ICsbZ5I{~$;!B%JE0T<2 zokCn1{QYFR#^v_u{hgZXYcx`?oN7El;wTn;9qMRIuBFVsJ$9}3$OpV2M8t?B_&6kl zA=z<)^}G;YcF#8-hbhTLL7W?BVQLZW&ia|HABE2}Uvlqv{2iykvv#xLd(LGtfF&5- zJSw~N+=WR4tSIqAL70pNt%!V0{{bU7b4ax639DoI=;QLm zwYp4j!ih@M4v$}Is4mj5n1w}yoGO-~JgTSEBVTuN$%H)+}a`tsH02HDTZbB0SUy+>r3f zwZ8wApl7g1beR$392Agb+&o#t(LI-F^!sl@zSwG&nfqBmeEhZ%3;({Cm4;MG`9)TyXdt z*f=u}pn!;LGM0-ltH4d5*FzKds87i>;A*hN1a0g7<^VL71YL3x*Sm|}>kWF3fl9kFL&23)88Dl9a1gF zu{Adj8sD{v*4r$^4%*gv>f7!M*Qb@qB{nD1_-7>2zw+pQEjCL0@=Nujx9!w6veT}v z2D2{#xH=6oRLKyimtG!fF++l9qYvmOy3!zX(+z5mcc5lv~LO6vryAH0p}*<$H!fZ;a+RhjeIm zH8s`x_kce4w^y^4v5Nug1KPs$Z#E%L^_BH_PSv{gWu|DJ{>t6*nyRp8CPa&abM`7^ zZCyCIfARPEU#T^!s|qLznzbW3vP5Mlh;gzE+R<><%qo=qX;7wFk_k&tyU<;(7g%QN z)?Z>88J|q4^G;K@tq`bTTq(nh6|@n*`Ya~x4d@&dP~AbSGx&0B?IrXR3lax($vs%s zH>8-46i3cMa%FtI)WzId%zpd9y4+n^{-B#{PsdkacL?lVsVNf%^{q9b`xH>#ULn@L zzQ@8Su+sE$-F;%j#tAF3Z>ayJzm&E4$-#Yp$W zUv-Br$$3&$`#@&)_~lq#FVt#CSwM-E`?sUYOREFEc*3k$PTc&~9Fh_NDweCjSlEOssy!(YT)< zx9RjGY$?V|u$J3z#l-fE|Ds>+fNH6|ClR|DZe;XGGGe9qBE@TB8V%HdSnMuq6KNwP z$N#^7^;@mQsJ#$?fZF+hfN1^?rtfNQ=V)u}YVN{lV(em0|Nm_nyo~K^uXTO?J?tod z1q(nZEfmhaWfCr11=|o+uVw^)f@rmVDEQ$S+ymZ(el+9fI0Zp4=82 zRV9Z^o7P?#g!0F3#jDNmoGq~Fz(bk`?TWIQ(%+s$Ma;d-&NZ)$o@)u zB}YCfL;?yde7z%@A|p&D-X^-H7{w-5#ZzQDBisWIy%LL@Xi;cK{0sqFD7V1Bk%CB| zVu!03RrV*P%=wC~Lhgcm!1+!Ne1G~AqIW}hX^;uzrB$#iSacQFIXz{=q92z~!COn@ zLaitK-;{szqb5m}S>U+Egg{FZ@y83JZZ}8a^BdGRbIF z`=dqmhYNfMF`pf$r0j_t@_BGRmm;>NPbmChHFRz0>&GGR{YZ1PCzpxS7YsKg(4rWk zXLtrfg_$uP;EWIiYZ|E>O&rZ1A=P(sS!D7y>6t$~aTp|(70bdrip8{P2aJ-&q%LO6 z_YkvfL@k2R#o2_U+sQy7u@hg@HmLu|ROvB@`w?eVA6qobCxy`i2vWA41MK$K z@?^spFt-*p9xs}58Ify@s0pRnEHYV64@FntG89 zAX4}tn>$ksh!uN4#M6VlYJ-8XRX~wn&c*fP|@GWQU8W*cfI?~6|C>7X1!^({-wI$HQBWRy5&^eu`Oh2SX{*c7% zztqmSg|WL|x0^6Ri;YL|4}U|f5qQh3$3n`qt~9LrrN}mMw@^JHdyY;++IFo=F z!UpEnMOD&Yn=`*;1m{*KFTccZ!Bt%VF$_&6Bk zHjyJ5wQLyO_J*B>X{}-cAFZ@kWkIOCS(66PWMI#ae^&$BnqIV)lzXWtJ(5?EMzV7; z2|?c7+ct6*fZC8}{JExi6DW*wjrwLN%5g{zPfe!xXa&ajF^#iIjhGWri4YSLfM?ZF zRI&0*9q6l;L)4fvHHC+Jr-ukaqC&c^#UIwT`M#<3mV`M!uKM_6z~owk7Tqiz($$$yd|d zSsivo{YmxXzd68u;6AHA2IH{bRtj$)z^>kS$;77DTKN2&N9KaQ!?a$bnKqJA8zw%Hz+6^F$z#17g8v~;N~9j#XxjJw^>bT z8B-+6Zy7O;Kh-?d&05m9>H0-P@n@YCRZDwg(dj}@Sbtdhp~P53%Z=z3X{XC7&YOe-9TP<;7yDWPokV9N|W#jY2^#Z!G;S+)R!rfX}cp=6?Q zyLaHlYqy?492EHN=}HhZzJpUj~G+f_Z0 zXalH#MF^EU%r^c~5LK;6B+$U3yL=R!y{uST6O~>q^eqp~MCR=B0m(_#EgcdTkru&a zrz780?4ndjj>w_?E=HFYWHv+qXD-~mfn%bQqs+vcZTd)2c>*&KuF?+GfPZ!Eh@#j^ z_y9xkc=4B#E-ituYG>41_CA@p!1guGDk#%aitRHn-A?{ zMFeM)(W(w4e+6e>wuFP~_d6L+ZhHF&;}vdk+goAXnY~fEodG@stURxn!5XpATHJdd zpQ|!0j|*=gg%U?l$Pr#v?w@V> zgni$!WkkUpTGj!#DIG`m`PZ4~m3(3cX4pzYmsr50Z>+8fg^J^~RJc8xVsj@&EK%yx zxjH}8_QDfxfA=|8V~N)?-i(xx3V5RXA}FLDLpBrsWUs)mhx_B`KAP13orzbv!1aF;W%HcdZG zKD&2|_DR67J8mX&C zuc6B4rZklU`IQ>or#D8jxgxLZnmt%S8qo?ufxUjdZG6*J$IH>Z+8Q}{^Yzic1!zZq zemtD#lT=c}|Ng;)!9x2%rp3}qrxDo37W5p#>lyYMu|Ay49qF!6IjQ_Qu;9SECKRN$ zJ521Sgi>q9?(xW{e;U|wX%eWWc~%6H`Pfvc(ks0!hOIB{Z~zgNQ&oI_$d1LKI}f*dt1sg>Nh(rLO7xa2H|ZGhuktMlNRdeX@7M^?OC;+Y#buLq5r?3E9tP1#N8BTaXVq7AOkO;Ugb z54-DthP2r`Dy@Qm@H?bSzTMzD{F;O2*b)gaCv1@XRII|{3Je4N6wKv=RL83*?mTuX$cGYN+qP}nwymze?|nI^@9DU)BG%h_oGWvVk>5!6 z+zXL2O2M02x@IZESsrjRq!IHW>)dd8$c_X?uMrY}hxH|fiAhG?>w#ajQIIgT3Z59` ze)|m4=&`*fLifpOtNO4IzL+on zxKI?!nz`6HVya=;4tgoAG*BKF^&E+srhxeVl4TS)8;MSOtaNpzO5eb`lREdkAIw}+ z^){Y;%&DzzH-<9;e$o#;t5$Pd%V^pBT+$e`!M(t{fDz0qUf{&H=(x)u+(e;o>Me^! z%1$Ce!p#sLw!$iHfZZ~p8WrK8oKdxI4-T!}eO^aEbyU{L$*B7}S$M+XHXaK5EBel? zJyU(D8i#4ZjDDx>wqa>M!Gtau-WkvP{>eZDwn&Ml``?_ zJjl%woqn=Fl7(}rX;HY{Nl4SIkm0ysZ{tQHv1=I{`M7Rnqg!cl^+r{`gF=X5oiv{$ zSz=8aS-_%zOfP=fiVF%Gmv9)|uVtz!vQj;u+WH)&j=sVDdEN%uK&F*;_<1A)y-h=R zP<0`!<6)GseEKWIT%d$EI;02KaGnTt@Q%dOq>k>zLFldMBsm4z5$+y?fC+iKKeIof ziSVn!`6;!GKQ!=awpC269AUZ&=qLudtEVv2=REP@hZFKMzJ6`_dqAQ2Vp>w-Q1F_D zeh?Bw_9s6D8f5OFf zg$Yt1K=HK5rxA`QcPJy9Pg#Yk!kGCU_SSj=N0e{yT}Nbc+AvIv zT0Um{x1+;;dcGXSO1@05T~g#d!f2n-We&KAN>DJDdBisnqWJ`mZDL=s54&2K>=%YP zQ?;lbiUwb{Xc+3b8;c;DXmJp8APA<~7WUH(az4@bA2&w5h2}f%E&J!o{DfRcXEx-^ z#rHG5ro`4xr^=qyTjVk}UfD9(ZRXG|?j!)2gPH+iOX4{CW_7bu_jQ_g)4HQnmBAzI z`%TJ+fBr8?EdM94?Br->>F|Hku}O_-mU(}5>|iti0OJ4S{`>!YZenC@WdC~$)-^P; zcl_Uyww28WJJR>HE`uC=X(ts$*v3DPHcqt_yA~1blG+8@@O%W*u;#WTVH)uvyN0(L zF3&ick!eYI5B5t+VZyk9y~zPLw^g#)qalUbGbsU!o+bb9iUxpS<{-AeEZNN2y!A^6I>sbN+a2C5Y#2|1jjNS!u_j6q2#+uEkdIECAPTtxkeCH&nCbxN{KfwJnfvRz zL3-gw3joA9rJ#r?O@&gFeZxiT{1uGwS=G?^I>g{LO#a`a0jgQ@J}9}-x*zB^(|lUJ zHCcPLOs&6Dt+u>@*KfOxK689{yDJu_FuZ*m-@D8J;c9!VkL3vK-pui4IA-Mn*z)cA zy@8qm{{)a(^&#JrWF;Ak=#NsA4o~4wlqS;_ODN_#QJVf)PNT>h|C=Z)o$G~dlcOh% z$K|Q%ZncyG$q^z<4%$}~2w!7NkF#;e&lfa}x{Q~wosWkSVSJvtMCy$k#q>(_4;?-7 zltEl5oF^e((@~e!`E&i_5u(F`W0$OA0L)l&^0hFYmPlT{nz^1NpZKpxN$GFlAionM zpl6P$p%I6=d#8El_K(f;q7>kYeT%AhsxEBr+v( zJ`zc{Ncoan*qP^y5!gTbvb*SHC%xe5WZXlu5@L@@v`=qms#Se77zDPMeaMGNP&&H| z<5Hr$fmq-W4vPeL@Y<#%p(l10KX1?wcM&P&3b^JOySu^{5Q1!(xqNxlBRzc!UDvrZ z?5g^L+3i`GQ3RS{j3pKN#J5UxO*0y9nl{)~dZp5Du(q=D(F}ZsFkab2fma0s115Z+ z6=sQ1Bp)Ih>#kgzjo53jl^k=gr)|Pj#jahNN?kE2uErLKUzQ+NiwhF;e)%t54ZFB) zFDiQ$w2s6*J{y0!T4gptLk{A^vl0|US4`4z;l{(0dS+o>oBd(KNuMnuC01U z)Y2v^eMZ3y2V?6R&==jM8r+q9pU^+i9h(WxjLkO*nmxv2k>p7mfLd#_|^YFdkB>e=M?IA(J_FQsp zvCz(=DisOR>tS;X!|G45eiDDRZ@M33hA5+_uPq{KV#tK~h^(6N(=pX;%>15h(zwa9 zl1Q6&r7iF;Re!`bX^X*AERPosPl6)N-hOGfev)?WTK^o*>m?0FQ*l2bB{Aq6rHOFQ%%9knHz^hpr*!~o9KC)hWAFq}qyf%vq}s}pDJE~CIy_Fv zT4&okk;2o7m05|dA2if~RIFswaa_?pNwYI=#3!6ndnh+I{H2Dqdfa?$PJKprpbDf+ zzIN>#t?RNKH%uQi`O>30DwgUQ6TV z@2k(WmXr6Y^z~kG%wEw>pE%xe!PH1mBlH!x{d$|LC>URMw~Ez+hx*x0^uZUOX`}ek z&-^x4Lx)$!t0=u{d~xa;6jicJ+!Zipl>`|o$J|eZ-3TJ48FjSrx~8&d7GH*9h5tE= z&0dcie~mcl*%ejclVZYwN2}Fv95;$%xBRrTldkkUFa~~p%X7DIMGI|xOd)8s1Ydse z>E_Piwadzowr5|gt#)2exS6*b`iC>j6CLqjE7*&n|4B}l{)ytJ{$fPkj;pR*o6GK1 zV&9wVBDVJ>?18n7tNuSc!CL}h8;=~^c|*u8ZTI#p@F?}UVLrhBYC@aFof0UZ0su_u z|Np4J|B;=4>+cn}h0Otb{LTZF>p8rYq2EK%^ASH2WWf$yR1(Fr;|gwlA8Up1@j$boFnDie>1CDN&y2{werZ4|ceFNon2_`VAHkD8zcZKJ%` z>{?sD5FzP*ca)benfm-ZKeHC%Y|zu8eUzQ5U;o&9Ki@43jS%Qfj(oC6s;kHQe^tpq z`eE3f4B{hV@%*?7@cIy#(x+zytUDs?x)DXrhMl;iuu_sYgn6=uHafWkUy!qejHLApvTyw0!SA;L>K@U0P3ah9yMADMv*OXr$^6~HlQ z)P|rVG>5L2fKP7naV*-!=j~c4=kaI}s5BDA*B6{&FUP{ArQu?DDAVV-D!=nduo z{>f7bL8qt&Efm@$3slMdpY0Dye0suh=nGVO+EAB?1A^ibVipKiK`@VKy}~XkrDMhT z)pJc485bPqK<-D9bKC_aTpTJf#$+SZ5%;NKAx?3{W0%@$s^aDCw(TXi|7;}v@d{k~ zDCe?MmbSNbCXZ`9Ou4HtAn>cyWVIhfJz5Fgs;5<{7vRB&$^Cir@HtRnw8On$E5e)F z`gHrs+ecsPgGsU)%+eQH$%3DpWXq&oYxrb*zK)2hLHk3%X}J+5E6)WSbo1f-nWs+U zlXyO<x3o*9UX;+OTJ&hBGj+-3*y zizui3<0c&eA2G$ zP+=pohi9C>$yS9h?g|$WW;Rm1)rK?&Y9CDUBG@8H^=wtzvY&34XZFPfWizPRnd&2{ zAlKs;h`bz=!*>T{;}S1K!KA+U64K>XI3Nx(HFp8^Gd#3a6N&-5$1^-NTLy+T08)9I zEVBH$*vUrs-#X&2l40LT+i*RC#aDwHEfIK*pp^Ea@c`QZVH!hDDIpEd4cC1D?- zitAM{i-=b!BefAH^BrKFIP=}pA2S-Z=2&Qf~UP#%>Ggy0c1Jp9%SS9uqnMecArQw)r^i^@S0Tnhw{z$Q< zAOdcWLk+SSgv^)jCMa?z9XJ-raa7kf4so3cJu$!(aTRVg8IV)*Y@sq%QVw|i8iYl_ zk&3`08*KCn#9O4mB6(<4tA3wUz-x(CP>_Tbm%#m@_h~Xj6(JXP0-J)Jc8rGenSrmz z>y-5BU18AUuZ;lpcl;D$@H6MN@Lp9Ang})rrT{ojL(Yo9(9o zkFE;5eh&sF__wND*kR~8FO}Kn8l(az3Yk8Cm7=z+e&#L(PGAQ9vgv~g0@|2y1hcPJ zb>M5K%D3t96@|_XskWeDE7J|w^yi?)O9z%qhb9XuR*jVWo)Q9hBdj4%y(_a*ubsd2 zn#60`C5C ztvrg0*;K|c(Ji2rko)>G3yt29R$H2!j*9fT*d315h9;U(gRHD7I4ZmHBfby;?RR3m z^|cBw8p8>Asg3HxXbZ_Z?n1yj|2w@f$E% zsHc>xNag*+*C0mWsdkM^#roP(d;Ez8S;&QLZO@Dh4jG=Xdddw?%~Ob5xKC0lU<(}D zqttyuN$u{~uOJ)~n5oLT!XU%PR(34PfeUR<_wP@cp!1S#c(lIn0sL$9Pdfvc$7*$Z z-T}2R+rj;68XG8z1}JaAW*L)PpU~|yL+}|agc6wpzeQE;VNL?smT63FlSJfH#d;Ad zT9dZgZc;Y)RTVwB=v64FFI+n>Odbn&6u{I0yhl8DL>CKO5b2jHMOgqFs$o`1oHa_NsWZ=I8J1hU-bkl%O6bEoj}vNN!Y+VY2;;em zC!CG?)xiN5kNFO!6|Jx7F~M)Dd%C>E_neE8QeYHx3av6kqIs?7#whwR?YY(HT6~{W zT(06EZbX1-?J@YX{}dM4AjeOeC>Y4rJIS(?X*roV{1i4D{6lkUJlukJmjxYT>J|3D zb#7+5;BswH;XY4|LZ8M4Z|PNbDo}Cba>mc#7aIMNAb*9J_zIGqHPB?W1lc9+(O?=Q zmt6t29}t%4hnGj=T3EryTOr5e2dDcBED$m)JUXfWfdKwyF(`lCIzk#7*^ewaM+|wB z4R$6_#fYy+d}hoP}mYtswuCha=77kd$JG3IA<9`(us1}p%nCv`W(f+ zXPU=2a#FcNaLz_CA}KLH5NO+b9n>8 zI=Nzx9-rH9qDPrP4Ic(kHN*$hVRdj#Rr0O&-@20Ai(K2{vBd6MLD4MaR0?A=bZGkdv6=1E_uS%zz zfAutYxTrpKDr32EfVKETxUwPTt-v18JtqG4f4WlKqClBiwNsBN>Be1R~qBohLirAzw3S@vt2P{gxTH3F72?o zruaSnC#w7~_^I*KQro{k=lib0o!u?DMnQ&J^hg2WB_a0%CqYOLDLPQxmZK3n)Foxz zW7R5`SyVRpohQ%o^KK!7yDBec*nD2ik}BvX{c7TwyC?8;2^QRdSKX5-t|g~=3o#~t zxptS(-RjI-P9aqy6OLJA)zpNz1_4^^BdJzAE^xhW*H6KXCYpbJ<6j`p5Kr53&o z&ZR}&=fb>>?GPKoFt7o9UXtPKcU1KtlqOU&^Qg}SP~%Tcm3!2B z`G_;{JIb!cXcm4|rBzlsYU98HtM&I}ec4?8`7!oQHOP z`3`B$IK*iEOSBt0R$~Hxm2True`tkMe3h$y7=UCB;7I)wmN3!c{ay{Xd9#k8E8KB5 zL!ylQ&k@9Zug8s5qvF4+7&46?>WRa=rVM_oD?-bln`gL@38)ohoIkuVH<#o_bpg9w zkBi7Du4A}0hXce$>c?2=DMRsw>&5a@*K(>}IVRKL-@3W1;~RAr2nj*zo05f8b7LraxlD3co;rZMReeWk>6?ZzI}!F{oz*H%EJHT}YdcsB zI}vwWzN#Kn-PpjgwM(%B7vu--ZTF5@bELkOQSB;g+6xa{4GND#Q-R5_&2-O&Y z$Zw&W9}avVI{rg{sW`khN3vZU-YZGQTfg=EBbxDnCLAVP_@`f!aKafLkeV=OUTtM@ zop*UF^1WOe;TyMdnQyys<|t;?qsBrG4qQ|$#Fs=(vD#?2^;;?LJDhS)5;%+*VsYcKNQnv5_GP1Z{f(udI3k4wjDt zs8jOzi*1+~!qorXK^Rdw?k<|y1Ud~{unCwM@=P%`czb7P@t{WznLY43IP2-#(3E2Q z^WWTo_c@`Voj;Iy1)5ZgK_hzx8U|g#OC9>c77B{TYKWXJo` z{YS*Q1|!&3Jc)nv#pN3j?#|bwf}rz27rWp2g36#){g4p4A-B1WEY$ zZK1h#JogYAJu7^ZH~f*K4iv7F@VItICeB;(j(NW%!qY5sYnDZZYXn2(J!XiroHidp zSmJ=jggI~ACQi3$-~A&TH8PQ-5)8GBfQ5F{XMC zET|LgjRX}9hC95Ye%d`M{h5H8;yR?Kfuc8?6XFSrtJ}t^GuiEG_c~3Hi&9>iO6uZ3 zL(yUqF}o6uQ!4jq_H+`_duZ4?(O_SozA^pjZ-w;QGT8thCaT*pTJXYtCpKa*PQL+%0;hif7?iThX-sj6su+W>m48wAD=n6PNI=aM>Gp0hF?(hp7y2kDb z)m>Is-De>wpMqcjXMfC7<7AKuPS*FGmxDO&?4P|6^{a#GwRR7T>~NM4VK?tA`ZKo3 zo2M28kNbeIu7m?`J`l|Q=9XP(IKIv+;H=h^pj^Z`#1s@7U~2&Y`AXIO3S*pZt}D%q z0)&BDP!Ue2xa5P64?RANKHZ`@e|Z28@WfCj6b>kAHlXayS_81C*Z4!=&uuJ9x+xMq zUOeDUP;X&J*l#HqbFkm=q*k1plW`dc*|VN;D)-0le~zQ4kUZ>}Q#*ADF7h~2@G;w> zWZ+~YbJV=WBaf#a0jYCueV?~qFAXk{)5beKsW~{EK0fz*ue)1gVM8v~wSR0Tov8S8 zGYEgMxlhfm`h?vq-xz>Ox3#?@%&jmYneK#Hy%3cPF@TeJ=f zus2~cTV>|wg0M_7JS$W{_`1!?$8mXhP~Yphu%HW2h-JZuu!Y#x5!=fYFru5@32X85 z)QQ$qG>SghDo79i5uBL8I}YFWeB~Q_rk&Q^jqaIACUyxYuKB(d)+*+_Y4$a;h9wu7 zH^jE{4%PdqScUJj+j@*+A$JI)WVm`^^-8^kEw%nK?D^Cc!gb4nITw5%+bDGj>e(gs z&7s53T`%4g;sS2&Mm|x5kS3l5(__JNu5Z)MXqT#tG&KWL7_F`ABVur6(r5zlvKCL3{l?jrYz$OuIK32`X|4ujVrhtmxk~e*RFI(W znm;nGgkdn+*#7VIABxtGp3yJ;hy3e{lKl^!sD+b(k*>b2nXajkrLB?ue@LGwCFx&V z^fwDy6(%tPmfSveyAuMf{2*7b!&#q;!kv-aj2Kxmj*@S`y~jHw@iVTnMePrDI!?FS z!*-;#UU7TC@ziwx*VY%$A>NOKUJ$CM0c(4h^)57>Jl48xy&$Y}NUT$3;fig1FSPkU zQsBoUoCq&IZuI^gUZAZUoQ5+%88v$VNFVH?7XI!G5*M_VVA;BiEvaN6PfJIvF@QMa z{;#G2H9?*(?WJilRSg{np~x_W{@zmQmfTx+2%UQ_GT~elZ~(G88Y*> z&j9!f0m6P?HlD~5x2+&j(XC_M!TM=D1xkgG)VPT|jSbg<<;mBSk|Y8DIZwvEDJF17 z&4bvspRingQMR&WZK*2Px=n&&nKj#HoEqx0P|0QMA-tkKGVpiiAs~gn zT|;dQLX%GPngVPp4){}qo{d!XlD?VigjFxBR4|`huXol7DPN@w?UN&?2SF|lX@+c$ zrv5-MZbsmGM^F^8kFmmFWh+T0{}&YbLe&^iqmfHFt+XGj8P{`yvm5rXS=7E@lnq-( zAD1)juf(U9*+(k{iUg#qZT1%A~d1J_ zkirfz@g!An+W96ur-q>!S#?L#dGq~ZS^*+uSzMXtbl@8kT<*aw)dhiK=WUi{Wez8n znD8|z@7i?5HkS%Tn3+%8%cHAPkO+rYw#vP3eKI3-R3#_O+<>x-gZrs%prNZP#-^Cb zp3}UQJEbO*%+&kSVyHvS;c4mYE7X1&;{1bo-lf7mG%{R{r;&+gyNB7F8Q<$JyhxYe z0TWT9%!EZzAedxBv~F2|AOi^tjQT6WVlzVNppC{M`ZUTV_%q^r@zz=LIM=8Om~$Gz2X8e8oQ+)`a#;{-OfRJszzVY|gUNLt zp>uf_blOn+KBe{qly&j}M0T|?dv6L8v8uk6(+!M?m?{jf5005d_evkBjIH@+a zM%E6dX2y=XHvjKBnJG2N{~Xlr4^@P7eVWS3d{a3TFr{2rC`!qS%k`*oV+5PZ@Qrk&1^^}9=csBB_t*E6(ulE6Rrk&^L>w7nwE=Q?I-wwIrG4&nk~Wryi;j-Lh-=pOTs?z^_w$+)Mmhx zyGz2t2%YVr_>+Wcm@?L>D1tyPW&{V}l#}y+Ha_%W_Ns(X_!^KGm?eBOCZIrN1q5mM zq%aEYn>2)Li3q@p)RKiL4Vn|Yi~qf<-@*qfR7UAlB?~R;#w{RRUWMm@u~Zp_WI{Uh`Q zEKRGtejQiux{9_Y0Ra4r4x*)r$AIvhnEOO96S0bhFyp*#$KtWZHTU6ZXPyZX_FnPa{3|wn-(*~wTvT|W3znuE z1o{H|x5s{<-T2&5`@nSjIqYM&A450lH4NWD190y#>)bK(i20k|d@{c@Qbptq2p_pV zbC{2e_+F1P$z7#UTIbsrymvCC);w8@O<8z(b(i9vW(a3>>R{#T9u0`~{a zs2Uni6OjWc=xVR)OT8@J)lT8(+<9JPQtEyn>W-DtD**F9eJu#~Ym^-~Phc8hass&m zW6mI2AVjx=1O3in3<xCn(u)ri{SuWe*RBvCnsqQgvRaRVpAD3%sU6a}O@ANAc*!@ACm5evB{*`Ft zlY{?q|8>2?SZa?NLg?Z>i&3CEYI3q5hbxLk2Uu5+%~r)@SPE~VAB3l((Hd3E{iz=; zNd|!5lnXLc^h*7!KGfVf8bC5|kw;eCRIZ$vC{NU_0$>^Oenzk5wVwB_Uy+`ljt}0F zsl?#TF?V?Yhqxwjsk35fVE`xW+85{f-ow4hhUCPEb}Gd3?-||kf}sQON1~sI^FU42 z+351lIn~-LO<}laL2>Ou#mqyidV-YQ!8kPsklV#!Xk><@p)VrvwIT#W7{c3T?gc)7 z4|$l@Y_~J(KM`$*srfdhBS(@Q8yv!YzkuUCkU9p=2>tK`40iQsHs}y7hL5sW{e)rD znh*y3)Ae#EQv%->`FLbsbcwBaHT~&|>e*M)8=uo(AW4lVv+7n%B08^1mz}>d={;E2 zIx(dVWHHVsB8YzRZKAIP<2R#%R_xuOYn*D?`7crsTIsh<@`qjg?S`vG4Bl+430!kQ#SX(t85g zUElNzM@9W;So+LVGMRu^PIpG2GBzdZn<$5wB>fN^Zq>zfYl`5{mB(QYX%WuDQp$+M zw0B~KrLhL=BbWbb~jNn)R07U;oF|xHca&&alwY4|1{?8;`llt!@9V^1` zB%Q!0o^f9Ie(M!F{O~U6nR%r{JG;6(h@ZejR;=C@8;M+>1A`Q9dqe@AuRoPl=nIeeKMZV{Ya}uu_t@CndutGfh|0 zFo7JacGDImm`wb)o==fhU`1HO6b1;?5zrvmQ8bX)kp^=%k>AfHo(i{$Cq6W3U49I} zXbhR;jZm4z0(IYSUKy+wJt~wbJ8o=};AXgCY!WzfCLse}wT12Q;q~2}yDLp=dUT0} zqbFTQ%9>PF(bYqCc!BOPW$dw^2*wh0q_#vY{}Z2Bl&hWu%W7Cn+BbH*yLd{+36I>peB&SXp5G7k*bEoTUPV|dlvSiX*{h=vq749kAFU7 z7|D*p*Z_=Gv)<_^$*N4BI&+|MEu%qJh#8Q@?k1mFE+iPu79hffVAOH$F$=hb93WgvQ zU^Udc=$8t?2(y8{z*iuhl^@#v7rvZNB=;mzE?RwS2$!0ySGUxHzUyGD734 zOB?Tk$eAOjWHY1t3=uUzWznEyo|g9Oi!Yukp_#S-?zS8# zgSyb+`_W)t#;ga%D5#THvrPx_;YHc#i{(PVuoEsK7e+SPOXZc|btso2=NDXejr@xc zJ$6L@)*G|bS}n-#`QKoTIf6HbRiI?he5xcw0_}Lq-eJHLfpO)NQa>qxD5=XwYLr$d z5Wa6$XWq0)x@zkkug*-LRvcZ|WMMnh`Z$LcguVN0m?8+=RIK&HR~jPT!DeHS-HqMN5vZZ6MEkKiefyk6>Q(j%m+r3ZDK~cT zx9rgoye1oJMj&X7?U0Rd0M|I~Xi4yF}-12__&1G8oC zm16HeCX19}-z@+7&^tl|-jPg@=00OfaAraiMil+b4b8+8rUd2MDiN;Ik57^6A~5Fb zI8$h-N~7% zY$kwJg33Z|A4D!^6K9n~ivv-&m+oOM^494LpJY4$MIms+^F0)K9rv*zm{0evWQu^4 z9Cb$;CahdIS@fqR;x&BnXmYk9dZUOr6RZcXa~Hx)pm1AO#E=8z#Ez=NX1 z%O^bx52$iT25f9}=1KUo#ARL;nqNb|V5MQfMTjh&)T0cm!8cb|Jf0yWB{2iBY z*2QC|-udCR%=J6Q(psUus;U=Oo!mwi6;P*1&Bv(j;i;(sIb1YT>re1LE{C{KGvclq1oLFY{6Wn0Kgwx38hv*4^f? zw{We4wcm50$6BW|J9IB*=2(w(2D()Vk$pffW7z0nXR^`Ia2{)OZtFpBF8D*21()Nq zbro&?gXhxqb?s67pltA7iMl`r~n2U=DA-{#9PFH+Lfv!!MqB^QvGaV)9jBdXi(kq2k{6ozvs+ zgPV~8qIk;-<$T{-54-|k(*#neDCLw5ittjz#oUiGP|FaN zxn@=pO5IXtHoMJiYB|5+CLS}VcfiRqHbt!#l`e>c(#ej$C+S>2Ds$^%rIIie|NO2Q z-jh^~u}~dxYiT@YVR55HT@zbYHS`kQ@mz}@*?E+`yy_A8*-rY@lNxz9W-`8Bs(RzC z&8RLQ86Q^OPv8z=SRUfj2SAP)Of*GTIyuE@fCPSWmXuu*K{0oS4SuSr-Pt5^asK*C zdZvd00&4W$a-t_5UW{s)tD;uA!5G7cMVFh{gWH&bOfRUmeaub1dX(Q4!B=heS}xLb{~8k5W19q0hIZ*dqn$OR}{KF`(!QNV(kF9-&2KN z$)BArp=N)>|JOdC6GYp?5*`3RoBIC)DW{>Q{m=8y$ocnW=HN(UW^HUkCnYWOF-ULsA_o!vt_xbu%q!UI9K z$NOt4A;NiJw#V(syk}>I#EOZ{`J;TVYO?~p_4fPyx}-_1vU+eMV`=H$+pR$(!iCLc zf8f0x!DXezg-sQ2dw&I#$EitcXNSh;)psY>x8`_XVyS%rnEd3Ju9KC?CqK=WYmGTy zxgHrf(^0P3NznOsrG@Gpr93v3NC}j_vW+xL@~hLH@LofbbHGvtVh{R!7*vZ=(kjlN zMiTWQRjkca>sHuJVA)pt)haxtvj{siZg3rzP*usYBQ@BV2~#alcGGfDsca zvHswCGCwF7JED-$K9bO}co;$3=77@WA+;a)pc~NVC+<3WPD}3Pi~^}N)+cBNO%Hf& zsabT%*#=KaH@_{rQe>X41oD z^+aHXvj(UrfbW%7X~ReSYpnz>LeA1^Psp>t)b?q@G(8AfIpnjpW_0cJ3U76vTY+e# z9ybz#o{KA>c_oK9+p#n)->aBuhZL(RBWMNNkreA6t8-;n_@uHluAwH=v;6N)wpG}I z5>_-JQMs?Lohl2T;4Sa|eD!$wrH zCqNS`sc&TJSQi1RqA{HM3HEV#(2!i~*_zus405l_H z5Oo3A9aD59uEMPHelbPU$NsAIengKxaOh{Zr3*r>a*f)fz zv#w}YU;1|upKntTHA>?XA?+@}xqLIvB@8s+I>DqH0dL!>2PhYxAR~*hPgk8*uwDyK==B}vIRf9E{I#_{rF0Jm8$!T~FM4SE^QwepEw zM_B%U7U~LdsXQSSLD8lq{OZeKKlZ8di$Jl>hW3`PxK`G8*iK^j<3{QAf_axkjd zVIbncRzelKFC~d|^@SJ+Pq3C-Agj&?V6_7QqPu@kv9*`OlT11Pdd>2y`DhN!d6j*aZPVrpR`lz<^kVSVlVbXG@$_f2i=v_%gM zFFT(=jRmY7lVF0Jv9u)6>U*fNX@n6w?s;~53X$>it!?dq6OjsyNT?JDTx9v_J6l51@kD)!g{DX(xWkB~ zX0r?u;S4>s<4m36yJ?#75j?I9Mgtuw$k=f8dqW66B4R#U5)-OTsp#2I+-qgc$SA&s z>W&zOzW?LE8Gf&8?3nZbW`GE*RoJvtgy8^}vv6Iku>cHxy zA$S9LNjI&GvT!bR5zlb3GkAMKR*qQL*)f8z!GxqVDc_0%*Ll#B0KJK`ynNwVW3~o=z4m&#`5z9eQcXJP@HFtMjhiNpX-_>VtnV zILuc4o&Dmwod%^={PKkaqZvUUMB;!{x|VF%x8}_50*9S9u;vH9z*=!Q{$zp06T(9YxlbU zyVQO0_#4ghX}H|Qv4MKQk0IS5KpA~;fg)I!5$d7J{tSqHw_T4ID3!g1MW%;1*nnZ8 z@!#*koW2JIX8Rivo?UtDrXxZCH0W_M8QZ|{89*-RyR69E?!iZKlCiA)#zek2Lx_x6uRh3B_l;J^NZ%mwsAZ^x0fn>X zNd|N)#eUhL7aqOUYzsDt8BiwDuCZ1;VDe$2tYnA>$<86WV8x+N|p<<@xBK%Qke`~md*pK8Li4FG& zYh!Q+!z|P?3Y`^9>NCve;5fL9g+k9cY*dnsQqd+f75@1chS%oN+}fnu(V4-2!v(Kr zE6&;hrf_MN&DO>Q$s9{Wo`~;Zl+Sxg+!a!nk4f{F$zq}Mk00c)Asr6DA^^q%ke}D*N|HVMichTKkJMXW6OhJqlxmhiOaGhXp%QZAK%WDmp1raF zm^kayk4c@jp#&&>MIBwvi+=L;8`1qx*Kgaq#v+I?or3l7VK z1Wl3$*|lSb9{lQLemuGJ7q>MjT8Q@-hj<8wF)ELqp%JVc_{WMj5nM_6*)rr!7Bea( zA-AP@D#@6qcfn=**PHab#I{pjxx8YSb#_^N5JYe9o38Z%V_~#cKNQfIJwuAQ%0D~I z&G%2tK@9A}V%Ly5a#6LvDHZHR8UPM#H=}AUf={%T5zd&|zbfQpkKn$Nx$QJ6BD91p z)U@|VR$+;Z^z@+t%{{~n9r$4nQ3DJSlfmk17#Cx6zs-E>!5w}` z**0ko?+;s5T8Vn0F&aI!f0+fUHJ;noI7}<~_O_hY8iw^ay;aNNwmv4o(k8@FZ<)lM zFha4wF;Wj=12k@|KdPZ*V^DT9(9bSYASdHf={U?jq-%jd<+{Ygi^ND*NDuTYGQU1P zKAb+o!CVr-Ax^5j37ep)t54qWBZ-#bDB6i}+}!S>7J#n%W!96w_9oYg_J)_+}BsjAjgsgwq%Xrdef-RxEAC`8dAoV1_ z+-R#C3zi}2tSs&jU&&UF5EAgq+c|83C9~L?)gZw}BNjUgDb(U{jvaPLlEW zhR#jyUXIS8PNR>pcSSR!+Z~alTNR89=Mx2($&vs|;ARiw%Ss9AOAw%kJ9W2vdT4vk zD^ezwIy#QdYgYKQYp$X17M`{?@B}7?;3r-tK5{4mw`p!vS1i1}^!AK4X|H?r@wVx% z2d_dpHZq=AR5iP4EN_hgA8+&(rCZ<}nFh4?n6-KxmLFA!cuIg*5VT(*hI9otEp^5h zL{5B>_w0bREJR1IrOFI5CUdWE)XlImbQ`l-8*n^)(gsfS6to_7CZeoQ?>)nkQ~|!u zz~%#tdR1XzxdcQ{8yn;$TgUu=R{+Hclr%?yJu8<_5N|#SiTaV0~fESz;dhW`RwpPwH*5yFNS{PBaEMW3h!_|NkrT}Zz z+*m^0t+N&SGqgCFt;KQbZoA7nSeV}L-~X0 zzA6x-5nOZ%dDHRx2XJK{%-S(@&^cY!(A15am;;J(>5ekA8)nB)omDDZ&pwj_w`yW()5SUpp_|kct z1ve!`0^)d>`p!HbMI=}LwGhNPd{`i2?Z4L`iXlhFcMm$w(?|$DGEsHtu318`_UfY3Wf1Ox%4ND&E0FCtB)N$|FEX5PKB*8BU)f8*Yw$asECWuZD~4gU?mLNAjx zQ*n}v49Deaamt`qx!_dtVJEKZs(Kbm1Y68)b=%pPD>uNt{XMTLoMwwNQmFZA0 z)(?nVu-o1jTc_RQxqT`I?Xw+FbzDAKC>_Uc2T+JF&(=VT>?}RXQf{pSJkMx&qzVfyaR6*a z#$PuBBeM3N4*JjSa&~}ZX!$f9YQik`vuQ$K4@WD!De4DV_qWbA)3V=(+O{HTMEAeW@oz9}@^bAyx+S^}g>^yl zt$$D)5@gR|UJKjMGw_XNmJ$z?Xq@kk?Zb*5&2*Fo(wWN0MqF1NbrclfN=%H`$1?*5 zIh}vG1>Xp$c{4J~BWbR)%}RL6u|7KkmgXIoU)u$-)e3yP<{1%i9=j_-;>b6tQy7lV z^f7!yqNj17dJPW)Hwn-s<#A$>m6gm=zpm1E$8QTQ_8ZU+^3ekV0Myg~z%BH-`^&G{ zm1Wf>WhB)lE3|B%PF9kgeliRY&TQWgOIj;eM%dQ4U1f9I*&cJ)z@1UTj(-)F#-asC z>iNFgFjEF72%S%#G`o=)#T8H!37sD=BWNH^w*0dF!y`LdeJ|`P%khA&K{%Y^)?FU*vbrTi@y(JN(wZf^Y#aBz6Uy%A29VP7q@2nD9t4?l3p`9mYcgfP zN$7c{725*@!Z>&M8gs=~vwcR1_Vedh#Te07=nut5d^Zrheg|{9h&RQ>RP+ER!y?S%9XwcfZqcS#TKVd_3 zc;7>?$8A(=0?r}KcbY8k_15^ty3B-*6lVGRzU0&80w=gn;Yl&&<3?`4>X@dQpJ7rvbLL7Y{W9S-ZBByYj%si3+v2?Ft(!#;su6#v zcnVU6rz-npSQ)_ENNvie51GB!WDKWZ1)9osZpZaADj}!%#$(|*iD{h^m4jWWG`E|S zaVy)o6sB#7MJa_B3!Q|QzJ<F|a0qG9kmG)tCTB@=h3;`cN#WrP^pmDmgGb?V&>HRzRvoQQT+d5T-bFd4^Uk z4Q1OlOp;1!`B(k^YbNkKn|E~wr|W7h3$?Ws!J2F3 z66_y!A>Hb>`UQH=<@qo$5xV>lIVp8|?Zy?QCOJb~n4ezN0C>hgufqBuA2bPgSsVyC zJZtg9cKc0npLJN?)rgiUe*I-uZ!X1D#Qpj@ogH?P5MILGd_Upwyw;aCU;eTdPwOGR z>mSE;wJ_K{wC}8Vb|;Xgi@FFab%uYktG!}LI9n&wp~%zpB>pxecjR;bJXEE~Jov~X zYGo^ZS#;PAAV7PhhhGmYn&zW&MQDh!{9OMa1 z;_n=IOAHWit6i9J7z=okOm|!e<`cN6Hqy8L(a-?PaRd z)~h)D=9z~O*J_qt8+j`-Q{GfHqLWYgB<9~^>Xev}xrUZGDv#H4aWkaWk?zX6RW8dr z;aP6&ZX+%E(b?)Dv;|nvU~QgMF)h`bR)$1ODr0w0-OBMed2QKgK)#5rRb_e7Hs_=tr@{%CNy; zK_HD)oQYiHN$`?+hu&iO(iNb&^)LpcmQ1<&%M1N~kSq&Qn;8r;2w z*hvVDP}b45AdrYmFqkaL0k++LtSw76n-N#R$SwW;Hms0nE&{`!?`2e7$_Ep5d_8F@ zHM(o$S#ND;$f=S%CA7nKF}GK$-GbSm{-&}KbKbhf3U0@&V%+1Og+YcfLiw=+;QcoK zhUNS+;$2?rYToa@Q}5`wI<)8QueO85Thk1Jyplgmv>xZ_w{wrA(@?nD%S?fl zQeD@pFau77+|69Or05tvHC2dEwkAS&4l?xR$V)sIq!&UShJ zJHK+!l5t(7-c!&i2cA|1`y_$A{f><_)cFr@I1A%Emz*hAZB3#({(aAWZOi)Q_9mcX z!MD$ialr)9pHpQF;U!X^pu?v`)_Duhh1PE|ymoEQ3O_wSa6~Oe-)&O-AaEld5iB~5w(i3Ocb2g~9F*}WhD)CVwmS z9_liY23)p=BUSfkT!?i#p9vN`YvJg487$KAqgg94zsS(f(+Gexy7fua{s(KtXY_YN)*b z>6(L1@$)=y&EB!J1E&|Puf^!&H4+Cl^!BHCE2}umGTLM$D=X4TYPmknWXf9yacX6h za>Y|ci2C-##Eu*KIlenTvF)jxzv2v8`!bc6yS^UcwJM2mXeAEEI2?KpgnT;+a}@7E z1T!2r&^?-H!J`hFC^I>Jd(Du;s+4aa5>kcdUud)>>d#H|X1KLVID{L!+()4gGS^-j z<5erO$)2~?8F(PrA%eZKC_80PG;CigHs@~{v(mGrIm)a$1?^!hOZ0B&!!zeSbl#QWS&(Vhww%g z@SPu4+Qvs+yL&SBb#OmzpbXk+B&FFN_A&ljpddP zB73mkwY=cQOXj}uO|ejqPChHpliobb#~8RAyMK)glD7xzZvI>|{0+4lk>j6@O;uL< zp{h*9(1X}qA(|^l?{lgG3qzu3jM$jKfIt9Hk0esNnT`|kj8uJz19Lb+7*R_Ru@{&m`Ni4YZ>3H>R}+Mjfb1&7a(4}|bAd&r z`Fp1C#g9e!#G^~0Ml*9+RaJDefTQzBPzRGAqa_2SsiiCPDjiVE?gNbUzB`=6a<5`! zeJeyN(Sc7*PJomuck|7V_i#uj^cI{eTDa4imfF>PJFVG=L(pY(7~-sHDo(v?oV16@ z1`dLSy>UN2I01Zxz~FfuR!rWjZ)sCZvR}(yFNkwSr%sDLAB; ze`G+C*Mh2o@ojo>b2$3AjTG-5NVhS+_%u#l>@@7}`%#{O8C$N@uxf8|bUA<$$~oVk z-&Wm^XfD%`*=B*khtpd|=XGpGC3m2Ea|$mAneZ|oCU5L_;Ucq{KJ=Rb{C_OCWayU) zsDYLxCFY7;aT72wDPJrVDfg(bEdnzq+h&9kwX_ntTOI@~$OSnR1;W>={Ork9CviJQPDhka+8lSBfyc?OxRvbbZ? za~{$*jC)tC;XaWDtK!l=%41%b)#9A6!5^B64LkIg3d*B7d*L$*HP#9&;YsUgCRjFC zODW>E>4vHU4N9m`R*RI0HZQKb9$3;v_N_%XFa8!^*M^RGs%i&|`crl3r$0D@%JE(!qRuz;UuQ&g^xg`KgBx#3eY+kbDK_TQL4(F+FrH^$V??WwD| z%YWk@ty7NaqXrm24*>iOB)Pc30wAcTe;w5Y_iITr`p9w}o{d&x0|0uUKNHM;Q)H;C z-2Xs%*f|=bN>nd&EQ(8xte*w|c;N#8FkVQgGb{jz>YRQ#_G^5XXz1Z;7a%Nv8nxNt zKWG= timeout: + raise TimeoutError(f"job has timed out after waiting {timeout}s") + sleep(5) + time += 5 + +print(f"Job has completed: {status.state}") + +print(job.logs()) + +cluster.down() + +if not status.state == AppState.SUCCEEDED: + exit(1) +else: + exit(0) diff --git a/tests/e2e/mnist_raycluster_sdk_test.go b/tests/e2e/mnist_raycluster_sdk_test.go new file mode 100644 index 00000000..cf4080ae --- /dev/null +++ b/tests/e2e/mnist_raycluster_sdk_test.go @@ -0,0 +1,207 @@ +/* +Copyright 2023. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package e2e + +import ( + "testing" + + . "github.com/onsi/gomega" + . "github.com/project-codeflare/codeflare-common/support" + mcadv1beta1 "github.com/project-codeflare/multi-cluster-app-dispatcher/pkg/apis/controller/v1beta1" + rayv1 "github.com/ray-project/kuberay/ray-operator/apis/ray/v1" + + batchv1 "k8s.io/api/batch/v1" + corev1 "k8s.io/api/core/v1" + rbacv1 "k8s.io/api/rbac/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +// Creates a Ray cluster, and trains the MNIST dataset using the CodeFlare SDK. +// Asserts successful completion of the training job. +// +// This covers the installation of the CodeFlare SDK, as well as the RBAC required +// for the SDK to successfully perform requests to the cluster, on behalf of the +// impersonated user. +func TestMNISTRayClusterSDK(t *testing.T) { + test := With(t) + test.T().Parallel() + + // Create a namespace + namespace := test.NewTestNamespace() + + // Test configuration + config := CreateConfigMap(test, namespace.Name, map[string][]byte{ + // SDK script + "mnist_raycluster_sdk.py": ReadFile(test, "mnist_raycluster_sdk.py"), + // pip requirements + "requirements.txt": ReadFile(test, "mnist_pip_requirements.txt"), + // MNIST training script + "mnist.py": ReadFile(test, "mnist.py"), + // SDK Wheel File + "codeflare_sdk-0.0.0.dev0-py3-none-any.whl": ReadWhlFile(test, "codeflare_sdk-0.0.0.dev0-py3-none-any.whl"), + // codeflare-sdk installation script + "install-codeflare-sdk.sh": ReadFile(test, "install-codeflare-sdk.sh"), + }) + + // Create RBAC, retrieve token for user with limited rights + policyRules := []rbacv1.PolicyRule{ + { + Verbs: []string{"get", "create", "delete", "list", "patch", "update"}, + APIGroups: []string{mcadv1beta1.GroupName}, + Resources: []string{"appwrappers"}, + }, + { + Verbs: []string{"get", "list"}, + APIGroups: []string{rayv1.GroupVersion.Group}, + Resources: []string{"rayclusters", "rayclusters/status"}, + }, + { + Verbs: []string{"get", "list"}, + APIGroups: []string{"route.openshift.io"}, + Resources: []string{"routes"}, + }, + { + Verbs: []string{"get", "list"}, + APIGroups: []string{"networking.k8s.io"}, + Resources: []string{"ingresses"}, + }, + } + + // Create cluster wide RBAC, required for SDK OpenShift check + // TODO reevaluate once SDK change OpenShift detection logic + clusterPolicyRules := []rbacv1.PolicyRule{ + { + Verbs: []string{"get", "list"}, + APIGroups: []string{"config.openshift.io"}, + Resources: []string{"ingresses"}, + ResourceNames: []string{"cluster"}, + }, + } + + sa := CreateServiceAccount(test, namespace.Name) + role := CreateRole(test, namespace.Name, policyRules) + CreateRoleBinding(test, namespace.Name, sa, role) + clusterRole := CreateClusterRole(test, clusterPolicyRules) + CreateClusterRoleBinding(test, sa, clusterRole) + + job := &batchv1.Job{ + TypeMeta: metav1.TypeMeta{ + APIVersion: batchv1.SchemeGroupVersion.String(), + Kind: "Job", + }, + ObjectMeta: metav1.ObjectMeta{ + Name: "sdk", + Namespace: namespace.Name, + }, + Spec: batchv1.JobSpec{ + Completions: Ptr(int32(1)), + Parallelism: Ptr(int32(1)), + BackoffLimit: Ptr(int32(0)), + Template: corev1.PodTemplateSpec{ + Spec: corev1.PodSpec{ + Containers: []corev1.Container{ + { + Name: "test", + // FIXME: switch to base Python image once the dependency on OpenShift CLI is removed + // See https://github.com/project-codeflare/codeflare-sdk/pull/146 + Image: "quay.io/opendatahub/notebooks:jupyter-minimal-ubi8-python-3.8-4c8f26e", + Env: []corev1.EnvVar{ + {Name: "PYTHONUSERBASE", Value: "/workdir"}, + {Name: "RAY_IMAGE", Value: GetRayImage()}, + }, + Command: []string{"/bin/sh", "-c", "cp /test/* . && chmod +x install-codeflare-sdk.sh && ./install-codeflare-sdk.sh && python mnist_raycluster_sdk.py" + " " + namespace.Name}, + // Command: []string{"/bin/sh", "-c", "pip install /test/codeflare_sdk-0.0.0.dev0-py3-none-any.whl && cp /test/* . && python mnist_raycluster_sdk.py" + " " + namespace.Name}, + VolumeMounts: []corev1.VolumeMount{ + { + Name: "test", + MountPath: "/test", + }, + { + Name: "workdir", + MountPath: "/workdir", + }, + }, + WorkingDir: "/workdir", + SecurityContext: &corev1.SecurityContext{ + AllowPrivilegeEscalation: Ptr(false), + SeccompProfile: &corev1.SeccompProfile{ + Type: "RuntimeDefault", + }, + Capabilities: &corev1.Capabilities{ + Drop: []corev1.Capability{"ALL"}, + }, + RunAsNonRoot: Ptr(true), + }, + }, + }, + Volumes: []corev1.Volume{ + { + Name: "test", + VolumeSource: corev1.VolumeSource{ + ConfigMap: &corev1.ConfigMapVolumeSource{ + LocalObjectReference: corev1.LocalObjectReference{ + Name: config.Name, + }, + }, + }, + }, + { + Name: "workdir", + VolumeSource: corev1.VolumeSource{ + EmptyDir: &corev1.EmptyDirVolumeSource{}, + }, + }, + }, + RestartPolicy: corev1.RestartPolicyNever, + ServiceAccountName: sa.Name, + }, + }, + }, + } + if GetClusterType(test) == KindCluster { + // Take first KinD node and redirect pod hostname requests there + node := GetNodes(test)[0] + hostname := GetClusterHostname(test) + IP := GetNodeInternalIP(test, node) + + test.T().Logf("Setting KinD cluster hostname '%s' to node IP '%s' for SDK pod", hostname, IP) + job.Spec.Template.Spec.HostAliases = []corev1.HostAlias{ + { + IP: IP, + Hostnames: []string{hostname}, + }, + } + + // Propagate hostname into Python code as env variable + hostnameEnvVar := corev1.EnvVar{Name: "CLUSTER_HOSTNAME", Value: hostname} + job.Spec.Template.Spec.Containers[0].Env = append(job.Spec.Template.Spec.Containers[0].Env, hostnameEnvVar) + } + job, err := test.Client().Core().BatchV1().Jobs(namespace.Name).Create(test.Ctx(), job, metav1.CreateOptions{}) + test.Expect(err).NotTo(HaveOccurred()) + test.T().Logf("Created Job %s/%s successfully", job.Namespace, job.Name) + + test.T().Logf("Waiting for Job %s/%s to complete", job.Namespace, job.Name) + test.Eventually(Job(test, job.Namespace, job.Name), TestTimeoutLong).Should( + Or( + WithTransform(ConditionStatus(batchv1.JobComplete), Equal(corev1.ConditionTrue)), + WithTransform(ConditionStatus(batchv1.JobFailed), Equal(corev1.ConditionTrue)), + )) + + // Assert the job has completed successfully + test.Expect(GetJob(test, job.Namespace, job.Name)). + To(WithTransform(ConditionStatus(batchv1.JobComplete), Equal(corev1.ConditionTrue))) +} diff --git a/tests/e2e/setup.sh b/tests/e2e/setup.sh new file mode 100755 index 00000000..d5c99cbf --- /dev/null +++ b/tests/e2e/setup.sh @@ -0,0 +1,58 @@ +#!/bin/bash + +# Copyright 2022 IBM, Red Hat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -euo pipefail +: "${KUBERAY_VERSION}" + +echo Deploying KubeRay "${KUBERAY_VERSION}" +kubectl apply --server-side -k "github.com/ray-project/kuberay/ray-operator/config/default?ref=${KUBERAY_VERSION}&timeout=180s" + +cat < Date: Tue, 14 Nov 2023 17:45:59 +0000 Subject: [PATCH 092/496] Fix pre-commit and remove redundant files --- .github/workflows/e2e_tests.yaml | 2 +- tests/e2e/install-codeflare-sdk.sh | 2 +- tests/e2e/kind.sh | 129 ----------------------------- tests/e2e/mnist_raycluster_sdk.py | 6 +- 4 files changed, 5 insertions(+), 134 deletions(-) delete mode 100755 tests/e2e/kind.sh diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index 42463583..24c22f65 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -27,7 +27,7 @@ concurrency: jobs: kubernetes: - runs-on: ubuntu-20.04-4core + runs-on: ubuntu-20.04 steps: - name: Cleanup diff --git a/tests/e2e/install-codeflare-sdk.sh b/tests/e2e/install-codeflare-sdk.sh index d7a1b253..e30f7ca5 100644 --- a/tests/e2e/install-codeflare-sdk.sh +++ b/tests/e2e/install-codeflare-sdk.sh @@ -20,4 +20,4 @@ ls # poetry install --with test,docs # Return to the previous directory -# cd .. \ No newline at end of file +# cd .. diff --git a/tests/e2e/kind.sh b/tests/e2e/kind.sh deleted file mode 100755 index 0b936f99..00000000 --- a/tests/e2e/kind.sh +++ /dev/null @@ -1,129 +0,0 @@ -# #!/bin/bash - -# # Copyright 2022 IBM, Red Hat -# # -# # Licensed under the Apache License, Version 2.0 (the "License"); -# # you may not use this file except in compliance with the License. -# # You may obtain a copy of the License at -# # -# # http://www.apache.org/licenses/LICENSE-2.0 -# # -# # Unless required by applicable law or agreed to in writing, software -# # distributed under the License is distributed on an "AS IS" BASIS, -# # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# # See the License for the specific language governing permissions and -# # limitations under the License. -# set -euo pipefail -# : "${INGRESS_NGINX_VERSION:=controller-v1.6.4}" - -# echo "Creating KinD cluster" -# cat < Date: Tue, 14 Nov 2023 17:58:42 +0000 Subject: [PATCH 093/496] Fix Action --- .github/workflows/e2e_tests.yaml | 1 - Makefile | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index 24c22f65..6bfc1fe8 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -85,7 +85,6 @@ jobs: echo Setting up CodeFlare stack make setup-e2e - make defaults manifests fmt vet cd .. diff --git a/Makefile b/Makefile index c0d41155..6b1f2eaa 100644 --- a/Makefile +++ b/Makefile @@ -383,4 +383,4 @@ verify-imports: openshift-goimports ## Run import verifications. .PHONY: scorecard-bundle scorecard-bundle: install-operator-sdk ## Run scorecard tests on bundle image. - $(OPERATOR_SDK) scorecard bundle \ No newline at end of file + $(OPERATOR_SDK) scorecard bundle From a6a6b311c0fc44aa76154855f1f17d72650b3e4c Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Tue, 14 Nov 2023 18:24:16 +0000 Subject: [PATCH 094/496] Remove redundant files --- Dockerfile | 23 -- Makefile | 386 ------------------ .../codeflare_sdk-0.0.0.dev0-py3-none-any.whl | Bin 39252 -> 39456 bytes tests/e2e/mnist.py | 2 - tests/e2e/setup.sh | 58 --- tmp.yaml | 37 -- tmpclusterrole.yaml | 9 - tmpclusterrolebinding.yaml | 12 - tmpingressesrole.yaml | 8 - tmpingressesroleb.yaml | 12 - 10 files changed, 547 deletions(-) delete mode 100644 Dockerfile delete mode 100644 Makefile delete mode 100755 tests/e2e/setup.sh delete mode 100644 tmp.yaml delete mode 100644 tmpclusterrole.yaml delete mode 100644 tmpclusterrolebinding.yaml delete mode 100644 tmpingressesrole.yaml delete mode 100644 tmpingressesroleb.yaml diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index 2ea7fead..00000000 --- a/Dockerfile +++ /dev/null @@ -1,23 +0,0 @@ -# Build the manager binary -FROM registry.access.redhat.com/ubi8/go-toolset:1.19.10-10 as builder - -WORKDIR /workspace -# Copy the Go Modules manifests -COPY go.mod go.mod -COPY go.sum go.sum -RUN go mod download - -# Copy the go source -COPY main.go main.go -COPY pkg/ pkg/ - -# Build -USER root -RUN CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -a -o manager main.go - -FROM registry.access.redhat.com/ubi8/ubi-minimal:8.7 -WORKDIR / -COPY --from=builder /workspace/manager . - -USER 65532:65532 -ENTRYPOINT ["/manager"] diff --git a/Makefile b/Makefile deleted file mode 100644 index 6b1f2eaa..00000000 --- a/Makefile +++ /dev/null @@ -1,386 +0,0 @@ -# VERSION defines the project version for the bundle. -# Update this value when you upgrade the version of your project. -# To re-generate a bundle for another specific version without changing the standard setup, you can: -# - use the VERSION as arg of the bundle target (e.g make bundle VERSION=v0.0.2) -# - use environment variables to overwrite this value (e.g export VERSION=v0.0.2) -# best if we could detect this. If we cannot, we need to document it somewhere. -# then we can add a patch in the `PHONY: bundle` -# BUNDLE_VERSION is declared as bundle versioning doesn't use semver - -PREVIOUS_VERSION ?= v0.0.0-dev -VERSION ?= v0.0.0-dev -BUNDLE_VERSION ?= $(VERSION:v%=%) - -# INSTASCALE_VERSION defines the default version of the InstaScale controller -INSTASCALE_VERSION ?= v0.0.9 -INSTASCALE_REPO ?= github.com/project-codeflare/instascale - -# MCAD_VERSION defines the default version of the MCAD controller -MCAD_VERSION ?= 725a614debe3d34d1547c1659ef5ad49f8f6c5df -MCAD_REPO ?= github.com/project-codeflare/multi-cluster-app-dispatcher -# Upstream MCAD is currently only creating release tags of the form `vX.Y.Z` (i.e the version) -MCAD_CRD ?= ${MCAD_REPO}/config/crd?ref=${MCAD_VERSION} - -# KUBERAY_VERSION defines the default version of the KubeRay operator (used for testing) -KUBERAY_VERSION ?= v0.6.0 - -# RAY_VERSION defines the default version of Ray (used for testing) -RAY_VERSION ?= 2.5.0 - -# CODEFLARE_SDK_VERSION defines the default version of the CodeFlare SDK -CODEFLARE_SDK_VERSION ?= 0.8.0 - -# OPERATORS_REPO_ORG points to GitHub repository organization where bundle PR is opened against -# OPERATORS_REPO_FORK_ORG points to GitHub repository fork organization where bundle build is pushed to -OPERATORS_REPO_ORG ?= redhat-openshift-ecosystem -OPERATORS_REPO_FORK_ORG ?= project-codeflare - -# CHANNELS define the bundle channels used in the bundle. -# Add a new line here if you would like to change its default config. (E.g CHANNELS = "candidate,fast,stable") -# To re-generate a bundle for other specific channels without changing the standard setup, you can: -# - use the CHANNELS as arg of the bundle target (e.g make bundle CHANNELS=candidate,fast,stable) -# - use environment variables to overwrite this value (e.g export CHANNELS="candidate,fast,stable") -ifneq ($(origin CHANNELS), undefined) -BUNDLE_CHANNELS := --channels=$(CHANNELS) -endif - -# DEFAULT_CHANNEL defines the default channel used in the bundle. -# Add a new line here if you would like to change its default config. (E.g DEFAULT_CHANNEL = "stable") -# To re-generate a bundle for any other default channel without changing the default setup, you can: -# - use the DEFAULT_CHANNEL as arg of the bundle target (e.g make bundle DEFAULT_CHANNEL=stable) -# - use environment variables to overwrite this value (e.g export DEFAULT_CHANNEL="stable") -ifneq ($(origin DEFAULT_CHANNEL), undefined) -BUNDLE_DEFAULT_CHANNEL := --default-channel=$(DEFAULT_CHANNEL) -endif -BUNDLE_METADATA_OPTS ?= $(BUNDLE_CHANNELS) $(BUNDLE_DEFAULT_CHANNEL) - -# IMAGE_ORG_BASE defines the base container registry and organization for container images. -IMAGE_ORG_BASE ?= quay.io/project-codeflare - -# IMAGE_TAG_BASE defines the docker.io namespace and part of the image name for remote images. -# This variable is used to construct full image tags for bundle and catalog images. -# -# For example, running 'make bundle-build bundle-push catalog-build catalog-push' will build and push both -# codeflare.dev/codeflare-operator-bundle:$VERSION and codeflare.dev/codeflare-operator-catalog:$VERSION. -IMAGE_TAG_BASE ?= $(IMAGE_ORG_BASE)/codeflare-operator - -# RAY_IMAGE defines the default container image for Ray (used for testing) -RAY_IMAGE ?= rayproject/ray:$(RAY_VERSION) - -# BUNDLE_IMG defines the image:tag used for the bundle. -# You can use it as an arg. (E.g make bundle-build BUNDLE_IMG=/:) -BUNDLE_IMG ?= $(IMAGE_TAG_BASE)-bundle:$(VERSION) - -# BUNDLE_GEN_FLAGS are the flags passed to the operator-sdk generate bundle command -BUNDLE_GEN_FLAGS ?= -q --overwrite --version $(BUNDLE_VERSION) $(BUNDLE_METADATA_OPTS) - -# USE_IMAGE_DIGESTS defines if images are resolved via tags or digests -# You can enable this value if you would like to use SHA Based Digests -# To enable set flag to true -USE_IMAGE_DIGESTS ?= false -ifeq ($(USE_IMAGE_DIGESTS), true) - BUNDLE_GEN_FLAGS += --use-image-digests -endif - -# Image URL to use all building/pushing image targets -IMG ?= ${IMAGE_TAG_BASE}:${VERSION} -# ENVTEST_K8S_VERSION refers to the version of kubebuilder assets to be downloaded by envtest binary. -ENVTEST_K8S_VERSION = 1.24.2 - -# Get the currently used golang install path (in GOPATH/bin, unless GOBIN is set) -ifeq (,$(shell go env GOBIN)) -GOBIN=$(shell go env GOPATH)/bin -else -GOBIN=$(shell go env GOBIN) -endif - -# Setting SHELL to bash allows bash commands to be executed by recipes. -# Options are set to exit when a recipe line exits non-zero or a piped command fails. -SHELL = /usr/bin/env bash -o pipefail -.SHELLFLAGS = -ec - -.PHONY: all -all: build - -##@ General - -# The help target prints out all targets with their descriptions organized -# beneath their categories. The categories are represented by '##@' and the -# target descriptions by '##'. The awk commands is responsible for reading the -# entire set of makefiles included in this invocation, looking for lines of the -# file as xyz: ## something, and then pretty-format the target and help. Then, -# if there's a line with ##@ something, that gets pretty-printed as a category. -# More info on the usage of ANSI control characters for terminal formatting: -# https://en.wikipedia.org/wiki/ANSI_escape_code#SGR_parameters -# More info on the awk command: -# http://linuxcommand.org/lc3_adv_awk.php - -.PHONY: help -help: ## Display this help. - @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m\n"} /^[a-zA-Z_0-9-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST) - -##@ Development - -DEFAULTS_TEST_FILE := tests/support/defaults.go - -.PHONY: defaults -defaults: - $(info Regenerating $(DEFAULTS_TEST_FILE)) - @echo "package support" > $(DEFAULTS_TEST_FILE) - @echo "" >> $(DEFAULTS_TEST_FILE) - @echo "// ***********************" >> $(DEFAULTS_TEST_FILE) - @echo "// DO NOT EDIT THIS FILE" >> $(DEFAULTS_TEST_FILE) - @echo "// ***********************" >> $(DEFAULTS_TEST_FILE) - @echo "" >> $(DEFAULTS_TEST_FILE) - @echo "const (" >> $(DEFAULTS_TEST_FILE) - @echo " CodeFlareSDKVersion = \"$(CODEFLARE_SDK_VERSION)\"" >> $(DEFAULTS_TEST_FILE) - @echo " RayVersion = \"$(RAY_VERSION)\"" >> $(DEFAULTS_TEST_FILE) - @echo " RayImage = \"$(RAY_IMAGE)\"" >> $(DEFAULTS_TEST_FILE) - @echo "" >> $(DEFAULTS_TEST_FILE) - @echo ")" >> $(DEFAULTS_TEST_FILE) - @echo "" >> $(DEFAULTS_TEST_FILE) - - gofmt -w $(DEFAULTS_TEST_FILE) - -.PHONY: manifests -manifests: controller-gen ## Generate RBAC objects. - $(CONTROLLER_GEN) rbac:roleName=manager-role webhook paths="./..." - -.PHONY: fmt -fmt: ## Run go fmt against code. - go fmt ./... - -.PHONY: vet -vet: ## Run go vet against code. - go vet ./... - - -##@ Build - -.PHONY: modules -modules: ## Update Go dependencies. - go get $(MCAD_REPO)@$(MCAD_VERSION) - go get $(INSTASCALE_REPO)@$(INSTASCALE_VERSION) - go get github.com/ray-project/kuberay/ray-operator - go mod tidy - -.PHONY: build -build: modules defaults fmt vet ## Build manager binary. - go build -o bin/manager main.go - -.PHONY: run -run: modules defaults manifests fmt vet ## Run a controller from your host. - go run ./main.go - -.PHONY: image-build -image-build: test-unit ## Build container image with the manager. - podman build -t ${IMG} . - -.PHONY: image-push -image-push: image-build ## Push container image with the manager. - podman push ${IMG} - -##@ Deployment - -ifndef ignore-not-found - ignore-not-found = false -endif - -.PHONY: install -install: manifests kustomize ## Install CRDs into the K8s cluster specified in ~/.kube/config. - $(SED) -i -E "s|(- )\${MCAD_REPO}.*|\1\${MCAD_CRD}|" config/crd/mcad/kustomization.yaml - $(KUSTOMIZE) build config/crd | kubectl apply -f - - git restore config/* - -.PHONY: uninstall -uninstall: manifests kustomize ## Uninstall CRDs from the K8s cluster specified in ~/.kube/config. Call with ignore-not-found=true to ignore resource not found errors during deletion. - $(SED) -i -E "s|(- )\${MCAD_REPO}.*|\1\${MCAD_CRD}|" config/crd/mcad/kustomization.yaml - $(KUSTOMIZE) build config/crd | kubectl delete --ignore-not-found=$(ignore-not-found) -f - - git restore config/* - -.PHONY: deploy -deploy: manifests kustomize ## Deploy controller to the K8s cluster specified in ~/.kube/config. - $(SED) -i -E "s|(- )\${MCAD_REPO}.*|\1\${MCAD_CRD}|" config/crd/mcad/kustomization.yaml - cd config/manager && $(KUSTOMIZE) edit set image controller=${IMG} - $(KUSTOMIZE) build config/default | kubectl apply -f - - git restore config/* - -.PHONY: undeploy -undeploy: ## Undeploy controller from the K8s cluster specified in ~/.kube/config. Call with ignore-not-found=true to ignore resource not found errors during deletion. - $(SED) -i -E "s|(- )\${MCAD_REPO}.*|\1\${MCAD_CRD}|" config/crd/mcad/kustomization.yaml - $(KUSTOMIZE) build config/default | kubectl delete --ignore-not-found=$(ignore-not-found) -f - - git restore config/* - -##@ Build Dependencies - -## Location to install dependencies to -LOCALBIN ?= $(shell pwd)/bin -$(LOCALBIN): - mkdir -p $(LOCALBIN) - -## Tool Binaries -KUSTOMIZE ?= $(LOCALBIN)/kustomize -CONTROLLER_GEN ?= $(LOCALBIN)/controller-gen -ENVTEST ?= $(LOCALBIN)/setup-envtest -OPENSHIFT-GOIMPORTS ?= $(LOCALBIN)/openshift-goimports -OPERATOR_SDK ?= $(LOCALBIN)/operator-sdk -GH_CLI ?= $(LOCALBIN)/gh -SED ?= /usr/bin/sed - -## Tool Versions -KUSTOMIZE_VERSION ?= v4.5.4 -CODEGEN_VERSION ?= v0.27.2 -CONTROLLER_TOOLS_VERSION ?= v0.9.2 -OPERATOR_SDK_VERSION ?= v1.27.0 -GH_CLI_VERSION ?= 2.30.0 - -KUSTOMIZE_INSTALL_SCRIPT ?= "https://raw.githubusercontent.com/kubernetes-sigs/kustomize/master/hack/install_kustomize.sh" -.PHONY: kustomize -kustomize: $(KUSTOMIZE) ## Download kustomize locally if necessary. -$(KUSTOMIZE): $(LOCALBIN) - test -s $(LOCALBIN)/kustomize || { curl -s $(KUSTOMIZE_INSTALL_SCRIPT) | bash -s -- $(subst v,,$(KUSTOMIZE_VERSION)) $(LOCALBIN); } - -GH_CLI_DL_URL := https://github.com/cli/cli/releases/download/v$(GH_CLI_VERSION) -GH_CLI_DL_FILENAME := gh_$(GH_CLI_VERSION)_$(shell go env GOOS)_$(shell go env GOARCH) -.PHONY: install-gh-cli -install-gh-cli: $(GH_CLI) -$(GH_CLI): $(LOCALBIN) - curl -L $(GH_CLI_DL_URL)/$(GH_CLI_DL_FILENAME).tar.gz --output $(GH_CLI_DL_FILENAME).tar.gz - tar -xvzf $(GH_CLI_DL_FILENAME).tar.gz - cp $(GH_CLI_DL_FILENAME)/bin/gh $(GH_CLI) - rm -rf $(GH_CLI_DL_FILENAME) - rm $(GH_CLI_DL_FILENAME).tar.gz - -.PHONY: controller-gen -controller-gen: $(CONTROLLER_GEN) ## Download controller-gen locally if necessary. -$(CONTROLLER_GEN): $(LOCALBIN) - test -s $(LOCALBIN)/controller-gen || GOBIN=$(LOCALBIN) go install sigs.k8s.io/controller-tools/cmd/controller-gen@$(CONTROLLER_TOOLS_VERSION) - -.PHONY: envtest -envtest: $(ENVTEST) ## Download envtest-setup locally if necessary. -$(ENVTEST): $(LOCALBIN) - test -s $(LOCALBIN)/setup-envtest || GOBIN=$(LOCALBIN) go install sigs.k8s.io/controller-runtime/tools/setup-envtest@latest - -.PHONY: openshift-goimports -openshift-goimports: $(OPENSHIFT-GOIMPORTS) ## Download openshift-goimports locally if necessary. -$(OPENSHIFT-GOIMPORTS): $(LOCALBIN) - test -s $(LOCALBIN)/openshift-goimports || GOBIN=$(LOCALBIN) go install github.com/openshift-eng/openshift-goimports@latest - -OPERATOR_SDK_DL_URL := https://github.com/operator-framework/operator-sdk/releases/download/$(OPERATOR_SDK_VERSION) -.PHONY: install-operator-sdk -install-operator-sdk: $(OPERATOR_SDK) ## Download fixed version operator-sdk binary for consist outcome -$(OPERATOR_SDK): $(LOCALBIN) - curl -L $(OPERATOR_SDK_DL_URL)/operator-sdk_$(shell go env GOOS)_$(shell go env GOARCH) --output $(LOCALBIN)/operator-sdk - chmod +x $(OPERATOR_SDK) - -.PHONY: validate-bundle -validate-bundle: install-operator-sdk - $(OPERATOR_SDK) bundle validate ./bundle --select-optional suite=operatorframework - -.PHONY: bundle -bundle: defaults manifests kustomize install-operator-sdk ## Generate bundle manifests and metadata, then validate generated files. - $(OPERATOR_SDK) generate kustomize manifests -q - $(SED) -i -E "s|(- )\${MCAD_REPO}.*|\1\${MCAD_CRD}|" config/crd/mcad/kustomization.yaml - cd config/manager && $(KUSTOMIZE) edit set image controller=$(IMG) - cd config/manifests && $(KUSTOMIZE) edit add patch --patch '[{"op":"add", "path":"/metadata/annotations/containerImage", "value": "$(IMG)" }]' --kind ClusterServiceVersion - cd config/manifests && $(KUSTOMIZE) edit add patch --patch '[{"op":"add", "path":"/spec/replaces", "value": "codeflare-operator.$(PREVIOUS_VERSION)" }]' --kind ClusterServiceVersion - $(KUSTOMIZE) build config/manifests | $(OPERATOR_SDK) generate bundle $(BUNDLE_GEN_FLAGS) - $(MAKE) validate-bundle - git restore config/* - -.PHONY: bundle-build -bundle-build: bundle ## Build the bundle image. - podman build -f bundle.Dockerfile -t $(BUNDLE_IMG) . - -.PHONY: bundle-push -bundle-push: ## Push the bundle image. - podman push $(BUNDLE_IMG) $(BUNDLE_PUSH_OPT) - -.PHONY: openshift-community-operator-release -openshift-community-operator-release: install-gh-cli bundle ## build bundle and create PR in OpenShift community operators repository - git clone https://x-access-token:$(GH_TOKEN)@github.com/$(OPERATORS_REPO_FORK_ORG)/community-operators-prod.git - cd community-operators-prod && git remote add upstream https://github.com/$(OPERATORS_REPO_ORG)/community-operators-prod.git && git pull upstream main && git push origin main - cp -r bundle community-operators-prod/operators/codeflare-operator/$(BUNDLE_VERSION) - cd community-operators-prod && git checkout -b codeflare-release-$(BUNDLE_VERSION) && git add operators/codeflare-operator/$(BUNDLE_VERSION)/* && git commit -m "add bundle manifests codeflare version $(BUNDLE_VERSION)" --signoff && git push origin codeflare-release-$(BUNDLE_VERSION) - gh pr create --repo $(OPERATORS_REPO_ORG)/community-operators-prod --title "CodeFlare $(BUNDLE_VERSION)" --body "New release of codeflare operator" --head $(OPERATORS_REPO_FORK_ORG):codeflare-release-$(BUNDLE_VERSION) --base main - rm -rf community-operators-prod - -.PHONY: opm -OPM = ./bin/opm -opm: ## Download opm locally if necessary. -ifeq (,$(wildcard $(OPM))) -ifeq (,$(shell which opm 2>/dev/null)) - @{ \ - set -e ;\ - mkdir -p $(dir $(OPM)) ;\ - OS=$(shell go env GOOS) && ARCH=$(shell go env GOARCH) && \ - curl -sSLo $(OPM) https://github.com/operator-framework/operator-registry/releases/download/v1.23.0/$${OS}-$${ARCH}-opm ;\ - chmod +x $(OPM) ;\ - } -else -OPM = $(shell which opm) -endif -endif - -# A comma-separated list of bundle images (e.g. make catalog-build BUNDLE_IMGS=example.com/operator-bundle:v0.1.0,example.com/operator-bundle:v0.2.0). -# These images MUST exist in a registry and be pull-able. -BUNDLE_IMGS ?= $(BUNDLE_IMG) - -# The image tag given to the resulting catalog image (e.g. make catalog-build CATALOG_IMG=example.com/operator-catalog:v0.2.0). -CATALOG_IMG ?= $(IMAGE_TAG_BASE)-catalog:$(VERSION) - -# Set CATALOG_BASE_IMG to an existing catalog image tag to add $BUNDLE_IMGS to that image. -ifneq ($(origin CATALOG_BASE_IMG), undefined) -FROM_INDEX_OPT := --from-index $(CATALOG_BASE_IMG) -endif - -# Build a catalog image by adding bundle images to an empty catalog using the operator package manager tool, 'opm'. -# This recipe invokes 'opm' in 'semver' bundle add mode. For more information on add modes, see: -# https://github.com/operator-framework/community-operators/blob/7f1438c/docs/packaging-operator.md#updating-your-existing-operator -.PHONY: catalog-build -catalog-build: opm ## Build a catalog image. - $(OPM) index add --container-tool podman --mode semver --tag $(CATALOG_IMG) --bundles $(BUNDLE_IMGS) $(FROM_INDEX_OPT) - -# Build a catalog image by adding bundle images to existing catalog using the operator package manager tool, 'opm'. -.PHONY: catalog-build-from-index -catalog-build-from-index: opm ## Build a catalog image. - mkdir catalog - $(OPM) render $(CATALOG_BASE_IMG) -o yaml > catalog/bundles.yaml - $(OPM) render $(BUNDLE_IMG) $(OPM_BUNDLE_OPT) > catalog/codeflare-operator-bundle.yaml - $(SED) -i -E "s/(.*)(- name: codeflare-operator.$(PREVIOUS_VERSION).*)/\1- name: codeflare-operator.$(VERSION)\n replaces: codeflare-operator.$(PREVIOUS_VERSION)\n\2/" catalog/bundles.yaml - $(OPM) validate catalog - $(OPM) generate dockerfile catalog - podman build . -f catalog.Dockerfile -t $(CATALOG_IMG) - -# Push the catalog image. -.PHONY: catalog-push -catalog-push: ## Push a catalog image. - podman push $(CATALOG_IMG) $(CATALOG_PUSH_OPT) - -.PHONY: test-unit -test-unit: defaults manifests fmt vet envtest ## Run unit tests. - KUBEBUILDER_ASSETS="$(shell $(ENVTEST) use $(ENVTEST_K8S_VERSION) --bin-dir $(LOCALBIN) -p path)" go test $(go list ./... | grep -v /test/) -coverprofile cover.out - -.PHONY: test-e2e -test-e2e: ## Run e2e tests. - go test -timeout 30m -v ./tests/e2e -run TestMNISTRayClusterSDK - -.PHONY: kind-e2e -kind-e2e: ## Set up e2e KinD cluster. - tests/e2e/kind.sh - -.PHONY: setup-e2e -setup-e2e: ## Set up e2e tests. - KUBERAY_VERSION=$(KUBERAY_VERSION) tests/e2e/setup.sh - -.PHONY: imports -imports: openshift-goimports ## Organize imports in go files using openshift-goimports. Example: make imports - $(OPENSHIFT-GOIMPORTS) - -.PHONY: verify-imports -verify-imports: openshift-goimports ## Run import verifications. - ./hack/verify-imports.sh $(OPENSHIFT-GOIMPORTS) - -.PHONY: scorecard-bundle -scorecard-bundle: install-operator-sdk ## Run scorecard tests on bundle image. - $(OPERATOR_SDK) scorecard bundle diff --git a/tests/e2e/codeflare_sdk-0.0.0.dev0-py3-none-any.whl b/tests/e2e/codeflare_sdk-0.0.0.dev0-py3-none-any.whl index da85a840077f75398ff1b2a1d190bdf882c681e1..872bc4edb183aaa421edc093759008693ddbb814 100644 GIT binary patch delta 14320 zcmY+rb8sb0(C8i8wr$(ColQ2_*mh2|u}_?hu`y4aY;4=MZQbX6tL|6#o2seqndzSS zuV&;#*u+J3%%E-9F8PEd zzjd-V!Y?1sSfW`^=1NBa1I|QJ*v9hxe4hJ^NJTv!?ZsHuAZ=#p+wRxy=j-bmK0TtY zUwwYzIh*YC-2IvM=j-M^{79jnyX%k<_o35(1ET2 zFF;>E(P=+`V*nwEE?VXr9&O1&HtjSE%R;d|93rxc{GRD=SYXCn1+~6->0!fIBPc_o zVQdZhTm~!(e834p%4fb*P4M>{wtsf^W29Jya7=SG%F~lhrS=~V`~8N0o8vx{me-G` zv!knnqli5jO+JPPAR9U5?^z4(8Q(@48vrz%@;sp!r>bsomAtQ^CY_z4l()8(gU&|0 zyyn!pwt;34$9Purlw1oty7FAW2k!&~Dhog&^hr5SKe1AV_l<7c9(VSf31bAp;aoz_>ZOX5pU*Z16oPb|z z&?Pt!_sNtCTJBksjr23x8=j}k)Gdd^)re#fS|DRfBysv%AvWK55{6M zndUDDqjnja2OYY$AcoyXlRV8vgeKi66_R3j6-10AmD2$6#nQm92D>kC2r$|5;$>3- zK|4m;iaiQ-9F};r#{*cMXxJdkkSwad>77^>#wimM3R6GbJ#wvhzsYA3=223nLxfF< zpjfn4r8@P&WL*tnXk3;p+a%y%Y8{q=6;t`BjS~Kfhz5U|bE@VoSSnFr%c?8jQXIAW z0U#1o8Be$3fDkz*Ym~zZKz|fNzK78g5x+_Era`@%Mi$lG)nD$xsT=`Jmh{Pt#mw11 zjun*khfK*gzavTO(~Tw<Qb$D%>M5jn$C#H$H6-PQG4O;koAHCiDfY6!6QSU#lrhD&10<|P<~6~SGZzTa zat#;KYcST*IpVTYDE^!)OY=ElstUBqIxA`}G0mYM1&d7B#rR;5=JZz! zyOhA+YifdVUwtPYNve(i;UN5(r%tI)k^<&ZBv9Yzp^u1wEQF2xGr01_tx@fg6X8;{ zQbNS3fbeSrqrX;W0RAjK05q&@^yA!7aT!tICR=$(mkA3a9(1_qRE~H-%1!u$3M6R( z0|fS6RrMR)P||!M{5^Mua+$#nnw)YjeGo5wyaIooVxuVbXP=?NhN*^j_)jHVPUm-}%zDdlKe_-~r-Z+rg?}O23s`AaCF0$z1fu|ypx)A|XeN;~Y&+1~RsnD2 zJ1z6{*@{1EKh7Ha@u5vh@xcZ=gjUZkJJ;}>o$*_W{>u*`p(5pnkk23 z_Mmd+Qlq@CRsZZtC36b-F7Z0S-tv)|x$aUS*A;mz;0RgRTY1fq>w4y_ zx1m^JogT#pCUmXFs5kAJ)?YxC$>+fURjZ@fcwLFzNFf3yMc8@Uuo*_F7+(NlZ~Tz( z$|6{qAe_qQpdu)k$sw#7SZDt5M4Mcx3G=hn$UUWke(IiOxTdhGWSpkG85Z?Ijk_y4ZHIeujwi0Bmg3K5pX(kaAgo@_T9TA1RhEa3s|L-Wk9zg{~aK)pwsyZVcsOfF7crhb&5d-kN-Mk95V z&u_H0WJh1j?r0d~XM}pjr4+qz%|hvP?T&bEKhz44N_yuYc$-!nJ4Dbwn{u={vRL_Bre?EHPF zQ2nvoukx(Yv#ih@gWkjBw9;L^r76P2d(B>?&6GHGq;zTdO{$pLf6${o4&o}cT7Tse<%=>?~z5!WCS_saa764v|}o8V36IY9*~fTgX9 ziJ6`!^Xu!$jdylqS(V5Gt#BN2ZuDPX&-X+JKcNcb#?Xbz!D||V0M*$7y=jA&jB{DZ zr)pOl{Sbsg+rnYNOr4zb?(%2PNou3CM**$$^pz$jzn!ft=XCvGV*bYg!ya;>=T zrUK}s1!$ZRXCv+Wvn^jE+|4-}0F-18UdTgb6o&A15cr6C?PbMXla8YZ_WOKeP4Jf; zr?OjZ7Q@UzNNGn{A0r{oOCKhvk_CUzg-)$Od8(CANk5ZKE~KGi8`^6Cdw zmuvQ&{Nmwjae)b?Auefmi0yf1Xtg5dWU%1|d*;L#wIf{{5jSENkHsJ{Akvvq{p_tU z7_G&@P0bYZ!|9^y2-x!WOIPhIyG%70;l~i>zFYoWb%^V^IJ!YrWVZ_W@}$(qqha*b zqilCqRiVh5HdxWY5;QZ2869vbQEndfVS>w9udwp15xp2z$4%PeTC>+PFm%9K8d~VZbz?if-QG>RM_kKbg z5`Pu^d%qH2B4xkHL||A_M%#8Qc{{{|o{1>y{Hn7EO|4=q2hGJMJ9JYbP1(7w>>!4Aex6CyJIK6*t;v%xcyxImrUbSYT|F1+vRTsw~MG@XZ z`Ksu&e6-QsPGWb0u`(lWFUGb`(kZ%*L z2iLw4c&#Df0HVss&9~O>nJcCLi(exEGHPt5-2CqiLH)Bf+2D<$`RF_6QJV0Fm(y*% zcI4<4X|>eLh84;BpFg)HN9SdXO>fM8BuizvJI@%H_EnS;S+ zq3h!c+PqNiHWb1{x1nK~7aR^|1+~XcbiFtiEgh^Xm@gx>GzOgpwckgsF)lnA&X#t~ ztPR69FQv36~R)Oc|b>A2y=J%^STe#^F$LT>K%Nf5@L?hg4QB;3kPekqm zNty%L`}lq7x+e=ST-oi+Ybc=0`>XSLkn|i^5aJFq0AP4;;99^^ZW{7nW@f1)`sH{5 zqX@1&b-d5<(bd~4innej)va8pO3{o(@eg1z?!2?;i#;J%_=iiKkM~+9?oJ{B+*{{s zK?^Ns4#&|XlH#(QrdJ8S=VdL+4Z16iXE>MguIVwD$xh!OC6=D5-0DFwR4Ny~Z@6;I zsu@`vz`okSBT@i`?$#o zfY6Szl^;6=Rj#fv>LqWec~6EK?a+EuH(XJ4#?Z z3Pss1?D7$>k_0co*#q9Lfchi1IioaSZdk52E$e|&E zYrS-Op3NXP>sVQevH%qQ9uP__*E zTyOpZpLBt45=WN`(TAc*=G3j40u`|-ojcfHX+M(KV?|Qni6o1(7h4y2WS=KJGABE` zNb5p1j%%AImM7=Gl2ozl6$nmw#tQ#Bm3r)^nx1%yy$|I~fZ7#+iQfo=%OxA2?{H8@ zIjMp*hUq~Q8JtPO=SyfB1q^=>x-%9(JHM}7-{r&wNz)G2ivM%nc z6b;IMntOURB(?%!(@FvY+V)jopMU#uEW}+Rsn^qj&pqz=hTbIrIaq}G^ls@P%+gpL zITNX&N;x!TwBju8yzQ|B$JCn-Q_9W4B3dbzwceh_LnqyiFj&f27f!lwsFzxFvLpzN zC(`hdYtF;(>B!y-Q-KNqL4+)q7~TN-==HW>RKpEvZas^NXSuzq4ZB+^uipE*)i%-1 zW5)n)bvczFPyJg=0`O5LrR>OamcNsWUml%w*-t<#ekDxi;t`b-*q?uw(Fx+eEb)*_ zDZ0~n<9M*VD+KK6S#JBl@g}ADDNo(UJW!OO));Xd8Js7oPNW4-$u6U<`yX;|;4_ zkpsn^p)@LSl1|23U+N2H%Lxj!MT12eL@(I%yS{P#);Nv*ztR?Cott#9T2yxG_lB5E zFY*0uEQ1*7dQ4;i4p6-hDVbB2N_iMQ`FS~h36&s6x{1H9&YDbS0}tBMzwtUv7ih2a zRo(Wv1aA5&J?+WwWrxKYP4fzZgH55fCBoZ9!E8q~J-2rtI3P>5ReW?hG zDyYIFVR>~$wSOcjp7V3#eVX$dZ(5S=3wZr>Ds#+rOo(j&TRVRna%Z!qiYw8K#`pVx zBZgYusp`saVt%vyt|i+xGX+9F37=1DCOW7;+b$A;@%{bn!{rbiN4?YC<`hHFof2

      VQZ>B1POLed6Qi|a6P>ef4+#UyNYjw`AHj(Xv@@u~LqIP7_pz@>$0nGz|C5;7OzLB)kxaHBJ`Qm2e;R_@ZPa zr$GjQ1)#|1s{G7L=qIaIW#Nw2w3S zS~%9Xex)I?9^lP#4By4jx$H5=-fbAtKa&VsU%m;|jj+4?@vJKXoC|OqJa*$EK-tyc zOwJ2vb*g@2SCn~En0YgFZu#~Vx6%E1)N~kdTtAV^Ctkh&I^IQ#R}8>*TXjRy7D=#V zAQfQvxiIin^if9R$>;VWnd%U)IQcuBrytn>S~3NjfV}}3B?3@CcOcoYJ4cH zSX+>|Jt+9$bnvv+d-ekB>k>3Pn5z6Z=t&NHtc5qTgz9@$*AK*;+_G9!KC*MnRr+xB zSGl=EnRQG3{ixl2NCm9BaG&+}&j;W2Dlp?WsEm|XHc#TH(ZkuGwAHFY_Z-&EU-B}9 z^oG_L*(|Lr-Vrp1b3jc#R89c%H*2%PE7)JBMgxN;DgD?@WI@w{gr)7IShsLHM}GkG^40jF4Ae!W~9kWQoK2UyWAT zu1B}EU92k$1gOU7Gj)#Qv2G^}s#7KErP-cR(JQ9BVYVo#K%8i8{q zoH>UX$Nu=XVd>^@dzA(_?ElKtX>I3X4OG@$5}0)slKd;+mBL2s9$*Xq-r_Dns$5KR zk>Q9~q)e240{cEX3Hp3jB zKicb0&W!UNO7b)Ab(rZEfFjbMf4U zlqQ_h_nOaut&IgZs~rlQfR7i!60w|#D(n+8tbK~1v)!~+J$}5M@$2{pUJ@~`-O8z> zR`v8!ScRG`LrQ0(VhT@*7a~VQUXXV$^Ha>5St~a_4>nu6x)~{N_&MgX7ch=t>#IY! z0bepTJa^5O)Aj^p=2D8q>C2>_xD%G~{BZxf;%gxBH}(KDiPT0Ro9{Px__e+E%W;`C z9PGTY+B!niIt33_sH~lCZ?l}%&&VK8CF&MjyTEb*0%OyI%h zzG_@Jt1#d$`u^`wu_*9|GJTXOwa@K{OT-^Bgmy{aR}(?;ogrsHN?Rg2oZ%)s;Xed^ znZ7<-=iqyeU9`l@7(~VM8t-t++JAl){b0XngB;9S(5d)7M?)hP$|sXKZVsrgM~zVI z$-HPa-bW5T`^aQql#@x0se)vL+f8(fN*Xg?y6^xn)UIJYot|RGMl`&uA{_mDqU`|^HdK&@TniC&2S$fLF_-FcGkNyP4Mo) z-kw+~tu={EiW?Oc1!~S5I|vlA9bx@<{*t8lAZvTP7BHEVHPEP2!<*h%pJVe+UAkFj z=T?%QDRGCLg?_`TDMCVFfq{X+{nyFC+NhT_3LwG2hJY-zbbxioeIB%cWnH9{bSUS8 zg_VfDK?sjd{i@k&qm?Zd9BJ~IIXN@MBnDQUu)^;xa%z6hWC-vx*mq8T9fiD zmHUPUwvOyc6OZqEsOF4*TFau!69kcaq!RLAAir%Lc3+_>K5_pzRG_p&YciYS)y^8@ zK)^fFG#Zj_va#E&(}pR&S7{L<9uJ+q(SbkSj5VdVFm7_}4hPQP z$b>nOA-|v_R4HUP^-%TPDp--MuA=!D>99h5EeJ=;gi;oP5ebB?VQ%y5pIT2LIT`mX zX~rrQSpq`!+21m2p5>t{_lBbuCw�Sj(5^nB?>kVJ@*+w0Ve#PHRs%=aQe+b6(d=aXQH4WKl?G5TC7&g0} z69BybWn}gz{;>Imt^!+!vQma4uJcI7I6phcYHVKG1{OS#vs$2FTSK<@-i+?(A~9lV z(bBJ7`HcbBU9LXCmmTCw`-~c=XCUo2RfFW|h0#$9nnqyu^1;^ZI27~T0HAYwMdk-Z20)^cFis_4Y0^CLj}L=~BP5=`qN(u#17@i%`#H^~Y^@bizOKpM-B zLT+MYW#MObM>i}zju3$v<2Q?(3L2)xmU;e_!-~>s-=vl9Mm$Du3S?^9#Pe2cFt8gl zFfb}0yE!g!DGd#9DP-ri&X>IX$ml!~A;*p3KpRh6!sw8*49s-7RN}z4qUA1eD<{K9 zZYz~zCT(tF*>>u7>;zA4_$BD)=kXVn2Oc8sN~Y+i{T?>|_;NOdzOnYf&L9>XRCj$WwK0t~gB~nZG zP&xxZ_Co9+O@h1M*4C~bhJ|fuX(2yc3HPZNl9Qgk+eyD~L?vYvi3KwkIjYqk8%xHz zxc1NhtG)*y7ubo>6}pLWmb6f@!WrQpSeT?`w zG)};0Jz;lnC*_=d#iv*y?vCeU?qkS%UVOg}k5vJm68~}ozx>~B$6q55?@l@S-yd!x z?pcrXMEW)p4j%SN4_J>$0eBcNogp?Wo9$vB;G+Pj*AcV-NUJ#tM9dE|ts4ClrewI2 zu{3TYj)n;*y(Dn{Uz5?=8hf^BLB>dGbH7rxKzq!{g?+}Xa3Nc`KV}2hahYDqL{`kQ2*6sbl^;>6$qfbEb9AnMI<$I{GVj1?z%#A7=c>|nw*S+h@ zpyv$GQ$#(k*Xx4FPa>Qz!phK4P^o2bY^cIaX&dsb)dUY-kNRO11mem%AZxQeX^j=AYmN?KPq60?fU#tuC+W?DeSP|OY2CIRD% zxKk*|xbTY;ZR5lscJh(P1H9`l)jD@RIfn>PDSHnzPm}hU80c@8oB#F2Q&mZ&ZN#_9 z={>iev@=X++@JtDwpcuWG;0nDklNQD6vMI^d^#Tg$+61OtGsJfql0BuaDdwX{*$|A)sjBV1y<+x5lw`IwH{iUUVV(t0B%)mQ;Q zorH`Mh+JSQV$^xMt(D@}cRu{hY{~beWX2(0{7xrE{>6WVibO(I(juY7uXI9~QK~NX z4=DBkMo-e-(&=>0z`v`^?k;vS#l*~9b;vfG{><(^4C>4}21Qzt)t9O9CH_NJ=a7=n zhl1XWeW;u5+I$3mqC839#4sgOI zfAwwA24vsD&amtL4=M+apv2VMr)LAG{RuNV&Ngir^Rj$;#t z7DMQY<=L6^_~Ag{Ya1nxh7pmH6cspUZ#6qMlG2aFuWHAWWo|aY4SYq5M@d;@JGiimIhuamx$e}kOqAyaLg~Q#fThWV73s9hZn#E1 zw3Ekl76|-nGui6@Kq$<&CgDAF(8RrH=bqL|3Lz2N&~BhhC|{sPHX;geet{Z1$o=+% zBj7|LfGU`LTT0m>+Kz-uQ!q?PQQ-Db6u_o3F2vJfl}DP2CgHuZ0sEvFpN$u&D1LTQ z+2T&p&9)`jIU34EMr$507J&$|gGG_v=9bBynL&!g6}kD>5ZawlvNXU`M-!qHYnBgyVb-t8}OA1IoZ2>ne$9*-y&1fuFJi1%X1vJC1L_eU!Ko zt{P0llNy)EqiN1P^hZ0h=KJtumvNEV=(ka|e8z$~D;9&kBwj-ohEA(QqT2z9*mpHY zu%NHWVWE4Tul*W;#tanj7cZCcUg8JbS5lOxR$3)heveIggEOHLM8)s441q>H`M7Yi zI~$$Z*e<1nS7JlZ!;_I{`*Oz{c_%LCed0F4nn@?xx)rp;x1kw`WsqKPrh>~$Gow^j z;g}4DZqdh|yYEmPpQ4(y#jeMAIP1FFpLwujJVnrYBfJ@~S!nZ9Zd^pSUqrW~*{sNe z{>_izPOen_2iQ_>_|~&BRHH@m=5GvFYuB!z6Y<^~Y8XOo5Li|yJ3LCrZXnqh2QfC2 z3W5nLL|JrJQ!m|JyBSJ03WNWAqRWQM0YPL_*w)b%-UX+nIZ=YEpB<=QNZ}9Hf&WmYnmI zRUOLum^f@!#vAS7M7YrQ$J8JT(~_&ESXzMToF%@YQa0~6$r00}lPTJhw54#b3L#jt zbmwG@oj)Pg-g~XI0wT`P#Zk}BYE(0_>in{I!rn8$mU3DjKRC;PU}7m1T{*LJih`&A zhe6vghjVq&PDT#1W(fWG8;SKDt?LQFqN`^31&a-*!YRTaVlLn$hNni(USM7L8;DelXW;te1hqCW%k!$@Vg zh;kor*zBt&J)u@UV)Lp{F>Bbj=bTgfjxf<o!;+M?=kIa4wI%(`g%m#x#GI=P`5e5K8dOsqVRhkST@FN4CThOauca zKUIsa-sh|WrZeZL-UhYIiH_qULA)VLjmreSYW~YIZg|Oy0+JChYd6?# z8{n`G$$vR8U)IVH9xTg(@??9QYC9jv>ncL;2D8e}m5hj|PYn;Ofz&-1pK!JdLq9Ah zz5kd{fd9+@=Z*o?@|-FR&$~e77KH6iSs4^LE|+102*ax7cW^rY+%<@n*-aIbS3bmm z_@@sM4^X*wgf(`Hjr<9Jh1-k{>{xd++;?~_86bt;)q8IOsMXhZnS|{;f+)i8wjg5t zUiC{6&el^9!D6JYIwZ~~_%6?Mw$1+JSF^Y0sQU>IO={{<2ZA@a8Vj!m+c?m*qrpEs z^_6q{56QlekC|D$5ghCj973M#P}`85ptZwg^4pT`kpbfP#)zL+ppw;JhBUb3`#S!L z1ANsgkw%f^+gDX$xJ&B1^$sBHH@4Qr@6=;2@&?1`RU)er8O~>KH=Q6nv!ty@bK1zD zKk`Q5HsoZay)G%gpM{!l_?t3UEL5`Pw~p4M^o`m*bYoGw*=5r-_RZ!@Q)b75 zxh%<*r{VGxks!$;qILNKZ^zl?J=Ag$5f8&IXzsF59$p9dibiz(H=6ToKv=f_^0*=} z1yg~qqyCLAYnQ*EB2W7yR`VxE2RR2i^yUvOP+4FRv{BSciE$j7WLK$PU*#={p7h8z zh3H-MM=Y0&PmZ=wm=Tq<{%AQt8!GV)NSdtzQ?B%`unknx5C&kk0_=kF1P(VOamz1O}%WXg+;EhDiG8mkQk>RGH%4WbI4B-a_!Cj)M^%bQ$>7^?_msbk!!**_`X3RLp zN4QJcQC*|e$G#we_9*hX-Rx=`lx)DqzV4&*9n;T}KL+t*C1h8$;?Mjv5j|9$=rp~D zGl67V#RM0G~+p$|4EH9fOoA$`cA5 zf9nXZUa!cWz{aL!XinvzaZ;_xj7_+Oc|J1ZmrntcOOgdim~Q;;GjBTvqdon~9ND)j zZvH30VX>vG+Si^pUlqLO_Xhmj*xFLLzWBOhK+B3V^yA^ei6&NPf$Md ztjO}5GyK@(3SaUR3)wiH47KxlQ$|pF>CL{p#NZu%VTz?s<>l(ixwFbyxOaB< z{$(m&1IG_;OHozWBP#vkRzXBbBh8|wz(=2nztn*Muy}#u!F}U%RCV$VWjlYMA$>Rn z`4!S_0_GQ%RBj>D<)m-yKkL=kW`)0=VW{CgRGOYkR^##MMfi31e%AHyu69UN{Mj>>XvB!$LRfrSlcX98h&XT#j z*j{4^@a9MfSQ=q;9Xgcte(8h}g3TlC8bJ6%sAE~n=;9eEms3W<`H=zbJUWh6E8*Rf zz-gmYq($-FW8{frmGod_k~SjIjomer{2)WyOYAllCrbLV*Rls9^bVivpr~D?m~^tTXp&zIBTXT<(u#2LhZfNVAQk5OD_wn#l0vYu{tBrgnx<-?-O7Em zgO9J-&dw-fe(J5Da0?s7?@pFvU-(&oaTLWs5)`z@XQJ&6mVgq}Oo(N+l};(JKRN{O z1MvS?vSlt85d51|xKAu|PSL#%<*Sm}eoD~qmyo|dGS#*tX3F~@E__LhF1uO6E*c01 z07Lx)nevtu0oj52HLo68zrn^3)(Sqe_4q>a41`z-v7k%DUr|yr!%_FY=fte=I|A_* zs$<$`=(X499%br_b}QH9A`b0=eDe7PL3J%ST1XuAuh+8O_c*wx^0fZomW8r05|H5g zA(dVKs0A=pQ($;>YT=Ykhtmo*;Iy%Zzx>E#h)*qI!Wjr$alcUY6bq1o_IA$N5_D)Idiv|h#3~w3_V4I0p%{RmV zd_PJS$`|b0?rTcuu*|+Pec`C(XBxXd50mdUb*)Wsz8H5Sd8Zu?6^G*pbExgbC+Y$S zg9*5>B8pQMQnJt+z-)wo6|Phbn%Wcn6TGy`E<*FIr-rfiNJ;~~zweOY_5UoH4RHbU zgH4Higz+HKbA%FVGaV!X5Zta$!M=3yXc=cIek}nYoYTFL&R{+=@fMH*^U7WLMMhF{ z&~oPhDY_4n9l%@kBHD)=>wn(kDqAuxAU@xF76E=C{`jL?cHvT|f1>jc#8+&_B$$1v-+LqA#|pxWv0x_d+aEs^{Hq|m03QU;xEa@UnD zh4=Dd=2^^JseeQl7_Qw?INFUD>ky_!_zwBF>$&{>P5y8WlWNJSc1T z96jKZ>yze4^Cl(nNch4i?A#|@wE~8bDCWR5j8-JA&AGKNATNt+`^Vv~8Ut5|a{;Sg z_-#)2Wc_0Ojr^q`npLLTkVoBn0du5$%jqcTXVn3OncAQ*=+pym!dpSz@2G{3-6 zr`&(E0prx>)Y+fIr?nfc!WlF`45<0O@YD2DJ@UyNTcscXtUqsA)l)$qo@z7l*b4r+ zjk{wal-bV~U)QherfTX$;Wgjcv+e$LmGe-^Ng)thlso&J+&%}95Kp{2eAXFZ4X#){ zx9eBrJVc5;m19$@vPST`Dri6<>(E$EU%xr%-BG#|>OH5QJc)fWyFn%li&!@oxCba# zyx1#cYaZDFE>&70jO@>^l;@j4u>2+6@ff#R{&YI@tXl$F`h(-j5(Y6D-PESL=$n+}X1wJQ95OA@o~-4c1aG!h(n{Prj86dTFQoQ2yw!ldv6L6a@}_uxGT! zZQT;8WnOY&OO7zz%O}BPXpBs!`SFYJe>e^BB8{Mt%bbb{`uR2t-V$h(zypv?D3Zg2 z*?IrKIibNit%HrN@1?;*;`@PC07sLB)E&5ac9}YyWjtxBLtOzCas2t9tuBt;NB2x{ z(ro&4)ozpNS@W(lEtl;LCH)PZ(PX{At7AYcywG&87p5Ty{shC-7&5ou>(%Z!da&YT z5A}}zF#zqe?OFHQtyg=UObc+`QzNhQTOW25Nq{buJPUSO#BfPQ{mlWx7!>|!A_X!b zLxQ=vUSO~7ycyk;!v4Ls>vCn~UI2tJ(Pf{(99AbVO%Ao|voq?R)%f!Z9RqD!`+fQi z>KL3s&Q|N!RgZKrqo8gdQvyL(+UJ*w@|m%^3Vdm@J2fShxa)*qXPJCzQcbE28o(MeY!M;r%-)8Cb*+F6^I(_v~JJnPq zC6>p@SgYKiw)MhY1_xkaXbLU-S2=EdsC14WvHf;H?eCN2^YshtSKYASnAp+0X9v}J zFh-sFTv=#XNp=HCU-bCXOwu&7(wOSl9W3a`^w@uj_fM)Vdzf*i@Y%9mj$qkW@{XCa zOxKz%>l}~TttNdoosokq-NZovP^1VNSHV7Uqy5RJCNEDED8-@#i%tlWTcbn`z z-o+aQN#5%*k#JWwrAW1+Juw_stHmRsMs@2OFZx*ZTyVZvp546YbC7&+iM>?h4aY8k>c-#qmLxbP)NGPKTnY z(PlPFxk-0084wUQa6YfEk3MzF;@RO5s)qff#>+YD!>61xRcHRLFNf`@n8b-WiUW2{Qgg+ z21aFYK=>s5uX-I14Lr_3Bl_QH^B)yHfB^%e|9?&@2FFhPUqil39GE`f|E&BU9gL3^ delta 14094 zcmYkjb8sfi6E&KRZQI5sd15DDEB|LdAM zrzfuobg2Qf4hs=Bis3Kn95x6@s&^s@A`Q@PQ#@h!iLPxbRGc%|_2hTofLj({?A7Hf z<>A!&3^-V>(46w*K@e%XxB5K9+l|`t4Hm_^L)N7sG?w*Dnq$IPnJr{-OdY<&2xUlJQgvl=_}M?H>fpSZLIePF$*xD#Hb-++-AZWtY~R zDQ5z-{O62b}M5KC_3ljbbcN_5!4@wh4Y+f z(KQ~?bCuV0Wv5GDse$A@S%Df_SF1ok?nR_Dc zrg5&niYsfP_DjBn*ypn55#=9y&S~whu%sSf(MiBGhD-C~#h%Wz#BqL0*uz$q!D@ma zVFPy8)Wb5lBgjYxaM?!iWWM@GG=iDqP2d*Bu=_Y8kiOlXX=AD zM~Q+8VBL|uH`tZoItb387}oFC)$E4vWbQHvf_ce~qbPf;_zmxmJvBTyPw#H*LM$1} zCzPp62miTijE_jPl}!viK>#eD9oChdXHqQOJ8d+&aUM^tY3~0^FhqqR^z8 znJiPG#AKn$q}BFYgPMnn5Cc3YQFe+rX_vhjr+h~?gIg!s28(DB1acWQxzQea#GSQe zNtsyVB89<>NP_;7#AVsT4PMGm(J`atVZ>4mvdse@BQw}X%7S6q@InO zWYcDTl{8k6OVfT=+V+hAwl&A013^1e1Mpjvl>r9sKWf1v&GUya+z{}@+*MAdpUpk~ z?bn$suhe?-IfMZ=l^LHuW!6DZ74g)I#gNvXVlp>g4YG4~V4X+@4+hryh1g19g$gxc zZ%x2xsJ)V-2bTp5L_BwaxzVMfzLpP>R|jo%H1-|u9*a}Ns2F=(_C6l-FXIgJGwt(+ zC5|;Df*czWW5mP{n?`4^Eu>WN5yXe88PKkid zM174^P(rux9jQBvi(qxSebYQPEsG(*u~~w|a9n(6-*fnXwwXq120QU4c)FQ^P_AV& zH3Jmx!i`N*gBO*^RIRt%VMcJH{(~U0IHZF0ftnr8{*SYRdR!5H66<1U$5fAbbH=4^ zdtvjLD?hW$CEH+pIQPhPJ?0$-2J5Vh@C_R&VkM=n>-yg@V8`9mTtck zhDF+y0aMrGw=4eTR%|Y=kFRxQ-gbq~(CExQbg(NQ&vDDi%iurYO7`=^y^%?W7@O$~$Qlw}>!L2T&{WQa- z+m3wJY9(^i@^?HzgngO^fQa5zM`u`RP5op93y&J!_0yLaVl4htgOO1eJs3CYf#N2*5K%5HD)*&mRr7#4cYOPk4X(LFm&b(}-p z!VCA)3P$t45d;G{L{NsLroq~Lr>vDikTacGR7&~kfXv5_6l8}QT{r&e4FXNaSDeS3xC#yfA^uA1Kfj77IvL8|qa zaHY*K%Ugwb-Ygq9o4vt7Jad?Ohf}gKCR;2~R>yzghI@s>T(;t=?*Rm_*4Y6=WTa%4 z5sihU2kqsi7o0ln!Ot7>&&hZDcU_h+aGu(p6IVgtOF)%)^WRV71I*@Z_Jfv*+ngnt z5n!%hrFep`0oR`OSG0bAr2;;d8fsBBHd<6t4l6{~*^KliY<=96?yqRe^O?M3pY=5K zlKPup3JJA>%PxIwCC&K2J^7FEhGZB~HHY$;>ZTrp+l z8lR)MmA0m(j&cMEflFb|Lb)L@*Fks2Sa` zaRXt@T1pg-dI;H?yZJueE~h&i^MzcJKX4|u^7+6>klM-~ zLg*vOnjkk;jrw7T(B1UEgakG}sfTI~u!m7@_<{7qwOy80W{!XU+0D+#&VFC3IYPak zPBZoP@VE1F?V#is$@zLdymRg+|M!k-Z?66Njs!+MnQgl3X%pt?fx^g|;6i-k&vbM1 z`gF6B#mGUfJr8rwjETJ2V5#WcYTzZVK#S{SvwPN!T4!9{J4Bl#f#BZ%Qcf&FFfH_B zU;ui+k}k$j>U;nW6tKT8#@FwzN#6zRbdyp{BU#aHuve!cS&FOklU3Wf~~@*iqajXB8cWgpsC}CT8IaC*x*uduRt}Q zQ46codyl*!%SNVun+&evv~)HS%(d^1>^mkJ_aIgrq62u^M5EL@8=wOmjkKeDcFh2M z%@Nhh)6~L$)nQJm{d279?9FXc7&!H5j!#YPGHDLFPdcApg~fdJ6xo+3C{{dm;#jq3 zl;tgb54ATr5Fof?8A8b>J|e4xx4?7?{l++(IzNhW1pig~H3(nDU5SesR?zp2R(Ajf z0k|4G6o)8G&5=S>@8<5y$r+C7T(U?Z4h>@^;0C~)4*N|s=KW07K#r2sH7Pv-Hp&!!c zT}_?w!=)C-c9qM0TF1~W*+Om09*E4YMD@DCnU{VuAojE#zirJ^7<3>Vgx(>$%ZX3E4ho=`MZUGorx7s3h<)4alb$z`fQwyL-+_Mj$}?x;aVU#+}9lh+C<$F|+UU&l24X#Dkv@ zkj<&T-P0eY;z6D#H%jyeko?9-XTb-c;*7?tTGNOvtKg^%Bg;L6w3Itvr3PjmPYp{e z>W$3<_a&>Ws+Eysyis&ao7!sdW<_EzA2d~;FNUl}5O&~;fz-DDAo-mgu-NsJ!#Bvn z?=5Qw$+{_cmV}V}%GuqlDiM4fL4C0Na_69-O$|2wEImS4)IQf1iY~k6;!oyhnlp-; zVsgH62gb~~-*Zfx%VWG1c333m_Z>F-N;!m6%^Co(mg&VCqt5U%pqtbHTgnF+zta9J z`UWSDN?>m813&1eH2PRYzzCas^+mGjBhbq?tpCh9m6a_g^^BQ8nNPB3cF&$l7uw(n zc@4l}Kn1fKdbyxJ%r*1Z@2Gz(>>|2k2W)NSm=%0OjgA*JY3d#auR;z)uzpyJ#?( z^WQ5@gyDDQc1E9n3HclXc46Ba4>w))fn{D2c>U6`*`{<7f+U;X(PK=&k@|9t$G(G=>YGS4*5q!OFegipA^D~quP zfb;z(Kx1ay!+5qM(v5VM0Y}N(Y90g+xB7CgCY84F3v^s6p^?9j6a;Z0fi3t>krXRr8!`6U~y1kYtx_gm*Qen zwmY|8Zahr$lYY*nDBplUvup!+Ds&c)n9ZUoMHhn8E-4Fi{^hO!$LHKwEEy} z(SlCP&N#f_i;6XS^0GL}dd*DN3E(N@Sb*~JcYe|)-n-`#6B3%}os_hV48L;5V+c7& zA8PC%mQ*P^m%5mf{k@Wpe#FUiflf$oM>I}t~GTMUE@>8uRr!}{!0{5)pP2V@Q0 z{-7u;n|>Ll!f?I_MQX2DU%r$^#CsC27k}_w51$I1aL(L?A@$}&*ud~jscgMH=2S#n z5#SJsAEK}Wu?Z`@n4PKkZCacFZm)7uZgNsDd57sdGV2!e$8?nq0KPcoo>L*CfcU4QB?Rs z1cNTZXe^IxmDg|4#LI~`E&Dd9uMsJNdhp|&mXx@1g(>V_2RY|O-?G=fxiqc(>S?A7 zC2lWC+i*NE4epCeQ9Ub*33}@`V0OG`M5cbpx2#02E2CKsR$suYIgF>fF}5)L{jw+9;Fl6isS%pSXgdtE@66k6*}c} zF`%%GYc6V^WKA%|388&ark>qsbZQB?%YVThouaxE6CvDq2S8`qyzwgoEl(Y>xfbJ* z+>vk+&<%D(;#2z;q1}cr9A0}(PE|32fRN1?i369J+uvXK`t#IY2VkI$dcKht@~dPmcokx9Pm)zh}2;M_M>1;KD=> zKvI?7pgcEgFb&TN+*()yE=3#>Yud`ofJJ@`Z}Gz}GnWyL6}cyY70R@*c^C%fwv7Al zoB-^yUpaV2tKhVZO;(rSJG95wo+EBXIGpDo?Yvk=R>NQBm8tOC$#p0m6E0Wcpk=VT zXvbEC=3B0qKV#(x0Y+CC*o}=9Ucu#usv8@;=&j5?OEmGR6Z6hMC*u*Zt8b=abnj=p zX#0`zn_FALmfo$HwSkrMtGq3(@&h@_hqH4OW8YGzS*;nKn<~)nhv`gZ{X>a^YD*Gr z{&!vSPe~aGwIwPo1arv_+y!(sk;^DYRyTa|OX^{-=(`c5_Y0`si~oXmVPH>V$-IeK zDT77!FAQcbX@U2^odu}lYJ?q%Q%>WMAnR_+zU;Rfr^)F)1@buc3`+|N8n}cIv{q+@6E%J2 z);_nf4SwcY?O%Y{d;1{GDD58$#$INpH&Gj-25Bp8opU!}a|xYMb{SVrk5V>|WEyM- ze{;~66nB0sr6Q86bZbNsCEcDlV#JcY^#m$~G_lKY7@`TAT$4T4Y}C6_UysMHMku6K zLj4|mq_8*=5mz0ZoUPj|u`=Vh7}ykom`*~Pd?!<}`4$XZ@cs{{`%hCUxul-`GbKDp zZDp?#(JDir`-Zf)IZGpCDM7m#NJ;!-cnq@F)piM7_+9AyMh{6pED$raMEXTWr@tXf z&uIo`ax=8mnkJn}l{c?2lgdZhOZ20`#f9uYWvr!6Mhtf*<=XoZ~Y z@Gw8P{FD>f_(4d$5jWG3Il}915)??gB7~$)$hQZypTAYW{U<+v4He<%KMS$+a8daW z`t@^zTeayhzTlU3PN3)tXI9`JJVcaq5#L&;KOMvJ8K&2fHq%F@9k`n> zTfBwBYO0m`qM&J+uQw_Sa9AHT2dm7GHW*FR5D2q9rgykDHwT0gThUa3nY-W(rL z!ARpD&&OVNh6!z6pXKBB#bVkH-%CmGW(ku(8j6)Fa=IM#>o^Q?AvbXbB55k|icl>g55f$yD#orHq`FhF&fo(a z=!W-xzumWKfaw(9QD$#co^*X^{0gngSzFMyD_auFHm2vS9H02ZIau=>2}2%|x{CrO zjKQt>1qRp3m6K<-f91I|8~E+~512fb6c{)<2nY!De~Sd`R3P&E(xUQrv?!nfb?7HSqF$p3%WIbtNZoNUn?13lmZuQu)(beIT!|hZA@9bLD zhhtNR&X7)wtu2OKCiciqif#Tr?>7Wr4f@?!kZ?NKK%@=uPw)gN!g~0kY7{vTR8P)5 zxPO#Wll_;#-z*Un2#n*Qh-WGJ$r|zyrj|sPISY_-7MmiG0_0W>kW0&`ox-j`K*1`7N|G-t*=&*l5#6y_ygMLL+4Hk zYaV|}qKE+kGHsCfBf|i+JLF8*eV}tcN3t=pKAu059)x0I%sfknaV_zhJIfyF!@_R* z!_zUApcYHML;Ukq@k&X-!@Db+!#p&Q>}3_?mX&MSdq;D(~5LOw2@#H3~++!z+WS=4c`C{9e`_bL)FW`+^G zgZ^H-XKVXHhHCKLSy8cU4*Yq3W-r3u{LO^P*fFVsiX_l zg5@LLxCZ$wz=4rv8haBJ{Py;A@MQ+U-ks_F{(P&|8*g1?(vubN;rnqH;s=tLF2-&O<+VOc%c5$ zSuZGo`-}O8K*vHHTBTkuEvY??Iv4p%GJ`%bLba<{NV%;Q3%q|vO?$o}2O-r%`@sc5 zSgL_Gfr25yIhr?)U9Ddm5Uje#GQcf$fx4mZ@DnP!tc%nZIUR4!ZvT-Lw77h( z^{Wyr7Ly)m1To5RM6`!|^$9-C5Q8q< z8}k=dqa{c{%5-AzDTZMyjSj^kUCO{`$AhwfzGytwBAvbt>}ArRu!N+z6|zk*{3B+c zsGDlpcnNXMd^2|D1=l&K=dsj0Zy^N_mujpj)hInM@;)s*)Fr-T{8C3ZH!goVOYqnz&kBPUlg~tBvHXW=4&E5ebrwH5-XTjYu;XsX zk@*B8La!NaHajF`-Id_+k-J!>q~NT$q#uuc;J29>YE8Kq4rs*bD^L)N%tVslve_J^%yRzcw;M^T-jed*wobFO=IX#4yT ze(8IVqmL8hAIW=-a1K<>ywN~t&&tQnRAXMsQK(UK!mSdw>di|r*ajolWHNEnmnpFNUPF0OlnmkSre)9J zJP+B6q}IUupGZ{>GDjqqg_|5TXw&WpAyI&d(yb1RC1}S`vJc5NMVfb;^0wnl;{w2s z5Q4+7c6Yj;qLNx)P$=qhT%OqT7YC3>q6i&_{^CnSk5}=KJlNdQ4Z`2($X;C}7V@6R z=+t5b48ago^=+mU(+53eS*j_v>Yq@$I*Z4;n;O=MvYfy#IpMr;5kKjzsCIR2p>Y8o-x;kf>`q7{^^kq znn!avOg6g?yG7g@q~D2}yvS4}DKlp=!u^ z2$Og;oQ&fP+|SGcy*{UuSMQ2LUVrR_=)V)DQ9|C?3gkXr{@|aFiojl%2yk7`sY@Pm zxA@uOg_?i!k`XHOQkrVnQRB2;qE?sX!b5bR((;`YP~dwsVo(iuL5`!y(GRsD9Ycbg z?s`wK+8qj9(?ZDnmT(1%ERrLR$L{#j4b79nunLOMkX(rQ&Z-e zVoa}1KBn!~3rX4SJPmqvR}u|)F|#7QRp%j$z}5Sx&OO(n7P`>L_6MjIw`UIkx)r&> zm<7vc4l79*V<(U;zS=avu3f6$W+qk@yEdiUgGX%4HxV+PgPVYtPHdM>%~o{mnyCf7 zr6fouxWf?oR~Dx}yMKObk+17elY~;P!i8O9!N)SL)fGU@;YW$r5y88HDWsg$={qQb zcp(d_EL9==jbFuZT5HnVe-j>Y7>%bR?&t{cjif{B3(Q^FUe37GmuP7s)zXNYUO9AO z5bGVT^_tRb3MT^Ud4ahmw(%(~<!CX9tTXEaw zVT+J83LI3LH zL`Vd`5ZV!Kr_ts*C7i5wf9Z7E@NFuAqPgBBIH1QXiZNBt%5lY zib@V3DPZy}t`ZZiQWFV6G5rA(3Y`-jn=<%7hI+FaQn+p#rHqRjK$Ds$hq=jtJOc_< zvk)thpP8~UtRxK_7{tTXIXKxtjf*ii05Q=hIY}CXE2_js-EzUL{~J9>&~g%jeHBPR zX^W4FxZV)^zVWc$cd}%ypUz@W?3!-HXzXylXu&4xViChs6z3JG{&aRh?k%FRZl*}d zw!9f?-f~8)LG#QtUb#4J7yJrlkq+oZ#)Gd4&Rsj>N^id1pN>Kp|*S-2oqYaDA{*{7Yw|$aPJl={7GAKHv1=B?UDDc%MuMQ zh-iP-lBeH_49L$t;K>U0^uTM960KL#Z+l|MZ!a@WZg^yA`JLY9Pn+7^=U0XLw@c3~ ztqmtzDs2yH5OE$k#d!18ZF9`)$w!;N7kAbBjeLO)?wj=OXUAM6nKqd?D+hlWtGI$} zfLW4fV4Q%7`fT)wf0#ZD-*M-MO0Dx+@`2puPKzmd+<5WDb`81|Tfn1EUEPeLEct=; z=;hIQ_*od0uPR})toMXcic-rXe&@bL%>FW2VhFvKbFrJ2^H#wn7exx{{yWLguK3h_w>MDS ze^;C_6Y~enztJZ#B7h^{YDUDrblIKU)0F*c0=BQD_M_If`Kr{;oqsNs(AAf1;kWug>zO{o5ZlxR-!tst1r@~uYs)B`B&@q z2(F!mj1)J7O=qq&4YHSfh*mYGz7;zD@_M&)3J+P*KKLSj;s?e}fr4?WDqol5!q$L! ztp3-;VI#v3A;y3)uyEin07C|02u2hHkFnpsU*gMRUEA?9cqZ^XZ)A5WhfQ#L+Bs#M z*4CB2CkUb|jzXTAPOWtU3U$BlDm^(pQfd`t>BWu}kzW$?KJXJo^igAiB<#7G@WR|u zH@wzt@&ICTDZqEWeCyA<#Z2Dn{MZr81$Aq>;G2xA$!Ff)pwndpC_{b?Z`SzM+?H*W z*n*Y1JyK7bGfR2JG|4PPfM)+SwB?GbML6U>1de4eKISu2L?SPA27bk>9w$g>?HD^8 zQKVDm+}xCq7XR|0Nm{eEp{e^P%NC3Ifwmb#Sp{c|9FWenIW(*bc0Mt%8%z?B@t#e! z4yhf{tyKefVOh^{goo{%VX%&EJ{ZxfOD&~soDQ|0Z1nmaT{8r$1=|7`173hO{nS=_ zMsHM%x`MrwGGm_?>!DInxJh&Jfx67SjL^k< z=@eA#0x5GRpvM|sVmz>Ln-T@9^_qqU!YW-7s@)C3L8NkjjW#^tijpne@7MBLw&)tU zBc9|iC&?=O97EstdEMAFDH&JCQfdCsPafqrXA0U}ky;1eJR^=y!mgs>6Y$60TvD6V z2kv!0E}^Bmj}z7&4U(H^oZx1p4ks9GlqgJJ1Lf6xa?NHUzV-6hB^XtduX*>NJ|V!? zM53e_SkJpB?++q-FlM|`>Z98!M9r)n&&GvH>v43rt@Ksy-(wi%-di*&vybNRt=sZd z5|*dZ$AaW*IpYUYkrRV8wxo*a=Eu?X^H}9^y>)-_SN9*MXKWe8?nXL}ch#u6unpe` z0oQkNnRg@ac>L77=z8$LH*->#afR%q}ZwmzH$z;y$L|Il&yZjI)+IlWg=OtgLL2Sm0IgHAe4wF=I>CgVpiKcF__ z%)Z*o5xVa2R~GoVHz74`=K*ee@aJh}fg91|p@)ZVs#X%qV&=H*j5`KxwD+AZdH<5Q z;%bF9h~LsFvOp3^Uys?78?p<$CS&Ub z4Gpnujwv00*U!-WFNgb8NCaqu8qN zvBTUIj0MP|l7|dtfcy!&c)jKW&yNUo%F?_rjpeZN=_9bw{+gq-02jG@9>CilHq|+LsQ1npGuka6{W%$hNB zjfZ*z90ttewI{uQ{q|uuz~dAVT%$X8F2ZfROQO%0-4KyCM_J~Tny^(Uk_;@#V^3c3Rs8JlJ0B+r!a0KdQH`s^n)MDZ?3@U; zP*D%x?B8b`Q8!Pm$X@q>;oXUcz5?Jl11+t4aEJn3S76y~sljOVJ_%gu=E_DIa_sxkkPLR zM_`{jxU@{u6axH2U|SHrqRt54((sm$|BbM<;otn5kcE~z>z$zUeEc?c9y^8M?gf8Kt*G`dC2nC=48a`C%>!2A8zz3uVv zVK>`40=p>}I>Ee5(jPqDQ;Vy9Q4i}kX3(-79iK=`8|*07J5hEY6qO=uuw*`kU;w5a zZg~Km?AB7rMbB>m+q?OVZqhpg;1sXas?nE)z|+MC-L~5S zCA|*_YMh`AK#Uv`Zwo%FO?F{E7}qS*yHXWgpvR(Of{>37^9PlRIw zrK4OCJEr-aw2nAmgKS+@v-pFflJdxyw@>;le7WsE(XfENQnY?W zIQK%}V+XxHQ6s0+u_bKex%W2zSp;|Hj^mQ1y{)>y_tA)8;>FWCyt^-=RCH?9E;>U7p(PTDmz{qv`TeVxhGb zn1!n;7Va*?k6uZMX-7~|MY~5yQoY>wBMw_FY2bUI0(U4^iMZWcZ24N;wtR=6?Y|l6 z4YevQ5$A`smWsK0xV z+t!A=5AyyKi7-k61QMGOVi=h@lV0M&s9U!ep@_MbBAY!muUy}S2q)UIDE$SXU>4ri zE_p47j0e({GDLHaTi{?ho7`s%c+;>ut}c5zs2T}iJsKoEfl_e=6@}Hhe!P5w9C{X& z7LS^fcwM5=OUU<^xHXHRI3ziP^-jWL1YY7ODF+R4<$O!p0yCS&2=~ys5ZZ!aEvKOh zXl5Dpr8r(hhDlRr*3qi%m;HRl#0g2k|2F8gJ#DU@`(p&sAJ3Hxcg$4`G^+f$Cod{p z&Kz7l*0fv|dI)?vkW7UqfzI+YthIS8<=M~n#=G@r#XoV((rExFA)E^d9#lC)CHw-# zNIk8RnUd|s0@5?^Pz)q&ImtlDyQy%3yz;@42nwV!NvW&lqEIr{r`U7##T!sp?HK<} zUf~rla)wJ&;l;o*(7XsPccTqh1c>sQM2RT$3+O~30l*MWcc1Zw@`wnH$9Aeu*pWU6 zHs1HmG8Tp0zK;+*u}0bdMxTeQ%#+h8VCCaV-XpF^v5 zA;B~>d~qlwBbd+Y97kc~?omljfiIh(x-lrw12xc&@-BHmUL|zyE?XrHk3-e(wU}0S zO6^fs`ZaPJEb{$J%=w*jBq*eQuFtpG@7qm8z;C7n67yWfn9Kq>oZfY><|`;Q5iUh2 z9Qk5W;K{x^{e~s%(spdvo1bt{G4^+=_iCVldO!_R#^?atxrtIr)A>^(Ua5q|ob1;I z+cmA8s->lJNxz&HGqT3E2zCewutnkW_)DVi@tJnd4^w8tcU4(oL2g0dWhY7}v#IN{ zbluVwzI>g>sHS1cY6(Jiuj)NYxaT~4EfYG%A`$SxgCjPM*%H9u9m3gQ3ta@qxZJSk z8KBT!7T0NYiA_&UhrYe1d9}w8NkkKLk{i3Xieo04&@}7U3hHF)KE&F%r#OMZ!R0(5 zX=*=Tm;$D&G{O~OtLNM;4aru!e9O=bc8Voda9B1(iwLQZ5!!TrUZD3kK{7ieNq ziW=ykXyQ&PW}@a_xJ1NMT2Rox|8wP2iShrB$^rsH{{L6_@4WI4koo_TU=k}+nSWs> zZl@}P_L=_gvfB(bQ7erWRL16iZgv_MsESJ>a~wtDR~j=YOz{7pN;(T@P0asbO*#ht k|60p`;_VS62nfypRjKHgWbA)QbcwJT$dJzo|8L~~0U2c(VgLXD diff --git a/tests/e2e/mnist.py b/tests/e2e/mnist.py index 244c84d2..a9958965 100644 --- a/tests/e2e/mnist.py +++ b/tests/e2e/mnist.py @@ -35,7 +35,6 @@ class LitMNIST(LightningModule): def __init__(self, data_dir=PATH_DATASETS, hidden_size=64, learning_rate=2e-4): - super().__init__() # Set our init args as class attributes @@ -116,7 +115,6 @@ def prepare_data(self): MNIST(self.data_dir, train=False, download=True) def setup(self, stage=None): - # Assign train/val datasets for use in dataloaders if stage == "fit" or stage is None: mnist_full = MNIST(self.data_dir, train=True, transform=self.transform) diff --git a/tests/e2e/setup.sh b/tests/e2e/setup.sh deleted file mode 100755 index d5c99cbf..00000000 --- a/tests/e2e/setup.sh +++ /dev/null @@ -1,58 +0,0 @@ -#!/bin/bash - -# Copyright 2022 IBM, Red Hat -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -euo pipefail -: "${KUBERAY_VERSION}" - -echo Deploying KubeRay "${KUBERAY_VERSION}" -kubectl apply --server-side -k "github.com/ray-project/kuberay/ray-operator/config/default?ref=${KUBERAY_VERSION}&timeout=180s" - -cat < Date: Wed, 22 Nov 2023 19:57:54 +0000 Subject: [PATCH 095/496] Reflect Poetry changes --- .github/workflows/e2e_tests.yaml | 2 +- copyscript.sh | 30 +++++++++++ tests/e2e/install-codeflare-sdk.sh | 50 ++++++++++++------ tests/e2e/mnist_raycluster_sdk.py | 2 +- tests/e2e/mnist_raycluster_sdk_test.go | 73 ++++++++++++++++++-------- 5 files changed, 116 insertions(+), 41 deletions(-) create mode 100755 copyscript.sh diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index 6bfc1fe8..983fed4a 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -92,7 +92,7 @@ jobs: run: | export CODEFLARE_TEST_TIMEOUT_SHORT=1m export CODEFLARE_TEST_TIMEOUT_MEDIUM=5m - export CODEFLARE_TEST_TIMEOUT_LONG=10m + export CODEFLARE_TEST_TIMEOUT_LONG=15m export CODEFLARE_TEST_OUTPUT_DIR=${{ env.TEMP_DIR }} echo "CODEFLARE_TEST_OUTPUT_DIR=${CODEFLARE_TEST_OUTPUT_DIR}" >> $GITHUB_ENV diff --git a/copyscript.sh b/copyscript.sh new file mode 100755 index 00000000..cfbf30b9 --- /dev/null +++ b/copyscript.sh @@ -0,0 +1,30 @@ +#!/bin/bash + +# command to be ran on local host and not in the pod +# kubectl apply -f temp-pod.yaml +# kubectl cp ./ default/temp-pod4:/mnt + + + +echo "starting" +namespace=$1 +# podname=$(kubectl get pods -n "${namespace}" -o custom-columns=:metadata.name | grep "^sdk") +# kubectl cp ./ ${namespace}/${podname}:/codeflare-sdk + +# kubectl cp ./ ${namespace}/temp-pod:/mnt + +sleep 60 + +# Get the name of the pod starting with 'sdk' and its status +read podname podstatus <<< $(kubectl get pods -n "${namespace}" -o custom-columns=:metadata.name,:status.phase | grep "^sdk" | awk '{print $1, $2}') + +echo "$podname, $podstatus, $namespace" + +# Check if the pod is found and is in 'Running' status +if [[ -n "$podname" && "$podstatus" == "Running" ]]; then + echo "Pod ${podname} is running. Proceeding to copy files." + kubectl cp ../.././ "${namespace}/${podname}:/codeflare-sdk" +else + echo "Pod not found or not running." + exit 1 +fi diff --git a/tests/e2e/install-codeflare-sdk.sh b/tests/e2e/install-codeflare-sdk.sh index e30f7ca5..0c574d4a 100644 --- a/tests/e2e/install-codeflare-sdk.sh +++ b/tests/e2e/install-codeflare-sdk.sh @@ -1,23 +1,39 @@ #!/bin/bash +# Generate zip file beforehand +# tar -czvf root-dir.tar.gz --exclude='.git' --exclude='.github' --exclude='.pytest_cache' ./ + +# Create temp-pod to contain the zip file +# kubectl apply -f temp-pod.yaml + +# Copy the zip file to the temp-pod:/mnt +# kubectl cp root-dir.tar.gz temp-pod:/mnt + +# Run a shell in the temp-pod +# kubectl exec -it temp-pod -- /bin/sh + +# Unzip the zip file +# tar -xzvf /mnt/root-dir.tar.gz -C /mnt + +# Not necessary as the PVC is mounted to /codeflare-sdk +# mv /mnt/* /codeflare-sdk/ + +## Copy files from temp-pod /mnt into the codeflare-sdk volume mount in this other pod. +# kubectl cp default/temp-pod:/mnt codeflare-sdk + + +cd .. + # Install Poetry and configure virtualenvs pip install poetry poetry config virtualenvs.create false -# Clone the CodeFlare SDK repository -echo "first ls" -ls -pip uninstall codeflare-sdk -echo "install codeflare sdk" -pip install codeflare_sdk-0.0.0.dev0-py3-none-any.whl -echo "second ls" -ls -# git clone --branch main https://github.com/project-codeflare/codeflare-sdk.git -# cd codeflare-sdk - -# # Lock dependencies and install them -# poetry lock --no-update -# poetry install --with test,docs - -# Return to the previous directory -# cd .. +cd codeflare-sdk + +# Lock dependencies and install them +poetry lock --no-update +poetry install --with test,docs + +# Return to the workdir +cd .. +cd workdir diff --git a/tests/e2e/mnist_raycluster_sdk.py b/tests/e2e/mnist_raycluster_sdk.py index acfac720..067993a3 100644 --- a/tests/e2e/mnist_raycluster_sdk.py +++ b/tests/e2e/mnist_raycluster_sdk.py @@ -67,7 +67,7 @@ done = False time = 0 -timeout = 300 +timeout = 500 while not done: status = job.status() if is_terminal(status.state): diff --git a/tests/e2e/mnist_raycluster_sdk_test.go b/tests/e2e/mnist_raycluster_sdk_test.go index cf4080ae..e6049242 100644 --- a/tests/e2e/mnist_raycluster_sdk_test.go +++ b/tests/e2e/mnist_raycluster_sdk_test.go @@ -17,6 +17,8 @@ limitations under the License. package e2e import ( + "bytes" + "os/exec" "testing" . "github.com/onsi/gomega" @@ -51,8 +53,6 @@ func TestMNISTRayClusterSDK(t *testing.T) { "requirements.txt": ReadFile(test, "mnist_pip_requirements.txt"), // MNIST training script "mnist.py": ReadFile(test, "mnist.py"), - // SDK Wheel File - "codeflare_sdk-0.0.0.dev0-py3-none-any.whl": ReadWhlFile(test, "codeflare_sdk-0.0.0.dev0-py3-none-any.whl"), // codeflare-sdk installation script "install-codeflare-sdk.sh": ReadFile(test, "install-codeflare-sdk.sh"), }) @@ -113,6 +113,30 @@ func TestMNISTRayClusterSDK(t *testing.T) { BackoffLimit: Ptr(int32(0)), Template: corev1.PodTemplateSpec{ Spec: corev1.PodSpec{ + Volumes: []corev1.Volume{ + { + Name: "test", + VolumeSource: corev1.VolumeSource{ + ConfigMap: &corev1.ConfigMapVolumeSource{ + LocalObjectReference: corev1.LocalObjectReference{ + Name: config.Name, + }, + }, + }, + }, + { + Name: "codeflare-sdk", + VolumeSource: corev1.VolumeSource{ + EmptyDir: &corev1.EmptyDirVolumeSource{}, + }, + }, + { + Name: "workdir", + VolumeSource: corev1.VolumeSource{ + EmptyDir: &corev1.EmptyDirVolumeSource{}, + }, + }, + }, Containers: []corev1.Container{ { Name: "test", @@ -123,13 +147,20 @@ func TestMNISTRayClusterSDK(t *testing.T) { {Name: "PYTHONUSERBASE", Value: "/workdir"}, {Name: "RAY_IMAGE", Value: GetRayImage()}, }, - Command: []string{"/bin/sh", "-c", "cp /test/* . && chmod +x install-codeflare-sdk.sh && ./install-codeflare-sdk.sh && python mnist_raycluster_sdk.py" + " " + namespace.Name}, - // Command: []string{"/bin/sh", "-c", "pip install /test/codeflare_sdk-0.0.0.dev0-py3-none-any.whl && cp /test/* . && python mnist_raycluster_sdk.py" + " " + namespace.Name}, + Command: []string{ + "/bin/sh", "-c", + "while [ ! -f /codeflare-sdk/pyproject.toml ]; do sleep 1; done; " + + "cp /test/* . && chmod +x install-codeflare-sdk.sh && ./install-codeflare-sdk.sh && python mnist_raycluster_sdk.py " + namespace.Name, + }, VolumeMounts: []corev1.VolumeMount{ { Name: "test", MountPath: "/test", }, + { + Name: "codeflare-sdk", + MountPath: "/codeflare-sdk", + }, { Name: "workdir", MountPath: "/workdir", @@ -148,24 +179,6 @@ func TestMNISTRayClusterSDK(t *testing.T) { }, }, }, - Volumes: []corev1.Volume{ - { - Name: "test", - VolumeSource: corev1.VolumeSource{ - ConfigMap: &corev1.ConfigMapVolumeSource{ - LocalObjectReference: corev1.LocalObjectReference{ - Name: config.Name, - }, - }, - }, - }, - { - Name: "workdir", - VolumeSource: corev1.VolumeSource{ - EmptyDir: &corev1.EmptyDirVolumeSource{}, - }, - }, - }, RestartPolicy: corev1.RestartPolicyNever, ServiceAccountName: sa.Name, }, @@ -194,6 +207,22 @@ func TestMNISTRayClusterSDK(t *testing.T) { test.Expect(err).NotTo(HaveOccurred()) test.T().Logf("Created Job %s/%s successfully", job.Namespace, job.Name) + go func() { + scriptName := "../.././copyscript.sh" + cmd := exec.Command(scriptName, namespace.Name) + + var stdoutBuf, stderrBuf bytes.Buffer + cmd.Stdout = &stdoutBuf + cmd.Stderr = &stderrBuf + + // Run the script + if err := cmd.Run(); err != nil { + t.Logf("STDOUT: %s", stdoutBuf.String()) + t.Logf("STDERR: %s", stderrBuf.String()) + t.Logf("Failed to run the script: %v", err) + } + }() + test.T().Logf("Waiting for Job %s/%s to complete", job.Namespace, job.Name) test.Eventually(Job(test, job.Namespace, job.Name), TestTimeoutLong).Should( Or( From a404ff697d844360f91277ac80362cf148db05f9 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Fri, 24 Nov 2023 10:23:05 +0000 Subject: [PATCH 096/496] Cleanup code --- .github/workflows/e2e_tests.yaml | 2 -- copyscript.sh | 30 -------------------------- tests/e2e/install-codeflare-sdk.sh | 22 ------------------- tests/e2e/mnist_raycluster_sdk.py | 3 +-- tests/e2e/mnist_raycluster_sdk_test.go | 4 ++-- tests/e2e/sdk-to-pod.sh | 18 ++++++++++++++++ tests/e2e/support.go | 9 -------- 7 files changed, 21 insertions(+), 67 deletions(-) delete mode 100755 copyscript.sh create mode 100755 tests/e2e/sdk-to-pod.sh diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index 983fed4a..0f1aeeed 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -35,7 +35,6 @@ jobs: ls -lart echo "Initial status:" df -h - echo "Cleaning up resources:" sudo swapoff -a sudo rm -f /swapfile @@ -45,7 +44,6 @@ jobs: sudo rm -rf "/usr/local/share/boost" sudo rm -rf "$AGENT_TOOLSDIRECTORY" docker rmi $(docker image ls -aq) - echo "Final status:" df -h diff --git a/copyscript.sh b/copyscript.sh deleted file mode 100755 index cfbf30b9..00000000 --- a/copyscript.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/bash - -# command to be ran on local host and not in the pod -# kubectl apply -f temp-pod.yaml -# kubectl cp ./ default/temp-pod4:/mnt - - - -echo "starting" -namespace=$1 -# podname=$(kubectl get pods -n "${namespace}" -o custom-columns=:metadata.name | grep "^sdk") -# kubectl cp ./ ${namespace}/${podname}:/codeflare-sdk - -# kubectl cp ./ ${namespace}/temp-pod:/mnt - -sleep 60 - -# Get the name of the pod starting with 'sdk' and its status -read podname podstatus <<< $(kubectl get pods -n "${namespace}" -o custom-columns=:metadata.name,:status.phase | grep "^sdk" | awk '{print $1, $2}') - -echo "$podname, $podstatus, $namespace" - -# Check if the pod is found and is in 'Running' status -if [[ -n "$podname" && "$podstatus" == "Running" ]]; then - echo "Pod ${podname} is running. Proceeding to copy files." - kubectl cp ../.././ "${namespace}/${podname}:/codeflare-sdk" -else - echo "Pod not found or not running." - exit 1 -fi diff --git a/tests/e2e/install-codeflare-sdk.sh b/tests/e2e/install-codeflare-sdk.sh index 0c574d4a..e7808582 100644 --- a/tests/e2e/install-codeflare-sdk.sh +++ b/tests/e2e/install-codeflare-sdk.sh @@ -1,27 +1,5 @@ #!/bin/bash -# Generate zip file beforehand -# tar -czvf root-dir.tar.gz --exclude='.git' --exclude='.github' --exclude='.pytest_cache' ./ - -# Create temp-pod to contain the zip file -# kubectl apply -f temp-pod.yaml - -# Copy the zip file to the temp-pod:/mnt -# kubectl cp root-dir.tar.gz temp-pod:/mnt - -# Run a shell in the temp-pod -# kubectl exec -it temp-pod -- /bin/sh - -# Unzip the zip file -# tar -xzvf /mnt/root-dir.tar.gz -C /mnt - -# Not necessary as the PVC is mounted to /codeflare-sdk -# mv /mnt/* /codeflare-sdk/ - -## Copy files from temp-pod /mnt into the codeflare-sdk volume mount in this other pod. -# kubectl cp default/temp-pod:/mnt codeflare-sdk - - cd .. # Install Poetry and configure virtualenvs diff --git a/tests/e2e/mnist_raycluster_sdk.py b/tests/e2e/mnist_raycluster_sdk.py index 067993a3..d46c6e4c 100644 --- a/tests/e2e/mnist_raycluster_sdk.py +++ b/tests/e2e/mnist_raycluster_sdk.py @@ -62,12 +62,11 @@ script="mnist.py", scheduler_args={"requirements": "requirements.txt"}, ) -print("Submitting Job OSAOUBDAUDDAOUBDOUBUBAD") job = jobdef.submit(cluster) done = False time = 0 -timeout = 500 +timeout = 900 while not done: status = job.status() if is_terminal(status.state): diff --git a/tests/e2e/mnist_raycluster_sdk_test.go b/tests/e2e/mnist_raycluster_sdk_test.go index e6049242..baaeb893 100644 --- a/tests/e2e/mnist_raycluster_sdk_test.go +++ b/tests/e2e/mnist_raycluster_sdk_test.go @@ -208,14 +208,14 @@ func TestMNISTRayClusterSDK(t *testing.T) { test.T().Logf("Created Job %s/%s successfully", job.Namespace, job.Name) go func() { - scriptName := "../.././copyscript.sh" + scriptName := "./sdk-to-pod.sh" cmd := exec.Command(scriptName, namespace.Name) var stdoutBuf, stderrBuf bytes.Buffer cmd.Stdout = &stdoutBuf cmd.Stderr = &stderrBuf - // Run the script + // Run the script to copy the SDK to the pod if err := cmd.Run(); err != nil { t.Logf("STDOUT: %s", stdoutBuf.String()) t.Logf("STDERR: %s", stderrBuf.String()) diff --git a/tests/e2e/sdk-to-pod.sh b/tests/e2e/sdk-to-pod.sh new file mode 100755 index 00000000..7001b88f --- /dev/null +++ b/tests/e2e/sdk-to-pod.sh @@ -0,0 +1,18 @@ +#!/bin/bash +echo "Starting the script to copy the SDK to the pod" +namespace=$1 + +# Get the name of the pod starting with 'sdk' and its status +while : ; do + read podname podstatus <<< $(kubectl get pods -n "${namespace}" -o custom-columns=:metadata.name,:status.phase | grep "^sdk" | awk '{print $1, $2}') + echo "$podname, $podstatus, $namespace" + # Check if the pod is found and is in 'Running' status + if [[ -n "$podname" && "$podstatus" == "Running" ]]; then + echo "Pod ${podname} is running. Proceeding to copy files." + kubectl cp ../.././ "${namespace}/${podname}:/codeflare-sdk" + break + else + echo "Waiting for pod to be in Running state in namespace ${namespace}..." + sleep 5 + fi +done diff --git a/tests/e2e/support.go b/tests/e2e/support.go index 547ed32c..cc55beeb 100644 --- a/tests/e2e/support.go +++ b/tests/e2e/support.go @@ -19,8 +19,6 @@ package e2e import ( "embed" - "io/ioutil" - "github.com/onsi/gomega" "github.com/project-codeflare/codeflare-common/support" @@ -35,10 +33,3 @@ func ReadFile(t support.Test, fileName string) []byte { t.Expect(err).NotTo(gomega.HaveOccurred()) return file } - -func ReadWhlFile(t support.Test, fileName string) []byte { - t.T().Helper() - file, err := ioutil.ReadFile(fileName) - t.Expect(err).NotTo(gomega.HaveOccurred()) - return file -} From d93f565e1a6e3e9983666c67b15d79970931e126 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Fri, 15 Dec 2023 15:28:45 +0000 Subject: [PATCH 097/496] Use common workflow to setup and start KinD --- .github/actions/kind/action.yml | 74 ------------------ .github/resources-kind/kind.yaml | 31 -------- .github/workflows/e2e_tests.yaml | 9 ++- .../codeflare_sdk-0.0.0.dev0-py3-none-any.whl | Bin 39456 -> 0 bytes 4 files changed, 8 insertions(+), 106 deletions(-) delete mode 100644 .github/actions/kind/action.yml delete mode 100644 .github/resources-kind/kind.yaml delete mode 100644 tests/e2e/codeflare_sdk-0.0.0.dev0-py3-none-any.whl diff --git a/.github/actions/kind/action.yml b/.github/actions/kind/action.yml deleted file mode 100644 index f76e60af..00000000 --- a/.github/actions/kind/action.yml +++ /dev/null @@ -1,74 +0,0 @@ -name: "Set up KinD" -description: "Step to start and configure KinD cluster" - -inputs: - kind-node-hostname: - description: "Hostname of the main kind node" - required: false - default: kind - -runs: - using: "composite" - steps: - - name: Init directories - shell: bash - run: | - TEMP_DIR="$(pwd)/tmp" - mkdir -p "${TEMP_DIR}" - echo "TEMP_DIR=${TEMP_DIR}" >> $GITHUB_ENV - - mkdir -p "$(pwd)/bin" - echo "$(pwd)/bin" >> $GITHUB_PATH - - - name: Container image registry - shell: bash - run: | - podman run -d -p 5000:5000 --name registry registry:2.8.1 - - export REGISTRY_ADDRESS=$(hostname -i):5000 - echo "REGISTRY_ADDRESS=${REGISTRY_ADDRESS}" >> $GITHUB_ENV - echo "Container image registry started at ${REGISTRY_ADDRESS}" - - KIND_CONFIG_FILE=${{ env.TEMP_DIR }}/kind.yaml - echo "KIND_CONFIG_FILE=${KIND_CONFIG_FILE}" >> $GITHUB_ENV - envsubst < .github/resources-kind/kind.yaml > ${KIND_CONFIG_FILE} - - sudo --preserve-env=REGISTRY_ADDRESS sh -c 'cat > /etc/containers/registries.conf.d/local.conf <> $GITHUB_ENV - echo "CLUSTER_HOSTNAME=${{ inputs.kind-node-hostname }}" >> $GITHUB_ENV diff --git a/.github/resources-kind/kind.yaml b/.github/resources-kind/kind.yaml deleted file mode 100644 index 4546589b..00000000 --- a/.github/resources-kind/kind.yaml +++ /dev/null @@ -1,31 +0,0 @@ -# --------------------------------------------------------------------------- -# Copyright 2023. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# --------------------------------------------------------------------------- - -kind: Cluster -apiVersion: kind.x-k8s.io/v1alpha4 -nodes: - - role: control-plane - image: kindest/node:v1.25.3@sha256:f52781bc0d7a19fb6c405c2af83abfeb311f130707a0e219175677e366cc45d1 - kubeadmConfigPatches: - - | - kind: InitConfiguration - nodeRegistration: - kubeletExtraArgs: - node-labels: "ingress-ready=true" -containerdConfigPatches: - - |- - [plugins."io.containerd.grpc.v1.cri".registry.mirrors."${REGISTRY_ADDRESS}"] - endpoint = ["http://${REGISTRY_ADDRESS}"] diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index 0f1aeeed..c2da71c8 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -52,6 +52,13 @@ jobs: with: submodules: recursive + - name: Checkout common repo code + uses: actions/checkout@v3 + with: + repository: 'project-codeflare/codeflare-common' + ref: 'main' + path: 'common' + - name: Checkout CodeFlare operator repository uses: actions/checkout@v3 with: @@ -69,7 +76,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} - name: Setup and start KinD cluster - uses: ./.github/actions/kind + uses: ./common/github-actions/kind - name: Deploy CodeFlare stack id: deploy diff --git a/tests/e2e/codeflare_sdk-0.0.0.dev0-py3-none-any.whl b/tests/e2e/codeflare_sdk-0.0.0.dev0-py3-none-any.whl deleted file mode 100644 index 872bc4edb183aaa421edc093759008693ddbb814..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 39456 zcmaI6Q;;a(wlvtbZQHhO+qUi9#%|lTZQHhO+ugJ8#LPsT_~+b*RTcHHR@F9K;Z^;P50kN1YiRI;QrSEhORCa|K2{Vxov;QhV*Auzu&69 zEP6w#{xwBdi({R}>dLMmyjvGV)Nql>_K_%(E8*{|@7b;&v+x8O>H0cK71x+HS^m5A>$*gj} z+k%}$lz9R`RlS}`7mE;+!T`>f$YNNjl{H{OgWa^SstQ=aeRpT{I$OkSR-o07v#$j(0o z?62TSkK_3q#uej^OnejK%i<9lqOK|6Uj3D zH9ooz%?8%bBrb3#bwX#=EEuxD7qkJ=WUUKM()wCrq>3n`jw~VcUBPwfPK;X>rjT;d zsxkFUB0VV7>C&S`O2#1AB6j_wZ@5CA^zmBE2A$C|>U$9;F20Y5d{6YmRZwT-U)&Se9su&rmkLxk zM$YjQ7l!3NQvy9P9`q z))>Z>O!D(>M+Tt00K_M3nDNzJ(XQP0uklOdm;#+8c}D>!crKEsFF-7CMfp(k)cM(}Dy8D4?P_n#;&tJLXckcjl%Y|dulvq7%M(nq1jH4&x^_?6gdD4x3 z`_USDOaiBQVgWLl`1q!bjp^+s(8Ww;Ubh}#iz|k_$IMZ@%OV>W z(1nE@h+B+qhB@Q>`NVF+Bdvz1^gWOTm$@El9gs$HU{m1Wu?VR_02eVcIaso1&56ZL zF;6u}h;1y6{q0V8u&G^L)bwrjXSk7*Ka3=0-e}<$wb>!5uz!Sp-MxLJ7HjAc!VjUc z`}PzfHbYmX-~u1K7yAw=X%23yDMaK*WjQsxbAmqBlI)V02B?9&_GBUxUwt@&2_=e( z(S%)Q5w@5?7@eAP55H8COFWM|Jz+QEx;s#uv-JJpy`s7DP=#Sy1JmD8khf$2$SoZNU@}wC65fkcJPfQ%h8gysre;H4UI2ycM|gA z#OrhWWjYh||HO8){}e61!5d)eLvx6ny}=|#Go=vt0=zvUb&B97lCROLPd~L)h!znu z>a;`hE~^a*nw!C-Fo=i?&^II+4_nCsiZPQae@T3(QjO%<1NUuuw`E)4O4|wr4pKzLP<@ zfgm@Ho|MJ0KP^FfunJsQ2B86PdyB_>o6yokzgt&N6d)h>Yv}2R7D|=x$9)2xKu~z? zVGiqMrgh(d)c3-2U^!^F|0dL{?`ihzJV>WdBW!~F`9zpUOcJZ?9Cn7LX6gq#tT|`{ zcCKhDA(L2s0oOMtl*v|J+O7HVtMKZ%R-HBfyPhtPRpVTmtp;kRRN?`nmFy6jZ;3!p zm$h-t(PIAM&XCAv*1ICGi?gloMlh)P$k8dBIyC5HZBVk`=1axxN^zNWZ&`P5C&y0Y zTLu4P4lbDSs}ZehZBhwqeKbc#8^_B3-Az2w2^Dee9Q%svQg#90G80Yje**nsvkblQ zl6%35s=(h)2Gv-j7>_tU|hyE}V| zZ<}An7zShQ>h@I6B67-C{D&WN2}fI(&W~$rVG(B0nlV?ICAC@d;D&!;6Q@}|sLrThw5$z* z8#an4!Q8^wi5QV;!e1kl?^ifV%@(81{v5aZo-q{jQ2U)O%nKN?hBb;FE$lWQcTdPf zcz58oQ4l_+!)#A;he>0U480^GK~S@31{w{SSTl2gTt!PdP#v(_JagDDv&q|EK}5EO z;4E7K3E*NesD9QW9^t-J3}|2-0h&jmLzy86RCa538Dx??1pN#ICrb=X<_3C;11nLRbc0e_Ryfj)! zvk|H|s5L3VYfMW(^}pD3YQS?$tfF^XIgC_TmFP4DtgP))KE{vG7hHmhM>rY+{X;URl9Rd4K&|j?Sdhh@d)LdZtP^S0D z5_XGuoCDPk4d9RsT#>PA$19}Wgx}CpcPrx+Oo2DNPiRVv#`y!GJ}eM zW*>cbwdb(aOcO}+CXYSo5)&*Ui|dvbW(fO}pzQ{2CfR)2j05G=RhD}h*?`Mdc=QG{ zW{RymP|5?0;uZX1T5sNjM6}ul3p^B5D3J(?q)v)pl+r%B=Tmnqf;{&ayzKNZ@(haT z3nZzOa$4N<8ik63x9Ey?em3iPthaI}ND)K8p4o(CY@i3^#!GxuwLt|fJxYMBaq9!c zt*3B?M#!z$&qT_JQW=XEJsO?B>)^2C{SQk4HBwJxrxPA{=w~pJdrTiE8b4YwH%}E< zt~xHbrJ{{zlqwifXWblwTU^Jy7A>QX zHD>*sYCyU)!evPM*xdyBs$*V<>fa%kif?I%zD~@-L)nu;4@mn!*@<_H_}O+x;Y3DPMgoH z@oxE+0O~`c%a#v~XXc9wA2scvRcs}9E9j)@b*xt1bu)AO30!_=?(Uv1ZG*kZyI#~B zv%5;+qSm%5xps>udQ)%W+VX50HX85vQl30N!TYg}cP6=%S3|C!4ti#BfwM`ha;7hN zm<~qn@+;Bb@-G><68`AJngL%4eQW>Y!AG?B@}COtO0~$#s@(rvd+MqK@EZS4fH8gm z00jSa?fpOJx_^7`n%CNSQ!HWkoto?pJh7RDT|MRMotV2o2ThPqg}p1_HM z2n>)0ub`do_w_R86`F)}@~=B}S%avNnOBE*hqsrPSLp1RqGt8wmD_xR{cG=cO4iTa zV_McC<~yS;_$=7$*rknBB$%GdaMVhXu7Zs zNQ4zr@s#rn6jSMrP@wQC!bh6YV86753Q|qMlH-Pn#-=o-hKUW(OEG{5z#)4e5zoaE z1@4d!RG-Yu=Wu~E-pH0}xR)2TO4TeTo5O~l?McsRv)kwM`SJDPap=CZW>4*-CQAv~ z-+5EkIj=@COHc^eMQj0PdG(?yNiS_>3TtT*4^?wpwXGOQ<(W-YEoFbE$&8j6yN;EE z6cPAvHfn(0g+P%o^rI&tB7PjbEWIeekBxd3n0AOlPO3ldHF< zho@;wA8%2(SwCX_>>S)!NKidN_9tOg;vavLP@f`ZlMT>fG@!=>;w2TAjOj+oIn^z< zb6V2YW87+Jf-sdPJu}!CGvKpn0I3|>MmF1Z?AS67hVTLl2T1!8&($V{4HtI$l;bMk z9@$Btuu4N(GnX7x%z^krHxjKR_TEKHZn8>IHr+l90@@~vOhC23SyOd+N_?D+CLfnN z2$S-daZBliE=pJP$C@-Jmv{q=k8|%L7%do-n5Jtgpgpd*9TW*0EfK0|X9H57{*MAX zW--39pB#1*)fE_ncsfhq22dFRJeU*Lb`sl`A`URq?F6#YCZyU`R5nD=+WbgHPi4Xs zOFptx-DF^*(e)-=7?JD-pdY#hP6hBou4988cXkH3CQ!R@D}iU8&f{X&j+j66_GAn| zMlhz;zm)cLOOwQLu?5NBF0MJ|?7xI_v5Rm?vw^&ZI3RQ?>mps60OC$skz|gmX6-@{ zV70cZi4`+>@Qp$~($HF^wAodQrgW9?;APbn5J`5bgMXUv;KkgWi+lqmXe{84D+VK| z@?3RSa5xQGwzcY=l`=>kZc15)XR^7F=~AcDmec36>?(-sk7*L_Lc;OuQ+218Bve3f z^Wm`JDlfACk%k{2)*7TtOx}Ja4owVzT}pPT_5I2>oprPqX+*@ZhE$SAq;auGLws&9 z6754R7+;equdYuTOP-(wymdkxM3PEUpdd*i0!ThAV&?z}!{Cp}P)Q76-4~mia#RAAj)->5Ntvv(@ z*&IrLcFGtj&Rpq6e$?*)ZQCtFCDl-U8Fbjp2vAz6thhvuF_@!*r9Kg;d{qbv`QVU! zHwwh)Hb{oQVZ@!sLu}Q^HS}{oRWufV$T_6P{%A#@4(XqfsG zFuVbjbXP(@%nMO-fpBZAtn;Vih=O+{dRR$eto^2>pTD~B3gW|`Kp-^33qxZCrD-8l zJ4H{G+VTLTl4aJC+uqa*&ISCG5i;K3%!A_}`@8QCOrKfU`xFz@|)h z@<|H^eb4Q$vx&SgyzjDEEBC41EACzGr5G|vXAaAzuQW=kn-9*fRnjIA?&EG^9ju-h z8LO`pu-p(voxt*XNHKZetqE2o5iEAH+|FI}Hxx;2Qo?zH1#Q&m_NUyE`*6wAcs^;t zt8}*LZOSm}N`-+X@jC2kw}8nMVR8W-Odez3mH})O~5*FM%?IuNmb%J(Z~};WcATrB$M|r@frS^ zTlQB4;3YW1j7#02TLiAt>zn2_He(6xoh8Rxh{wZmNvfcnd88YyDX1!*BcEWIv{eMJ7$GG|ii zzSmUm+U}zGrXiMtR+p|B!R?Yct+3TdTICsn(4JuDh1~lW4E7bOo_ZxoBUHISG*z`T zhSeLsqDd~bYZ$OyC7Kb)zsIs1VL|qSML6Ia71Je<*j+&GRBW_>y565Xiw4=&ykn2n zFt{L`VUZ9H$Dlia)TJ15A->ul8`7 zNdUlyMxcP&zN2ue5?QX)&3d{m48m+p5dzwAv-9e(3JPGa?TrRTT^2&F$tZ+T@fO$E zqc^7sNwoI}MFPF3#*;?2S?2nU$P@k%k-X`}^6QM`mML72gv%Z!~Pq-%@tB&p2Ce zo(kB;pry*;TQV$P`T2Z}S*^FUOL5_sY9~w0K&S$%f>G{twd~8@@>jQMQr(niE|t{O zwPt(oy`2n)RLuZf&gUWRLah%%mFV8)e9-tMP_!`zUDdpc9WPzJiic)3J3iFU#m1U|A8Te==h_Uaxud|6&fo!RY_zulNkd$igp}U=x#RCVcc*BV#SM(f&he8TW1AB#?>M=PMTO|*!Au7%2tBfbpKnd_z2 ze&qrdN=5#-+<}@(-SrAc7pp~lFV{d#B=+%t7MGGlypjn76=OyGPS_okiyU;CN z)k;S(kzH+5&q@CH9yPZBsTirCz?*LtsJw+splS(S;aGsY59WEWBQ&gq7C{97_aEZTI`cS(l0m2UB5c{Ny~k>&De zn!=Rv&HjRn5N{==VZrWp4idwE6?p|1$k}|Ydaivob$-2*c%!Xt(3xZ7otYAqly|z71)oi zLJE~ygF?uOQ=}tXnuD2j3*j6qFC0W|LYz7!L=sslxiSf_B<1~PWA4mO7xyK$ihg_U zeK4&UCIly90UehUP0SM3*)9x?{7~K-Fm?<<$B8)b3+gCU4Cs5Ee-~6mb!H!OZZ9u4 zH}9vn#(4LJxV_)c@$>lc_hw-7gZJ@#8w%4ti?XX{vwV zYFbDVf0%PEoV@<+RQO>cUW+f|0H+s4?{vYH6Qi|p4H0PhV09iFjE3DBRP=Eg0LBNP zN@58s8DRh|t;h-GY7Ccd7|Veo#`om-`uz>jN3Xs7P7X*Vf7-I(S7H(Rq67b%4K_>Y zmt&o$$3_?WUK|$0N7s9PD>-vE)5$c9^s0oidoicmbuHZ;q7#~1D2wdA;VFQ@-oP+1 ziiW(z`cV-`G7F|xsBGlA5ka&~we?Nk65Wq5EFaA9l8o%rF80Ie ziPFS4ZPRME0WI*U{ZutDjHOt0TS(hRkU3Q$6HHD!`~852ugs`=Nagj;E_I z7{7rsU+ijEV!-6Z^~ZZup0l<&Gv)zh$1!Y&y7NGb(3+A=yO}SX#T0OAB5n)L5i$cr z3|e^YNz9^JLIdZ#QS*8#v@NeU1!{49QCg}lBlb-Lji-MzHt_*#O)N5sy)htShGz3& z4y%|LD)k|?+tcLyEQ|0;wFyCXBH(moNv_`^~g2!}_?jE^ob;Au&cV4AX zq`3CHpu@2PgfdHiGSA=p*3ZxWB4Nb0&$+>5#Hdq{rHMk(RQj49#^2) ziRJ7G&%cIct4-gVEm@H9SBM7D6v;pXaLNSsz4A#QVcViP!-JJJV{v`vc=>KH;xGqM zb$%x{xuRnW;`7VojwF-lR=EPX5>0}&U*oKi_`pX4@4f(!Km?n3Q! zYEpU>CTSBODvo+eAGKT_Z!cd>^dMsh>4%slLEXIc_P2}BPilHB?u%{YrSzc7wQ!8c?CVhCNk0S za%kLB0vV;y+cC$Hf|M{Ri>XALK6u!ma8D?<94D0<1&6f}uc~~!Oa@ImpCHkd(=VO% z-jS}fs$~da>rJI#!q!}dK2i{T79=N1aYJP|MzZ@-{@rX3fY;s3>Q}pD~lkMe@wc&GwG1Y_g-6Dt&iQrZF0NQk@OK z=rUX)ztNO;K4js#8>n=%A$*t%BHQ8KK1n=-Qv>OAS6?KLJE z0qqj{c-e23SskKksBpMu5CozIMNkI+I6A+bYtIoFsh))QE2yRiXIIHgZj-ILm#wmg zv&m-3x$477TbQNnOe?wXK=FL)^oP7#dxVrJ(GI~0hJqAb)$wMq8XKv&@UioG8&oiU zU40@daZU_U%o;^py_=~FC{e$uODcrOh12V5Vs$-Y{-7=vj~$#@8k3FVSF?d~Kx;rL zX+PenrWX`z;mGi4>TIz!Z%G2h6@EXBWhB7M{)BTxyqp~V+F3sdsB)*5BCNWZV;c>P z9Qs7)L|R~k+{&MGw*y&~9~v@@VF@M?u5a#@;!*7aIY z!qr!p#-7@3Y`lW|NFZ79yjzXrxEi?>KV>?&4~FS62($9yn3CTLe#o25xWPiZ0z|`L)GPc` zb~8Me{noTTQ@^#(PrJ=ayB)N!djF2z=<+&lIC@$?mBS%ez4<=bO^#9Y2i1Aq8Ag>a z){F|Di%NfK=%etfjLePN9vm2T6U@5U9aNXN!}E2a$4t>K5tfl`0*lRqIFI9o!>NWp zS@$6m$i9D{ZYokvwy&b%t|dBH2H1G!E!w*v^F{a=9ByO>%2vm(K5JoJ*-2UtWqeW7 zoW5g1LixmS@H&b`d5eU3+Fk1-%9-<#1{{q#?|9kJm;=Xx-z4+d6Fuh*5>t zefKFB$7p;nAQGzA(S174Xnja^gYKZJf3i4ien(g9c3HQc98T!=+*`^qs4D+im#z5R zx?6(CYOx;=EupVbt9P>L7~Bcr<}cu?wVY7h1a;JwZ| z+qNBlxe-2g(=R|<=uNCtNWd;I4Kiv$gH1Xgh;CW!o9;F+@fh&ny&&X7xA>2U8c!l~ z`c@cLS0YXbH4K%_z5~Ddx>zMwnksK5O1$Gf3*PMvp89_I%_9MO=Zsdlugq*?xdAlI zbyn6OTJ;hiZ&DXed@hS_^jFQp%i&Lr3gnnA5?$G~?Xzf7#83thRfVdc1KUmGx7;)y zjgbv%1~YThPblTl?51W<5_{th3!{P?aFg?sv8dP)QzI?Pp(}Qby{N?P{cRqfKkb-z zA=SdRwOicf2l8Z(=NE`3ex=T{+B4j@RY3cX)0rv;hmr@?R)o5IFM4EORI-xlQ{>vu z7E&D-izxcSTTxEj?l>e@6vLNQ_apExh3E{6=ALViL|5#Ld~=wo13h=H{N`@S{H~za zK@Tfl^kh#sOwvLvdaQf1U$5;<^V3Dl%=SQ@E-~XDNJL|VmDdo*5my<4;669wRaTpS z+uN@;Wx0IiBQ25XSGeXKc!W#2+>;n^U3{$|KU-ad@MVkeuF~vK zDTUSg;;&H4f7X(SrP2g0A`!7CocI_AbcvWR^N?`@Z?3GW@8dSjfQ!- zIy@Ifo79hD<=l?9n8bx}IbCmYFfyKNL9#A6=+(8TZS0-{8|?VJ+B$Ze9o=?;MX?t& z-51lqYoi<#j=9bNCkw%G=q~uB4soeAz6B5&?%J!KzdkNF)qMP}ai}-$B^2SS`ud5? zgN#;zMKj@%d1nL*V8g<%2zysKi5874WE)?HTg;rCb!E4_?Q$6NsV7i16@i=+-_n)b z_Kj9k4!Fb?l8OW=%S2yTV^=Y}(f@IJ4KU``0a+ZWZs7Lk9R_A?zs+iNdJPjJdz7jg z52;%IlQ}$nm-ELwv&AcHlba0b>S`Owr!h)-2nQ-}A?@chQnP#)0H=1zaK<*$pdXu{ z9*OWh9bd{>k(usSzhYh6?Rzbj)`DN`A;d?ZL!#=N8Y7jL#Ev_~t~YYx(e|NybSORV z!{5i!ks|&?9a+iost%xKX-{V1HOXbrEOm|15x8u5SHU#eEN}pp# z-vb0=E>SsWuAR*?b1E-nSc5Nu ze!>6Km1!qkQOXAf02l%P-^^%Zdpk2r^M9sHvFfJXAsd44sya+kDu~0;(puQSFpz7P zX4QPP?%EC=nkeDif`pNDJQclKaKY~mAt|R@0ubN@_#)2C@l<+btwDK)++#xnLucl+ zf$Q%hNK4uvxmjW585G|mOfg|VBBxay>Og@ZChp)Qh@Yr!TLOdh&E5v}(4exx*cv;}hUWceBE^wXo&d)*H27Se&iqbL*$7|YLbTZs-lZe1vV>6HKj zsI{P_L9v)u;{>oBYpYHQGDAfj?V^O9lU>ybxHn{|h8@bCxrSCiJCGyo)Z6fk`3>wiA< zjpxu-VvBGWfI=BwoF&5{=qv@jCYKsf(nYwI64zW_P5b>a3kW?qecZ%T z%%O#Rg3Nees=?NNi&{If;6bHnpkNGW>Q*P_WDClS#?qw0sS5-kXCn>rRGQ?H8c!vU z^~_V%Ya4e(f})&qDa>(&;)Xw(iUF}W6g5l}s*Jd6u?c$QVYsL-+`vp#g-T1e4`rDpmYe0d^^oj7S@D@7gUv=vNi*`jaJL& ziESepNuy!6xp))I-<Y*`8WS^Dk9OuKL1-6-n7s?S!hcIvzRbBMrl0y?c4AhG zjGNCs|B5X#B{B-OfER;c{xKPN0es!_KlSbK{73an9+q_)K;mRNwfz^#wfP6`YSrD? z4KW12SAE97RFMOR7`9|7)pEEuFST&V_xVNM{AS+1lPg_wcU=5mU-MNkV0W%^C6D^o z%yoOSo^(dvw6TZ0H#>WSCqtINv&B?E8ipWmv#y@iFid$x3m~fae4s{g2&t5X!e*E~ zeU%Jpz`}iS4|80v>hjkgcq5K6C@41oLRb~7h(PPL2OPj$Wg1uqz;c?2EarHkrL8O7 zv=kEwlvvsj85m-Fu^-axmQ%kuvMWDtgx&$OCO(DyZDzI|@Ym|g(iA;y;wtXXZ0NU0PGpGpC!wrfYl@p0 z`}j($<)LZ?OEOpDUw&y0^iDLrBgi@yubT+?T_0#JUwp1>Sa@aztGQ{_ z`;C?%ST0CfTz{OZ-)d6_e7iCtcgt#eJ6t>UDklSPikv$AzLO0js-`k8i8TU@J1Vy% zxyMK7S5`4Cs~Tmsw7Ahrf}m))z{0L3Djv&=y$awbSq*A!q3Aj)PdU8(0!ry6>4Ddk zSiVoQmlGMgpER2RfdTB%{RdFV4tBWM39X zvwyV4NyG+JWi*EM1BApTDQEOY*drlcM+u>yM6)g&=>uVog*YmaAYw86ohQ#^x}i9u z1mVCx+&v`RB<0=z1u%DDwIG5xWZD;T^&8N6JG5pT5DE{F*^8EaL`1u`q zPUF7o1Ti@Q;zNE0q{_90&<_d<^ao}s>WftpE!+R68pi;$(6OeEj>p+agy4} z%hQpqJ6%t&Z}ry1=|1L*wVMMQ5vnE7@=TaY;g3Q>KJGbAw3}>sejm|1)OZq@O+SzZ zI=nM~5s3_+Lcbv=NimTblDl%CmvbCaeVVCX2}QOjXHY+YnT&|omnMToI0@N0nW!Y_ z`^k2t^X=37J0-=}Xt+)Z#dxgPQ55Jp#L<{cW3gXL)LPY%H)w8l-HW~^B4>ER|_XIj*hPB~_36H=Z)dpPhaHn)dMZQT$x#v>@cBoaX zBO(6?o|{pXz{ADA<|a&JXwVIJ$3=ELg*1pyBOTXDRme0 zm!)JD=L3L1B=(&oL;rVy_?=aIUJ(@z5+I3 z%%bPp2rRP^^-TJ2_Q+eroxnmIYN8|EgblIyc{ac|ZIw=1;rl-t^s zdjtyoEjFN^g8&eZo+k-Ay5|u7``(Ms6IG!!b3e-_&Zq zOQZoq>6xp$65&yM&S`(H-*MOob!>7-<r)6RnD%^wxOOLDzEq`UHfmCT>$~Cc9q<=kYGq~ z1`KR>SF=E<6=%k^inV^36_@JnK}y4&v4|VhTqFHnTl-$MFTF_M^!!nF7AR~cbhL>( zz@M;d5~hm~OaD!P*Fz)Nh)?lkzzUGXSWT)wwe_cAa;dj zE{=nA5*{L9OcT_4QSL zUBJ(MEfvhgtRjHAX3ZhGH=AImx{5knrz)MgQd3kERXXT0qY89!Lq(WjXJO8F}3SBlZ2_^m~+J`0Gu|Fn(rDeu5n>wP&ib>sVr1c;io%RE@r)Fzsa z6ok)#bEJN~R7c*L&wl$tJKtSd{Ggg@PREpDw({*=sVL$H^sLpRdgqegUcpzrzDGgH zF;jJO+~LWOuSAPG!e6I6a9@ja)*8-eZI#u$pYX`JO4Z01 zy#+m#ezf4)L%EPE+_dXXUKm%GEYm+X6MI~yQg2?3_9PA*llB5P5UQTB)$Qj*Z#q5+ zT8MDtt!4FDGO#@3zUY?NBU@}tfdM1QF z$%UKcn)`T#)rW@}O}tICPtl7^tcoT|wS~F)A9}{+J5nQ2kNE1FX&~JK_96rjK|~Fg z(<|*yN|^E#SO(n%c!Tns9Qge7#s1w5;-*5xla*A$EM?M`UuXA_4*C1IgbdnLC=+Zo z;rFK4%ZnT@QEZOo8W{vBNx&N;h`ikpfy=8`)4(BP)C;@|7clomicVE2W{DxWgEOE( zF~9-31E0f+RapE)3jRDepG6i`*&`78uo}EJ^z~!!|9+%C+Lgt??gN6I=x<&C)-^nX zro>1eW99@G0Bszu_?Pf6ZjT~WGeXcjit37O+{QMv>Sq(=Vx(#U~=-`SaES^NNWUbzc z{|v0trVQjf0LZsf7l0F`QtM7kW%Ho!BSV?{AofSJWldE6ERO_Qmsx+NQB5Nk2T9 zFQTavS-(huJ6H@Y$g3s@2umph=~d=%W1uQ%dj?U7tXhDI-6VZ~TPKG$5SO#yC zA!}WE=x0h?zE07ZrkUq43 zR&_)f?X@Z6OKMI(?>OHy|4jOPp9?M$LV5zsgURFIR zyg}oJ!EJZQS%}6eI^fYtOL;n&(wi0WAF5Q$`SI@xKpW$Wros|WC51=Qa^i4S4h8}6 zyL+2D_FQ0V($s-#syDv82$zU&y8KN0gwUh}TK6VEv>)SW%cQV5A*C=85k6REEqNtN zkEH&d3K@8{Ib&m3n0H#R00atz>nhw~P3!NQDlc(}I3kM1qtJEGwoz6|JFwM)ohFcZ zO~sD@j>P=%$14>9nW@lC6-80-B)4Hxu=Mo!R!YQs$qbJmGn?kPodOYsObXcwsyoZW z_K1NbU*4Mo%m>c1nqv?)yKRNgmVV5NjhA#yU82G8xuWE@LoLpPgmAZN~)ge#xjkJ74UE)lOhw7(yffe-4CTxWUam!i)aDwVfuFMa2x^ zNWD@bYy%Zs6%87a*eSaC1TklArR7U|V}DZwo-hWO`XEG@gi8!)oC%FlsoD#4gJk)x zjIat4Wefx4#@i;S5=en=n64|X#k>^wG70NET?lyAaAODi#q=?e;oZo&=}lXA(aQe7 zP0?No!~JMtosLt!S>P{tXw8U?f;neQPDtoFXOI9*wTHzy^dk?4dQH`_aK-N1BkRrhb{?;N)mB1DgCGIK3QXiFf>srk&Ps*0$S$ma zS1a*@ob#Z7>R1U0l&={9M=V7Q*_maw_(ATB6CHf015)okuG3mb>*FhRD93VDLm*rW z%x4}%;RdyhyA(iCB@_=&QhrN=fI*;vciHB^ za}~8HQJ5)oXt#^jt^u9__J=(S=3dVsPQgKH;>{*yB)=q<5dd3ZhoaxFqH085WF@qp zu3)_2OF^3&Pf)onVl88z#FTG)8;ufToNpL-dU(t*P}u|-7gs@;Na)BaDaA%$eRdH= z(5!C9t{)-*>l%BVuHjkB5xVmh(Qi+M8S_eI`@4K_VXXoW;8=(A`je6Jp z%=dLLZxYF9Y>T02aDuj)Hw4z|*(L>HYH|7yiX&x0;rk9#Nt8;&_K9ISt=sxT^H?6v z$!N5^6~Rx=$%iHOpyK^b%7c^EF3fO+Q`F{GPqRYqI z@8M#v%Xmxsj6cNc^;&>DsCKyOPng#bGY+{+~Z@A3nc#&+D#zVv!su#0~}`KVs-k;+p?*We|vAo|bN(%{llz z-%-T`fvp-=e{K_7kM47>(@@KJL=H?a6^1S`0Eyq2U1Ia($Ezr?yEaAUPV$)|R3)>t zekkn(CtQE`*;iz-k4|%7zrd2jx%F@LfQuh8TLV}zV@DIM{XGMM+3R5-j$}Nn>DTJU zPXfBRrVonb>xYu39a7I558~(zmdeQ!zG;KaFJn zj<|bqRXTjlGiRCbz@NGnjc4oUCB{LSuuJI zQ8G8BuISIJQ0F$iF_OU%er4O>&J0wCk{b-@`SWe^Wj}IGHulS*mnW_H|&+hI5V2PicD? z*GCSa(tz3Jo-0nRqT*R&Q;F5GwM>ilXS|aw$37Fl5!b$SVgVe0(D)5o4Jx10H)8i`6$Ap8~ zen3Us;Prno_D;c~1>KhBwryKy+qP{RXWO=I+qP}nwryK={;Gbtefw5)tcZMDkC{1h zjQNcjo?MED{~g#a)1rGF(`yi8a`S`7q#Nyn1b<(m5suM4K2Bz8UI*_wbmQACwO9x0 z@RulA+#HvHDbZh_#isQo^h3B{wL8Q3wQ`5ZyD*?b)B2Yme%#i3$}J?~QV zoKo<%j;=-8XpRTm9BI^I*d{MR0kSiZ(R-8>;BjM_VRDL5?`H6y+9*hrRt--Aa{3)A z+WMza8MSl%4Swew()g*PHd61|dAsJQ2)=}`;G{?l z%Z9ncC33oP#U6S&y(~xp5A_0xnx>HC;fiGpI0uPNX1r`|w%WkZri(i7V*t!TOYJU! zecZXdelM0Y5`M}bJ-be8Lfd%7;zG&ObU=izEnY<+fny;m~)LCGR}c{f#o|`Xbof z3Y~tcP>O|fxp_&X!&yknyolkXaDVewGO2q78~LPub+bo#Y3)``q4SRr!vd-x@muWrStE13+@o7pbv=iI|1|bvj&Op{cVl&Zq zrOR_#Ie%Et^<0~{dIiEvHPCS^ba!u2mhVN<<1Z)VS3<-3%Fm!;%jJxe3XnVwgth$Y`rW0ZyRYz4CHM=2hNBCqH=l}`bVk(VN+Dbm;(6bY=={%Lr z;M|U%7|-H5&;E=KE6ghACNKu9ZG+?PKDsX3Lcu9<^3_H4)4LPX`-)0gAH@ktsBdl(O?OWQn)>Qc+B`R0|T~1jEn+ zpHOnnR+2tqy(w`HY;g`!McLrtvs#Bd#<@-jFkGOkwMX$;BN@l8|F?%aOn-dDeHLZroHyhnv$v-R(@Ly86T)Jj#Y^?Y8CT3fmO>O+& zXH+ySTv_{bZwvFv6Q@Yfg}q*UyJDEZg7&ByEaC<3GpMjantQTaWC*j^2pzjy#kED! z=oE?Lk7#2Np@NJVU7Vxv?!oh?-wv;xB!XIw=F;l4NHS)(eq1a70Ux$PLH#;u>T z0Xs3_zrEj%hD?Acg2r+zuC$@2K*hk%MOb&}9oN3xr zkHtf;+cXRf+)c%h&9pd(xe$cY?TZH)hPhwp2Tz-0K0*tf4^{&U<^DphWV4$J6%q%T z-_zpjXVc{`8m;n~n{RBH?6&jhmiLl?%)u=Hai#Gb{d0ObX$QJ3d+9yVYAWE74g;nY zBftNbxR?J3AN}MBTaZy=-L|R=GP`9L6 zzTa`&oqIe(0yzqr^z;;~tV9qcF9)hu6@(;m65XApVk)p{ox6!7>Zqq@k>03>E%I;I8yI)6#PLDs>>Ua)?| z@ge|8ZW$;dN^_AkW&cR=27e_Ze0B{qfi4MnEmOeHSfEr(~7h7WUsIgWXS0JcJhL0^zo z;9mh`Rs+b7WH~9uV*2A$<)bq=6y>S(rP4q1T`0|et!7XZO#V)mm(BOVw#(C#CE)SY z_Ow|^gX9Vkr34=+2}G_Tz}J8KuIE^+JDQtpmTbu+O<{I&wIURso-_b+(uZrsCbe^uSL zq;(?c_1(nnZjcei`MS%r94~REkSwp?rdlTP+UEeb^q(JYTrH2eHG#^HxZj)m zr7R=*10MBXK4Zw|_pv$zj@MvsQw7%vOSx1|q z{1pu|5`wK~NMC%PW_Vxvb1}DCI?I6bMHZxDcBL&#rha>)@lE`u^7k8{gP!>k+?t4U zW|9MqZ?}!Mj``lbi$h6I3A*9A658XDOJj7Ud;BVZDZwh^vzgvzb*Em}R4L}$i`8~> z2RVAjNi$T;OwoAU71a-XQ8^Z>LNY#Zz4v7L8ZD2}+|%!plk5v)zmF^gP1WNJm67DU6E;igL%^To zUDDo;M@yv>SBFLbfTdO(!T5>1$(9O3(0UY-l~x|*9cf_M$HRL%Zd=Kntm|KNPO#E2LD}8T}-e<(Ky@ls>bcZBxY-}?WXs^k^NnnY)8;_oPx{}ML@Dxncb2>1=qxzVc(}d56}frdADpETEp{8640{cY z$epc#g#{&Gv)0CB77ux$RtB58q0J3A3Ri-vWDL#Et26mZDQVJXXFNfW1#Qh+;WytI zZD*e~nVbER*!|+&ehK`O!s*fCCg^K$hmCeOF)+TI9#!i{PmS~4n8R;AvnGk<--R8l z#!m0dH!*s(gp#y%D5@0M_-kOyYDscbj``n6`%y$pbLtqA4J{S1Y`#pT%76ww}3o3r3LJIvyR_;L#fMBYc4WRevTWkGpI`1pv4+{Qq(P z{zrEHcmG~<|0Dd_6Lz1e?50A+S&*#BW5|oCtg}}W)9tUN8BtBiS&ALY36K)nOT=mL zTbgNiZF_CH023Pia(Q|>m%_6`0!8157V23YpmI#EW)f-WsvJ%4XR;lA&Gy2Lo1!J= znlF1Udy4o-5Tzu~$(u4zi8~LEoo9&N$?;+)>}n{Gxr^=ug%-u)0=-jUd~xxw9kFQ4 z!;U{hERg~uNJyYbOh9x-u5}Peh!%J?%OYopL~W5RqvnbNK>`lpBMI}?QaqK+`NFzF zcjCuGJnpKfRF8s#x3;zt9=D%s%X;J~qPR)A2?D81bIeYD`Qdpj_QLD*{wM z!sc7@QxrJzGZnXz&_U>+!Q`lL5P4~ls3g#Y7EPfq)6ih>hbJ~rCb;WxY$=`r&bxr` zp-+q3c?itV0X>{9Mm|Oo_Pu%h9G}Yhehc>G`22W%+)uuTLO+}{aeO}Ahd$Ds=JF0~ z#~wW$;vdnS;`?GCfprC#uWfhme*unzypI|FL;Q{4fFiXClxsASs1qR0CQ?{*7#pT+ z)#Cv<{!T}zC>~g(`02vPFZ@kbZaScWEf~;ZRcmay{<@ixO@f^6!%wN3~f1NQDVhXT(q@UE%(zhEShJGRh!iCLHo)=UzSprW$tCe=(O!nwB zx1+{+#>VnIbqUj+Il)lplVti`Ny{M5kknp+ol^C`_7$BcIc6mn0X$G}B6jjJ1F2LM z`uqYg))VvjpMY)r62$*7s;_*8`nwg4NI=b>-9Q74j_ucl;K3g4lFcnvfoF_>Ts>yX z(Rff%SE`x)(bx@SZ2tS;uvbGnRi5Jmq6(%BPL>YN+hvm3a0`2EDS>Rq=~HSx-RAX# z&BMMS=r(fH%(86hl+uO|BpBGxp{a%~kuOg$#%K2iGJKTt>j{Cy1cL>VKmDibp}ag|x)$2y-N$Nf0~*f9Q1EQeX4&Hx#TU%OHD8CT!t2b{ zafXl+n5<7_2lV!!J0@Pk0q{p4^a~UnGoQy%fFlge|Ju(QGp{j;cN(f4geC7bVXMFh|M77% z)aG9ho=BJ26Wq`N-8!B@nI7@Ske=93z7KcNp3O(%AzXAZa}6@|S&efNnB=IAuZbl=6d3jylR8IYMm{Hh0cFIJh2|cOBA#LR&qjiUS&WU zjT5$mksQn%y8X@s`tL}=uvMA>G6ESO;R5!32qqYDQK@C2^x=WZA~Kr$DIPiu;WFVU zy%yu_x9pFw!fI;rb{{5y@AgWaUz^E1)e~^5$7Ip|c0Rwgq!`T|tn!%9Yz)QLb^~_G zY~}p1m_=6gU@Xc6g}jBpsNgEMKQ@QPq0F(wsaS|t-^L|Fs$raOACGZR&+)gNQQm&i)`loU%+%}QZILaZj zNd#!(>bc{vCd$-~(M9dh6TV=su^3VSoT`n7c#Li&2qSj)KkEC|YG~L`gLj<(6=L=p zegcmZB)mEmZyG^i1R< zqO@i~Ma{pw6K>gl4x~-~#5?R_734qC&Sug?lf}=)M5O`M@1i?JFBf>S@ciC}-ydC? zv0QnC2#>gQ-U3%~@DkeLSNBfTVQ3#Mk^X3$a(M#pN?qYzP-Yne9e#~2ymG;^CW;a} zpmxUKjquz^w01hZWq`_tD{zSH7ofF(8R*h(j^>#?EUSvrg;>AvrCDR1WU5XSeO?gW zELo!A0KVK}gMzwN5vrs^zEJ|0D^$dGR{@&7-XW{BC#qam(Zt}0S4PD9XRu;jASLz# z7EC8Bv_DRoPA0-<sHpN*M4yk$6+Y{U>xPQ84*A_Gl|5x!++y!S%MSDM zu_4ps8kHGrCT%4(-on4`m9#7>!q^_4VY?swJ`|YYZr3xka1fv$1kC;Y8gHyS~xyF`7U@*YoJWNep+ z?;+wVidob>TZKDFheSkUyd0|?9`~+EguVh^&dLg-So*3;c)L}Vd9t*XpVeBQ zDxDsIm6On+qQ=mPjOB&_`r0cXPL}kOl3%ciab_xQRa?4i)8VH5OD2 zu=_4LUo|QkA}(ayFBU2q#EtooW)PG%5!OSTf!PhQ0{*oF2eWGu+tY4Nmr~>;<5!4% z%1K?L>DM6_mdZHnrRDs3JBXO~*P+_8-2=5=ah7JxL@~h)xxgz2S!f?g7b02z@thCA zj;_V6L9hP;4v)r#r{MbAp(&fYrVxJga#Q`9D%38^X6m)O59E9$Hr$X`!%55q&Z{u0>P8H{w!N^#T&?05szg6bJPcJSw?60yB@|eXkurOb_8E zwHNh_wU7`Kxb?eqY^uSEMR5B5_@v!rUbtg+b6&*(J8$*cG9J+brLY{_3mgSU+ZdaA zKkotO*0xDtcIB{ce69YRd5E!59xT<5dp@;&yfI#oTFm|{TL%)A)t|Kmf?qlGyl-FY z<<_!l4=c7jIl!9WEy#t5jg@i@flV9l)-^lO=i{X_C6vH?-49ZGMTgO#MPhkyZ`&@U z_Epk~56g%2C_ie?+cejF0WQd;4-qZus5VXV^>FkQh`J|CJ8G?31!h4S{GW+JlS`(M zQ~euEp>q^i-53J+uGei*E|HZFw}R)|25CIaPspVi+5y?On;W~XDm$M3`TfVYnHWVh zPlz2UIUeWm)T?_*UTMV?92_UC%=_X-Fjdmp+p~uv+e+gF3ApM05Nu6S zjp?j#X^3Z7a)&o0!v^}u&u}MF&U7FcCosvtS*ku$mNsstM9-^7PX*fa)#Z*F6F0^r z@0Bqshmm73x3?}ZZtz_E?jfiwY*mw5DtnhO@$51@rmr+myYWecT0ytISSEAnLS>@g zJ{=b{)A%PH{gg4mUexZ9geOt*eq6_iXg>V6gVuv4Y`1`FFv{AWsLSOIBj^{b3ZmL| zqG?-G<7SEVAp9gkQ{_;5FHl}Jn1Vbn>6`Cyd>}fyzX4S^vm_1VJ2}smkTEs8nQ3J- zFZ~rnjzQy?y$M1LOM3Z8C*hQMetrk+`YKKUv2cDZ*eHfOsl=Rz<0FvnzFuD|7BuCY zT&3{^hq$7bM7^6J9z z(lz*cWomMuM-H0Y^EkMg>RQp1VHoS*Z^e2a(a_H&$h`nf3dAA>fdLN#D!Y3~Ig#a( zz*tplA!H0jQwkJgRK7@X6cDoMx&~**u2Bcan97kL)PDPC;Sk~75^xI?YtrhrIl*9A zBenk;OfR9iu=MbGFmGV~&(X)r!%pU-3Uq=e>b}W0J^)4nWeU z28zv=OFzPNJN&%Bl90K~tL{`M-&se3!AkbXEMYEf2kPY_bM5s(yT^cCqnyW5$r1l3 zCecMVVt0&~=i(I_{MM_BEsHG8iko*GN0BMFYdz4@*0k2D_lu_wTFA#M03WrHKqYsT zHHPU?G-i$iBc4W8JwRL$qCa3Ar7EkR-cj64l+Rf%HNhK5*zTwn$=U2*KBq}h!THgwc*TTSnf=YzwPM%?)kQ`VU62HM1;GDbn!yW zdx2B6PKHH%Gq$_7%>ni5Mjir^KS;~}kio!SV9a7zq(fJ;k5DFlo+NPRaSiMaG{<(D zWZoFOG1hWWPdr`*N%R^x)W+IfO*-Ozl8*)oKrjYb*Y;!Lbo=rEaN5&B3lJ9&)Be!} z+X?_ESgRT~u)^u#y0*+LMDVC51v7JwPf7gz+#|pk)GMA3um|u2PYQbmbH-7(0pV^p z7{t9XZ!{(Ny6pB42AA zeOL+k-an}V`LGojSo+3-R{Lk!K{Y?!6Dp#qG(vm@-1J)#%CMk>0<-+wJDWfYi1>(bJa{wk4Ce_0QOmfTd zDzsruXz!ZeD^Dd)xfA!CiFG2Z&DR~%dJ@p%a}_F93$CpD=m%2-ODCC+^J9N?>Zfn;Wg9O4vmWJr!fAPa#T_|x=;=W zNktG?RWg&=+Yf8?2Vy5|_1Dz17yfS=N8s2|UbB{bCvTaGH!JB(r4!3*+15}UtIHdi z#g-;;j^f@Jr27mn3e`V!JDkcI!;>b}p?UVSxKj zfxn#e;3n#!#yAwY5^bO>d%edJC&yjb9guu*5QxKl<28<(CLpbgLQ6a1*r~4WvHBwu z1gb61KiL0O`9;zG)i?gf7$X03U&;Rm=hf2L&{)sF&RoyT*vih>;Xgc4w6e@U^Yve> z^*@q05|+XtZl?Q3Gu+znW>yHN`g*gfGKa7~~prh9(H0dk8s#P7AIs>Q2{c$JC zM!%#Z@ML;s;CuU<=Lr8-Qa>2g%aFAr+-47&P62Dfu0asiB{a^ts%X_Np%2<(FgfV+ z2~L!k059g?9zV!V9!}E*pq!dL5TqY=NtuoasGb_9XK8D+MEzYgRFjA@4r) zFvEfdu8)&Qxgo#|>K`-e&w_P6MJeZ2+kV_2(O0%{lP)S)-d3N1KGUOcDIOcq14GcA z7R0V4)DyFB)H1UQY2Y=_yIQB+Z`JNL#rEzX@S8)Rg4SxoSFm|CTJ~j++;UOf0zgtIb z3PzJj@}354E(ydfM$bX2e$Cj*bH-{AQ7&9aY0y9Kf>fwhf%eUn*N32xfHX(8K+||6 zoG>SJyC*D;Jiu6Gu(p#TR}cn8zEm?o)NJBXNiQ3~YQgiG|iz zLNY}al76v8r-PF~a5e9`i-lwOEPSsq{RH52sC1ebSsM|DZ4)OD9_Rh7$$B`$J9 z#=AaKxx=OUC*0h({q@PsIarj#J4f|FuOWpII=YGzW`0mb*3sk4F38Bu4P#5(bl-Ww z+JjPyNp||re@Q{hP zNp{k*I0#ItF-EUEP>_L?1xDiyVW|b7Y{=H}CuY{y(PNZ~on)ufE2RfH>7=(V;+vqG z!&|Xq5A}JBOYnEp@AAE?^l83H4KVjCk`LZ`9ykZPBJ`>-&V*iCJCGG-OBR#sAyW71 zJov1!?qgd087TYo6NvnJbME}kT*^De{w@0F|K-tO_WE2%fB^u|{G$ZP{)e4vYiDfZ zXl8EWq-Xp8Ovag3m-^2}?fFzgxG{h*OCSxxeTb4uP@?S{OTrJ4Wc#&rrcW= z5kctc2qTyxQpc3FNkb6?ay2JBjG&xaz}@^bfZ4AWLg8yfUSyW^%bbJ)l@k!8;giNF za%k2RsUs!?FIGgH`iqx(9>h7jVab*W~;cg1M-541_u7O$Qc_4W5NBPY3gCu!zs9*OsvO%tNzDRZ4Nur5;$iUNGn@ ztZ=WxV28p&P({`$N+|3^jkQAqvpS(J^R8b>zMh6-eRhtEJ{`M9f$zAA!~$> zoa8~DGTB45Nk;d_54>d(sXkF#+$iB;GIF-K!FG_IY9YlLC6TMjh+2y0@+SCriM*9M zn!D1o*4Ya{f|#;gE%L1h_LuyI?P^(3(@iHE>{QdSwq79Auf&|JTsUmN^DOI(oz16+ zRCs^Ic-DCRkuj1(IEuI}fXQ{VEp7#^kv4@9ee-k{KPec$8t!xyEa18+K;Yr91yxh? zc`|A+6sMygrkNUfO@3oce}I7a6;K$u^RP&U1yp~vHBCpoo(w4-oO#JnU}U|Q^I>~)RdD!GJOZwrRz}HsTR`Z@-u;%M0R=eD zZEbzu2$uSjrVzSB?@~1AuDZM&$kD2j@gdgrQ;T)UIF{nO*eB7MSd3;h^FZ1sOR^#0 z59OjP6}|GnnlCkXt|pKyT-32OHviV+0~qP;4K!bfILf?(Jn{IS><;sQ`}O%A$( zOKy$MYI8W=d2oD(Pzm$!n!X@qPY6!!A>>X;I2ySTS=g&6e4QvEF^0&FxksU|@DUHw zy4_Az!xy662sPj4Ow?$KQ=?;q-#2iACsOC&IgvlUfZ?7#%_be9<;Zc)n!gBadNaac zK!$$aRBF)s5+9G;s~(9puT}tEaRd8mM$=2iDcPM_jUuqz^AYMiWV>o)vx{2Hysm4UdJ1W_qrIzv%q%Y0)&Ab$OIfj| zN5ZUN<nQDhONy!q9g?fZISEhX9G}(bQ?dYRxW6dVzyftZH1`SCCd9Sx8zc9lh`R2FFE4Z| z54hNpeTc+Qqn?+MUc`(U0yI_6h7vxSD0W{c(f|i+U!G0s5BxXY&Yy2{9{4G4JU->` zZ?R1O;8;G$?Sfb^o;qcvvcD7M2#o(gK&xofm-GEYa%K0zv$}~#BL8(tTN_>CZMS4K z-It}@`-duCCKmRew)S=l`!B&n!+>f{j^ulcHibNz6*V{849jLa^-j>JfS!umiSI?2 zCB}35CZ~2^G`eQlr?>WEF8y9EY=A`>4>a*tmpmH{4>f30?*<^!I9B#Fx44Xe8U#m|e95SMuM`e_eOX(jZ zip%4T*vGE_wQRKEo$wg_S2BWQ0RRyH50{ahgRzs7yPlncxy^t2^_n&Q_3N=B{Oi{f z7{fQok2q+%Mu#8SBRjXKa_nH&Pyq24m~5IBL1T%CAcont-NpT*VB50jl8v@ zb35cR-X6v@zKSgZ_TBF%z4Lt$Z`neEUv}ZdrSlLEPA#cV7@Z=G)NJq= z=I?-rFqlv7Q(XVJM9e2K!CxX$t&DhaiUcwk0yjZb?!Q4ipwy~CVk8Kg>_^N{80+@U z^P@)uy!>9{ghPS~dnJANgPRo-Vh;Y#jS7@_IXO2jT)7tJ-h`{A`TNqce6llirHzxw zaq742(Sj)?!VP>%v;wOlqGm8apiY2>Ax>gJBu+G#b4mREt_f6l)jSDd$r}pe0LJ6U zr0+y3ES9JT{tGH#b?DJyOgZu6Q-rr8jpI|mQL~Ae@M^7WM~`nG9^BpO+B0LzEF8TV zy3#geYD#XNawCg$N2%ja1H>>^prduA>IGi}Bw}0*q*&G?>N0-G^gyHx&~Fjod1_6v z=Na+MZhskpoW-Y(QpnGyL58$|B*$8{6^hj~CEv4S4%xG@pUe_yWm^o>EqVhAAj3&_ z6~_l*tXuTYzDU<(`!$$@RO%QFvqQ~+Ecdqf)blub>S$@f-yOm)El~MYiW`stV&{NK z@x{|E%EuD~(t^TkgDnVX?%-MV?k1*3~v9Iqt#qD2D)0&`IC7x>PX)p#W>3 zKE%FN2}hX?4FtY}=&b$G4#ue~s6~^ZsOt(eUca@a@TnCBbQY=Ea=;}peNzyc-dx*x z7e&vVIHg(`J!Xj+LiG{LVbmG*An(j)ab4)mp;W|zQ+QfCZZ5xhs)c6T1G(FCp$zN8 zMjpmOcp0-F8Ka?2-^{lhB}SIyVlG#T0K-qYh+P@kXs=XOLpGpXi(OvvJTwa~L-pAa z1KMuQ)9SP#dlvr1cgz#MJFWqxfEG|CBNFN)SoMtnrV31`oR;}Z14K(-JyD~yIfL;1 zxViAAPtn!b?0R=)`L^NcxuuBMqc+4lwj%65HB z(4GNCEZHnOl|E15Fe`$I4~!DM1EVZt5oKdE4rR?SnyA*IfQ>JRu}m7>s#Hgf-18w8 zBLKiC6nwHDUw9O*c1#objSn#&hwN$UX^BJ)QzJfz)9pXtEY_%YP`vVR>qxz|hreTw ziR3liR5u1eYwCb(f&;k0nE>(~R*^eoF5ra!B+7eSH6UepG;`)Xw=iiTE#b!A&TCgd zS_H8;fou&wQa0-H^LBT`hc~^>tFD{2H7q|ELoG z05V;o6#rrQ+mGHED)511iZuTfSBf(mmN=^9Z((FEkvJ_V-(H1qopEx8R3C}4P%n>S z)8?3mI8If>{(2G?0J}K2R`MD6V)+Yxr8C!RMDf_%q6|wJTg3PzeOWIU%V9WdAGDBE}c{)8O+fi6%clY0^bOA#?=teYg4zh|jGR++i}Asi~d; zFnG7<+w(%ej3KQ}NRX!rR66v~#kPwV8L~TJD(e8PKuespnJj-*9ez;NGMmbk3jad5HSBp6aPR{t-Yt+aewhf z7we$+QlGV6cW(GW+}x=F=^S*s3L@u_Ue>6|)82HmvGF3#_QLL8TBzVJT{c{<@Ah?! z#V@{V_xFuw$)k$lM-}QKS!kX~3BBm*EW72fl~7ot5LE6mZ*0d=;~&D$o;Y2gupm_y z3u~(jZuX+aYg*^3en$&*o~lK>a+OUqLDK8zv4p(q^lX=hwg%u;0NZAeA|+|(98iSUVlL(ZoWVMV=&W_~ z(lF}Qdh@v*W;3gWRd*iB$O_8f_*8MhB4VYS8LV8>4cXLt%%;_ z8jQu7$U7^OaZAfvZR*;%^6KH&n9i3v^r)`moRu}tsILyP=iaoa`*G8WjWV@cA00*w z0jY%WiUC3o2&0Nn-+llJ%n;&fy0WQhPD3Q{)AQt<(#Sva_t@ZPS~^|LqL&wM|9H}j zFhD@fzB^9zq@&9*Z3{KjDt8zY81a}2QwMNcGmx1@wf4{XsW;DxI~&ddLQPZfY3SMx zrf)u|XnVZiO1D9eE9=jQ8n>Rq&qE^Dif@2&{|?U>-eT!Ai23&9P6z>yJXJh0KN9ZO9TsN$Ut&!Rp3n2_n9qc~ZOmi~hQP zsLGVENL;Nqo<3yXW(6bUzH2=-Z2h^{O|5Xe7-o|D5z;7?A@9lQr5)P+%O>fGpx5j3 zJ)IcoGC0@k{%p~^J4?B94;o7)BUx^l#fkA6=XcsOc=#nbyL56TH1%rzl|AH>0MnX~^Xc}P7vmp%k9_rG> z{fhov|G#LRg^Ig;!eVG{SRXDIRB9@j&5RkT2l$9?JQwOvwya3kkp#SNGf!Z|#40RY zJTK-)MH43!a@r?SIu=i3XuDicx_qRLLtk`5`huiAC$AZ){oFAi)ux6-t>BqK?``#) zF2mrpTNGUUF1b(1glsL7T80e%iC{39F&>*z&m`qbtg3Y?t%9>iTfN_(gcc@!JXS9R zW;h#w%0l=)8P#?IL}43ca8U}DHU}b}MW*&IQ>K|A(5hkI^>yPL=QnukhrCKeV~zOH zQ1m=J0j+C!#JSGp8HGNjEPJFlEm=Wp*v{m*fH>W2`=V#n)+Qs+VvqS_E=U?zT{#9j5p zdj>N9W(xW>2UDXoJrmLH0bD4w@La(_18xvbxfAlXpLv3E@d+}rnE0kw*%ep$08sH# z_=DLu9zyi-%xw1M?noJ8gAB7R7+q_X4AT2V)!+Jl>x@3 ztMZsRkcu*-jhu$d>l+vik|XSIXPG9Vhp5PMgrPL_1Wii|NdQhhsk+upme0!Fv+pVM zNc;-oWX@{+o8$KxGxS+uphD$5CATak6(8V|Z`3qVaRT5z=S?(dO`u6H%eh`Lx#tAS z|JPDOF`l9U3p3iI07X+QozwlBMzFp1-jv5D3z)&U@w))fdmva7>zUsGIaeM=EhijA zBE(v#a__Y?iLRjt1K}CgY8zzDm{7e%Ltqx@9KpOpxQFCVpkQu!vQ8jV9i zK)K|aVTTPWN+a-34jG-E>h&;nj`@kPeRpgfOr$a}q#3NAy}$0-Sjd5yt-X%ekQ5rEZl^hoap6|-c>o)CJoG!3K{~*DCzznf)HoY)G=;=aYF2*^j*5J`soNh z0Nygqt79yji`^u%T-W+egY-wcj( z)xvY%e0MXT)JorekYF^U2oy*ZXKjQXC_K3=k9c#=2rY^55$d|~VIO8nt7DIO5M^L{ zslVt5!)yMy$o756#WvnHXyR0(lg z9f)jPW4fnB%SqYIpihvv(F^%6$Kk&FwE3{C*$=3Qd+-<%f1jJK+yvMWT-$m!0zRZ4 zN+#ZEUd|%qFHa0L3V)60jsVK(OA3|1x{XnfR1aoB9D3|~#X+eYEG@GRD?I6jp`HU7RPIe8c+GbZzH>vq(0i zTP^X=3A^;{t6|HZ(H)cGJ%@;I5%E^wxnb+8#)=8cOIF%l!#&s?7a&~@J(NB0q%1U; zm1!Y|w?RrC;f4Bi#k)|4AvD8x3bh~DE}U!;3yUO-W~_JKO<+KoOux}K;nHH^4?y*2 zGo|5n2G5(n-*1RM4EaL~ z0ft$mZ5%cynA~qvz`=2N6$gc$d(@;X7pt3-eN!1w?igDuOH%`^CRL{0%hqm0@>!t-!QT%qE*lic`WXq{J*?;4ju#x32??4s zAF_Mb9zEpE+2UksR~WA?IYx;0j8O400>-!^W|l^B{$xlM|reI<5Ksq26AytfEg9+WjX*3Y!9Pa9)fR-wlU7Q`QK{f6wi?U()pcqDq^(6ZPfIS zC{_{4%#4iTLF8(2-t5S7;Rm}*tBh0>_V(%w<^8b^`xI$qtV1)#l<@2c1#)`(vDT%l zw@iQ=6=Y~9$NT(1%8N1c3q)2*mX=-;hE9U;$LK+Z$f*#Gb&SjL`G3WH+u=PyX!#CV zF7Gc}b$Y2rkqH_-bwIf#stvyT_XJEE#mVk0zOTmY((R8w$H4AAc$QxGTPbJ+yU0i;`@K-Gri*uD%3$s^C5wjjmiFtG@9bE-B7jClMxc3 zQ{f6EowemX;v4zeF+w6iMF)o+uv9i%i#jCOSmaVy(I54AoD;`gQWZLRH5@FQ*0T3S z_MR;ESEN%-fcM5>P3l#q`$5BQZSX@M?VVq@$9-Kd?X7JZ)z`_(=C=09+GE?D@Zk&7 z``6^-rpm<}f-nKngZqlF{^X7XgfQn@_W3rZFnqD;8ZWyf#kb8Ad~C@xAle|8Tg< z_o2Omb*}q7RJ=|GoH{L|if)^S^?h=RiIQ7VS;%OtnLkw zVS3=U?z|V)o+KIvkIQ~C!hCOQMoXfX3R*{~0Ec3FU-Z&foZR!r+F%aMv|4LS*3ucW z=GU(h9pu@kfvXrb`6_;l07pprmP~8lx+~Vk;I6P;_7sjT+jAsNZVLhj5|iQY%!K=K zs%Qco?q^VxKHsyYa~4Y6$K0gP5(S+^XJjOOqXZu{ese#PlpQy!pow~OZh=R>^rH|@ znjkE?uEk-9O-A(SF>_Z_L%(0K>Cls}j~Je=2~1{1l;y`@lot4 z^W%qHOSxy`&N?~uxExj{@(yszU<-&gIH{HpB+3EA&a9}O-4e4NGs6*or0*a{f5+n) z3)`dfr4!Q{%@RgqaJUOA}OKSZxK?p;Yjx4vrQU(CK;^l7S>Pu&of72Z!iNv-4vzhLp&!`q-; zOd~#Wzv~^vR~7PBP$UpS6oByA#$vT^o~5aCNj%FAz~zdRLo_$n6a~Pjr~h@&CnW9U z&5+m95o#h#nKUf5Jd?8(+EZ}#-GtBfQ zM5>@&EgetrebHw=!VN3k;C`g=@e~^|baF#+i4a=FaU1T(Y%wuW+Gs|6ezw;$EZMu> z)rDi@93sXV2lQB%%sY$od}5rF(%VPaj5Uuts9Zw4ufazGxHeqV8o9w3G#$ZX!aWUx zRoiF?sIh=9QRfSjw6r+7x*hrcCvW%R63hW@*zQ{B000RI0Kfvjc7OXjyPSlQh`5N7 zNSPW;^H+|4*{AExpVD>`6uVt22eqhnq-1nFJeaWFMV)_v9Q7e6kxmT|+w<+He!c{d z6S$HzZNv~0#^zle>bo*of?1CjXF7B6-8ns6={Sgz?tD;FCm2swnc*PX0xxNcTZ43q z{)mI1q;?&e|Dd26uX^1w-Q4N;f~fnQ4(M}fY)1JY2Nd6~IXg&0Z90C?miJw`8RK&( zR6lc+>VDaNlT~Hiu|-12RKqD`5lFM-Ys3Mfulh2Spzh6Q_D2YU)*a9JM!)G&()if- z_xXYQO5h2K1Y)h&t0{<>bm+Yl{gk_^OuxzO*1HJ| z^^3CUU>6cRQ#xOsWOTdO>%4dFmbUY`{>8={;MRlbydj`ndpS7bBzjkH^u(FF$8lV3 zDw~;~>oQK-wMd^~M|?^{6fx-%xo7g>7b`~5@g|JsaxfUDPBxX!&9~V%n<4<(NWo%+ zlfmzF7U|Y`Rf0X7)$L+s+*N*;hZ8tAyO3AIw(S;T7x~l!5O+roec(#uV0OXw8a+YF zPxNg2;5bM-M3OAn$kDf-8M@X4(cmV2LYW4gS`yxO9RJXjS6L!9a%`2g>PRSPkc}xt z=t4N_?nt0RZdRnR}=UAT#Y6Xf0| zo9Se?;+dDuo!xia7T2AHa?)92F}u{D2^|d> zqI$HO@+gOk=YN`6)LM!+6mX*5A-6=1;fF&qV1T}R6}5L zR)4bT(jASLBxuk7qBFS9vgVE{)?zJhyDUeeYScqe=GgGSim!a0vHuU}u*1PN=Z2ZPKCI2%Z!XmH+$#UtT8;x9*D`N*a3GWG|MG`MVg| z7l&t0zeO4{)gp0cS6$dS9Ze7KuDGUN+Nz{%Ba4zQPEt;!S|}ImPLq#%7CG-B9}nE$ zOpuv2Zc@}KLFcTK;NG=!p+$M#0aiMVMsRdJ3Q8GoTQXv|B~1CzF~s2$`>1oWGOE{Q z+4K12m{eJD=+Vq3awB9HJP?P19x2|B%MfxU$M0p-K;0Zl&yB3yj=RhWJs^O`Udf33nj`Umb|*0A1#_39by=5yRt2}b@A<-;qdG%5KBQ)m29gaI z{_Do=T5F~2chHT2qX?iHe7UNNha;Fc`sp99+^=Y?DUM@7`RVd=eBJxd!|1>eISq9a zOySUIo$0&`^j0YabqV~%{ zh+|!Z)q{=@4>qeD{TY3|3?)LA+%)xN9M9>6*e8eiv2`POv%rIVC#{d_H?m6Z9&rMz zIKO$$7L&8JtFKs5w(&h{N!0Okjr%gya{f`fjeRUWE9!-rdG>BWzvPyLZJ-(-5=a@A zsxpYS8vCKePpbVGFQ9Z0Kb(Y|JaoVLZcuEwEPqmHMP%&?DWQXv_$=Rx1c#kUMDI&p zCnJY0QSy6zjb#EvEioXD(`0Q)f&!OSvDJX*&s>9rYEOHY9*hJFv|_$ITG>2pPNtQw zbK$nl$WE+;^a&#$6f@EX+Wp~{O-H@LCTYl4Qyt@kA>}flZd$j|)`)JCvpn1o>5myc zoFJ~7T_D=$J9>!&{J8p#cZcQPCx@oA;LB4ebJ$uqPowO2ex{0weYV3Puyld%`WQh^ zpirYa0DT05TkV;%BUm6lN(o0#>)E|yJDn_KhyW0hj8Gz+a4bN;c6>{{hW>A;(9YmNMOAKg@X zClXI>-_w5*qKHzC8Qj%6ndL06WGzW<6&EQlOTw*T>zGfG2Kuq8C4Xj%A`TJs?1=zR z>b|upzP_;NDPOr`2il&Q{g}D46X3cff^cT`EF0l$qyinZ{3FQbSr63z-g!OwtEpx* zlAx&)gYzOPU1qb-T!W#YN;I!ry>&q^cAUb|mP-BrcH~lbnSRhx+vf<^8u2}*kK3I- zr`+uV$eU{tvpRXBR-bv@bEe|;=Nf9ge`Kb+#=J*MSTyjGTtG}-*f1Q;r(V=}QQ$%N z_*_e#1}8i$MM0FRbFKNfTGiAB=o?CWYW~`tQ0}W$TLB`|Z1<|nbQU#Cw*tbip2Mzx zTwO0+r;6CrpKbZaDz&>%m5?i+PPg=wv*Q@5W*VZwslX~o=555~-udLp=vj4zRA4#= zSVZf@v|FIc+frxXhrq=8W_{85yiSkqr(A-oIh!x0-%|aI)AA#sZVNeF*JtGxQDIa( z7ZYEH4%IoYgFnt=^e(_h`m%4FB_2pXG)Xa zSvbESUS}ZuR)iW>+{i$R#jAyzj>eFPRqr{wvHh++QKwg5g&v9=xORD@tRO3K*IJxt<#Aj#($il)eIy4_j`jFKg`;cYXDZx?;npX&~bSHQ8Jc&Q#W z)}6iG5zfMzz0O&2BLAsHyB$-9^cX}vRn7r|b_H_%XkUva3+?(r#bhHqeB62E^Zp;m zYX}dd&6SLH8YCbr0;)}{VDj3y%PV=g4IKq}^sHzHdRjUW`z>dCWl7)#aFD z2I?5ciqS0!#x9lDvi#Cr1hK+H@jhi-U?QJ4GtNXL;KDNkmP)&p3}gS%)C4!iN5?!! zRFv<#mnl~@vu5~LHrP$2p0=ZjEG+ofDqp1up;Ik?1*Y-NX7z6@M<(Dg=dV`L^##!q zI<9wBO1CPTjU2>tjkNXQ1{md%6#Gh~xKUcpdldSwk8e(I5uK^}d54B|rkz^790stc z6sJm|po(#7h=K4Tz9mq_8Dxd#hb<7I1icTdlRZ3nS+PVAu8*E1Rcj4M+IeoBFvz<5 zx?-B^Sh-TK30M=<%3e?D^|myrC?fjMom{yT9p**p=m+o4g zU?iGG#~$!OPM<8@gmO>2U!fAW=qtK?P{gz2ywmi}(cGklntA2-TP9SRh4R4eeKyK z_FTPsZ|WGomd#;xb*y_~s%h~jW}y$gRy{k%vA3!w^OH*5t)FtFsj5qMzBjS;Vd`<{ zV#jg(#+r197@(LXdr?WO z|DytbQfcJinK?vFQ@yy%bqrMuiuxq2ySO24vp%|}$#e=zb%^({O|W0K8qylmE}(sa zl$E%Pef4=~@jhpT{h~~q&7-;#H|~;hju0lPr0oFL!^VaY5DAe8VSh|mZV-S5w*O8T zA^XqOpI;E~pGQMje2|HyzP+*T8zYPV@6qaCFnoW*7+N~MaWJ<3U$|F0L=)Pu z0=+2!fS%1w9GV2>#RrX{1kA;FFiZ*-VJ`|Yq__1!{)4`{mqLcG&pYKwoM-Nd!Qmj<5) z^&92^dyl^zc9RYT&-!^U;xE8oAHM+yJPR&_1|IbaE`;WHS!uVTenry2W8gjTe`8E} z;48YZz`xBAAD;V@bMs4oS`*%E5N{?)c<$ef3gKb!Muop&rvCtgw=IMx!doxh5VyV8 zGpd6951J From 1f1886ec136c6e4097e33d31c15dad394e5d038d Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Wed, 3 Jan 2024 11:05:23 +0000 Subject: [PATCH 098/496] Use cp package directly --- go.mod | 30 +++++++++- go.sum | 67 +++++++++++++++++++++++ tests/e2e/mnist_raycluster_sdk_test.go | 64 ++++++++++++---------- tests/e2e/sdk-to-pod.sh | 18 ------ tests/e2e/support.go | 76 ++++++++++++++++++++++++++ 5 files changed, 207 insertions(+), 48 deletions(-) delete mode 100755 tests/e2e/sdk-to-pod.sh diff --git a/go.mod b/go.mod index 76ca0575..6bb665dc 100644 --- a/go.mod +++ b/go.mod @@ -9,15 +9,26 @@ require ( github.com/ray-project/kuberay/ray-operator v1.0.0 k8s.io/api v0.26.3 k8s.io/apimachinery v0.26.3 + k8s.io/cli-runtime v0.26.3 + k8s.io/client-go v0.26.3 + k8s.io/kubectl v0.26.3 ) require ( + github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect + github.com/MakeNowJust/heredoc v1.0.0 // indirect github.com/aymerick/douceur v0.2.0 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/cenkalti/backoff/v4 v4.1.3 // indirect github.com/cespare/xxhash/v2 v2.1.2 // indirect + github.com/chai2010/gettext-go v1.0.2 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/emicklei/go-restful/v3 v3.10.1 // indirect + github.com/evanphx/json-patch v4.12.0+incompatible // indirect + github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d // indirect + github.com/fatih/camelcase v1.0.0 // indirect + github.com/fvbommel/sortorder v1.0.1 // indirect + github.com/go-errors/errors v1.0.1 // indirect github.com/go-logr/logr v1.2.4 // indirect github.com/go-openapi/jsonpointer v0.19.6 // indirect github.com/go-openapi/jsonreference v0.20.1 // indirect @@ -26,28 +37,43 @@ require ( github.com/golang-jwt/jwt/v4 v4.4.1 // indirect github.com/golang/glog v1.0.0 // indirect github.com/golang/protobuf v1.5.3 // indirect + github.com/google/btree v1.0.1 // indirect github.com/google/gnostic v0.6.9 // indirect github.com/google/go-cmp v0.5.9 // indirect github.com/google/gofuzz v1.2.0 // indirect + github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect github.com/google/uuid v1.3.0 // indirect github.com/gorilla/css v1.0.0 // indirect + github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7 // indirect github.com/imdario/mergo v0.3.12 // indirect + github.com/inconshreveable/mousetrap v1.0.1 // indirect github.com/josharian/intern v1.0.0 // indirect github.com/json-iterator/go v1.1.12 // indirect + github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de // indirect github.com/mailru/easyjson v0.7.7 // indirect github.com/matttproud/golang_protobuf_extensions v1.0.2 // indirect github.com/microcosm-cc/bluemonday v1.0.18 // indirect + github.com/mitchellh/go-wordwrap v1.0.0 // indirect + github.com/moby/spdystream v0.2.0 // indirect + github.com/moby/term v0.0.0-20220808134915-39b0c02b01ae // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect + github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00 // indirect github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect github.com/openshift-online/ocm-sdk-go v0.1.368 // indirect github.com/openshift/api v0.0.0-20230213134911-7ba313770556 // indirect github.com/openshift/client-go v0.0.0-20221019143426-16aed247da5c // indirect + github.com/peterbourgon/diskv v2.0.1+incompatible // indirect + github.com/pkg/errors v0.9.1 // indirect github.com/prometheus/client_golang v1.14.0 // indirect github.com/prometheus/client_model v0.3.0 // indirect github.com/prometheus/common v0.37.0 // indirect github.com/prometheus/procfs v0.8.0 // indirect + github.com/russross/blackfriday/v2 v2.1.0 // indirect + github.com/spf13/cobra v1.6.0 // indirect github.com/spf13/pflag v1.0.5 // indirect + github.com/xlab/treeprint v1.1.0 // indirect + go.starlark.net v0.0.0-20200306205701-8dd3e2ee1dd5 // indirect golang.org/x/net v0.17.0 // indirect golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 // indirect golang.org/x/sys v0.13.0 // indirect @@ -59,12 +85,14 @@ require ( gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect - k8s.io/client-go v0.26.3 // indirect + k8s.io/component-base v0.26.3 // indirect k8s.io/klog/v2 v2.100.1 // indirect k8s.io/kube-openapi v0.0.0-20230501164219-8b0f38b5fd1f // indirect k8s.io/utils v0.0.0-20230505201702-9f6742963106 // indirect sigs.k8s.io/controller-runtime v0.14.6 // indirect sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd // indirect + sigs.k8s.io/kustomize/api v0.12.1 // indirect + sigs.k8s.io/kustomize/kyaml v0.13.9 // indirect sigs.k8s.io/structured-merge-diff/v4 v4.2.3 // indirect sigs.k8s.io/yaml v1.3.0 // indirect ) diff --git a/go.sum b/go.sum index c9b425cb..d85b8eb7 100644 --- a/go.sum +++ b/go.sum @@ -31,8 +31,12 @@ cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohl cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= +github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/MakeNowJust/heredoc v1.0.0 h1:cXCdzVdstXyiTqTvfqk9SDHpKNjxuom+DOlyEeQ4pzQ= +github.com/MakeNowJust/heredoc v1.0.0/go.mod h1:mG5amYoWBHf8vpLOuehzbGGw0EHxpZZ6lCpQ4fNJ8LE= github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= @@ -41,6 +45,7 @@ github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRF github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk= github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= @@ -55,6 +60,8 @@ github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghf github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.1.2 h1:YRXhKfTDauu4ajMg1TPgFO5jnlC2HCbmLXMcTG5cbYE= github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chai2010/gettext-go v1.0.2 h1:1Lwwip6Q2QGsAdl/ZKPCwTe9fe0CjlUbqj5bFNSjIRk= +github.com/chai2010/gettext-go v1.0.2/go.mod h1:y+wnP2cHYaVj19NZhYKAwEMH2CI1gNHeQQ+5AjwawxA= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= @@ -65,12 +72,16 @@ github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWH github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/creack/pty v1.1.11 h1:07n33Z8lZxZ2qwegKbObQohDhXDQxiMMz1NOUGYlesw= +github.com/creack/pty v1.1.11/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= +github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153 h1:yUdfgN0XgIJw7foRItutHYUIhlcKzcSf5vDpdhQAKTc= github.com/emicklei/go-restful/v3 v3.10.1 h1:rc42Y5YTp7Am7CS630D7JmhRjq4UlEUuEKfrDac4bSQ= github.com/emicklei/go-restful/v3 v3.10.1/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= @@ -80,12 +91,21 @@ github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.m github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/evanphx/json-patch v4.12.0+incompatible h1:4onqiflcdA9EOZ4RxV643DvftH5pOlLGNtQ5lPWQu84= +github.com/evanphx/json-patch v4.12.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/evanphx/json-patch/v5 v5.6.0 h1:b91NhWfaz02IuVxO9faSllyAtNXHMPkC5J8sJCLunww= github.com/evanphx/json-patch/v5 v5.6.0/go.mod h1:G79N1coSVB93tBe7j6PhzjmR3/2VvlbKOFpnXhI9Bw4= +github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d h1:105gxyaGwCFad8crR9dcMQWvV9Hvulu6hwUh4tWPJnM= +github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d/go.mod h1:ZZMPRZwes7CROmyNKgQzC3XPs6L/G2EJLHddWejkmf4= +github.com/fatih/camelcase v1.0.0 h1:hxNvNX/xYBp0ovncs8WyWZrOrpBNub/JfaMvbURyft8= +github.com/fatih/camelcase v1.0.0/go.mod h1:yN2Sb0lFhZJUdVvtELVWefmrXpuZESvPmqwoZc+/fpc= github.com/flowstack/go-jsonschema v0.1.1/go.mod h1:yL7fNggx1o8rm9RlgXv7hTBWxdBM0rVwpMwimd3F3N0= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/fvbommel/sortorder v1.0.1 h1:dSnXLt4mJYH25uDDGa3biZNQsozaUWDSWeKJ0qqFfzE= +github.com/fvbommel/sortorder v1.0.1/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/go-errors/errors v1.0.1 h1:LUHzmkK3GUKUrL/1gfBUxAHzcev3apQlezX/+O7ma6w= +github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= @@ -148,6 +168,8 @@ github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.1 h1:gK4Kx5IaGY9CD5sPJ36FHiBJ6ZXl0kilRiiCj+jdYp4= +github.com/google/btree v1.0.1/go.mod h1:xXMiIv4Fb/0kKde4SpL7qlzvu5cMJDRkFDxJfI9uaxA= github.com/google/gnostic v0.6.9 h1:ZK/5VhkoX835RikCHpSUJV9a+S3e1zLh59YnyWeBW+0= github.com/google/gnostic v0.6.9/go.mod h1:Nm8234We1lq6iB9OmlgNv3nH91XLLVZHCDayfA3xq+E= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= @@ -176,6 +198,8 @@ github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hf github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1 h1:K6RDEckDVWvDI9JAJYCmNdQXq6neHJOYx3V6jnqNEec= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -183,6 +207,9 @@ github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+ github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY= github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c= +github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7 h1:pdN6V1QBWetyv/0+wjACpqVH+eVULgEjkurDLq3goeM= +github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= @@ -191,6 +218,8 @@ github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1: github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/imdario/mergo v0.3.12 h1:b6R2BslTbIEToALKP7LxUvijTsNI9TAe80pLWN2g/HU= github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= +github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc= +github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/itchyny/gojq v0.12.7 h1:hYPTpeWfrJ1OT+2j6cvBScbhl0TkdwGM4bc66onUSOQ= github.com/itchyny/gojq v0.12.7/go.mod h1:ZdvNHVlzPgUf8pgjnuDTmGfHA/21KoutQUJ3An/xNuw= github.com/itchyny/timefmt-go v0.1.3 h1:7M3LGVDsqcd0VZH2U+x393obrzZisp7C0uEe921iRkU= @@ -276,6 +305,8 @@ github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.10.5/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de h1:9TO3cAIGXtEhnIaL+V+BEER86oLrvS+kWobKpbJuye0= +github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de/go.mod h1:zAbeS9B/r2mtpb6U+EI2rYA5OAXxsYw6wTamcNW+zcE= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= @@ -290,6 +321,12 @@ github.com/matttproud/golang_protobuf_extensions v1.0.2 h1:hAHbPm5IJGijwng3PWk09 github.com/matttproud/golang_protobuf_extensions v1.0.2/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= github.com/microcosm-cc/bluemonday v1.0.18 h1:6HcxvXDAi3ARt3slx6nTesbvorIc3QeTzBNRvWktHBo= github.com/microcosm-cc/bluemonday v1.0.18/go.mod h1:Z0r70sCuXHig8YpBzCc5eGHAap2K7e/u082ZUpDRRqM= +github.com/mitchellh/go-wordwrap v1.0.0 h1:6GlHJ/LTGMrIJbwgdqdl2eEH8o+Exx/0m8ir9Gns0u4= +github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= +github.com/moby/spdystream v0.2.0 h1:cjW1zVyyoiM0T7b6UoySUFqzXMoqRckQtXwGPiBhOM8= +github.com/moby/spdystream v0.2.0/go.mod h1:f7i0iNDQJ059oMTcWxx8MA/zKFIuD/lY+0GqbN2Wy8c= +github.com/moby/term v0.0.0-20220808134915-39b0c02b01ae h1:O4SWKdcHVCvYqyDV+9CJA1fcDN2L11Bule0iFy3YlAI= +github.com/moby/term v0.0.0-20220808134915-39b0c02b01ae/go.mod h1:E2VnQOmVuvZB6UYnnDB0qG5Nq/1tD9acaOpo6xmt0Kw= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -297,6 +334,8 @@ github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lN github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00 h1:n6/2gBQ3RWajuToeY6ZtZTIKv2v7ThUy5KKusIT0yc0= +github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00/go.mod h1:Pm3mSP3c5uWn86xMLZ5Sa7JB9GsEZySvHYXCTK4E9q4= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= @@ -322,6 +361,8 @@ github.com/openshift/api v0.0.0-20230213134911-7ba313770556 h1:7W2fOhJicyEff24Va github.com/openshift/api v0.0.0-20230213134911-7ba313770556/go.mod h1:aQ6LDasvHMvHZXqLHnX2GRmnfTWCF/iIwz8EMTTIE9A= github.com/openshift/client-go v0.0.0-20221019143426-16aed247da5c h1:CV76yFOTXmq9VciBR3Bve5ZWzSxdft7gaMVB3kS0rwg= github.com/openshift/client-go v0.0.0-20221019143426-16aed247da5c/go.mod h1:lFMO8mLHXWFzSdYvGNo8ivF9SfF6zInA8ZGw4phRnUE= +github.com/peterbourgon/diskv v2.0.1+incompatible h1:UBdAOUP5p4RWqPBg048CAvpKN+vxiaj6gdUUzhl4XmI= +github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= @@ -365,7 +406,10 @@ github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFR github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= +github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= +github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= @@ -373,6 +417,9 @@ github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMB github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/spf13/cobra v1.6.0 h1:42a0n6jwCot1pUmomAp4T7DeMD+20LFv4Q54pxLf2LI= +github.com/spf13/cobra v1.6.0/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY= +github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/stoewer/go-strcase v1.2.0/go.mod h1:IBiWB2sKIp3wVVQ3Y035++gc+knqhUQag1KpM8ahLw8= @@ -380,6 +427,7 @@ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+ github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= @@ -393,6 +441,8 @@ github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= +github.com/xlab/treeprint v1.1.0 h1:G/1DjNkPpfZCFt9CSh6b5/nY4VimlbHF3Rh4obvtzDk= +github.com/xlab/treeprint v1.1.0/go.mod h1:gj5Gd3gPdKtR1ikdDK6fnFLdmIS0X30kTTuNd/WEJu0= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -405,6 +455,8 @@ go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= +go.starlark.net v0.0.0-20200306205701-8dd3e2ee1dd5 h1:+FNtrFTmVw0YZGpBGX56XDee331t6JAXeK2bcyhLOOc= +go.starlark.net v0.0.0-20200306205701-8dd3e2ee1dd5/go.mod h1:nmDLcffg48OtT/PSW0Hg7FvpRQsQh5OSqIylirxKC7o= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= @@ -544,6 +596,7 @@ golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191002063906-3421d5a6bb1c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -575,6 +628,7 @@ golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -616,6 +670,7 @@ golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBn golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190624222133-a101b041ded4/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= @@ -771,6 +826,8 @@ gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk= +gotest.tools/v3 v3.0.3 h1:4AuOwCGf4lLR9u3YOe2awrHygurzhO/HeQ6laiA6Sx0= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= @@ -782,12 +839,18 @@ k8s.io/api v0.26.3 h1:emf74GIQMTik01Aum9dPP0gAypL8JTLl/lHa4V9RFSU= k8s.io/api v0.26.3/go.mod h1:PXsqwPMXBSBcL1lJ9CYDKy7kIReUydukS5JiRlxC3qE= k8s.io/apimachinery v0.26.3 h1:dQx6PNETJ7nODU3XPtrwkfuubs6w7sX0M8n61zHIV/k= k8s.io/apimachinery v0.26.3/go.mod h1:ats7nN1LExKHvJ9TmwootT00Yz05MuYqPXEXaVeOy5I= +k8s.io/cli-runtime v0.26.3 h1:3ULe0oI28xmgeLMVXIstB+ZL5CTGvWSMVMLeHxitIuc= +k8s.io/cli-runtime v0.26.3/go.mod h1:5YEhXLV4kLt/OSy9yQwtSSNZU2Z7aTEYta1A+Jg4VC4= k8s.io/client-go v0.26.3 h1:k1UY+KXfkxV2ScEL3gilKcF7761xkYsSD6BC9szIu8s= k8s.io/client-go v0.26.3/go.mod h1:ZPNu9lm8/dbRIPAgteN30RSXea6vrCpFvq+MateTUuQ= +k8s.io/component-base v0.26.3 h1:oC0WMK/ggcbGDTkdcqefI4wIZRYdK3JySx9/HADpV0g= +k8s.io/component-base v0.26.3/go.mod h1:5kj1kZYwSC6ZstHJN7oHBqcJC6yyn41eR+Sqa/mQc8E= k8s.io/klog/v2 v2.100.1 h1:7WCHKK6K8fNhTqfBhISHQ97KrnJNFZMcQvKp7gP/tmg= k8s.io/klog/v2 v2.100.1/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0= k8s.io/kube-openapi v0.0.0-20230501164219-8b0f38b5fd1f h1:2kWPakN3i/k81b0gvD5C5FJ2kxm1WrQFanWchyKuqGg= k8s.io/kube-openapi v0.0.0-20230501164219-8b0f38b5fd1f/go.mod h1:byini6yhqGC14c3ebc/QwanvYwhuMWF6yz2F8uwW8eg= +k8s.io/kubectl v0.26.3 h1:bZ5SgFyeEXw6XTc1Qji0iNdtqAC76lmeIIQULg2wNXM= +k8s.io/kubectl v0.26.3/go.mod h1:02+gv7Qn4dupzN3fi/9OvqqdW+uG/4Zi56vc4Zmsp1g= k8s.io/utils v0.0.0-20230505201702-9f6742963106 h1:EObNQ3TW2D+WptiYXlApGNLVy0zm/JIBVY9i+M4wpAU= k8s.io/utils v0.0.0-20230505201702-9f6742963106/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= @@ -797,6 +860,10 @@ sigs.k8s.io/controller-runtime v0.14.6 h1:oxstGVvXGNnMvY7TAESYk+lzr6S3V5VFxQ6d92 sigs.k8s.io/controller-runtime v0.14.6/go.mod h1:WqIdsAY6JBsjfc/CqO0CORmNtoCtE4S6qbPc9s68h+0= sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd h1:EDPBXCAspyGV4jQlpZSudPeMmr1bNJefnuqLsRAsHZo= sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd/go.mod h1:B8JuhiUyNFVKdsE8h686QcCxMaH6HrOAZj4vswFpcB0= +sigs.k8s.io/kustomize/api v0.12.1 h1:7YM7gW3kYBwtKvoY216ZzY+8hM+lV53LUayghNRJ0vM= +sigs.k8s.io/kustomize/api v0.12.1/go.mod h1:y3JUhimkZkR6sbLNwfJHxvo1TCLwuwm14sCYnkH6S1s= +sigs.k8s.io/kustomize/kyaml v0.13.9 h1:Qz53EAaFFANyNgyOEJbT/yoIHygK40/ZcvU3rgry2Tk= +sigs.k8s.io/kustomize/kyaml v0.13.9/go.mod h1:QsRbD0/KcU+wdk0/L0fIp2KLnohkVzs6fQ85/nOXac4= sigs.k8s.io/structured-merge-diff/v4 v4.2.3 h1:PRbqxJClWWYMNV1dhaG4NsibJbArud9kFxnAMREiWFE= sigs.k8s.io/structured-merge-diff/v4 v4.2.3/go.mod h1:qjx8mGObPmV2aSZepjQjbmb2ihdVs8cGKBraizNC69E= sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo= diff --git a/tests/e2e/mnist_raycluster_sdk_test.go b/tests/e2e/mnist_raycluster_sdk_test.go index baaeb893..9d4dc862 100644 --- a/tests/e2e/mnist_raycluster_sdk_test.go +++ b/tests/e2e/mnist_raycluster_sdk_test.go @@ -17,9 +17,9 @@ limitations under the License. package e2e import ( - "bytes" - "os/exec" + "strings" "testing" + "time" . "github.com/onsi/gomega" . "github.com/project-codeflare/codeflare-common/support" @@ -81,22 +81,9 @@ func TestMNISTRayClusterSDK(t *testing.T) { }, } - // Create cluster wide RBAC, required for SDK OpenShift check - // TODO reevaluate once SDK change OpenShift detection logic - clusterPolicyRules := []rbacv1.PolicyRule{ - { - Verbs: []string{"get", "list"}, - APIGroups: []string{"config.openshift.io"}, - Resources: []string{"ingresses"}, - ResourceNames: []string{"cluster"}, - }, - } - sa := CreateServiceAccount(test, namespace.Name) role := CreateRole(test, namespace.Name, policyRules) CreateRoleBinding(test, namespace.Name, sa, role) - clusterRole := CreateClusterRole(test, clusterPolicyRules) - CreateClusterRoleBinding(test, sa, clusterRole) job := &batchv1.Job{ TypeMeta: metav1.TypeMeta{ @@ -208,20 +195,39 @@ func TestMNISTRayClusterSDK(t *testing.T) { test.T().Logf("Created Job %s/%s successfully", job.Namespace, job.Name) go func() { - scriptName := "./sdk-to-pod.sh" - cmd := exec.Command(scriptName, namespace.Name) - - var stdoutBuf, stderrBuf bytes.Buffer - cmd.Stdout = &stdoutBuf - cmd.Stderr = &stderrBuf - - // Run the script to copy the SDK to the pod - if err := cmd.Run(); err != nil { - t.Logf("STDOUT: %s", stdoutBuf.String()) - t.Logf("STDERR: %s", stderrBuf.String()) - t.Logf("Failed to run the script: %v", err) - } - }() + // Checking if pod is found and running + podName := "" + foundPod := false + for !foundPod { + pods, _ := test.Client().Core().CoreV1().Pods(namespace.Name).List(test.Ctx(), metav1.ListOptions{ + LabelSelector: "job-name=sdk", + }) + for _, pod := range pods.Items { + if strings.HasPrefix(pod.Name, "sdk-") && pod.Status.Phase == corev1.PodRunning { + podName = pod.Name + foundPod = true + test.T().Logf("Pod is running!") + break + } + } + if !foundPod { + test.T().Logf("Waiting for pod to start...") + time.Sleep(5 * time.Second) + } + } + + // Get rest config + restConfig, err := GetRestConfig(test); if err != nil { + test.T().Errorf("Error getting rest config: %v", err) + } + + // Copy codeflare-sdk to the pod + srcDir := "../.././" + dstDir := "/codeflare-sdk" + if err := CopyToPod(test, namespace.Name, podName, restConfig, srcDir, dstDir); err != nil { + test.T().Errorf("Error copying codeflare-sdk to pod: %v", err) + } + }() test.T().Logf("Waiting for Job %s/%s to complete", job.Namespace, job.Name) test.Eventually(Job(test, job.Namespace, job.Name), TestTimeoutLong).Should( diff --git a/tests/e2e/sdk-to-pod.sh b/tests/e2e/sdk-to-pod.sh deleted file mode 100755 index 7001b88f..00000000 --- a/tests/e2e/sdk-to-pod.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash -echo "Starting the script to copy the SDK to the pod" -namespace=$1 - -# Get the name of the pod starting with 'sdk' and its status -while : ; do - read podname podstatus <<< $(kubectl get pods -n "${namespace}" -o custom-columns=:metadata.name,:status.phase | grep "^sdk" | awk '{print $1, $2}') - echo "$podname, $podstatus, $namespace" - # Check if the pod is found and is in 'Running' status - if [[ -n "$podname" && "$podstatus" == "Running" ]]; then - echo "Pod ${podname} is running. Proceeding to copy files." - kubectl cp ../.././ "${namespace}/${podname}:/codeflare-sdk" - break - else - echo "Waiting for pod to be in Running state in namespace ${namespace}..." - sleep 5 - fi -done diff --git a/tests/e2e/support.go b/tests/e2e/support.go index cc55beeb..9ef8e176 100644 --- a/tests/e2e/support.go +++ b/tests/e2e/support.go @@ -18,8 +18,21 @@ package e2e import ( "embed" + "os" + "path/filepath" "github.com/onsi/gomega" + "k8s.io/apimachinery/pkg/api/meta" + "k8s.io/apimachinery/pkg/runtime/schema" + "k8s.io/apimachinery/pkg/runtime/serializer" + "k8s.io/cli-runtime/pkg/genericclioptions" + "k8s.io/client-go/discovery" + "k8s.io/client-go/rest" + "k8s.io/client-go/tools/clientcmd" + clientcmdapi "k8s.io/client-go/tools/clientcmd/api" + "k8s.io/kubectl/pkg/cmd/cp" + "k8s.io/kubectl/pkg/cmd/util" + "k8s.io/kubectl/pkg/scheme" "github.com/project-codeflare/codeflare-common/support" ) @@ -33,3 +46,66 @@ func ReadFile(t support.Test, fileName string) []byte { t.Expect(err).NotTo(gomega.HaveOccurred()) return file } + +func GetRestConfig(t support.Test) (*rest.Config, error) { + const GroupName = "" + var SchemeGroupVersion = schema.GroupVersion{Group: GroupName, Version: "v1"} + kubeconfig := filepath.Join(os.Getenv("HOME"), ".kube", "config") + restConfig, err := clientcmd.BuildConfigFromFlags("", kubeconfig) + if err != nil { + t.T().Errorf("Error building kubeconfig: %v", err) + return restConfig, err + } + restConfig.APIPath = "/api" + restConfig.GroupVersion = &SchemeGroupVersion + restConfig.NegotiatedSerializer = serializer.WithoutConversionCodecFactory{CodecFactory: scheme.Codecs} + return restConfig, nil +} + +func CopyToPod(t support.Test, namespace string, podName string, restConfig *rest.Config, srcDir string, dstDir string) error { + ioStreams, _, _, _ := genericclioptions.NewTestIOStreams() + copyOptions := cp.NewCopyOptions(ioStreams) + factory := util.NewFactory(newRestClientGetter(namespace, restConfig)) + if err := copyOptions.Complete(factory, cp.NewCmdCp(factory, ioStreams), []string{srcDir, podName + ":" + dstDir}); err != nil { + t.T().Errorf("error when completing all the required options: %v", err) + return err + } + if err := copyOptions.Validate(); err != nil { + t.T().Errorf("error when validating the provided values for CopyOptions: %v", err) + return err + } + if err := copyOptions.Run(); err != nil { + t.T().Errorf("could not run copy operation: %v", err) + return err + } + return nil +} + +// restClientGetter interface is used to get a rest client from a kubeconfig +type restClientGetter struct { + ClientConfig *rest.Config + ConfigLoader clientcmd.ClientConfig +} + +func newRestClientGetter(namespace string, clientConfig *rest.Config) restClientGetter { + return restClientGetter{ + ClientConfig: clientConfig, + ConfigLoader: clientcmd.NewDefaultClientConfig(clientcmdapi.Config{}, &clientcmd.ConfigOverrides{Context: clientcmdapi.Context{Namespace: namespace}}), + } +} + +func (r restClientGetter) ToRESTConfig() (*rest.Config, error) { + return r.ClientConfig, nil +} + +func (r restClientGetter) ToRawKubeConfigLoader() clientcmd.ClientConfig { + return r.ConfigLoader +} + +func (r restClientGetter) ToDiscoveryClient() (discovery.CachedDiscoveryInterface, error) { + return nil, nil +} + +func (r restClientGetter) ToRESTMapper() (meta.RESTMapper, error) { + return nil, nil +} From 0ff7dbc57394f2981c41ee341dbf9871427c933d Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Wed, 3 Jan 2024 11:16:45 +0000 Subject: [PATCH 099/496] Upgrade to go1.20 --- .github/workflows/e2e_tests.yaml | 2 +- go.mod | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index c2da71c8..51b01ba1 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -68,7 +68,7 @@ jobs: - name: Set Go uses: actions/setup-go@v3 with: - go-version: v1.19 + go-version: v1.20 - name: Set up gotestfmt uses: gotesttools/gotestfmt-action@v2 diff --git a/go.mod b/go.mod index 6bb665dc..08f9e651 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/project-codeflare/codeflare-sdk -go 1.19 +go 1.20 require ( github.com/onsi/gomega v1.27.10 From 3ed1b42418fe82b13cd9e8fadf3882f4827838ef Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Thu, 4 Jan 2024 09:42:24 +0000 Subject: [PATCH 100/496] Address requested changes --- .github/workflows/e2e_tests.yaml | 6 ++++++ src/codeflare_sdk/templates/base-template.yaml | 16 ++++++++-------- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index 51b01ba1..a89f5c21 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -117,6 +117,12 @@ jobs: echo "Printing KubeRay operator logs" kubectl logs -n ray-system --tail -1 -l app.kubernetes.io/name=kuberay | tee ${CODEFLARE_TEST_OUTPUT_DIR}/kuberay.log + - name: Export all KinD pod logs + uses: ./common/github-actions/kind-export-logs + if: always() && steps.deploy.outcome == 'success' + with: + output-directory: ${CODEFLARE_TEST_OUTPUT_DIR} + - name: Upload logs uses: actions/upload-artifact@v3 if: always() && steps.deploy.outcome == 'success' diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index 8f4575a0..8e6fd0e9 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -16,21 +16,21 @@ spec: custompodresources: - replicas: 1 requests: - cpu: 1 - memory: 4G + cpu: 2 + memory: 8G nvidia.com/gpu: 0 limits: - cpu: 1 - memory: 4G + cpu: 2 + memory: 8G nvidia.com/gpu: 0 - replicas: 3 requests: - cpu: 1 - memory: 4G + cpu: 2 + memory: 12G nvidia.com/gpu: 1 limits: - cpu: 1 - memory: 4G + cpu: 2 + memory: 12G nvidia.com/gpu: 1 generictemplate: # This config demonstrates KubeRay's Ray autoscaler integration. From 56c1d121d35c32de6ac9d64574b5ec48794b40c6 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Wed, 13 Dec 2023 10:18:07 +0000 Subject: [PATCH 101/496] Fixed get_cluster() method --- src/codeflare_sdk/cluster/cluster.py | 27 +++++++- .../templates/base-template.yaml | 2 + src/codeflare_sdk/utils/generate_yaml.py | 4 +- tests/test-case-no-mcad.yamls | 2 + tests/test-case-prio.yaml | 2 + tests/test-case.yaml | 2 + tests/unit_test.py | 62 ++++++++++++++----- 7 files changed, 81 insertions(+), 20 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 11cf5fdb..4f24663d 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -492,7 +492,7 @@ def torchx_config( to_return["requirements"] = requirements return to_return - def from_k8_cluster_object(rc, mcad=True): + def from_k8_cluster_object(rc, mcad=True, ingress_domain=None): machine_types = ( rc["metadata"]["labels"]["orderedinstance"].split("_") if "orderedinstance" in rc["metadata"]["labels"] @@ -532,6 +532,7 @@ def from_k8_cluster_object(rc, mcad=True): ]["image"], local_interactive=local_interactive, mcad=mcad, + ingress_domain=ingress_domain, ) return Cluster(cluster_config) @@ -685,7 +686,29 @@ def get_cluster(cluster_name: str, namespace: str = "default"): for rc in rcs["items"]: if rc["metadata"]["name"] == cluster_name: mcad = _check_aw_exists(cluster_name, namespace) - return Cluster.from_k8_cluster_object(rc, mcad=mcad) + + try: + config_check() + api_instance = client.NetworkingV1Api(api_config_handler()) + ingresses = api_instance.list_namespaced_ingress(namespace) + if mcad == True: + for ingress in ingresses.items: + # Search for ingress with AppWrapper name as the owner + if cluster_name == ingress.metadata.owner_references[0].name: + ingress_host = ingress.spec.rules[0].host + else: + for ingress in ingresses.items: + # Search for the ingress with the ingress-owner label + if ingress.metadata.labels["ingress-owner"] == cluster_name: + ingress_host = ingress.spec.rules[0].host + except Exception as e: + return _kube_api_error_handling(e) + + # We gather the ingress domain from the host + ingress_domain = ingress_host.split(".", 1)[1] + return Cluster.from_k8_cluster_object( + rc, mcad=mcad, ingress_domain=ingress_domain + ) raise FileNotFoundError( f"Cluster {cluster_name} is not found in {namespace} namespace" ) diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index 8e6fd0e9..0e6ef09c 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -293,6 +293,8 @@ spec: namespace: default annotations: annotations-example:annotations-example + labels: + ingress-owner: appwrapper-name spec: ingressClassName: nginx rules: diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 95c17cc2..af1b9ece 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -129,9 +129,10 @@ def update_dashboard_ingress( raise ValueError( f"Error: 'port' is not of type int for ingress item at index {index}" ) - if ingress_option["port"] == 8265: + if ingress_option is not None: metadata["name"] = ingress_option["ingressName"] metadata["namespace"] = namespace + metadata["labels"]["ingress-owner"] = cluster_name if "annotations" not in ingress_option.keys(): del metadata["annotations"] else: @@ -161,6 +162,7 @@ def update_dashboard_ingress( else: spec["ingressClassName"] = "nginx" metadata["name"] = gen_dashboard_ingress_name(cluster_name) + metadata["labels"]["ingress-owner"] = cluster_name metadata["namespace"] = namespace spec["rules"][0]["http"]["paths"][0]["backend"]["service"][ "name" diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index 484636bc..77f90f89 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -142,6 +142,8 @@ spec: apiVersion: networking.k8s.io/v1 kind: Ingress metadata: + labels: + ingress-owner: unit-test-cluster-ray name: ray-dashboard-unit-test-cluster-ray namespace: ns spec: diff --git a/tests/test-case-prio.yaml b/tests/test-case-prio.yaml index 70b68e97..b6d820ae 100644 --- a/tests/test-case-prio.yaml +++ b/tests/test-case-prio.yaml @@ -175,6 +175,8 @@ spec: apiVersion: networking.k8s.io/v1 kind: Ingress metadata: + labels: + ingress-owner: prio-test-cluster name: ray-dashboard-prio-test-cluster namespace: ns spec: diff --git a/tests/test-case.yaml b/tests/test-case.yaml index 920459c4..e96fa89e 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -172,6 +172,8 @@ spec: apiVersion: networking.k8s.io/v1 kind: Ingress metadata: + labels: + ingress-owner: unit-test-cluster name: ray-dashboard-unit-test-cluster namespace: ns spec: diff --git a/tests/unit_test.py b/tests/unit_test.py index 7ad0d08d..b217b281 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -397,7 +397,7 @@ def arg_check_apply_effect(group, version, namespace, plural, body, *args): with open(f"{aw_dir}unit-test-cluster-ray.yaml") as f: yamls = yaml.load_all(f, Loader=yaml.FullLoader) for resource in yamls: - if resource["kind"] == "Route": + if resource["kind"] == "Ingress": assert body == resource else: assert 1 == 0 @@ -414,8 +414,8 @@ def arg_check_del_effect(group, version, namespace, plural, name, *args): assert group == "ray.io" assert version == "v1alpha1" assert name == "unit-test-cluster-ray" - elif plural == "routes": - assert group == "route.openshift.io" + elif plural == "ingresses": + assert group == "networking.k8s.io" assert version == "v1" assert name == "ray-dashboard-unit-test-cluster-ray" @@ -623,7 +623,13 @@ def ingress_retrieval(port, annotations=None): serviceName = "dashboard" mock_ingress = client.V1Ingress( metadata=client.V1ObjectMeta( - name=f"ray-{serviceName}-unit-test-cluster", annotations=annotations + name=f"ray-{serviceName}-unit-test-cluster", + annotations=annotations, + owner_references=[ + client.V1OwnerReference( + api_version="v1", kind="Ingress", name="quicktest", uid="unique-id" + ) + ], ), spec=client.V1IngressSpec( rules=[ @@ -1148,6 +1154,11 @@ def get_ray_obj(group, version, namespace, plural, cls=None): return api_obj +def get_named_aw(group, version, namespace, plural, name): + aws = get_aw_obj("workload.codeflare.dev", "v1beta1", "ns", "appwrappers") + return aws["items"][0] + + def get_aw_obj(group, version, namespace, plural): api_obj1 = { "items": [ @@ -1403,21 +1414,34 @@ def get_aw_obj(group, version, namespace, plural): { "allocated": 0, "generictemplate": { - "apiVersion": "route.openshift.io/v1", - "kind": "Route", + "apiVersion": "networking.k8s.io/v1", + "kind": "Ingress", "metadata": { - "labels": { - "odh-ray-cluster-service": "quicktest-head-svc" - }, + "labels": {"ingress-owner": "appwrapper-name"}, "name": "ray-dashboard-quicktest", "namespace": "default", }, "spec": { - "port": {"targetPort": "dashboard"}, - "to": { - "kind": "Service", - "name": "quicktest-head-svc", - }, + "ingressClassName": "nginx", + "rules": [ + { + "http": { + "paths": { + "backend": { + "service": { + "name": "quicktest-head-svc", + "port": { + "number": 8265 + }, + }, + }, + "pathType": "Prefix", + "path": "/", + }, + }, + "host": "quicktest.awsroute.com", + } + ], }, }, "metadata": {}, @@ -1788,10 +1812,14 @@ def test_get_cluster(mocker): side_effect=get_ray_obj, ) mocker.patch( - "codeflare_sdk.utils.generate_yaml.is_openshift_cluster", - return_value=True, + "kubernetes.client.CustomObjectsApi.get_namespaced_custom_object", + side_effect=get_named_aw, + ) + mocker.patch( + "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", + return_value=ingress_retrieval(port=8265), ) - cluster = get_cluster(cluster_name="quicktest") + cluster = get_cluster("quicktest") cluster_config = cluster.config assert cluster_config.name == "quicktest" and cluster_config.namespace == "ns" assert ( From bece39cfe60b0587fe89646a982144d41771aff3 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 21 Dec 2023 11:04:06 +0000 Subject: [PATCH 102/496] Added logic for getting clusters with routes --- src/codeflare_sdk/cluster/cluster.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 4f24663d..22c3b6a4 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -691,6 +691,7 @@ def get_cluster(cluster_name: str, namespace: str = "default"): config_check() api_instance = client.NetworkingV1Api(api_config_handler()) ingresses = api_instance.list_namespaced_ingress(namespace) + ingress_host = None if mcad == True: for ingress in ingresses.items: # Search for ingress with AppWrapper name as the owner @@ -705,7 +706,11 @@ def get_cluster(cluster_name: str, namespace: str = "default"): return _kube_api_error_handling(e) # We gather the ingress domain from the host - ingress_domain = ingress_host.split(".", 1)[1] + if ingress_host is not None: + ingress_domain = ingress_host.split(".", 1)[1] + else: + ingress_domain = None + return Cluster.from_k8_cluster_object( rc, mcad=mcad, ingress_domain=ingress_domain ) From 0feab0f459175c03d0df632d447b7ba7a0363eae Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Mon, 8 Jan 2024 14:48:17 +0000 Subject: [PATCH 103/496] Fixed get_cluster for ingress_options --- src/codeflare_sdk/cluster/cluster.py | 93 ++++++++++++++++--- .../templates/base-template.yaml | 1 + src/codeflare_sdk/utils/generate_yaml.py | 26 +++++- tests/test-case-no-mcad.yamls | 1 + tests/test-case-prio.yaml | 1 + tests/test-case.yaml | 1 + tests/unit_test.py | 60 ++++++++++-- 7 files changed, 157 insertions(+), 26 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 22c3b6a4..4a5f69fb 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -492,7 +492,7 @@ def torchx_config( to_return["requirements"] = requirements return to_return - def from_k8_cluster_object(rc, mcad=True, ingress_domain=None): + def from_k8_cluster_object(rc, mcad=True, ingress_domain=None, ingress_options={}): machine_types = ( rc["metadata"]["labels"]["orderedinstance"].split("_") if "orderedinstance" in rc["metadata"]["labels"] @@ -502,6 +502,10 @@ def from_k8_cluster_object(rc, mcad=True, ingress_domain=None): "volumeMounts" in rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0] ) + if local_interactive: + ingress_domain = get_ingress_domain_from_client( + rc["metadata"]["name"], rc["metadata"]["namespace"] + ) cluster_config = ClusterConfiguration( name=rc["metadata"]["name"], namespace=rc["metadata"]["namespace"], @@ -533,6 +537,7 @@ def from_k8_cluster_object(rc, mcad=True, ingress_domain=None): local_interactive=local_interactive, mcad=mcad, ingress_domain=ingress_domain, + ingress_options=ingress_options, ) return Cluster(cluster_config) @@ -692,27 +697,55 @@ def get_cluster(cluster_name: str, namespace: str = "default"): api_instance = client.NetworkingV1Api(api_config_handler()) ingresses = api_instance.list_namespaced_ingress(namespace) ingress_host = None - if mcad == True: - for ingress in ingresses.items: - # Search for ingress with AppWrapper name as the owner - if cluster_name == ingress.metadata.owner_references[0].name: - ingress_host = ingress.spec.rules[0].host - else: - for ingress in ingresses.items: - # Search for the ingress with the ingress-owner label - if ingress.metadata.labels["ingress-owner"] == cluster_name: - ingress_host = ingress.spec.rules[0].host + ingress_options = {} + for ingress in ingresses.items: + # Search for ingress with AppWrapper name as the owner + if ( + "ingress-owner" in ingress.metadata.labels + and ingress.metadata.labels["ingress-owner"] == cluster_name + ): + ingress_host = ingress.spec.rules[0].host + if ( + "ingress-options" in ingress.metadata.labels + and ingress.metadata.labels["ingress-options"] == "true" + ): + ingress_name = ingress.metadata.name + port = ( + ingress.spec.rules[0] + .http.paths[0] + .backend.service.port.number + ) + annotations = ingress.metadata.annotations + path = ingress.spec.rules[0].http.paths[0].path + ingress_class_name = ingress.spec.ingress_class_name + path_type = ingress.spec.rules[0].http.paths[0].path_type + + ingress_options = { + "ingresses": [ + { + "ingressName": ingress_name, + "port": port, + "annotations": annotations, + "ingressClassName": ingress_class_name, + "pathType": path_type, + "path": path, + "host": ingress_host, + } + ] + } except Exception as e: return _kube_api_error_handling(e) - # We gather the ingress domain from the host - if ingress_host is not None: + if ingress_host is not None and ingress_options == {}: ingress_domain = ingress_host.split(".", 1)[1] else: ingress_domain = None return Cluster.from_k8_cluster_object( - rc, mcad=mcad, ingress_domain=ingress_domain + rc, + mcad=mcad, + ingress_domain=ingress_domain, + ingress_options=ingress_options, ) raise FileNotFoundError( f"Cluster {cluster_name} is not found in {namespace} namespace" @@ -762,7 +795,10 @@ def _get_ingress_domain(self): # pragma: no cover return _kube_api_error_handling(e) for route in routes["items"]: - if route["spec"]["port"]["targetPort"] == "client": + if ( + route["spec"]["port"]["targetPort"] == "client" + or route["spec"]["port"]["targetPort"] == 10001 + ): domain = route["spec"]["host"] else: try: @@ -949,3 +985,30 @@ def _copy_to_ray(cluster: Cluster) -> RayCluster: if ray.status == CodeFlareClusterStatus.READY: ray.status = RayClusterStatus.READY return ray + + +def get_ingress_domain_from_client(cluster_name: str, namespace: str = "default"): + if is_openshift_cluster(): + try: + config_check() + api_instance = client.CustomObjectsApi(api_config_handler()) + route = api_instance.get_namespaced_custom_object( + group="route.openshift.io", + version="v1", + namespace=namespace, + plural="routes", + name=f"rayclient-{cluster_name}", + ) + return route["spec"]["host"].split(".", 1)[1] + except Exception as e: # pragma no cover + return _kube_api_error_handling(e) + else: + try: + config_check() + api_instance = client.NetworkingV1Api(api_config_handler()) + ingress = api_instance.read_namespaced_ingress( + f"rayclient-{cluster_name}", namespace + ) + return ingress.spec.rules[0].host.split(".", 1)[1] + except Exception as e: # pragma no cover + return _kube_api_error_handling(e) diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index 0e6ef09c..1e99040c 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -294,6 +294,7 @@ spec: annotations: annotations-example:annotations-example labels: + ingress-options: "false" ingress-owner: appwrapper-name spec: ingressClassName: nginx diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index af1b9ece..95c962d1 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -133,25 +133,41 @@ def update_dashboard_ingress( metadata["name"] = ingress_option["ingressName"] metadata["namespace"] = namespace metadata["labels"]["ingress-owner"] = cluster_name - if "annotations" not in ingress_option.keys(): + metadata["labels"]["ingress-options"] = "true" + if ( + "annotations" not in ingress_option.keys() + or ingress_option["annotations"] is None + ): del metadata["annotations"] else: metadata["annotations"] = ingress_option["annotations"] - if "path" not in ingress_option.keys(): + if ( + "path" not in ingress_option.keys() + or ingress_option["path"] is None + ): del spec["rules"][0]["http"]["paths"][0]["path"] else: spec["rules"][0]["http"]["paths"][0]["path"] = ingress_option[ "path" ] - if "pathType" not in ingress_option.keys(): + if ( + "pathType" not in ingress_option.keys() + or ingress_option["pathType"] is None + ): spec["rules"][0]["http"]["paths"][0][ "pathType" ] = "ImplementationSpecific" - if "host" not in ingress_option.keys(): + if ( + "host" not in ingress_option.keys() + or ingress_option["host"] is None + ): del spec["rules"][0]["host"] else: spec["rules"][0]["host"] = ingress_option["host"] - if "ingressClassName" not in ingress_option.keys(): + if ( + "ingressClassName" not in ingress_option.keys() + or ingress_option["ingressClassName"] is None + ): del spec["ingressClassName"] else: spec["ingressClassName"] = ingress_option["ingressClassName"] diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index 77f90f89..4be18dc6 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -143,6 +143,7 @@ apiVersion: networking.k8s.io/v1 kind: Ingress metadata: labels: + ingress-options: 'false' ingress-owner: unit-test-cluster-ray name: ray-dashboard-unit-test-cluster-ray namespace: ns diff --git a/tests/test-case-prio.yaml b/tests/test-case-prio.yaml index b6d820ae..72c73083 100644 --- a/tests/test-case-prio.yaml +++ b/tests/test-case-prio.yaml @@ -176,6 +176,7 @@ spec: kind: Ingress metadata: labels: + ingress-options: 'false' ingress-owner: prio-test-cluster name: ray-dashboard-prio-test-cluster namespace: ns diff --git a/tests/test-case.yaml b/tests/test-case.yaml index e96fa89e..8b0677cf 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -173,6 +173,7 @@ spec: kind: Ingress metadata: labels: + ingress-options: 'false' ingress-owner: unit-test-cluster name: ray-dashboard-unit-test-cluster namespace: ns diff --git a/tests/unit_test.py b/tests/unit_test.py index b217b281..ab9e3dcd 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -39,6 +39,7 @@ _app_wrapper_status, _ray_cluster_status, _get_ingress_domain, + get_ingress_domain_from_client, ) from codeflare_sdk.cluster.auth import ( TokenAuthentication, @@ -616,25 +617,27 @@ def ray_addr(self, *args): return self._address -def ingress_retrieval(port, annotations=None): +def ingress_retrieval(port, annotations=None, cluster_name="unit-test-cluster"): + labels = {"ingress-owner": cluster_name, "ingress-options": "false"} if port == 10001: serviceName = "client" else: serviceName = "dashboard" mock_ingress = client.V1Ingress( metadata=client.V1ObjectMeta( - name=f"ray-{serviceName}-unit-test-cluster", + name=f"ray-{serviceName}-{cluster_name}", annotations=annotations, + labels=labels, owner_references=[ client.V1OwnerReference( - api_version="v1", kind="Ingress", name="quicktest", uid="unique-id" + api_version="v1", kind="Ingress", name=cluster_name, uid="unique-id" ) ], ), spec=client.V1IngressSpec( rules=[ client.V1IngressRule( - host=f"ray-{serviceName}-unit-test-cluster-ns.apps.cluster.awsroute.org", + host=f"ray-{serviceName}-{cluster_name}-ns.apps.cluster.awsroute.org", http=client.V1HTTPIngressRuleValue( paths=[ client.V1HTTPIngressPath( @@ -1417,7 +1420,10 @@ def get_aw_obj(group, version, namespace, plural): "apiVersion": "networking.k8s.io/v1", "kind": "Ingress", "metadata": { - "labels": {"ingress-owner": "appwrapper-name"}, + "labels": { + "ingress-owner": "appwrapper-name", + "ingress-options": "false", + }, "name": "ray-dashboard-quicktest", "namespace": "default", }, @@ -1817,7 +1823,7 @@ def test_get_cluster(mocker): ) mocker.patch( "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", - return_value=ingress_retrieval(port=8265), + return_value=ingress_retrieval(port=8265, cluster_name="quicktest"), ) cluster = get_cluster("quicktest") cluster_config = cluster.config @@ -1837,6 +1843,48 @@ def test_get_cluster(mocker): assert cluster_config.num_workers == 1 +def test_get_ingress_domain_from_client(mocker): + mocker.patch("kubernetes.config.load_kube_config") + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch( + "kubernetes.client.NetworkingV1Api.read_namespaced_ingress", + return_value=ingress_retrieval( + port=8265, cluster_name="unit-test-cluster" + ).items[0], + ) + + ingress_domain = get_ingress_domain_from_client("unit-test-cluster", "ns") + assert ingress_domain == "apps.cluster.awsroute.org" + + mocker.patch( + "codeflare_sdk.utils.generate_yaml.is_openshift_cluster", return_value=True + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_namespaced_custom_object", + side_effect=route_retrieval, + ) + ingress_domain = get_ingress_domain_from_client("unit-test-cluster", "ns") + assert ingress_domain == "apps.cluster.awsroute.org" + + +def route_retrieval(group, version, namespace, plural, name): + assert group == "route.openshift.io" + assert version == "v1" + assert namespace == "ns" + assert plural == "routes" + assert name == "ray-dashboard-unit-test-cluster" + return { + "items": [ + { + "metadata": {"name": "ray-dashboard-unit-test-cluster"}, + "spec": { + "host": "ray-dashboard-unit-test-cluster-ns.apps.cluster.awsroute.org" + }, + } + ] + } + + def test_list_clusters(mocker, capsys): mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") mocker.patch( From 8a5ea1946c364d22f243277fe43c009715455035 Mon Sep 17 00:00:00 2001 From: Kevin Date: Fri, 5 Jan 2024 17:46:30 -0500 Subject: [PATCH 104/496] use route instead of ingress for oauth endpoint Signed-off-by: Kevin --- poetry.lock | 10 -- src/codeflare_sdk/utils/generate_yaml.py | 6 -- src/codeflare_sdk/utils/kube_api_helpers.py | 4 - src/codeflare_sdk/utils/openshift_oauth.py | 109 +++++++++----------- tests/unit_test.py | 66 +++++------- 5 files changed, 72 insertions(+), 123 deletions(-) diff --git a/poetry.lock b/poetry.lock index a0ed4770..f52cd9c9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1175,16 +1175,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 95c962d1..2d507c8e 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -29,8 +29,6 @@ from base64 import b64encode from urllib3.util import parse_url -from .kube_api_helpers import _get_api_host - def read_template(template): with open(template, "r") as stream: @@ -557,10 +555,6 @@ def enable_openshift_oauth(user_yaml, cluster_name, namespace): tls_secret_name = f"{cluster_name}-proxy-tls-secret" tls_volume_name = "proxy-tls-secret" port_name = "oauth-proxy" - host = _get_api_host(k8_client) - host = host.replace( - "api.", f"{gen_dashboard_ingress_name(cluster_name)}-{namespace}.apps." - ) oauth_sidecar = _create_oauth_sidecar_object( namespace, tls_mount_location, diff --git a/src/codeflare_sdk/utils/kube_api_helpers.py b/src/codeflare_sdk/utils/kube_api_helpers.py index 01a93ef5..17cf6dbe 100644 --- a/src/codeflare_sdk/utils/kube_api_helpers.py +++ b/src/codeflare_sdk/utils/kube_api_helpers.py @@ -47,7 +47,3 @@ def _kube_api_error_handling( elif e.reason == "Conflict": raise FileExistsError(exists_msg) raise e - - -def _get_api_host(api_client: client.ApiClient): # pragma: no cover - return parse_url(api_client.configuration.host).host diff --git a/src/codeflare_sdk/utils/openshift_oauth.py b/src/codeflare_sdk/utils/openshift_oauth.py index 14e55b96..37038c56 100644 --- a/src/codeflare_sdk/utils/openshift_oauth.py +++ b/src/codeflare_sdk/utils/openshift_oauth.py @@ -1,39 +1,40 @@ from urllib3.util import parse_url -from .generate_yaml import gen_dashboard_ingress_name -from .kube_api_helpers import _get_api_host -from base64 import b64decode +import yaml from ..cluster.auth import config_check, api_config_handler from kubernetes import client +from kubernetes import dynamic + + +def _route_api_getter(): + return dynamic.DynamicClient( + api_config_handler() or client.ApiClient() + ).resources.get(api_version="route.openshift.io/v1", kind="Route") def create_openshift_oauth_objects(cluster_name, namespace): config_check() - api_client = api_config_handler() or client.ApiClient() oauth_port = 8443 oauth_sa_name = f"{cluster_name}-oauth-proxy" tls_secret_name = _gen_tls_secret_name(cluster_name) service_name = f"{cluster_name}-oauth" port_name = "oauth-proxy" - host = _get_api_host(api_client) - - # replace "^api" with the expected host - host = f"{gen_dashboard_ingress_name(cluster_name)}-{namespace}.apps" + host.lstrip( - "api" - ) - _create_or_replace_oauth_sa(namespace, oauth_sa_name, host) + _create_or_replace_oauth_sa(namespace, oauth_sa_name, cluster_name) _create_or_replace_oauth_service_obj( cluster_name, namespace, oauth_port, tls_secret_name, service_name, port_name ) - _create_or_replace_oauth_ingress_object( - cluster_name, namespace, service_name, port_name, host + _create_or_replace_oauth_route_object( + cluster_name, + namespace, + service_name, + port_name, ) _create_or_replace_oauth_rb(cluster_name, namespace, oauth_sa_name) -def _create_or_replace_oauth_sa(namespace, oauth_sa_name, host): +def _create_or_replace_oauth_sa(namespace, oauth_sa_name, cluster_name): oauth_sa = client.V1ServiceAccount( api_version="v1", kind="ServiceAccount", @@ -41,7 +42,10 @@ def _create_or_replace_oauth_sa(namespace, oauth_sa_name, host): name=oauth_sa_name, namespace=namespace, annotations={ - "serviceaccounts.openshift.io/oauth-redirecturi.first": f"https://{host}" + "serviceaccounts.openshift.io/oauth-redirectreference.first": '{"kind":"OAuthRedirectReference","apiVersion":"v1","reference":{"kind":"Route","name":"' + + "ray-dashboard-" + + cluster_name + + '"}}' }, ), ) @@ -98,15 +102,14 @@ def delete_openshift_oauth_objects(cluster_name, namespace): # for an existing cluster before calling this => the objects should never be deleted twice oauth_sa_name = f"{cluster_name}-oauth-proxy" service_name = f"{cluster_name}-oauth" + v1_routes = _route_api_getter() client.CoreV1Api(api_config_handler()).delete_namespaced_service_account( name=oauth_sa_name, namespace=namespace ) client.CoreV1Api(api_config_handler()).delete_namespaced_service( name=service_name, namespace=namespace ) - client.NetworkingV1Api(api_config_handler()).delete_namespaced_ingress( - name=f"{cluster_name}-ingress", namespace=namespace - ) + v1_routes.delete(name=f"ray-dashboard-{cluster_name}", namespace=namespace) client.RbacAuthorizationV1Api(api_config_handler()).delete_cluster_role_binding( name=f"{cluster_name}-rb" ) @@ -161,52 +164,36 @@ def _create_or_replace_oauth_service_obj( raise e -def _create_or_replace_oauth_ingress_object( +def _create_or_replace_oauth_route_object( cluster_name: str, namespace: str, service_name: str, port_name: str, - host: str, -) -> client.V1Ingress: - ingress = client.V1Ingress( - api_version="networking.k8s.io/v1", - kind="Ingress", - metadata=client.V1ObjectMeta( - annotations={"route.openshift.io/termination": "passthrough"}, - name=f"{cluster_name}-ingress", - namespace=namespace, - ), - spec=client.V1IngressSpec( - rules=[ - client.V1IngressRule( - host=host, - http=client.V1HTTPIngressRuleValue( - paths=[ - client.V1HTTPIngressPath( - backend=client.V1IngressBackend( - service=client.V1IngressServiceBackend( - name=service_name, - port=client.V1ServiceBackendPort( - name=port_name - ), - ) - ), - path_type="ImplementationSpecific", - ) - ] - ), - ) - ] - ), - ) +): + route = f""" + apiVersion: route.openshift.io/v1 + kind: Route + metadata: + name: ray-dashboard-{cluster_name} + namespace: {namespace} + spec: + port: + targetPort: {port_name} + tls: + termination: passthrough + to: + kind: Service + name: {service_name} + """ + route_data = yaml.safe_load(route) + v1_routes = _route_api_getter() try: - client.NetworkingV1Api(api_config_handler()).create_namespaced_ingress( - namespace=namespace, body=ingress + existing_route = v1_routes.get( + name=f"ray-dashboard-{cluster_name}", namespace=namespace ) - except client.ApiException as e: - if e.reason == "Conflict": - client.NetworkingV1Api(api_config_handler()).replace_namespaced_ingress( - namespace=namespace, body=ingress, name=f"{cluster_name}-ingress" - ) - else: - raise e + route_data["metadata"]["resourceVersion"] = existing_route["metadata"][ + "resourceVersion" + ] + v1_routes.replace(body=route_data) + except dynamic.client.ApiException: + v1_routes.create(body=route_data) diff --git a/tests/unit_test.py b/tests/unit_test.py index ab9e3dcd..a7b5d9a6 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -27,7 +27,7 @@ aw_dir = os.path.expanduser("~/.codeflare/appwrapper/") sys.path.append(str(parent) + "/src") -from kubernetes import client, config +from kubernetes import client, config, dynamic from codeflare_sdk.cluster.awload import AWManager from codeflare_sdk.cluster.cluster import ( Cluster, @@ -91,6 +91,7 @@ read_template, enable_local_interactive, ) +import codeflare_sdk.utils.openshift_oauth as sdk_oauth import openshift from openshift.selector import Selector @@ -110,6 +111,24 @@ fake_res = openshift.Result("fake") +def mock_routes_api(mocker): + mocker.patch.object( + sdk_oauth, + "_route_api_getter", + return_value=MagicMock( + resources=MagicMock( + get=MagicMock( + return_value=MagicMock( + create=MagicMock(), + replace=MagicMock(), + delete=MagicMock(), + ) + ) + ) + ), + ) + + def arg_side_effect(*args): fake_res.high_level_operation = args return fake_res @@ -536,17 +555,14 @@ def test_delete_openshift_oauth_objects(mocker): mocker.patch.object(client.CoreV1Api, "delete_namespaced_service") mocker.patch.object(client.NetworkingV1Api, "delete_namespaced_ingress") mocker.patch.object(client.RbacAuthorizationV1Api, "delete_cluster_role_binding") + mock_routes_api(mocker) delete_openshift_oauth_objects("test-cluster", "test-namespace") - client.CoreV1Api.delete_namespaced_service_account.assert_called_with( name="test-cluster-oauth-proxy", namespace="test-namespace" ) client.CoreV1Api.delete_namespaced_service.assert_called_with( name="test-cluster-oauth", namespace="test-namespace" ) - client.NetworkingV1Api.delete_namespaced_ingress.assert_called_with( - name="test-cluster-ingress", namespace="test-namespace" - ) client.RbacAuthorizationV1Api.delete_cluster_role_binding.assert_called_with( name="test-cluster-rb" ) @@ -2751,7 +2767,6 @@ def test_create_openshift_oauth(mocker: MockerFixture): create_namespaced_service_account = MagicMock() create_cluster_role_binding = MagicMock() create_namespaced_service = MagicMock() - create_namespaced_ingress = MagicMock() mocker.patch.object( client.CoreV1Api, "create_namespaced_service_account", @@ -2765,35 +2780,17 @@ def test_create_openshift_oauth(mocker: MockerFixture): mocker.patch.object( client.CoreV1Api, "create_namespaced_service", create_namespaced_service ) - mocker.patch.object( - client.NetworkingV1Api, "create_namespaced_ingress", create_namespaced_ingress - ) - mocker.patch( - "codeflare_sdk.utils.openshift_oauth._get_api_host", return_value="foo.com" - ) + mock_routes_api(mocker) create_openshift_oauth_objects("foo", "bar") create_ns_sa_args = create_namespaced_service_account.call_args create_crb_args = create_cluster_role_binding.call_args create_ns_serv_args = create_namespaced_service.call_args - create_ns_ingress_args = create_namespaced_ingress.call_args assert ( create_ns_sa_args.kwargs["namespace"] == create_ns_serv_args.kwargs["namespace"] ) - assert ( - create_ns_serv_args.kwargs["namespace"] - == create_ns_ingress_args.kwargs["namespace"] - ) assert isinstance(create_ns_sa_args.kwargs["body"], client.V1ServiceAccount) assert isinstance(create_crb_args.kwargs["body"], client.V1ClusterRoleBinding) assert isinstance(create_ns_serv_args.kwargs["body"], client.V1Service) - assert isinstance(create_ns_ingress_args.kwargs["body"], client.V1Ingress) - assert ( - create_ns_serv_args.kwargs["body"].spec.ports[0].name - == create_ns_ingress_args.kwargs["body"] - .spec.rules[0] - .http.paths[0] - .backend.service.port.name - ) def test_replace_openshift_oauth(mocker: MockerFixture): @@ -2807,9 +2804,6 @@ def test_replace_openshift_oauth(mocker: MockerFixture): create_namespaced_service = MagicMock( side_effect=client.ApiException(reason="Conflict") ) - create_namespaced_ingress = MagicMock( - side_effect=client.ApiException(reason="Conflict") - ) mocker.patch.object( client.CoreV1Api, "create_namespaced_service_account", @@ -2823,16 +2817,10 @@ def test_replace_openshift_oauth(mocker: MockerFixture): mocker.patch.object( client.CoreV1Api, "create_namespaced_service", create_namespaced_service ) - mocker.patch.object( - client.NetworkingV1Api, "create_namespaced_ingress", create_namespaced_ingress - ) - mocker.patch( - "codeflare_sdk.utils.openshift_oauth._get_api_host", return_value="foo.com" - ) + mocker.patch.object(dynamic.ResourceList, "get", return_value=True) replace_namespaced_service_account = MagicMock() replace_cluster_role_binding = MagicMock() replace_namespaced_service = MagicMock() - replace_namespaced_ingress = MagicMock() mocker.patch.object( client.CoreV1Api, "replace_namespaced_service_account", @@ -2846,21 +2834,15 @@ def test_replace_openshift_oauth(mocker: MockerFixture): mocker.patch.object( client.CoreV1Api, "replace_namespaced_service", replace_namespaced_service ) - mocker.patch.object( - client.NetworkingV1Api, "replace_namespaced_ingress", replace_namespaced_ingress - ) + mock_routes_api(mocker) create_openshift_oauth_objects("foo", "bar") replace_namespaced_service_account.assert_called_once() replace_cluster_role_binding.assert_called_once() replace_namespaced_service.assert_called_once() - replace_namespaced_ingress.assert_called_once() def test_gen_app_wrapper_with_oauth(mocker: MockerFixture): mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch( - "codeflare_sdk.utils.generate_yaml._get_api_host", return_value="foo.com" - ) mocker.patch( "codeflare_sdk.cluster.cluster.get_current_namespace", return_value="opendatahub", From 1677fae74f63fdbc2c3ec490dac573e45a1736c4 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 18 Jan 2024 09:41:21 +0000 Subject: [PATCH 105/496] Added kube exception to is_openshift_cluster --- src/codeflare_sdk/utils/generate_yaml.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 2d507c8e..494234fa 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -62,9 +62,8 @@ def is_openshift_cluster(): return True else: return False - except client.ApiException as e: # pragma: no cover - print(f"Error detecting cluster type defaulting to Kubernetes: {e}") - return False + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) def update_dashboard_route(route_item, cluster_name, namespace): From b40640e62133c0f11ef520bc15909f6e5857ca93 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 21 Dec 2023 16:06:27 +0000 Subject: [PATCH 106/496] Added docs for Config and Auth & update doc workflow --- .github/workflows/release.yaml | 2 +- README.md | 25 - docs/authentication.md | 40 ++ docs/cluster-configuration.md | 58 ++ .../cluster/auth.html | 0 .../cluster/awload.html | 0 .../cluster/cluster.html | 284 ++++++--- .../cluster/config.html | 5 +- .../cluster/index.html | 0 .../cluster/model.html | 6 +- docs/{ => detailed-documentation}/index.html | 0 .../job/index.html | 6 + .../job/jobs.html | 2 +- docs/detailed-documentation/job/ray_jobs.html | 574 ++++++++++++++++++ .../utils/generate_cert.html | 0 .../utils/generate_yaml.html | 427 ++++++++----- .../utils/index.html | 0 .../utils/kube_api_helpers.html | 0 .../utils/openshift_oauth.html | 42 +- .../utils/pretty_print.html | 0 20 files changed, 1163 insertions(+), 308 deletions(-) create mode 100644 docs/authentication.md create mode 100644 docs/cluster-configuration.md rename docs/{ => detailed-documentation}/cluster/auth.html (100%) rename docs/{ => detailed-documentation}/cluster/awload.html (100%) rename docs/{ => detailed-documentation}/cluster/cluster.html (91%) rename docs/{ => detailed-documentation}/cluster/config.html (97%) rename docs/{ => detailed-documentation}/cluster/index.html (100%) rename docs/{ => detailed-documentation}/cluster/model.html (99%) rename docs/{ => detailed-documentation}/index.html (100%) rename docs/{ => detailed-documentation}/job/index.html (93%) rename docs/{ => detailed-documentation}/job/jobs.html (99%) create mode 100644 docs/detailed-documentation/job/ray_jobs.html rename docs/{ => detailed-documentation}/utils/generate_cert.html (100%) rename docs/{ => detailed-documentation}/utils/generate_yaml.html (85%) rename docs/{ => detailed-documentation}/utils/index.html (100%) rename docs/{ => detailed-documentation}/utils/kube_api_helpers.html (100%) rename docs/{ => detailed-documentation}/utils/openshift_oauth.html (91%) rename docs/{ => detailed-documentation}/utils/pretty_print.html (100%) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 20dce4e7..a191baa9 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -53,7 +53,7 @@ jobs: - name: Run poetry build run: poetry build - name: Create new documentation - run: poetry run pdoc --html -o docs src/codeflare_sdk && pushd docs && rm -rf cluster job utils && mv codeflare_sdk/* . && rm -rf codeflare_sdk && popd && find docs -type f -name "*.html" -exec bash -c "echo '' >> {}" \; + run: poetry run pdoc --html -o docs/detailed-documentation src/codeflare_sdk && pushd docs/detailed-documentation && rm -rf cluster job utils && mv codeflare_sdk/* . && rm -rf codeflare_sdk && popd && find docs/detailed-documentation -type f -name "*.html" -exec bash -c "echo '' >> {}" \; - name: Commit changes in docs uses: stefanzweifel/git-auto-commit-action@v4 diff --git a/README.md b/README.md index 014b14d3..e864189b 100644 --- a/README.md +++ b/README.md @@ -17,31 +17,6 @@ Full documentation can be found [here](https://project-codeflare.github.io/codef Can be installed via `pip`: `pip install codeflare-sdk` -## CodeFlare SDK in a Kubernetes environment -To create a Ray Cluster using the CodeFlare SDK in a Kubernetes environment an `ingress_domain` must be passed in the Cluster Configuration. -This is used for the creation of the Ray Dashboard and Client ingresses. - -`ingress_options` can be passed to create a custom Ray Dashboard ingress, `ingress_domain` is still a required variable for the Client ingress. -An example of `ingress_options` would look like this. - -``` -ingress_options = { - "ingresses": [ - { - "ingressName": "", - "port": , - "pathType": "", - "path": "", - "host":"", - "annotations": { - "foo": "bar", - "foo": "bar", - } - } - ] -} -``` - ## Development ### Prerequisites diff --git a/docs/authentication.md b/docs/authentication.md new file mode 100644 index 00000000..56567cc3 --- /dev/null +++ b/docs/authentication.md @@ -0,0 +1,40 @@ +# Authentication via the CodeFlare SDK +Currently there are four ways of authenticating to your cluster via the SDK.
      +Authenticating with your cluster allows you to perform actions such as creating Ray Clusters and Job Submission. + +## Method 1 Token Authentication +This is how a typical user would authenticate to their cluster using `TokenAuthentication`. +``` +from codeflare_sdk.cluster.auth import TokenAuthentication + +auth = TokenAuthentication( + token = "XXXXX", + server = "XXXXX", + skip_tls=False, + # ca_cert_path="/path/to/cert +) +auth.login() +# log out with auth.logout() +``` +Setting `skip_tls=True` allows interaction with an HTTPS server bypassing the server certificate checks although this is not secure.
      +You can pass a custom certificate to `TokenAuthentication` by using `ca_cert_path="/path/to/cert"` when authenticating provided `skip_tls=False`. + +## Method 2 Kubernetes Config File Authentication (Default location) +If a user has authenticated to their cluster by alternate means e.g. run a login command like `oc login --token= --server=` their kubernetes config file should have updated.
      +If the user has not specifically authenticated through the SDK by other means such as `TokenAuthentication` then the SDK will try to use their default Kubernetes config file located at `"/HOME/.kube/config"`. + +## Method 3 Specifying a Kubernetes Config File +A user can specify a config file via a different authentication class `KubeConfigFileAuthentication` for authenticating with the SDK.
      +This is what loading a custom config file would typically look like. +``` +from codeflare_sdk.cluster.auth import KubeConfigFileAuthentication + +auth = KubeConfigFileAuthentication( + kube_config_path="/path/to/config", +) +auth.load_kube_config() +# log out with auth.logout() +``` + +## Method 4 In-Cluster Authentication +If a user does not authenticate by any of the means detailed above and does not have a config file at `"/HOME/.kube/config"` the SDK will try to authenticate with the in-cluster configuration file. diff --git a/docs/cluster-configuration.md b/docs/cluster-configuration.md new file mode 100644 index 00000000..35e8c149 --- /dev/null +++ b/docs/cluster-configuration.md @@ -0,0 +1,58 @@ +# Ray Cluster Configuration + +To create Ray Clusters using the CodeFlare SDK a cluster configuration needs to be created first.
      +This is what a typical cluster configuration would look like; Note: The values for CPU and Memory are at the minimum requirements for creating the Ray Cluster. + +``` +from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration + +cluster = Cluster(ClusterConfiguration( + name='ray-example', # Mandatory Field + namespace='default', # Default None + head_cpus=1, # Default 2 + head_memory=1, # Default 8 + head_gpus=0, # Default 0 + num_workers=1, # Default 1 + min_cpus=1, # Default 1 + max_cpus=1, # Default 1 + min_memory=2, # Default 2 + max_memory=2, # Default 2 + num_gpus=0, # Default 0 + mcad=True, # Default True + image="quay.io/project-codeflare/ray:latest-py39-cu118", # Mandatory Field + instascale=False, # Default False + machine_types=["m5.xlarge", "g4dn.xlarge"], + ingress_domain="example.com" # Default None, Mandatory for Kubernetes Clusters +)) +``` + +Upon creating a cluster configuration with `mcad=True` an appwrapper will be created featuring the Ray Cluster and any Routes, Ingresses or Secrets that are needed to be created along side it.
      +From there a user can call `cluster.up()` and `cluster.down()` to create and remove the appwrapper thus creating and removing the Ray Cluster. + +In cases where `mcad=False` a yaml file will be created with the individual Ray Cluster, Route/Ingress and Secret included.
      +The Ray Cluster and service will be created by KubeRay directly and the other components will be individually created. + +## Ray Cluster Configuration in a Kubernetes environment +To create a Ray Cluster using the CodeFlare SDK in a Kubernetes environment an `ingress_domain` must be passed in the Cluster Configuration. +This is used for the creation of the Ray Dashboard and Client ingresses. + +`ingress_options` can be passed to create a custom Ray Dashboard ingress, `ingress_domain` is still a required variable for the Client ingress. +An example of `ingress_options` would look like this. + +``` +ingress_options = { + "ingresses": [ + { + "ingressName": "", + "port": , + "pathType": "", + "path": "", + "host":"", + "annotations": { + "foo": "bar", + "foo": "bar", + } + } + ] +} +``` diff --git a/docs/cluster/auth.html b/docs/detailed-documentation/cluster/auth.html similarity index 100% rename from docs/cluster/auth.html rename to docs/detailed-documentation/cluster/auth.html diff --git a/docs/cluster/awload.html b/docs/detailed-documentation/cluster/awload.html similarity index 100% rename from docs/cluster/awload.html rename to docs/detailed-documentation/cluster/awload.html diff --git a/docs/cluster/cluster.html b/docs/detailed-documentation/cluster/cluster.html similarity index 91% rename from docs/cluster/cluster.html rename to docs/detailed-documentation/cluster/cluster.html index bf84856b..bb0ab95f 100644 --- a/docs/cluster/cluster.html +++ b/docs/detailed-documentation/cluster/cluster.html @@ -64,6 +64,7 @@

      Module codeflare_sdk.cluster.cluster

      generate_appwrapper, ) from ..utils.kube_api_helpers import _kube_api_error_handling +from ..utils.generate_yaml import is_openshift_cluster from ..utils.openshift_oauth import ( create_openshift_oauth_objects, delete_openshift_oauth_objects, @@ -123,14 +124,11 @@

      Module codeflare_sdk.cluster.cluster

      @property def job_client(self): + k8client = api_config_handler() or client.ApiClient() if self._job_submission_client: return self._job_submission_client if self.config.openshift_oauth: - print( - api_config_handler().configuration.get_api_key_with_prefix( - "authorization" - ) - ) + print(k8client.configuration.get_api_key_with_prefix("authorization")) self._job_submission_client = JobSubmissionClient( self.cluster_dashboard_uri(), headers=self._client_headers, @@ -450,25 +448,48 @@

      Module codeflare_sdk.cluster.cluster

      """ Returns a string containing the cluster's dashboard URI. """ - try: - config_check() - api_instance = client.NetworkingV1Api(api_config_handler()) - ingresses = api_instance.list_namespaced_ingress(self.config.namespace) - except Exception as e: # pragma no cover - return _kube_api_error_handling(e) + config_check() + if is_openshift_cluster(): + try: + api_instance = client.CustomObjectsApi(api_config_handler()) + routes = api_instance.list_namespaced_custom_object( + group="route.openshift.io", + version="v1", + namespace=self.config.namespace, + plural="routes", + ) + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + + for route in routes["items"]: + if route["metadata"][ + "name" + ] == f"ray-dashboard-{self.config.name}" or route["metadata"][ + "name" + ].startswith( + f"{self.config.name}-ingress" + ): + protocol = "https" if route["spec"].get("tls") else "http" + return f"{protocol}://{route['spec']['host']}" + else: + try: + api_instance = client.NetworkingV1Api(api_config_handler()) + ingresses = api_instance.list_namespaced_ingress(self.config.namespace) + except Exception as e: # pragma no cover + return _kube_api_error_handling(e) - for ingress in ingresses.items: - annotations = ingress.metadata.annotations - protocol = "http" - if ( - ingress.metadata.name == f"ray-dashboard-{self.config.name}" - or ingress.metadata.name.startswith(f"{self.config.name}-ingress") - ): - if annotations == None: - protocol = "http" - elif "route.openshift.io/termination" in annotations: - protocol = "https" - return f"{protocol}://{ingress.spec.rules[0].host}" + for ingress in ingresses.items: + annotations = ingress.metadata.annotations + protocol = "http" + if ( + ingress.metadata.name == f"ray-dashboard-{self.config.name}" + or ingress.metadata.name.startswith(f"{self.config.name}-ingress") + ): + if annotations == None: + protocol = "http" + elif "route.openshift.io/termination" in annotations: + protocol = "https" + return f"{protocol}://{ingress.spec.rules[0].host}" return "Dashboard ingress not available yet, have you run cluster.up()?" def list_jobs(self) -> List: @@ -567,6 +588,14 @@

      Module codeflare_sdk.cluster.cluster

      plural="rayclusters", body=resource, ) + elif resource["kind"] == "Ingress": + api_instance.create_namespaced_custom_object( + group="networking.k8s.io", + version="v1", + namespace=namespace, + plural="ingresses", + body=resource, + ) elif resource["kind"] == "Route": api_instance.create_namespaced_custom_object( group="route.openshift.io", @@ -596,6 +625,15 @@

      Module codeflare_sdk.cluster.cluster

      plural="rayclusters", name=self.app_wrapper_name, ) + elif resource["kind"] == "Ingress": + name = resource["metadata"]["name"] + api_instance.delete_namespaced_custom_object( + group="networking.k8s.io", + version="v1", + namespace=namespace, + plural="ingresses", + name=name, + ) elif resource["kind"] == "Route": name = resource["metadata"]["name"] api_instance.delete_namespaced_custom_object( @@ -698,29 +736,48 @@

      Module codeflare_sdk.cluster.cluster

      ) except Exception as e: # pragma: no cover return _kube_api_error_handling(e, print_error=False) - for aw in aws["items"]: if aw["metadata"]["name"] == name: return True return False -# Cant test this until get_current_namespace is fixed +# Cant test this until get_current_namespace is fixed and placed in this function over using `self` def _get_ingress_domain(self): # pragma: no cover - try: - config_check() - api_client = client.NetworkingV1Api(api_config_handler()) - if self.config.namespace != None: - namespace = self.config.namespace - else: - namespace = get_current_namespace() - ingresses = api_client.list_namespaced_ingress(namespace) - except Exception as e: # pragma: no cover - return _kube_api_error_handling(e) + config_check() + + if self.config.namespace != None: + namespace = self.config.namespace + else: + namespace = get_current_namespace() domain = None - for ingress in ingresses.items: - if ingress.spec.rules[0].http.paths[0].backend.service.port.number == 10001: - domain = ingress.spec.rules[0].host + + if is_openshift_cluster(): + try: + api_instance = client.CustomObjectsApi(api_config_handler()) + + routes = api_instance.list_namespaced_custom_object( + group="route.openshift.io", + version="v1", + namespace=namespace, + plural="routes", + ) + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + + for route in routes["items"]: + if route["spec"]["port"]["targetPort"] == "client": + domain = route["spec"]["host"] + else: + try: + api_client = client.NetworkingV1Api(api_config_handler()) + ingresses = api_client.list_namespaced_ingress(namespace) + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + + for ingress in ingresses.items: + if ingress.spec.rules[0].http.paths[0].backend.service.port.number == 10001: + domain = ingress.spec.rules[0].host return domain @@ -1073,14 +1130,11 @@

      Classes

      @property def job_client(self): + k8client = api_config_handler() or client.ApiClient() if self._job_submission_client: return self._job_submission_client if self.config.openshift_oauth: - print( - api_config_handler().configuration.get_api_key_with_prefix( - "authorization" - ) - ) + print(k8client.configuration.get_api_key_with_prefix("authorization")) self._job_submission_client = JobSubmissionClient( self.cluster_dashboard_uri(), headers=self._client_headers, @@ -1400,25 +1454,48 @@

      Classes

      """ Returns a string containing the cluster's dashboard URI. """ - try: - config_check() - api_instance = client.NetworkingV1Api(api_config_handler()) - ingresses = api_instance.list_namespaced_ingress(self.config.namespace) - except Exception as e: # pragma no cover - return _kube_api_error_handling(e) + config_check() + if is_openshift_cluster(): + try: + api_instance = client.CustomObjectsApi(api_config_handler()) + routes = api_instance.list_namespaced_custom_object( + group="route.openshift.io", + version="v1", + namespace=self.config.namespace, + plural="routes", + ) + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + + for route in routes["items"]: + if route["metadata"][ + "name" + ] == f"ray-dashboard-{self.config.name}" or route["metadata"][ + "name" + ].startswith( + f"{self.config.name}-ingress" + ): + protocol = "https" if route["spec"].get("tls") else "http" + return f"{protocol}://{route['spec']['host']}" + else: + try: + api_instance = client.NetworkingV1Api(api_config_handler()) + ingresses = api_instance.list_namespaced_ingress(self.config.namespace) + except Exception as e: # pragma no cover + return _kube_api_error_handling(e) - for ingress in ingresses.items: - annotations = ingress.metadata.annotations - protocol = "http" - if ( - ingress.metadata.name == f"ray-dashboard-{self.config.name}" - or ingress.metadata.name.startswith(f"{self.config.name}-ingress") - ): - if annotations == None: - protocol = "http" - elif "route.openshift.io/termination" in annotations: - protocol = "https" - return f"{protocol}://{ingress.spec.rules[0].host}" + for ingress in ingresses.items: + annotations = ingress.metadata.annotations + protocol = "http" + if ( + ingress.metadata.name == f"ray-dashboard-{self.config.name}" + or ingress.metadata.name.startswith(f"{self.config.name}-ingress") + ): + if annotations == None: + protocol = "http" + elif "route.openshift.io/termination" in annotations: + protocol = "https" + return f"{protocol}://{ingress.spec.rules[0].host}" return "Dashboard ingress not available yet, have you run cluster.up()?" def list_jobs(self) -> List: @@ -1517,6 +1594,14 @@

      Classes

      plural="rayclusters", body=resource, ) + elif resource["kind"] == "Ingress": + api_instance.create_namespaced_custom_object( + group="networking.k8s.io", + version="v1", + namespace=namespace, + plural="ingresses", + body=resource, + ) elif resource["kind"] == "Route": api_instance.create_namespaced_custom_object( group="route.openshift.io", @@ -1546,6 +1631,15 @@

      Classes

      plural="rayclusters", name=self.app_wrapper_name, ) + elif resource["kind"] == "Ingress": + name = resource["metadata"]["name"] + api_instance.delete_namespaced_custom_object( + group="networking.k8s.io", + version="v1", + namespace=namespace, + plural="ingresses", + name=name, + ) elif resource["kind"] == "Route": name = resource["metadata"]["name"] api_instance.delete_namespaced_custom_object( @@ -1581,14 +1675,11 @@

      Instance variables

      @property
       def job_client(self):
      +    k8client = api_config_handler() or client.ApiClient()
           if self._job_submission_client:
               return self._job_submission_client
           if self.config.openshift_oauth:
      -        print(
      -            api_config_handler().configuration.get_api_key_with_prefix(
      -                "authorization"
      -            )
      -        )
      +        print(k8client.configuration.get_api_key_with_prefix("authorization"))
               self._job_submission_client = JobSubmissionClient(
                   self.cluster_dashboard_uri(),
                   headers=self._client_headers,
      @@ -1617,25 +1708,48 @@ 

      Methods

      """ Returns a string containing the cluster's dashboard URI. """ - try: - config_check() - api_instance = client.NetworkingV1Api(api_config_handler()) - ingresses = api_instance.list_namespaced_ingress(self.config.namespace) - except Exception as e: # pragma no cover - return _kube_api_error_handling(e) + config_check() + if is_openshift_cluster(): + try: + api_instance = client.CustomObjectsApi(api_config_handler()) + routes = api_instance.list_namespaced_custom_object( + group="route.openshift.io", + version="v1", + namespace=self.config.namespace, + plural="routes", + ) + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) - for ingress in ingresses.items: - annotations = ingress.metadata.annotations - protocol = "http" - if ( - ingress.metadata.name == f"ray-dashboard-{self.config.name}" - or ingress.metadata.name.startswith(f"{self.config.name}-ingress") - ): - if annotations == None: - protocol = "http" - elif "route.openshift.io/termination" in annotations: - protocol = "https" - return f"{protocol}://{ingress.spec.rules[0].host}" + for route in routes["items"]: + if route["metadata"][ + "name" + ] == f"ray-dashboard-{self.config.name}" or route["metadata"][ + "name" + ].startswith( + f"{self.config.name}-ingress" + ): + protocol = "https" if route["spec"].get("tls") else "http" + return f"{protocol}://{route['spec']['host']}" + else: + try: + api_instance = client.NetworkingV1Api(api_config_handler()) + ingresses = api_instance.list_namespaced_ingress(self.config.namespace) + except Exception as e: # pragma no cover + return _kube_api_error_handling(e) + + for ingress in ingresses.items: + annotations = ingress.metadata.annotations + protocol = "http" + if ( + ingress.metadata.name == f"ray-dashboard-{self.config.name}" + or ingress.metadata.name.startswith(f"{self.config.name}-ingress") + ): + if annotations == None: + protocol = "http" + elif "route.openshift.io/termination" in annotations: + protocol = "https" + return f"{protocol}://{ingress.spec.rules[0].host}" return "Dashboard ingress not available yet, have you run cluster.up()?"
      @@ -1943,7 +2057,7 @@

      Methods

      -def list_jobs(self) ‑> List[~T] +def list_jobs(self) ‑> List

      This method accesses the head ray node in your cluster and lists the running jobs.

      diff --git a/docs/cluster/config.html b/docs/detailed-documentation/cluster/config.html similarity index 97% rename from docs/cluster/config.html rename to docs/detailed-documentation/cluster/config.html index a4f2d662..62a17a64 100644 --- a/docs/cluster/config.html +++ b/docs/detailed-documentation/cluster/config.html @@ -100,7 +100,7 @@

      Classes

      class ClusterConfiguration -(name: str, namespace: str = None, head_info: list = <factory>, head_cpus: int = 2, head_memory: int = 8, head_gpus: int = 0, machine_types: list = <factory>, min_cpus: int = 1, max_cpus: int = 1, num_workers: int = 1, min_memory: int = 2, max_memory: int = 2, num_gpus: int = 0, template: str = '/home/runner/work/codeflare-sdk/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', instascale: bool = False, mcad: bool = True, envs: dict = <factory>, image: str = '', local_interactive: bool = False, image_pull_secrets: list = <factory>, dispatch_priority: str = None, openshift_oauth: bool = False, ingress_options: dict = <factory>, ingress_domain: str = None) +(name: str, namespace: str = None, head_info: list = <factory>, head_cpus: int = 2, head_memory: int = 8, head_gpus: int = 0, machine_types: list = <factory>, min_cpus: int = 1, max_cpus: int = 1, num_workers: int = 1, min_memory: int = 2, max_memory: int = 2, num_gpus: int = 0, template: str = '/home/mark/distributed-workloads-repos/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', instascale: bool = False, mcad: bool = True, envs: dict = <factory>, image: str = '', local_interactive: bool = False, image_pull_secrets: list = <factory>, dispatch_priority: str = None, openshift_oauth: bool = False, ingress_options: dict = <factory>, ingress_domain: str = None)

      This dataclass is used to specify resource requirements and other details, and @@ -109,7 +109,8 @@

      Classes

      Expand source code -
      class ClusterConfiguration:
      +
      @dataclass
      +class ClusterConfiguration:
           """
           This dataclass is used to specify resource requirements and other details, and
           is passed in as an argument when creating a Cluster object.
      diff --git a/docs/cluster/index.html b/docs/detailed-documentation/cluster/index.html
      similarity index 100%
      rename from docs/cluster/index.html
      rename to docs/detailed-documentation/cluster/index.html
      diff --git a/docs/cluster/model.html b/docs/detailed-documentation/cluster/model.html
      similarity index 99%
      rename from docs/cluster/model.html
      rename to docs/detailed-documentation/cluster/model.html
      index 3832494e..8ac5b7c0 100644
      --- a/docs/cluster/model.html
      +++ b/docs/detailed-documentation/cluster/model.html
      @@ -145,7 +145,8 @@ 

      Classes

      Expand source code -
      class AppWrapper:
      +
      @dataclass
      +class AppWrapper:
           """
           For storing information about an AppWrapper.
           """
      @@ -298,7 +299,8 @@ 

      Class variables

      Expand source code -
      class RayCluster:
      +
      @dataclass
      +class RayCluster:
           """
           For storing information about a Ray cluster.
           """
      diff --git a/docs/index.html b/docs/detailed-documentation/index.html
      similarity index 100%
      rename from docs/index.html
      rename to docs/detailed-documentation/index.html
      diff --git a/docs/job/index.html b/docs/detailed-documentation/job/index.html
      similarity index 93%
      rename from docs/job/index.html
      rename to docs/detailed-documentation/job/index.html
      index 2360deec..bd8991c3 100644
      --- a/docs/job/index.html
      +++ b/docs/detailed-documentation/job/index.html
      @@ -30,6 +30,11 @@ 

      Sub-modules

      +
      codeflare_sdk.job.ray_jobs
      +
      +

      The ray_jobs sub-module contains methods needed to submit jobs and connect to Ray Clusters that were not created by CodeFlare. +The SDK acts as a …

      +
    @@ -53,6 +58,7 @@

    Index

  • Sub-modules

  • diff --git a/docs/job/jobs.html b/docs/detailed-documentation/job/jobs.html similarity index 99% rename from docs/job/jobs.html rename to docs/detailed-documentation/job/jobs.html index ea587df8..ce024011 100644 --- a/docs/job/jobs.html +++ b/docs/detailed-documentation/job/jobs.html @@ -323,7 +323,7 @@

    Methods

    class DDPJobDefinition -(script: Optional[str] = None, m: Optional[str] = None, script_args: Optional[List[str]] = None, name: Optional[str] = None, cpu: Optional[int] = None, gpu: Optional[int] = None, memMB: Optional[int] = None, h: Optional[str] = None, j: Optional[str] = None, env: Optional[Dict[str, str]] = None, max_retries: int = 0, mounts: Optional[List[str]] = None, rdzv_port: int = 29500, rdzv_backend: str = None, scheduler_args: Optional[Dict[str, str]] = None, image: Optional[str] = None, workspace: Optional[str] = 'file:///home/runner/work/codeflare-sdk/codeflare-sdk') +(script: Optional[str] = None, m: Optional[str] = None, script_args: Optional[List[str]] = None, name: Optional[str] = None, cpu: Optional[int] = None, gpu: Optional[int] = None, memMB: Optional[int] = None, h: Optional[str] = None, j: Optional[str] = None, env: Optional[Dict[str, str]] = None, max_retries: int = 0, mounts: Optional[List[str]] = None, rdzv_port: int = 29500, rdzv_backend: str = None, scheduler_args: Optional[Dict[str, str]] = None, image: Optional[str] = None, workspace: Optional[str] = 'file:///home/mark/distributed-workloads-repos/codeflare-sdk')
    diff --git a/docs/detailed-documentation/job/ray_jobs.html b/docs/detailed-documentation/job/ray_jobs.html new file mode 100644 index 00000000..01fa5fb5 --- /dev/null +++ b/docs/detailed-documentation/job/ray_jobs.html @@ -0,0 +1,574 @@ + + + + + + +codeflare_sdk.job.ray_jobs API documentation + + + + + + + + + + + +
    +
    +
    +

    Module codeflare_sdk.job.ray_jobs

    +
    +
    +

    The ray_jobs sub-module contains methods needed to submit jobs and connect to Ray Clusters that were not created by CodeFlare. +The SDK acts as a wrapper for the Ray Job Submission Client.

    +
    + +Expand source code + +
    # Copyright 2022 IBM, Red Hat
    +#
    +# Licensed under the Apache License, Version 2.0 (the "License");
    +# you may not use this file except in compliance with the License.
    +# You may obtain a copy of the License at
    +#
    +#      http://www.apache.org/licenses/LICENSE-2.0
    +#
    +# Unless required by applicable law or agreed to in writing, software
    +# distributed under the License is distributed on an "AS IS" BASIS,
    +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    +# See the License for the specific language governing permissions and
    +# limitations under the License.
    +
    +"""
    +The ray_jobs sub-module contains methods needed to submit jobs and connect to Ray Clusters that were not created by CodeFlare.
    +The SDK acts as a wrapper for the Ray Job Submission Client.
    +"""
    +from ray.job_submission import JobSubmissionClient
    +from ray.dashboard.modules.job.pydantic_models import JobDetails
    +from typing import Iterator, Optional, Dict, Any, Union, List
    +
    +
    +class RayJobClient:
    +    """
    +    A class that functions as a wrapper for the Ray Job Submission Client.
    +
    +    parameters:
    +    address -- Either (1) the address of the Ray cluster, or (2) the HTTP address of the dashboard server on the head node, e.g. “http://<head-node-ip>:8265”. In case (1) it must be specified as an address that can be passed to ray.init(),
    +    e.g. a Ray Client address (ray://<head_node_host>:10001), or “auto”, or “localhost:<port>”. If unspecified, will try to connect to a running local Ray cluster. This argument is always overridden by the RAY_ADDRESS environment variable.
    +    create_cluster_if_needed -- Indicates whether the cluster at the specified address needs to already be running. Ray doesn't start a cluster before interacting with jobs, but third-party job managers may do so.
    +    cookies -- Cookies to use when sending requests to the HTTP job server.
    +    metadata -- Arbitrary metadata to store along with all jobs. New metadata specified per job will be merged with the global metadata provided here via a simple dict update.
    +    headers -- Headers to use when sending requests to the HTTP job server, used for cases like authentication to a remote cluster.
    +    verify -- Boolean indication to verify the server's TLS certificate or a path to a file or directory of trusted certificates. Default: True.
    +    """
    +
    +    def __init__(
    +        self,
    +        address: Optional[str] = None,
    +        create_cluster_if_needed: bool = False,
    +        cookies: Optional[Dict[str, Any]] = None,
    +        metadata: Optional[Dict[str, Any]] = None,
    +        headers: Optional[Dict[str, Any]] = None,
    +        verify: Optional[Union[str, bool]] = True,
    +    ):
    +        self.rayJobClient = JobSubmissionClient(
    +            address=address,
    +            create_cluster_if_needed=create_cluster_if_needed,
    +            cookies=cookies,
    +            metadata=metadata,
    +            headers=headers,
    +            verify=verify,
    +        )
    +
    +    def submit_job(
    +        self,
    +        entrypoint: str,
    +        job_id: Optional[str] = None,
    +        runtime_env: Optional[Dict[str, Any]] = None,
    +        metadata: Optional[Dict[str, str]] = None,
    +        submission_id: Optional[str] = None,
    +        entrypoint_num_cpus: Optional[Union[int, float]] = None,
    +        entrypoint_num_gpus: Optional[Union[int, float]] = None,
    +        entrypoint_resources: Optional[Dict[str, float]] = None,
    +    ) -> str:
    +        """
    +        Method for submitting jobs to a Ray Cluster and returning the job id with entrypoint being a mandatory field.
    +
    +        Parameters:
    +        entrypoint -- The shell command to run for this job.
    +        submission_id -- A unique ID for this job.
    +        runtime_env -- The runtime environment to install and run this job in.
    +        metadata -- Arbitrary data to store along with this job.
    +        job_id -- DEPRECATED. This has been renamed to submission_id
    +        entrypoint_num_cpus -- The quantity of CPU cores to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0.
    +        entrypoint_num_gpus -- The quantity of GPUs to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0.
    +        entrypoint_resources -- The quantity of custom resources to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it.
    +        """
    +        return self.rayJobClient.submit_job(
    +            entrypoint=entrypoint,
    +            job_id=job_id,
    +            runtime_env=runtime_env,
    +            metadata=metadata,
    +            submission_id=submission_id,
    +            entrypoint_num_cpus=entrypoint_num_cpus,
    +            entrypoint_num_gpus=entrypoint_num_gpus,
    +            entrypoint_resources=entrypoint_resources,
    +        )
    +
    +    def delete_job(self, job_id: str) -> (bool, str):
    +        """
    +        Method for deleting jobs with the job id being a mandatory field.
    +        """
    +        deletion_status = self.rayJobClient.delete_job(job_id=job_id)
    +
    +        if deletion_status:
    +            message = f"Successfully deleted Job {job_id}"
    +        else:
    +            message = f"Failed to delete Job {job_id}"
    +
    +        return deletion_status, message
    +
    +    def get_address(self) -> str:
    +        """
    +        Method for getting the address from the RayJobClient
    +        """
    +        return self.rayJobClient.get_address()
    +
    +    def get_job_info(self, job_id: str):
    +        """
    +        Method for getting the job info with the job id being a mandatory field.
    +        """
    +        return self.rayJobClient.get_job_info(job_id=job_id)
    +
    +    def get_job_logs(self, job_id: str) -> str:
    +        """
    +        Method for getting the job logs with the job id being a mandatory field.
    +        """
    +        return self.rayJobClient.get_job_logs(job_id=job_id)
    +
    +    def get_job_status(self, job_id: str) -> str:
    +        """
    +        Method for getting the job's status with the job id being a mandatory field.
    +        """
    +        return self.rayJobClient.get_job_status(job_id=job_id)
    +
    +    def list_jobs(self) -> List[JobDetails]:
    +        """
    +        Method for getting a list of current jobs in the Ray Cluster.
    +        """
    +        return self.rayJobClient.list_jobs()
    +
    +    def stop_job(self, job_id: str) -> (bool, str):
    +        """
    +        Method for stopping a job with the job id being a mandatory field.
    +        """
    +        stop_job_status = self.rayJobClient.stop_job(job_id=job_id)
    +        if stop_job_status:
    +            message = f"Successfully stopped Job {job_id}"
    +        else:
    +            message = f"Failed to stop Job, {job_id} could have already completed."
    +        return stop_job_status, message
    +
    +    def tail_job_logs(self, job_id: str) -> Iterator[str]:
    +        """
    +        Method for getting an iterator that follows the logs of a job with the job id being a mandatory field.
    +        """
    +        return self.rayJobClient.tail_job_logs(job_id=job_id)
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +

    Classes

    +
    +
    +class RayJobClient +(address: Optional[str] = None, create_cluster_if_needed: bool = False, cookies: Optional[Dict[str, Any]] = None, metadata: Optional[Dict[str, Any]] = None, headers: Optional[Dict[str, Any]] = None, verify: Union[str, bool, ForwardRef(None)] = True) +
    +
    +

    A class that functions as a wrapper for the Ray Job Submission Client.

    +

    parameters: +address – Either (1) the address of the Ray cluster, or (2) the HTTP address of the dashboard server on the head node, e.g. “>:8265”. In case (1) it must be specified as an address that can be passed to ray.init(), +e.g. a Ray Client address (ray://:10001), or “auto”, or “localhost:”. If unspecified, will try to connect to a running local Ray cluster. This argument is always overridden by the RAY_ADDRESS environment variable. +create_cluster_if_needed – Indicates whether the cluster at the specified address needs to already be running. Ray doesn't start a cluster before interacting with jobs, but third-party job managers may do so. +cookies – Cookies to use when sending requests to the HTTP job server. +metadata – Arbitrary metadata to store along with all jobs. New metadata specified per job will be merged with the global metadata provided here via a simple dict update. +headers – Headers to use when sending requests to the HTTP job server, used for cases like authentication to a remote cluster. +verify – Boolean indication to verify the server's TLS certificate or a path to a file or directory of trusted certificates. Default: True.

    +
    + +Expand source code + +
    class RayJobClient:
    +    """
    +    A class that functions as a wrapper for the Ray Job Submission Client.
    +
    +    parameters:
    +    address -- Either (1) the address of the Ray cluster, or (2) the HTTP address of the dashboard server on the head node, e.g. “http://<head-node-ip>:8265”. In case (1) it must be specified as an address that can be passed to ray.init(),
    +    e.g. a Ray Client address (ray://<head_node_host>:10001), or “auto”, or “localhost:<port>”. If unspecified, will try to connect to a running local Ray cluster. This argument is always overridden by the RAY_ADDRESS environment variable.
    +    create_cluster_if_needed -- Indicates whether the cluster at the specified address needs to already be running. Ray doesn't start a cluster before interacting with jobs, but third-party job managers may do so.
    +    cookies -- Cookies to use when sending requests to the HTTP job server.
    +    metadata -- Arbitrary metadata to store along with all jobs. New metadata specified per job will be merged with the global metadata provided here via a simple dict update.
    +    headers -- Headers to use when sending requests to the HTTP job server, used for cases like authentication to a remote cluster.
    +    verify -- Boolean indication to verify the server's TLS certificate or a path to a file or directory of trusted certificates. Default: True.
    +    """
    +
    +    def __init__(
    +        self,
    +        address: Optional[str] = None,
    +        create_cluster_if_needed: bool = False,
    +        cookies: Optional[Dict[str, Any]] = None,
    +        metadata: Optional[Dict[str, Any]] = None,
    +        headers: Optional[Dict[str, Any]] = None,
    +        verify: Optional[Union[str, bool]] = True,
    +    ):
    +        self.rayJobClient = JobSubmissionClient(
    +            address=address,
    +            create_cluster_if_needed=create_cluster_if_needed,
    +            cookies=cookies,
    +            metadata=metadata,
    +            headers=headers,
    +            verify=verify,
    +        )
    +
    +    def submit_job(
    +        self,
    +        entrypoint: str,
    +        job_id: Optional[str] = None,
    +        runtime_env: Optional[Dict[str, Any]] = None,
    +        metadata: Optional[Dict[str, str]] = None,
    +        submission_id: Optional[str] = None,
    +        entrypoint_num_cpus: Optional[Union[int, float]] = None,
    +        entrypoint_num_gpus: Optional[Union[int, float]] = None,
    +        entrypoint_resources: Optional[Dict[str, float]] = None,
    +    ) -> str:
    +        """
    +        Method for submitting jobs to a Ray Cluster and returning the job id with entrypoint being a mandatory field.
    +
    +        Parameters:
    +        entrypoint -- The shell command to run for this job.
    +        submission_id -- A unique ID for this job.
    +        runtime_env -- The runtime environment to install and run this job in.
    +        metadata -- Arbitrary data to store along with this job.
    +        job_id -- DEPRECATED. This has been renamed to submission_id
    +        entrypoint_num_cpus -- The quantity of CPU cores to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0.
    +        entrypoint_num_gpus -- The quantity of GPUs to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0.
    +        entrypoint_resources -- The quantity of custom resources to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it.
    +        """
    +        return self.rayJobClient.submit_job(
    +            entrypoint=entrypoint,
    +            job_id=job_id,
    +            runtime_env=runtime_env,
    +            metadata=metadata,
    +            submission_id=submission_id,
    +            entrypoint_num_cpus=entrypoint_num_cpus,
    +            entrypoint_num_gpus=entrypoint_num_gpus,
    +            entrypoint_resources=entrypoint_resources,
    +        )
    +
    +    def delete_job(self, job_id: str) -> (bool, str):
    +        """
    +        Method for deleting jobs with the job id being a mandatory field.
    +        """
    +        deletion_status = self.rayJobClient.delete_job(job_id=job_id)
    +
    +        if deletion_status:
    +            message = f"Successfully deleted Job {job_id}"
    +        else:
    +            message = f"Failed to delete Job {job_id}"
    +
    +        return deletion_status, message
    +
    +    def get_address(self) -> str:
    +        """
    +        Method for getting the address from the RayJobClient
    +        """
    +        return self.rayJobClient.get_address()
    +
    +    def get_job_info(self, job_id: str):
    +        """
    +        Method for getting the job info with the job id being a mandatory field.
    +        """
    +        return self.rayJobClient.get_job_info(job_id=job_id)
    +
    +    def get_job_logs(self, job_id: str) -> str:
    +        """
    +        Method for getting the job logs with the job id being a mandatory field.
    +        """
    +        return self.rayJobClient.get_job_logs(job_id=job_id)
    +
    +    def get_job_status(self, job_id: str) -> str:
    +        """
    +        Method for getting the job's status with the job id being a mandatory field.
    +        """
    +        return self.rayJobClient.get_job_status(job_id=job_id)
    +
    +    def list_jobs(self) -> List[JobDetails]:
    +        """
    +        Method for getting a list of current jobs in the Ray Cluster.
    +        """
    +        return self.rayJobClient.list_jobs()
    +
    +    def stop_job(self, job_id: str) -> (bool, str):
    +        """
    +        Method for stopping a job with the job id being a mandatory field.
    +        """
    +        stop_job_status = self.rayJobClient.stop_job(job_id=job_id)
    +        if stop_job_status:
    +            message = f"Successfully stopped Job {job_id}"
    +        else:
    +            message = f"Failed to stop Job, {job_id} could have already completed."
    +        return stop_job_status, message
    +
    +    def tail_job_logs(self, job_id: str) -> Iterator[str]:
    +        """
    +        Method for getting an iterator that follows the logs of a job with the job id being a mandatory field.
    +        """
    +        return self.rayJobClient.tail_job_logs(job_id=job_id)
    +
    +

    Methods

    +
    +
    +def delete_job(self, job_id: str) ‑> () +
    +
    +

    Method for deleting jobs with the job id being a mandatory field.

    +
    + +Expand source code + +
    def delete_job(self, job_id: str) -> (bool, str):
    +    """
    +    Method for deleting jobs with the job id being a mandatory field.
    +    """
    +    deletion_status = self.rayJobClient.delete_job(job_id=job_id)
    +
    +    if deletion_status:
    +        message = f"Successfully deleted Job {job_id}"
    +    else:
    +        message = f"Failed to delete Job {job_id}"
    +
    +    return deletion_status, message
    +
    +
    +
    +def get_address(self) ‑> str +
    +
    +

    Method for getting the address from the RayJobClient

    +
    + +Expand source code + +
    def get_address(self) -> str:
    +    """
    +    Method for getting the address from the RayJobClient
    +    """
    +    return self.rayJobClient.get_address()
    +
    +
    +
    +def get_job_info(self, job_id: str) +
    +
    +

    Method for getting the job info with the job id being a mandatory field.

    +
    + +Expand source code + +
    def get_job_info(self, job_id: str):
    +    """
    +    Method for getting the job info with the job id being a mandatory field.
    +    """
    +    return self.rayJobClient.get_job_info(job_id=job_id)
    +
    +
    +
    +def get_job_logs(self, job_id: str) ‑> str +
    +
    +

    Method for getting the job logs with the job id being a mandatory field.

    +
    + +Expand source code + +
    def get_job_logs(self, job_id: str) -> str:
    +    """
    +    Method for getting the job logs with the job id being a mandatory field.
    +    """
    +    return self.rayJobClient.get_job_logs(job_id=job_id)
    +
    +
    +
    +def get_job_status(self, job_id: str) ‑> str +
    +
    +

    Method for getting the job's status with the job id being a mandatory field.

    +
    + +Expand source code + +
    def get_job_status(self, job_id: str) -> str:
    +    """
    +    Method for getting the job's status with the job id being a mandatory field.
    +    """
    +    return self.rayJobClient.get_job_status(job_id=job_id)
    +
    +
    +
    +def list_jobs(self) ‑> List[ray.dashboard.modules.job.pydantic_models.JobDetails] +
    +
    +

    Method for getting a list of current jobs in the Ray Cluster.

    +
    + +Expand source code + +
    def list_jobs(self) -> List[JobDetails]:
    +    """
    +    Method for getting a list of current jobs in the Ray Cluster.
    +    """
    +    return self.rayJobClient.list_jobs()
    +
    +
    +
    +def stop_job(self, job_id: str) ‑> () +
    +
    +

    Method for stopping a job with the job id being a mandatory field.

    +
    + +Expand source code + +
    def stop_job(self, job_id: str) -> (bool, str):
    +    """
    +    Method for stopping a job with the job id being a mandatory field.
    +    """
    +    stop_job_status = self.rayJobClient.stop_job(job_id=job_id)
    +    if stop_job_status:
    +        message = f"Successfully stopped Job {job_id}"
    +    else:
    +        message = f"Failed to stop Job, {job_id} could have already completed."
    +    return stop_job_status, message
    +
    +
    +
    +def submit_job(self, entrypoint: str, job_id: Optional[str] = None, runtime_env: Optional[Dict[str, Any]] = None, metadata: Optional[Dict[str, str]] = None, submission_id: Optional[str] = None, entrypoint_num_cpus: Union[int, float, ForwardRef(None)] = None, entrypoint_num_gpus: Union[int, float, ForwardRef(None)] = None, entrypoint_resources: Optional[Dict[str, float]] = None) ‑> str +
    +
    +

    Method for submitting jobs to a Ray Cluster and returning the job id with entrypoint being a mandatory field.

    +

    Parameters: +entrypoint – The shell command to run for this job. +submission_id – A unique ID for this job. +runtime_env – The runtime environment to install and run this job in. +metadata – Arbitrary data to store along with this job. +job_id – DEPRECATED. This has been renamed to submission_id +entrypoint_num_cpus – The quantity of CPU cores to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0. +entrypoint_num_gpus – The quantity of GPUs to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0. +entrypoint_resources – The quantity of custom resources to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it.

    +
    + +Expand source code + +
    def submit_job(
    +    self,
    +    entrypoint: str,
    +    job_id: Optional[str] = None,
    +    runtime_env: Optional[Dict[str, Any]] = None,
    +    metadata: Optional[Dict[str, str]] = None,
    +    submission_id: Optional[str] = None,
    +    entrypoint_num_cpus: Optional[Union[int, float]] = None,
    +    entrypoint_num_gpus: Optional[Union[int, float]] = None,
    +    entrypoint_resources: Optional[Dict[str, float]] = None,
    +) -> str:
    +    """
    +    Method for submitting jobs to a Ray Cluster and returning the job id with entrypoint being a mandatory field.
    +
    +    Parameters:
    +    entrypoint -- The shell command to run for this job.
    +    submission_id -- A unique ID for this job.
    +    runtime_env -- The runtime environment to install and run this job in.
    +    metadata -- Arbitrary data to store along with this job.
    +    job_id -- DEPRECATED. This has been renamed to submission_id
    +    entrypoint_num_cpus -- The quantity of CPU cores to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0.
    +    entrypoint_num_gpus -- The quantity of GPUs to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0.
    +    entrypoint_resources -- The quantity of custom resources to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it.
    +    """
    +    return self.rayJobClient.submit_job(
    +        entrypoint=entrypoint,
    +        job_id=job_id,
    +        runtime_env=runtime_env,
    +        metadata=metadata,
    +        submission_id=submission_id,
    +        entrypoint_num_cpus=entrypoint_num_cpus,
    +        entrypoint_num_gpus=entrypoint_num_gpus,
    +        entrypoint_resources=entrypoint_resources,
    +    )
    +
    +
    +
    +def tail_job_logs(self, job_id: str) ‑> Iterator[str] +
    +
    +

    Method for getting an iterator that follows the logs of a job with the job id being a mandatory field.

    +
    + +Expand source code + +
    def tail_job_logs(self, job_id: str) -> Iterator[str]:
    +    """
    +    Method for getting an iterator that follows the logs of a job with the job id being a mandatory field.
    +    """
    +    return self.rayJobClient.tail_job_logs(job_id=job_id)
    +
    +
    +
    +
    +
    +
    +
    + +
    + + + diff --git a/docs/utils/generate_cert.html b/docs/detailed-documentation/utils/generate_cert.html similarity index 100% rename from docs/utils/generate_cert.html rename to docs/detailed-documentation/utils/generate_cert.html diff --git a/docs/utils/generate_yaml.html b/docs/detailed-documentation/utils/generate_yaml.html similarity index 85% rename from docs/utils/generate_yaml.html rename to docs/detailed-documentation/utils/generate_yaml.html index c5a6affa..04f95bc9 100644 --- a/docs/utils/generate_yaml.html +++ b/docs/detailed-documentation/utils/generate_yaml.html @@ -60,8 +60,6 @@

    Module codeflare_sdk.utils.generate_yaml

    from base64 import b64encode from urllib3.util import parse_url -from kubernetes import client, config - from .kube_api_helpers import _get_api_host @@ -87,22 +85,60 @@

    Module codeflare_sdk.utils.generate_yaml

    return f"ray-dashboard-{cluster_name}" -# Check if the ingress api cluster resource exists +# Check if the routes api exists def is_openshift_cluster(): try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) - api_instance.get_cluster_custom_object( - "config.openshift.io", "v1", "ingresses", "cluster" - ) - - return True - except client.ApiException as e: # pragma: no cover - if e.status == 404 or e.status == 403: - return False + for api in client.ApisApi(api_config_handler()).get_api_versions().groups: + for v in api.versions: + if "route.openshift.io/v1" in v.group_version: + return True else: - print(f"Error detecting cluster type defaulting to Kubernetes: {e}") return False + except client.ApiException as e: # pragma: no cover + print(f"Error detecting cluster type defaulting to Kubernetes: {e}") + return False + + +def update_dashboard_route(route_item, cluster_name, namespace): + metadata = route_item.get("generictemplate", {}).get("metadata") + metadata["name"] = gen_dashboard_ingress_name(cluster_name) + metadata["namespace"] = namespace + metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc" + spec = route_item.get("generictemplate", {}).get("spec") + spec["to"]["name"] = f"{cluster_name}-head-svc" + + +# ToDo: refactor the update_x_route() functions +def update_rayclient_route(route_item, cluster_name, namespace): + metadata = route_item.get("generictemplate", {}).get("metadata") + metadata["name"] = f"rayclient-{cluster_name}" + metadata["namespace"] = namespace + metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc" + spec = route_item.get("generictemplate", {}).get("spec") + spec["to"]["name"] = f"{cluster_name}-head-svc" + + +def update_dashboard_exposure( + ingress_item, route_item, cluster_name, namespace, ingress_options, ingress_domain +): + if is_openshift_cluster(): + update_dashboard_route(route_item, cluster_name, namespace) + else: + update_dashboard_ingress( + ingress_item, cluster_name, namespace, ingress_options, ingress_domain + ) + + +def update_rayclient_exposure( + client_route_item, client_ingress_item, cluster_name, namespace, ingress_domain +): + if is_openshift_cluster(): + update_rayclient_route(client_route_item, cluster_name, namespace) + else: + update_rayclient_ingress( + client_ingress_item, cluster_name, namespace, ingress_domain + ) def update_dashboard_ingress( @@ -154,25 +190,15 @@

    Module codeflare_sdk.utils.generate_yaml

    "name" ] = f"{cluster_name}-head-svc" else: - metadata["name"] = f"ray-dashboard-{cluster_name}" + spec["ingressClassName"] = "nginx" + metadata["name"] = gen_dashboard_ingress_name(cluster_name) metadata["namespace"] = namespace spec["rules"][0]["http"]["paths"][0]["backend"]["service"][ "name" ] = f"{cluster_name}-head-svc" - if is_openshift_cluster(): - try: - config_check() - api_client = client.CustomObjectsApi(api_config_handler()) - ingress = api_client.get_cluster_custom_object( - "config.openshift.io", "v1", "ingresses", "cluster" - ) - del spec["ingressClassName"] - except Exception as e: # pragma: no cover - return _kube_api_error_handling(e) - domain = ingress["spec"]["domain"] - elif ingress_domain is None: + if ingress_domain is None: raise ValueError( - "ingress_domain is invalid. For Kubernetes Clusters please specify an ingress domain" + "ingress_domain is invalid. Please specify an ingress domain" ) else: domain = ingress_domain @@ -193,38 +219,19 @@

    Module codeflare_sdk.utils.generate_yaml

    "name" ] = f"{cluster_name}-head-svc" - if is_openshift_cluster(): - try: - config_check() - api_client = client.CustomObjectsApi(api_config_handler()) - ingress = api_client.get_cluster_custom_object( - "config.openshift.io", "v1", "ingresses", "cluster" - ) - ingressClassName = "openshift-default" - annotations = { - "nginx.ingress.kubernetes.io/rewrite-target": "/", - "nginx.ingress.kubernetes.io/ssl-redirect": "true", - "route.openshift.io/termination": "passthrough", - } - except Exception as e: # pragma: no cover - return _kube_api_error_handling(e) - domain = ingress["spec"]["domain"] - elif ingress_domain is None: - raise ValueError( - "ingress_domain is invalid. For Kubernetes Clusters please specify an ingress domain" - ) - else: - domain = ingress_domain + if ingress_domain is not None: ingressClassName = "nginx" annotations = { "nginx.ingress.kubernetes.io/rewrite-target": "/", "nginx.ingress.kubernetes.io/ssl-redirect": "true", "nginx.ingress.kubernetes.io/ssl-passthrough": "true", } + else: + raise ValueError("ingress_domain is invalid. Please specify a domain") metadata["annotations"] = annotations spec["ingressClassName"] = ingressClassName - spec["rules"][0]["host"] = f"rayclient-{cluster_name}-{namespace}.{domain}" + spec["rules"][0]["host"] = f"rayclient-{cluster_name}-{namespace}.{ingress_domain}" def update_names(yaml, item, appwrapper_name, cluster_name, namespace): @@ -428,8 +435,9 @@

    Module codeflare_sdk.utils.generate_yaml

    def enable_local_interactive(resources, cluster_name, namespace, ingress_domain): - rayclient_ingress_item = resources["resources"].get("GenericItems")[2] - ca_secret_item = resources["resources"].get("GenericItems")[3] + rayclient_ingress_item = resources["resources"].get("GenericItems")[3] + rayclient_route_item = resources["resources"].get("GenericItems")[4] + ca_secret_item = resources["resources"].get("GenericItems")[5] item = resources["resources"].get("GenericItems")[0] update_ca_secret(ca_secret_item, cluster_name, namespace) # update_ca_secret_volumes @@ -453,26 +461,21 @@

    Module codeflare_sdk.utils.generate_yaml

    command = command.replace("deployment-name", cluster_name) - if is_openshift_cluster(): - # We can try get the domain through checking ingresses.config.openshift.io - try: - config_check() - api_client = client.CustomObjectsApi(api_config_handler()) - ingress = api_client.get_cluster_custom_object( - "config.openshift.io", "v1", "ingresses", "cluster" - ) - except Exception as e: # pragma: no cover - return _kube_api_error_handling(e) - domain = ingress["spec"]["domain"] - elif ingress_domain is None: + if ingress_domain is None: raise ValueError( - "ingress_domain is invalid. For Kubernetes Clusters please specify an ingress domain" + "ingress_domain is invalid. For creating the client route/ingress please specify an ingress domain" ) else: domain = ingress_domain command = command.replace("server-name", domain) - update_rayclient_ingress(rayclient_ingress_item, cluster_name, namespace, domain) + update_rayclient_exposure( + rayclient_route_item, + rayclient_ingress_item, + cluster_name, + namespace, + ingress_domain, + ) item["generictemplate"]["spec"]["headGroupSpec"]["template"]["spec"][ "initContainers" @@ -509,26 +512,15 @@

    Module codeflare_sdk.utils.generate_yaml

    "containers" ][0]["volumeMounts"] - for i in range( - len( - generic_template_spec["workerGroupSpecs"][0]["template"]["spec"][ - "initContainers" - ] - ) - ): - if ( - generic_template_spec["workerGroupSpecs"][0]["template"]["spec"][ - "initContainers" - ][i]["name"] - == "create-cert" - ): - del generic_template_spec["workerGroupSpecs"][0]["template"]["spec"][ - "initContainers" - ][i] + del generic_template_spec["workerGroupSpecs"][0]["template"]["spec"][ + "initContainers" + ] updated_items = [] for i in resources["GenericItems"][:]: - if "rayclient-deployment-name" in i["generictemplate"]["metadata"]["name"]: + if "rayclient-deployment-ingress" in i["generictemplate"]["metadata"]["name"]: + continue + if "rayclient-deployment-route" in i["generictemplate"]["metadata"]["name"]: continue if "ca-secret-deployment-name" in i["generictemplate"]["metadata"]["name"]: continue @@ -537,6 +529,26 @@

    Module codeflare_sdk.utils.generate_yaml

    resources["GenericItems"] = updated_items +def delete_route_or_ingress(resources): + if is_openshift_cluster(): + client_to_remove_name = "rayclient-deployment-ingress" + dashboard_to_remove_name = "ray-dashboard-deployment-ingress" + else: + client_to_remove_name = "rayclient-deployment-route" + dashboard_to_remove_name = "ray-dashboard-deployment-route" + + updated_items = [] + for i in resources["GenericItems"][:]: + if dashboard_to_remove_name in i["generictemplate"]["metadata"]["name"]: + continue + elif client_to_remove_name in i["generictemplate"]["metadata"]["name"]: + continue + + updated_items.append(i) + + resources["GenericItems"] = updated_items + + def write_user_appwrapper(user_yaml, output_file_name): # Create the directory if it doesn't exist directory_path = os.path.dirname(output_file_name) @@ -670,6 +682,7 @@

    Module codeflare_sdk.utils.generate_yaml

    resources = user_yaml.get("spec", "resources") item = resources["resources"].get("GenericItems")[0] ingress_item = resources["resources"].get("GenericItems")[1] + route_item = resources["resources"].get("GenericItems")[2] update_names(user_yaml, item, appwrapper_name, cluster_name, namespace) update_labels(user_yaml, instascale, instance_types) update_priority(user_yaml, item, dispatch_priority, priority_val) @@ -702,14 +715,21 @@

    Module codeflare_sdk.utils.generate_yaml

    head_memory, head_gpus, ) - update_dashboard_ingress( - ingress_item, cluster_name, namespace, ingress_options, ingress_domain + update_dashboard_exposure( + ingress_item, + route_item, + cluster_name, + namespace, + ingress_options, + ingress_domain, ) if local_interactive: enable_local_interactive(resources, cluster_name, namespace, ingress_domain) else: disable_raycluster_tls(resources["resources"]) + delete_route_or_ingress(resources["resources"]) + if openshift_oauth: enable_openshift_oauth(user_yaml, cluster_name, namespace) @@ -729,6 +749,35 @@

    Module codeflare_sdk.utils.generate_yaml

    Functions

    +
    +def delete_route_or_ingress(resources) +
    +
    +
    +
    + +Expand source code + +
    def delete_route_or_ingress(resources):
    +    if is_openshift_cluster():
    +        client_to_remove_name = "rayclient-deployment-ingress"
    +        dashboard_to_remove_name = "ray-dashboard-deployment-ingress"
    +    else:
    +        client_to_remove_name = "rayclient-deployment-route"
    +        dashboard_to_remove_name = "ray-dashboard-deployment-route"
    +
    +    updated_items = []
    +    for i in resources["GenericItems"][:]:
    +        if dashboard_to_remove_name in i["generictemplate"]["metadata"]["name"]:
    +            continue
    +        elif client_to_remove_name in i["generictemplate"]["metadata"]["name"]:
    +            continue
    +
    +        updated_items.append(i)
    +
    +    resources["GenericItems"] = updated_items
    +
    +
    def disable_raycluster_tls(resources)
    @@ -768,26 +817,15 @@

    Functions

    "containers" ][0]["volumeMounts"] - for i in range( - len( - generic_template_spec["workerGroupSpecs"][0]["template"]["spec"][ - "initContainers" - ] - ) - ): - if ( - generic_template_spec["workerGroupSpecs"][0]["template"]["spec"][ - "initContainers" - ][i]["name"] - == "create-cert" - ): - del generic_template_spec["workerGroupSpecs"][0]["template"]["spec"][ - "initContainers" - ][i] + del generic_template_spec["workerGroupSpecs"][0]["template"]["spec"][ + "initContainers" + ] updated_items = [] for i in resources["GenericItems"][:]: - if "rayclient-deployment-name" in i["generictemplate"]["metadata"]["name"]: + if "rayclient-deployment-ingress" in i["generictemplate"]["metadata"]["name"]: + continue + if "rayclient-deployment-route" in i["generictemplate"]["metadata"]["name"]: continue if "ca-secret-deployment-name" in i["generictemplate"]["metadata"]["name"]: continue @@ -806,8 +844,9 @@

    Functions

    Expand source code
    def enable_local_interactive(resources, cluster_name, namespace, ingress_domain):
    -    rayclient_ingress_item = resources["resources"].get("GenericItems")[2]
    -    ca_secret_item = resources["resources"].get("GenericItems")[3]
    +    rayclient_ingress_item = resources["resources"].get("GenericItems")[3]
    +    rayclient_route_item = resources["resources"].get("GenericItems")[4]
    +    ca_secret_item = resources["resources"].get("GenericItems")[5]
         item = resources["resources"].get("GenericItems")[0]
         update_ca_secret(ca_secret_item, cluster_name, namespace)
         # update_ca_secret_volumes
    @@ -831,26 +870,21 @@ 

    Functions

    command = command.replace("deployment-name", cluster_name) - if is_openshift_cluster(): - # We can try get the domain through checking ingresses.config.openshift.io - try: - config_check() - api_client = client.CustomObjectsApi(api_config_handler()) - ingress = api_client.get_cluster_custom_object( - "config.openshift.io", "v1", "ingresses", "cluster" - ) - except Exception as e: # pragma: no cover - return _kube_api_error_handling(e) - domain = ingress["spec"]["domain"] - elif ingress_domain is None: + if ingress_domain is None: raise ValueError( - "ingress_domain is invalid. For Kubernetes Clusters please specify an ingress domain" + "ingress_domain is invalid. For creating the client route/ingress please specify an ingress domain" ) else: domain = ingress_domain command = command.replace("server-name", domain) - update_rayclient_ingress(rayclient_ingress_item, cluster_name, namespace, domain) + update_rayclient_exposure( + rayclient_route_item, + rayclient_ingress_item, + cluster_name, + namespace, + ingress_domain, + ) item["generictemplate"]["spec"]["headGroupSpec"]["template"]["spec"][ "initContainers" @@ -984,6 +1018,7 @@

    Functions

    resources = user_yaml.get("spec", "resources") item = resources["resources"].get("GenericItems")[0] ingress_item = resources["resources"].get("GenericItems")[1] + route_item = resources["resources"].get("GenericItems")[2] update_names(user_yaml, item, appwrapper_name, cluster_name, namespace) update_labels(user_yaml, instascale, instance_types) update_priority(user_yaml, item, dispatch_priority, priority_val) @@ -1016,14 +1051,21 @@

    Functions

    head_memory, head_gpus, ) - update_dashboard_ingress( - ingress_item, cluster_name, namespace, ingress_options, ingress_domain + update_dashboard_exposure( + ingress_item, + route_item, + cluster_name, + namespace, + ingress_options, + ingress_domain, ) if local_interactive: enable_local_interactive(resources, cluster_name, namespace, ingress_domain) else: disable_raycluster_tls(resources["resources"]) + delete_route_or_ingress(resources["resources"]) + if openshift_oauth: enable_openshift_oauth(user_yaml, cluster_name, namespace) @@ -1048,18 +1090,15 @@

    Functions

    def is_openshift_cluster():
         try:
             config_check()
    -        api_instance = client.CustomObjectsApi(api_config_handler())
    -        api_instance.get_cluster_custom_object(
    -            "config.openshift.io", "v1", "ingresses", "cluster"
    -        )
    -
    -        return True
    -    except client.ApiException as e:  # pragma: no cover
    -        if e.status == 404 or e.status == 403:
    -            return False
    +        for api in client.ApisApi(api_config_handler()).get_api_versions().groups:
    +            for v in api.versions:
    +                if "route.openshift.io/v1" in v.group_version:
    +                    return True
             else:
    -            print(f"Error detecting cluster type defaulting to Kubernetes: {e}")
    -            return False
    + return False + except client.ApiException as e: # pragma: no cover + print(f"Error detecting cluster type defaulting to Kubernetes: {e}") + return False
    @@ -1181,6 +1220,26 @@

    Functions

    sys.exit("Error: malformed template")
    +
    +def update_dashboard_exposure(ingress_item, route_item, cluster_name, namespace, ingress_options, ingress_domain) +
    +
    +
    +
    + +Expand source code + +
    def update_dashboard_exposure(
    +    ingress_item, route_item, cluster_name, namespace, ingress_options, ingress_domain
    +):
    +    if is_openshift_cluster():
    +        update_dashboard_route(route_item, cluster_name, namespace)
    +    else:
    +        update_dashboard_ingress(
    +            ingress_item, cluster_name, namespace, ingress_options, ingress_domain
    +        )
    +
    +
    def update_dashboard_ingress(ingress_item, cluster_name, namespace, ingress_options, ingress_domain)
    @@ -1239,25 +1298,15 @@

    Functions

    "name" ] = f"{cluster_name}-head-svc" else: - metadata["name"] = f"ray-dashboard-{cluster_name}" + spec["ingressClassName"] = "nginx" + metadata["name"] = gen_dashboard_ingress_name(cluster_name) metadata["namespace"] = namespace spec["rules"][0]["http"]["paths"][0]["backend"]["service"][ "name" ] = f"{cluster_name}-head-svc" - if is_openshift_cluster(): - try: - config_check() - api_client = client.CustomObjectsApi(api_config_handler()) - ingress = api_client.get_cluster_custom_object( - "config.openshift.io", "v1", "ingresses", "cluster" - ) - del spec["ingressClassName"] - except Exception as e: # pragma: no cover - return _kube_api_error_handling(e) - domain = ingress["spec"]["domain"] - elif ingress_domain is None: + if ingress_domain is None: raise ValueError( - "ingress_domain is invalid. For Kubernetes Clusters please specify an ingress domain" + "ingress_domain is invalid. Please specify an ingress domain" ) else: domain = ingress_domain @@ -1265,6 +1314,24 @@

    Functions

    spec["rules"][0]["host"] = f"ray-dashboard-{cluster_name}-{namespace}.{domain}"
    +
    +def update_dashboard_route(route_item, cluster_name, namespace) +
    +
    +
    +
    + +Expand source code + +
    def update_dashboard_route(route_item, cluster_name, namespace):
    +    metadata = route_item.get("generictemplate", {}).get("metadata")
    +    metadata["name"] = gen_dashboard_ingress_name(cluster_name)
    +    metadata["namespace"] = namespace
    +    metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc"
    +    spec = route_item.get("generictemplate", {}).get("spec")
    +    spec["to"]["name"] = f"{cluster_name}-head-svc"
    +
    +
    def update_env(spec, env)
    @@ -1438,6 +1505,26 @@

    Functions

    spec.pop("priority")
    +
    +def update_rayclient_exposure(client_route_item, client_ingress_item, cluster_name, namespace, ingress_domain) +
    +
    +
    +
    + +Expand source code + +
    def update_rayclient_exposure(
    +    client_route_item, client_ingress_item, cluster_name, namespace, ingress_domain
    +):
    +    if is_openshift_cluster():
    +        update_rayclient_route(client_route_item, cluster_name, namespace)
    +    else:
    +        update_rayclient_ingress(
    +            client_ingress_item, cluster_name, namespace, ingress_domain
    +        )
    +
    +
    def update_rayclient_ingress(ingress_item, cluster_name, namespace, ingress_domain)
    @@ -1460,38 +1547,37 @@

    Functions

    "name" ] = f"{cluster_name}-head-svc" - if is_openshift_cluster(): - try: - config_check() - api_client = client.CustomObjectsApi(api_config_handler()) - ingress = api_client.get_cluster_custom_object( - "config.openshift.io", "v1", "ingresses", "cluster" - ) - ingressClassName = "openshift-default" - annotations = { - "nginx.ingress.kubernetes.io/rewrite-target": "/", - "nginx.ingress.kubernetes.io/ssl-redirect": "true", - "route.openshift.io/termination": "passthrough", - } - except Exception as e: # pragma: no cover - return _kube_api_error_handling(e) - domain = ingress["spec"]["domain"] - elif ingress_domain is None: - raise ValueError( - "ingress_domain is invalid. For Kubernetes Clusters please specify an ingress domain" - ) - else: - domain = ingress_domain + if ingress_domain is not None: ingressClassName = "nginx" annotations = { "nginx.ingress.kubernetes.io/rewrite-target": "/", "nginx.ingress.kubernetes.io/ssl-redirect": "true", "nginx.ingress.kubernetes.io/ssl-passthrough": "true", } + else: + raise ValueError("ingress_domain is invalid. Please specify a domain") metadata["annotations"] = annotations spec["ingressClassName"] = ingressClassName - spec["rules"][0]["host"] = f"rayclient-{cluster_name}-{namespace}.{domain}"
    + spec["rules"][0]["host"] = f"rayclient-{cluster_name}-{namespace}.{ingress_domain}" + + +
    +def update_rayclient_route(route_item, cluster_name, namespace) +
    +
    +
    +
    + +Expand source code + +
    def update_rayclient_route(route_item, cluster_name, namespace):
    +    metadata = route_item.get("generictemplate", {}).get("metadata")
    +    metadata["name"] = f"rayclient-{cluster_name}"
    +    metadata["namespace"] = namespace
    +    metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc"
    +    spec = route_item.get("generictemplate", {}).get("spec")
    +    spec["to"]["name"] = f"{cluster_name}-head-svc"
    @@ -1579,6 +1665,7 @@

    Index

  • Functions

      +
    • delete_route_or_ingress
    • disable_raycluster_tls
    • enable_local_interactive
    • enable_openshift_oauth
    • @@ -1590,7 +1677,9 @@

      Index

    • update_affinity
    • update_ca_secret
    • update_custompodresources
    • +
    • update_dashboard_exposure
    • update_dashboard_ingress
    • +
    • update_dashboard_route
    • update_env
    • update_image
    • update_image_pull_secrets
    • @@ -1598,7 +1687,9 @@

      Index

    • update_names
    • update_nodes
    • update_priority
    • +
    • update_rayclient_exposure
    • update_rayclient_ingress
    • +
    • update_rayclient_route
    • update_resources
    • write_components
    • write_user_appwrapper
    • diff --git a/docs/utils/index.html b/docs/detailed-documentation/utils/index.html similarity index 100% rename from docs/utils/index.html rename to docs/detailed-documentation/utils/index.html diff --git a/docs/utils/kube_api_helpers.html b/docs/detailed-documentation/utils/kube_api_helpers.html similarity index 100% rename from docs/utils/kube_api_helpers.html rename to docs/detailed-documentation/utils/kube_api_helpers.html diff --git a/docs/utils/openshift_oauth.html b/docs/detailed-documentation/utils/openshift_oauth.html similarity index 91% rename from docs/utils/openshift_oauth.html rename to docs/detailed-documentation/utils/openshift_oauth.html index e8df7482..db346904 100644 --- a/docs/utils/openshift_oauth.html +++ b/docs/detailed-documentation/utils/openshift_oauth.html @@ -62,7 +62,6 @@

      Module codeflare_sdk.utils.openshift_oauth

      def _create_or_replace_oauth_sa(namespace, oauth_sa_name, host): - api_client = api_config_handler() oauth_sa = client.V1ServiceAccount( api_version="v1", kind="ServiceAccount", @@ -75,12 +74,12 @@

      Module codeflare_sdk.utils.openshift_oauth

      ), ) try: - client.CoreV1Api(api_client).create_namespaced_service_account( + client.CoreV1Api(api_config_handler()).create_namespaced_service_account( namespace=namespace, body=oauth_sa ) except client.ApiException as e: if e.reason == "Conflict": - client.CoreV1Api(api_client).replace_namespaced_service_account( + client.CoreV1Api(api_config_handler()).replace_namespaced_service_account( namespace=namespace, body=oauth_sa, name=oauth_sa_name, @@ -90,7 +89,6 @@

      Module codeflare_sdk.utils.openshift_oauth

      def _create_or_replace_oauth_rb(cluster_name, namespace, oauth_sa_name): - api_client = api_config_handler() oauth_crb = client.V1ClusterRoleBinding( api_version="rbac.authorization.k8s.io/v1", kind="ClusterRoleBinding", @@ -107,14 +105,14 @@

      Module codeflare_sdk.utils.openshift_oauth

      ], ) try: - client.RbacAuthorizationV1Api(api_client).create_cluster_role_binding( + client.RbacAuthorizationV1Api(api_config_handler()).create_cluster_role_binding( body=oauth_crb ) except client.ApiException as e: if e.reason == "Conflict": - client.RbacAuthorizationV1Api(api_client).replace_cluster_role_binding( - body=oauth_crb, name=f"{cluster_name}-rb" - ) + client.RbacAuthorizationV1Api( + api_config_handler() + ).replace_cluster_role_binding(body=oauth_crb, name=f"{cluster_name}-rb") else: raise e @@ -126,19 +124,18 @@

      Module codeflare_sdk.utils.openshift_oauth

      def delete_openshift_oauth_objects(cluster_name, namespace): # NOTE: it might be worth adding error handling here, but shouldn't be necessary because cluster.down(...) checks # for an existing cluster before calling this => the objects should never be deleted twice - api_client = api_config_handler() oauth_sa_name = f"{cluster_name}-oauth-proxy" service_name = f"{cluster_name}-oauth" - client.CoreV1Api(api_client).delete_namespaced_service_account( + client.CoreV1Api(api_config_handler()).delete_namespaced_service_account( name=oauth_sa_name, namespace=namespace ) - client.CoreV1Api(api_client).delete_namespaced_service( + client.CoreV1Api(api_config_handler()).delete_namespaced_service( name=service_name, namespace=namespace ) - client.NetworkingV1Api(api_client).delete_namespaced_ingress( + client.NetworkingV1Api(api_config_handler()).delete_namespaced_ingress( name=f"{cluster_name}-ingress", namespace=namespace ) - client.RbacAuthorizationV1Api(api_client).delete_cluster_role_binding( + client.RbacAuthorizationV1Api(api_config_handler()).delete_cluster_role_binding( name=f"{cluster_name}-rb" ) @@ -151,7 +148,6 @@

      Module codeflare_sdk.utils.openshift_oauth

      service_name: str, port_name: str, ) -> client.V1Service: - api_client = api_config_handler() oauth_service = client.V1Service( api_version="v1", kind="Service", @@ -181,12 +177,12 @@

      Module codeflare_sdk.utils.openshift_oauth

      ), ) try: - client.CoreV1Api(api_client).create_namespaced_service( + client.CoreV1Api(api_config_handler()).create_namespaced_service( namespace=namespace, body=oauth_service ) except client.ApiException as e: if e.reason == "Conflict": - client.CoreV1Api(api_client).replace_namespaced_service( + client.CoreV1Api(api_config_handler()).replace_namespaced_service( namespace=namespace, body=oauth_service, name=service_name ) else: @@ -200,7 +196,6 @@

      Module codeflare_sdk.utils.openshift_oauth

      port_name: str, host: str, ) -> client.V1Ingress: - api_client = api_config_handler() ingress = client.V1Ingress( api_version="networking.k8s.io/v1", kind="Ingress", @@ -233,12 +228,12 @@

      Module codeflare_sdk.utils.openshift_oauth

      ), ) try: - client.NetworkingV1Api(api_client).create_namespaced_ingress( + client.NetworkingV1Api(api_config_handler()).create_namespaced_ingress( namespace=namespace, body=ingress ) except client.ApiException as e: if e.reason == "Conflict": - client.NetworkingV1Api(api_client).replace_namespaced_ingress( + client.NetworkingV1Api(api_config_handler()).replace_namespaced_ingress( namespace=namespace, body=ingress, name=f"{cluster_name}-ingress" ) else: @@ -298,19 +293,18 @@

      Functions

      def delete_openshift_oauth_objects(cluster_name, namespace):
           # NOTE: it might be worth adding error handling here, but shouldn't be necessary because cluster.down(...) checks
           # for an existing cluster before calling this => the objects should never be deleted twice
      -    api_client = api_config_handler()
           oauth_sa_name = f"{cluster_name}-oauth-proxy"
           service_name = f"{cluster_name}-oauth"
      -    client.CoreV1Api(api_client).delete_namespaced_service_account(
      +    client.CoreV1Api(api_config_handler()).delete_namespaced_service_account(
               name=oauth_sa_name, namespace=namespace
           )
      -    client.CoreV1Api(api_client).delete_namespaced_service(
      +    client.CoreV1Api(api_config_handler()).delete_namespaced_service(
               name=service_name, namespace=namespace
           )
      -    client.NetworkingV1Api(api_client).delete_namespaced_ingress(
      +    client.NetworkingV1Api(api_config_handler()).delete_namespaced_ingress(
               name=f"{cluster_name}-ingress", namespace=namespace
           )
      -    client.RbacAuthorizationV1Api(api_client).delete_cluster_role_binding(
      +    client.RbacAuthorizationV1Api(api_config_handler()).delete_cluster_role_binding(
               name=f"{cluster_name}-rb"
           )
      diff --git a/docs/utils/pretty_print.html b/docs/detailed-documentation/utils/pretty_print.html similarity index 100% rename from docs/utils/pretty_print.html rename to docs/detailed-documentation/utils/pretty_print.html From 29567c641f8e24c5b1f6bfd9b45b2cb09953e0ba Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Tue, 2 Jan 2024 13:49:59 +0000 Subject: [PATCH 107/496] Added missing quote --- docs/authentication.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/authentication.md b/docs/authentication.md index 56567cc3..20dd4452 100644 --- a/docs/authentication.md +++ b/docs/authentication.md @@ -11,7 +11,7 @@ auth = TokenAuthentication( token = "XXXXX", server = "XXXXX", skip_tls=False, - # ca_cert_path="/path/to/cert + # ca_cert_path="/path/to/cert" ) auth.login() # log out with auth.logout() From 2926dbc32ff1648a55549ee0ca3d301a7e9faecc Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Mon, 8 Jan 2024 14:58:17 +0000 Subject: [PATCH 108/496] Review changes: file paths --- docs/detailed-documentation/cluster/config.html | 2 +- docs/detailed-documentation/job/jobs.html | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/detailed-documentation/cluster/config.html b/docs/detailed-documentation/cluster/config.html index 62a17a64..a99747fe 100644 --- a/docs/detailed-documentation/cluster/config.html +++ b/docs/detailed-documentation/cluster/config.html @@ -100,7 +100,7 @@

      Classes

      class ClusterConfiguration -(name: str, namespace: str = None, head_info: list = <factory>, head_cpus: int = 2, head_memory: int = 8, head_gpus: int = 0, machine_types: list = <factory>, min_cpus: int = 1, max_cpus: int = 1, num_workers: int = 1, min_memory: int = 2, max_memory: int = 2, num_gpus: int = 0, template: str = '/home/mark/distributed-workloads-repos/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', instascale: bool = False, mcad: bool = True, envs: dict = <factory>, image: str = '', local_interactive: bool = False, image_pull_secrets: list = <factory>, dispatch_priority: str = None, openshift_oauth: bool = False, ingress_options: dict = <factory>, ingress_domain: str = None) +(name: str, namespace: str = None, head_info: list = <factory>, head_cpus: int = 2, head_memory: int = 8, head_gpus: int = 0, machine_types: list = <factory>, min_cpus: int = 1, max_cpus: int = 1, num_workers: int = 1, min_memory: int = 2, max_memory: int = 2, num_gpus: int = 0, template: str = '/home/runner/work/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', instascale: bool = False, mcad: bool = True, envs: dict = <factory>, image: str = '', local_interactive: bool = False, image_pull_secrets: list = <factory>, dispatch_priority: str = None, openshift_oauth: bool = False, ingress_options: dict = <factory>, ingress_domain: str = None)

      This dataclass is used to specify resource requirements and other details, and diff --git a/docs/detailed-documentation/job/jobs.html b/docs/detailed-documentation/job/jobs.html index ce024011..08ce9833 100644 --- a/docs/detailed-documentation/job/jobs.html +++ b/docs/detailed-documentation/job/jobs.html @@ -323,7 +323,7 @@

      Methods

      class DDPJobDefinition -(script: Optional[str] = None, m: Optional[str] = None, script_args: Optional[List[str]] = None, name: Optional[str] = None, cpu: Optional[int] = None, gpu: Optional[int] = None, memMB: Optional[int] = None, h: Optional[str] = None, j: Optional[str] = None, env: Optional[Dict[str, str]] = None, max_retries: int = 0, mounts: Optional[List[str]] = None, rdzv_port: int = 29500, rdzv_backend: str = None, scheduler_args: Optional[Dict[str, str]] = None, image: Optional[str] = None, workspace: Optional[str] = 'file:///home/mark/distributed-workloads-repos/codeflare-sdk') +(script: Optional[str] = None, m: Optional[str] = None, script_args: Optional[List[str]] = None, name: Optional[str] = None, cpu: Optional[int] = None, gpu: Optional[int] = None, memMB: Optional[int] = None, h: Optional[str] = None, j: Optional[str] = None, env: Optional[Dict[str, str]] = None, max_retries: int = 0, mounts: Optional[List[str]] = None, rdzv_port: int = 29500, rdzv_backend: str = None, scheduler_args: Optional[Dict[str, str]] = None, image: Optional[str] = None, workspace: Optional[str] = 'file:///home/runner/work/codeflare-sdk')
      From ed9840b8e13f8f842e233e36c27684fc171b2f75 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 18 Jan 2024 14:59:43 +0000 Subject: [PATCH 109/496] Added description to job sub-module --- docs/detailed-documentation/job/index.html | 2 +- docs/detailed-documentation/job/jobs.html | 6 +++++- src/codeflare_sdk/job/jobs.py | 3 +++ 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/docs/detailed-documentation/job/index.html b/docs/detailed-documentation/job/index.html index bd8991c3..f4bfe93f 100644 --- a/docs/detailed-documentation/job/index.html +++ b/docs/detailed-documentation/job/index.html @@ -28,7 +28,7 @@

      Sub-modules

      codeflare_sdk.job.jobs
      -
      +

      The jobs sub-module contains methods needed to submit Distributed Data Parallel(DDP) jobs to Ray Clusters created by the CodeFlare SDK.

      codeflare_sdk.job.ray_jobs
      diff --git a/docs/detailed-documentation/job/jobs.html b/docs/detailed-documentation/job/jobs.html index 08ce9833..4e06013b 100644 --- a/docs/detailed-documentation/job/jobs.html +++ b/docs/detailed-documentation/job/jobs.html @@ -5,7 +5,7 @@ codeflare_sdk.job.jobs API documentation - + @@ -22,6 +22,7 @@

      Module codeflare_sdk.job.jobs

      +

      The jobs sub-module contains methods needed to submit Distributed Data Parallel(DDP) jobs to Ray Clusters created by the CodeFlare SDK.

      Expand source code @@ -40,6 +41,9 @@

      Module codeflare_sdk.job.jobs

      # See the License for the specific language governing permissions and # limitations under the License. +""" +The jobs sub-module contains methods needed to submit Distributed Data Parallel(DDP) jobs to Ray Clusters created by the CodeFlare SDK. +""" import abc from typing import TYPE_CHECKING, Optional, Dict, List diff --git a/src/codeflare_sdk/job/jobs.py b/src/codeflare_sdk/job/jobs.py index c3814971..655107df 100644 --- a/src/codeflare_sdk/job/jobs.py +++ b/src/codeflare_sdk/job/jobs.py @@ -12,6 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +""" +The jobs sub-module contains methods needed to submit Distributed Data Parallel(DDP) jobs to Ray Clusters created by the CodeFlare SDK. +""" import abc from typing import TYPE_CHECKING, Optional, Dict, List From db25cd339e6d5fe90e6986bd1a5617489947d451 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 18 Jan 2024 15:38:05 +0000 Subject: [PATCH 110/496] Updated documentation link in README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index e864189b..8228b685 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,7 @@ For guided demos and basics walkthroughs, check out the following links: - Note that these notebooks will work with the latest `codeflare-sdk` PyPI release. For testing and experimentation with `main` branch, please use the [preview notebooks](https://github.com/project-codeflare/codeflare-sdk/tree/main/demo-notebooks/guided-demos/preview_nbs) - Additionally, we have a [video walkthrough](https://www.youtube.com/watch?v=U76iIfd9EmE) of these basic demos from June, 2023 -Full documentation can be found [here](https://project-codeflare.github.io/codeflare-sdk/) +Full documentation can be found [here](https://project-codeflare.github.io/codeflare-sdk/detailed-documentation) ## Installation From 65ed743954fdccb861cbc42fe74e9b2389d54964 Mon Sep 17 00:00:00 2001 From: Srihari Date: Thu, 11 Jan 2024 19:56:15 +0530 Subject: [PATCH 111/496] Create a Ray Cluster SDK upgrade scenarios --- go.mod | 2 +- go.sum | 4 +- tests/e2e/mnist_raycluster_sdk_test.go | 40 +- tests/e2e/mnist_rayjob.py | 46 +++ tests/e2e/start_ray_cluster.py | 52 +++ tests/e2e/support.go | 50 ++- tests/upgrade/raycluster_sdk_upgrade_test.go | 396 +++++++++++++++++++ 7 files changed, 548 insertions(+), 42 deletions(-) create mode 100644 tests/e2e/mnist_rayjob.py create mode 100644 tests/e2e/start_ray_cluster.py create mode 100644 tests/upgrade/raycluster_sdk_upgrade_test.go diff --git a/go.mod b/go.mod index 08f9e651..436ed65c 100644 --- a/go.mod +++ b/go.mod @@ -4,7 +4,7 @@ go 1.20 require ( github.com/onsi/gomega v1.27.10 - github.com/project-codeflare/codeflare-common v0.0.0-20231110155354-042fb171fcdb + github.com/project-codeflare/codeflare-common v0.0.0-20231129165224-988ba1da9069 github.com/project-codeflare/multi-cluster-app-dispatcher v1.37.0 github.com/ray-project/kuberay/ray-operator v1.0.0 k8s.io/api v0.26.3 diff --git a/go.sum b/go.sum index d85b8eb7..0d6034b8 100644 --- a/go.sum +++ b/go.sum @@ -369,8 +369,8 @@ github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/project-codeflare/codeflare-common v0.0.0-20231110155354-042fb171fcdb h1:L2Gdr2SlvshDKZY2KK6507AwzQ1NSfRbMQuz5dOsYNM= -github.com/project-codeflare/codeflare-common v0.0.0-20231110155354-042fb171fcdb/go.mod h1:zdi2GCYJX+QyxFWyCLMoTme3NMz/aucWDJWMqKfigxk= +github.com/project-codeflare/codeflare-common v0.0.0-20231129165224-988ba1da9069 h1:81+ma1mchF/LtAGsf+poAt50kJ/fLYjoTAcZOxci1Yc= +github.com/project-codeflare/codeflare-common v0.0.0-20231129165224-988ba1da9069/go.mod h1:zdi2GCYJX+QyxFWyCLMoTme3NMz/aucWDJWMqKfigxk= github.com/project-codeflare/multi-cluster-app-dispatcher v1.37.0 h1:oyhdLdc4BgA4zcH1zlRrSrYpzuVxV5QLDbyIXrwnQqs= github.com/project-codeflare/multi-cluster-app-dispatcher v1.37.0/go.mod h1:Yge6GRNpO9YIDfeL+XOcCE9xbmfCTD5C1h5dlW87mxQ= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= diff --git a/tests/e2e/mnist_raycluster_sdk_test.go b/tests/e2e/mnist_raycluster_sdk_test.go index 9d4dc862..963a4872 100644 --- a/tests/e2e/mnist_raycluster_sdk_test.go +++ b/tests/e2e/mnist_raycluster_sdk_test.go @@ -17,9 +17,7 @@ limitations under the License. package e2e import ( - "strings" "testing" - "time" . "github.com/onsi/gomega" . "github.com/project-codeflare/codeflare-common/support" @@ -137,7 +135,7 @@ func TestMNISTRayClusterSDK(t *testing.T) { Command: []string{ "/bin/sh", "-c", "while [ ! -f /codeflare-sdk/pyproject.toml ]; do sleep 1; done; " + - "cp /test/* . && chmod +x install-codeflare-sdk.sh && ./install-codeflare-sdk.sh && python mnist_raycluster_sdk.py " + namespace.Name, + "cp /test/* . && chmod +x install-codeflare-sdk.sh && ./install-codeflare-sdk.sh && python mnist_raycluster_sdk.py " + namespace.Name, }, VolumeMounts: []corev1.VolumeMount{ { @@ -194,40 +192,8 @@ func TestMNISTRayClusterSDK(t *testing.T) { test.Expect(err).NotTo(HaveOccurred()) test.T().Logf("Created Job %s/%s successfully", job.Namespace, job.Name) - go func() { - // Checking if pod is found and running - podName := "" - foundPod := false - for !foundPod { - pods, _ := test.Client().Core().CoreV1().Pods(namespace.Name).List(test.Ctx(), metav1.ListOptions{ - LabelSelector: "job-name=sdk", - }) - for _, pod := range pods.Items { - if strings.HasPrefix(pod.Name, "sdk-") && pod.Status.Phase == corev1.PodRunning { - podName = pod.Name - foundPod = true - test.T().Logf("Pod is running!") - break - } - } - if !foundPod { - test.T().Logf("Waiting for pod to start...") - time.Sleep(5 * time.Second) - } - } - - // Get rest config - restConfig, err := GetRestConfig(test); if err != nil { - test.T().Errorf("Error getting rest config: %v", err) - } - - // Copy codeflare-sdk to the pod - srcDir := "../.././" - dstDir := "/codeflare-sdk" - if err := CopyToPod(test, namespace.Name, podName, restConfig, srcDir, dstDir); err != nil { - test.T().Errorf("Error copying codeflare-sdk to pod: %v", err) - } - }() + // Setup the codeflare-sdk inside the pod associated to the created job + SetupCodeflareSDKInsidePod(test, namespace, job.Name) test.T().Logf("Waiting for Job %s/%s to complete", job.Namespace, job.Name) test.Eventually(Job(test, job.Namespace, job.Name), TestTimeoutLong).Should( diff --git a/tests/e2e/mnist_rayjob.py b/tests/e2e/mnist_rayjob.py new file mode 100644 index 00000000..8557a55c --- /dev/null +++ b/tests/e2e/mnist_rayjob.py @@ -0,0 +1,46 @@ +import sys + +from time import sleep + +from torchx.specs.api import AppState, is_terminal + +from codeflare_sdk.cluster.cluster import get_cluster +from codeflare_sdk.job.jobs import DDPJobDefinition + +namespace = sys.argv[1] + +cluster = get_cluster("mnist", namespace) + +cluster.details() + +jobdef = DDPJobDefinition( + name="mnist", + script="mnist.py", + scheduler_args={"requirements": "requirements.txt"}, +) +job = jobdef.submit(cluster) + +done = False +time = 0 +timeout = 900 +while not done: + status = job.status() + if is_terminal(status.state): + break + if not done: + print(status) + if timeout and time >= timeout: + raise TimeoutError(f"job has timed out after waiting {timeout}s") + sleep(5) + time += 5 + +print(f"Job has completed: {status.state}") + +print(job.logs()) + +cluster.down() + +if not status.state == AppState.SUCCEEDED: + exit(1) +else: + exit(0) diff --git a/tests/e2e/start_ray_cluster.py b/tests/e2e/start_ray_cluster.py new file mode 100644 index 00000000..774be8f0 --- /dev/null +++ b/tests/e2e/start_ray_cluster.py @@ -0,0 +1,52 @@ +import sys +import os + +from time import sleep + +from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration + +namespace = sys.argv[1] +ray_image = os.getenv("RAY_IMAGE") +host = os.getenv("CLUSTER_HOSTNAME") + +ingress_options = {} +if host is not None: + ingress_options = { + "ingresses": [ + { + "ingressName": "ray-dashboard", + "port": 8265, + "pathType": "Prefix", + "path": "/", + "host": host, + }, + ] + } + +cluster = Cluster( + ClusterConfiguration( + name="mnist", + namespace=namespace, + num_workers=1, + head_cpus="500m", + head_memory=2, + min_cpus="500m", + max_cpus=1, + min_memory=1, + max_memory=2, + num_gpus=0, + instascale=False, + image=ray_image, + ingress_options=ingress_options, + ) +) + +cluster.up() + +cluster.status() + +cluster.wait_ready() + +cluster.status() + +cluster.details() diff --git a/tests/e2e/support.go b/tests/e2e/support.go index 9ef8e176..c3940fe2 100644 --- a/tests/e2e/support.go +++ b/tests/e2e/support.go @@ -20,9 +20,14 @@ import ( "embed" "os" "path/filepath" + "strings" + "time" "github.com/onsi/gomega" + "github.com/project-codeflare/codeflare-common/support" + corev1 "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/api/meta" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime/schema" "k8s.io/apimachinery/pkg/runtime/serializer" "k8s.io/cli-runtime/pkg/genericclioptions" @@ -33,8 +38,6 @@ import ( "k8s.io/kubectl/pkg/cmd/cp" "k8s.io/kubectl/pkg/cmd/util" "k8s.io/kubectl/pkg/scheme" - - "github.com/project-codeflare/codeflare-common/support" ) //go:embed *.py *.txt *.sh @@ -109,3 +112,46 @@ func (r restClientGetter) ToDiscoveryClient() (discovery.CachedDiscoveryInterfac func (r restClientGetter) ToRESTMapper() (meta.RESTMapper, error) { return nil, nil } + +func SetupCodeflareSDKInsidePod(test support.Test, namespace *corev1.Namespace, labelName string) { + + // Get pod name + podName := GetPodName(test, namespace, labelName) + + // Get rest config + restConfig, err := GetRestConfig(test) + if err != nil { + test.T().Errorf("Error getting rest config: %v", err) + } + + // Copy codeflare-sdk to the pod + srcDir := "../.././" + dstDir := "/codeflare-sdk" + if err := CopyToPod(test, namespace.Name, podName, restConfig, srcDir, dstDir); err != nil { + test.T().Errorf("Error copying codeflare-sdk to pod: %v", err) + } +} + +func GetPodName(test support.Test, namespace *corev1.Namespace, labelName string) string { + podName := "" + foundPod := false + for !foundPod { + pods, _ := test.Client().Core().CoreV1().Pods(namespace.Name).List(test.Ctx(), metav1.ListOptions{ + LabelSelector: "job-name=" + labelName, + }) + for _, pod := range pods.Items { + + if strings.HasPrefix(pod.Name, labelName+"-") && pod.Status.Phase == corev1.PodRunning { + podName = pod.Name + foundPod = true + test.T().Logf("Pod is running!") + break + } + } + if !foundPod { + test.T().Logf("Waiting for pod to start...") + time.Sleep(5 * time.Second) + } + } + return podName +} diff --git a/tests/upgrade/raycluster_sdk_upgrade_test.go b/tests/upgrade/raycluster_sdk_upgrade_test.go new file mode 100644 index 00000000..6b3c93cf --- /dev/null +++ b/tests/upgrade/raycluster_sdk_upgrade_test.go @@ -0,0 +1,396 @@ +/* +Copyright 2023. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package upgrade + +import ( + "fmt" + "testing" + + . "github.com/onsi/gomega" + . "github.com/project-codeflare/codeflare-common/support" + mcadv1beta1 "github.com/project-codeflare/multi-cluster-app-dispatcher/pkg/apis/controller/v1beta1" + rayv1 "github.com/ray-project/kuberay/ray-operator/apis/ray/v1" + + batchv1 "k8s.io/api/batch/v1" + corev1 "k8s.io/api/core/v1" + rbacv1 "k8s.io/api/rbac/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + + . "github.com/project-codeflare/codeflare-sdk/tests/e2e" +) + +var ( + nsName = "test-ns-rayclusterupgrade" +) + +// Creates a Ray cluster +func TestMNISTRayClusterUp(t *testing.T) { + + test := With(t) + + // Create a namespace + namespace := CreateTestNamespaceWithName(test, nsName) + test.T().Logf("Created namespace %s successfully", namespace.Name) + + // Delete namespace only if test failed + defer func() { + if t.Failed() { + DeleteTestNamespace(test, namespace) + } else { + StoreNamespaceLogs(test, namespace) + } + }() + + // Test configuration + config := CreateConfigMap(test, namespace.Name, map[string][]byte{ + // SDK script + "start_ray_cluster.py": ReadFile(test, "start_ray_cluster.py"), + // codeflare-sdk installation script + "install-codeflare-sdk.sh": ReadFile(test, "install-codeflare-sdk.sh"), + }) + + // Create RBAC, retrieve token for user with limited rights + policyRules := []rbacv1.PolicyRule{ + { + Verbs: []string{"get", "create", "delete", "list", "patch", "update"}, + APIGroups: []string{mcadv1beta1.GroupName}, + Resources: []string{"appwrappers"}, + }, + { + Verbs: []string{"get", "list"}, + APIGroups: []string{rayv1.GroupVersion.Group}, + Resources: []string{"rayclusters", "rayclusters/status"}, + }, + { + Verbs: []string{"get", "list"}, + APIGroups: []string{"route.openshift.io"}, + Resources: []string{"routes"}, + }, + { + Verbs: []string{"get", "list"}, + APIGroups: []string{"networking.k8s.io"}, + Resources: []string{"ingresses"}, + }, + } + + sa := CreateServiceAccount(test, namespace.Name) + role := CreateRole(test, namespace.Name, policyRules) + CreateRoleBinding(test, namespace.Name, sa, role) + + job := &batchv1.Job{ + TypeMeta: metav1.TypeMeta{ + APIVersion: batchv1.SchemeGroupVersion.String(), + Kind: "Job", + }, + ObjectMeta: metav1.ObjectMeta{ + Name: "sdk", + Namespace: namespace.Name, + }, + Spec: batchv1.JobSpec{ + Completions: Ptr(int32(1)), + Parallelism: Ptr(int32(1)), + BackoffLimit: Ptr(int32(0)), + Template: corev1.PodTemplateSpec{ + Spec: corev1.PodSpec{ + Volumes: []corev1.Volume{ + { + Name: "test", + VolumeSource: corev1.VolumeSource{ + ConfigMap: &corev1.ConfigMapVolumeSource{ + LocalObjectReference: corev1.LocalObjectReference{ + Name: config.Name, + }, + }, + }, + }, + { + Name: "codeflare-sdk", + VolumeSource: corev1.VolumeSource{ + EmptyDir: &corev1.EmptyDirVolumeSource{}, + }, + }, + { + Name: "workdir", + VolumeSource: corev1.VolumeSource{ + EmptyDir: &corev1.EmptyDirVolumeSource{}, + }, + }, + }, + Containers: []corev1.Container{ + { + Name: "test", + // FIXME: switch to base Python image once the dependency on OpenShift CLI is removed + // See https://github.com/project-codeflare/codeflare-sdk/pull/146 + Image: "quay.io/opendatahub/notebooks:jupyter-minimal-ubi8-python-3.8-4c8f26e", + Env: []corev1.EnvVar{ + {Name: "PYTHONUSERBASE", Value: "/workdir"}, + {Name: "RAY_IMAGE", Value: GetRayImage()}, + }, + Command: []string{ + "/bin/sh", "-c", + "while [ ! -f /codeflare-sdk/pyproject.toml ]; do sleep 1; done; " + + "cp /test/* . && chmod +x install-codeflare-sdk.sh && ./install-codeflare-sdk.sh && python start_ray_cluster.py " + namespace.Name, + }, + VolumeMounts: []corev1.VolumeMount{ + { + Name: "test", + MountPath: "/test", + }, + { + Name: "codeflare-sdk", + MountPath: "/codeflare-sdk", + }, + { + Name: "workdir", + MountPath: "/workdir", + }, + }, + WorkingDir: "/workdir", + SecurityContext: &corev1.SecurityContext{ + AllowPrivilegeEscalation: Ptr(false), + SeccompProfile: &corev1.SeccompProfile{ + Type: "RuntimeDefault", + }, + Capabilities: &corev1.Capabilities{ + Drop: []corev1.Capability{"ALL"}, + }, + RunAsNonRoot: Ptr(true), + }, + }, + }, + RestartPolicy: corev1.RestartPolicyNever, + ServiceAccountName: sa.Name, + }, + }, + }, + } + if GetClusterType(test) == KindCluster { + // Take first KinD node and redirect pod hostname requests there + node := GetNodes(test)[0] + hostname := GetClusterHostname(test) + IP := GetNodeInternalIP(test, node) + + test.T().Logf("Setting KinD cluster hostname '%s' to node IP '%s' for SDK pod", hostname, IP) + job.Spec.Template.Spec.HostAliases = []corev1.HostAlias{ + { + IP: IP, + Hostnames: []string{hostname}, + }, + } + + // Propagate hostname into Python code as env variable + hostnameEnvVar := corev1.EnvVar{Name: "CLUSTER_HOSTNAME", Value: hostname} + job.Spec.Template.Spec.Containers[0].Env = append(job.Spec.Template.Spec.Containers[0].Env, hostnameEnvVar) + fmt.Printf("CLUSTER_HOSTNAME environment variable value: %s\n", hostname) + test.T().Logf("CLUSTER_HOSTNAME environment variable value: %s", hostname) + } + + job, err := test.Client().Core().BatchV1().Jobs(namespace.Name).Create(test.Ctx(), job, metav1.CreateOptions{}) + test.Expect(err).NotTo(HaveOccurred()) + test.T().Logf("Created Job %s/%s successfully", job.Namespace, job.Name) + + // Setup the codeflare-sdk inside the pod associated to the created job + SetupCodeflareSDKInsidePod(test, namespace, job.Name) + + test.T().Logf("Waiting for Job %s/%s to complete", job.Namespace, job.Name) + test.Eventually(Job(test, job.Namespace, job.Name), TestTimeoutLong).Should( + Or( + WithTransform(ConditionStatus(batchv1.JobComplete), Equal(corev1.ConditionTrue)), + WithTransform(ConditionStatus(batchv1.JobFailed), Equal(corev1.ConditionTrue)), + )) + + // Assert the job has completed successfully + test.Expect(GetJob(test, job.Namespace, job.Name)). + To(WithTransform(ConditionStatus(batchv1.JobComplete), Equal(corev1.ConditionTrue))) +} + +// Submit a Job to the Ray cluster and trains the MNIST dataset using the CodeFlare SDK. +func TestMnistJobSubmit(t *testing.T) { + + test := With(t) + + namespace := GetNamespaceWithName(test, nsName) + + //delete the namespace after test complete + defer DeleteTestNamespace(test, namespace) + + // Test configuration + config := CreateConfigMap(test, namespace.Name, map[string][]byte{ + // SDK script + "mnist_rayjob.py": ReadFile(test, "mnist_rayjob.py"), + // pip requirements + "requirements.txt": ReadFile(test, "mnist_pip_requirements.txt"), + // MNIST training script + "mnist.py": ReadFile(test, "mnist.py"), + // codeflare-sdk installation script + "install-codeflare-sdk.sh": ReadFile(test, "install-codeflare-sdk.sh"), + }) + + // Create RBAC, retrieve token for user with limited rights + policyRules := []rbacv1.PolicyRule{ + { + Verbs: []string{"get", "create", "delete", "list", "patch", "update"}, + APIGroups: []string{mcadv1beta1.GroupName}, + Resources: []string{"appwrappers"}, + }, + { + Verbs: []string{"get", "list"}, + APIGroups: []string{rayv1.GroupVersion.Group}, + Resources: []string{"rayclusters", "rayclusters/status"}, + }, + { + Verbs: []string{"get", "list"}, + APIGroups: []string{"route.openshift.io"}, + Resources: []string{"routes"}, + }, + { + Verbs: []string{"get", "list"}, + APIGroups: []string{"networking.k8s.io"}, + Resources: []string{"ingresses"}, + }, + } + + serviceAccount := CreateServiceAccount(test, namespace.Name) + role := CreateRole(test, namespace.Name, policyRules) + CreateRoleBinding(test, namespace.Name, serviceAccount, role) + + job := &batchv1.Job{ + TypeMeta: metav1.TypeMeta{ + APIVersion: batchv1.SchemeGroupVersion.String(), + Kind: "Job", + }, + ObjectMeta: metav1.ObjectMeta{ + Name: "rayjob", + Namespace: namespace.Name, + }, + Spec: batchv1.JobSpec{ + Completions: Ptr(int32(1)), + Parallelism: Ptr(int32(1)), + BackoffLimit: Ptr(int32(0)), + Template: corev1.PodTemplateSpec{ + Spec: corev1.PodSpec{ + Volumes: []corev1.Volume{ + { + Name: "test", + VolumeSource: corev1.VolumeSource{ + ConfigMap: &corev1.ConfigMapVolumeSource{ + LocalObjectReference: corev1.LocalObjectReference{ + Name: config.Name, + }, + }, + }, + }, + { + Name: "codeflare-sdk", + VolumeSource: corev1.VolumeSource{ + EmptyDir: &corev1.EmptyDirVolumeSource{}, + }, + }, + { + Name: "workdir", + VolumeSource: corev1.VolumeSource{ + EmptyDir: &corev1.EmptyDirVolumeSource{}, + }, + }, + }, + Containers: []corev1.Container{ + { + Name: "test", + // FIXME: switch to base Python image once the dependency on OpenShift CLI is removed + // See https://github.com/project-codeflare/codeflare-sdk/pull/146 + Image: "quay.io/opendatahub/notebooks:jupyter-minimal-ubi8-python-3.8-4c8f26e", + Env: []corev1.EnvVar{ + {Name: "PYTHONUSERBASE", Value: "/workdir"}, + {Name: "RAY_IMAGE", Value: GetRayImage()}, + }, + Command: []string{ + "/bin/sh", "-c", + "while [ ! -f /codeflare-sdk/pyproject.toml ]; do sleep 1; done; " + + "cp /test/* . && chmod +x install-codeflare-sdk.sh && ./install-codeflare-sdk.sh && python mnist_rayjob.py " + namespace.Name, + }, + VolumeMounts: []corev1.VolumeMount{ + { + Name: "test", + MountPath: "/test", + }, + { + Name: "codeflare-sdk", + MountPath: "/codeflare-sdk", + }, + { + Name: "workdir", + MountPath: "/workdir", + }, + }, + WorkingDir: "/workdir", + SecurityContext: &corev1.SecurityContext{ + AllowPrivilegeEscalation: Ptr(false), + SeccompProfile: &corev1.SeccompProfile{ + Type: "RuntimeDefault", + }, + Capabilities: &corev1.Capabilities{ + Drop: []corev1.Capability{"ALL"}, + }, + RunAsNonRoot: Ptr(true), + }, + }, + }, + RestartPolicy: corev1.RestartPolicyNever, + ServiceAccountName: serviceAccount.Name, + }, + }, + }, + } + + if GetClusterType(test) == KindCluster { + // Take first KinD node and redirect pod hostname requests there + node := GetNodes(test)[0] + hostname := GetClusterHostname(test) + IP := GetNodeInternalIP(test, node) + + test.T().Logf("Setting KinD cluster hostname '%s' to node IP '%s' for SDK pod", hostname, IP) + job.Spec.Template.Spec.HostAliases = []corev1.HostAlias{ + { + IP: IP, + Hostnames: []string{hostname}, + }, + } + + // Propagate hostname into Python code as env variable + hostnameEnvVar := corev1.EnvVar{Name: "CLUSTER_HOSTNAME", Value: hostname} + job.Spec.Template.Spec.Containers[0].Env = append(job.Spec.Template.Spec.Containers[0].Env, hostnameEnvVar) + } + + job, err := test.Client().Core().BatchV1().Jobs(nsName).Create(test.Ctx(), job, metav1.CreateOptions{}) + test.Expect(err).NotTo(HaveOccurred()) + test.T().Logf("Created Job %s/%s successfully", job.Namespace, job.Name) + + // Setup the codeflare-sdk inside the pod associated to the created job + SetupCodeflareSDKInsidePod(test, namespace, job.Name) + + test.T().Logf("Waiting for Job %s/%s to complete", job.Namespace, job.Name) + test.Eventually(Job(test, job.Namespace, job.Name), TestTimeoutLong).Should( + Or( + WithTransform(ConditionStatus(batchv1.JobComplete), Equal(corev1.ConditionTrue)), + WithTransform(ConditionStatus(batchv1.JobFailed), Equal(corev1.ConditionTrue)), + )) + + // Assert the job has completed successfully + test.Expect(GetJob(test, job.Namespace, job.Name)). + To(WithTransform(ConditionStatus(batchv1.JobComplete), Equal(corev1.ConditionTrue))) + +} From 097fb6a784512b4202decc102e960fca4fac4cba Mon Sep 17 00:00:00 2001 From: codeflare-machine-account Date: Fri, 19 Jan 2024 13:47:59 +0000 Subject: [PATCH 112/496] Changes in docs for release: v0.13.0 --- .../cluster/cluster.html | 213 +++++++++++++++++- .../cluster/config.html | 5 +- .../detailed-documentation/cluster/model.html | 6 +- docs/detailed-documentation/job/jobs.html | 2 +- docs/detailed-documentation/job/ray_jobs.html | 4 +- .../utils/generate_yaml.html | 80 ++++--- .../utils/kube_api_helpers.html | 6 +- .../utils/openshift_oauth.html | 130 +++++------ 8 files changed, 320 insertions(+), 126 deletions(-) diff --git a/docs/detailed-documentation/cluster/cluster.html b/docs/detailed-documentation/cluster/cluster.html index bb0ab95f..3c91c6a6 100644 --- a/docs/detailed-documentation/cluster/cluster.html +++ b/docs/detailed-documentation/cluster/cluster.html @@ -524,7 +524,7 @@

      Module codeflare_sdk.cluster.cluster

      to_return["requirements"] = requirements return to_return - def from_k8_cluster_object(rc, mcad=True): + def from_k8_cluster_object(rc, mcad=True, ingress_domain=None, ingress_options={}): machine_types = ( rc["metadata"]["labels"]["orderedinstance"].split("_") if "orderedinstance" in rc["metadata"]["labels"] @@ -534,6 +534,10 @@

      Module codeflare_sdk.cluster.cluster

      "volumeMounts" in rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0] ) + if local_interactive: + ingress_domain = get_ingress_domain_from_client( + rc["metadata"]["name"], rc["metadata"]["namespace"] + ) cluster_config = ClusterConfiguration( name=rc["metadata"]["name"], namespace=rc["metadata"]["namespace"], @@ -564,6 +568,8 @@

      Module codeflare_sdk.cluster.cluster

      ]["image"], local_interactive=local_interactive, mcad=mcad, + ingress_domain=ingress_domain, + ingress_options=ingress_options, ) return Cluster(cluster_config) @@ -717,7 +723,62 @@

      Module codeflare_sdk.cluster.cluster

      for rc in rcs["items"]: if rc["metadata"]["name"] == cluster_name: mcad = _check_aw_exists(cluster_name, namespace) - return Cluster.from_k8_cluster_object(rc, mcad=mcad) + + try: + config_check() + api_instance = client.NetworkingV1Api(api_config_handler()) + ingresses = api_instance.list_namespaced_ingress(namespace) + ingress_host = None + ingress_options = {} + for ingress in ingresses.items: + # Search for ingress with AppWrapper name as the owner + if ( + "ingress-owner" in ingress.metadata.labels + and ingress.metadata.labels["ingress-owner"] == cluster_name + ): + ingress_host = ingress.spec.rules[0].host + if ( + "ingress-options" in ingress.metadata.labels + and ingress.metadata.labels["ingress-options"] == "true" + ): + ingress_name = ingress.metadata.name + port = ( + ingress.spec.rules[0] + .http.paths[0] + .backend.service.port.number + ) + annotations = ingress.metadata.annotations + path = ingress.spec.rules[0].http.paths[0].path + ingress_class_name = ingress.spec.ingress_class_name + path_type = ingress.spec.rules[0].http.paths[0].path_type + + ingress_options = { + "ingresses": [ + { + "ingressName": ingress_name, + "port": port, + "annotations": annotations, + "ingressClassName": ingress_class_name, + "pathType": path_type, + "path": path, + "host": ingress_host, + } + ] + } + except Exception as e: + return _kube_api_error_handling(e) + # We gather the ingress domain from the host + if ingress_host is not None and ingress_options == {}: + ingress_domain = ingress_host.split(".", 1)[1] + else: + ingress_domain = None + + return Cluster.from_k8_cluster_object( + rc, + mcad=mcad, + ingress_domain=ingress_domain, + ingress_options=ingress_options, + ) raise FileNotFoundError( f"Cluster {cluster_name} is not found in {namespace} namespace" ) @@ -766,7 +827,10 @@

      Module codeflare_sdk.cluster.cluster

      return _kube_api_error_handling(e) for route in routes["items"]: - if route["spec"]["port"]["targetPort"] == "client": + if ( + route["spec"]["port"]["targetPort"] == "client" + or route["spec"]["port"]["targetPort"] == 10001 + ): domain = route["spec"]["host"] else: try: @@ -952,7 +1016,34 @@

      Module codeflare_sdk.cluster.cluster

      ) if ray.status == CodeFlareClusterStatus.READY: ray.status = RayClusterStatus.READY - return ray + return ray + + +def get_ingress_domain_from_client(cluster_name: str, namespace: str = "default"): + if is_openshift_cluster(): + try: + config_check() + api_instance = client.CustomObjectsApi(api_config_handler()) + route = api_instance.get_namespaced_custom_object( + group="route.openshift.io", + version="v1", + namespace=namespace, + plural="routes", + name=f"rayclient-{cluster_name}", + ) + return route["spec"]["host"].split(".", 1)[1] + except Exception as e: # pragma no cover + return _kube_api_error_handling(e) + else: + try: + config_check() + api_instance = client.NetworkingV1Api(api_config_handler()) + ingress = api_instance.read_namespaced_ingress( + f"rayclient-{cluster_name}", namespace + ) + return ingress.spec.rules[0].host.split(".", 1)[1] + except Exception as e: # pragma no cover + return _kube_api_error_handling(e)
      @@ -987,7 +1078,62 @@

      Functions

      for rc in rcs["items"]: if rc["metadata"]["name"] == cluster_name: mcad = _check_aw_exists(cluster_name, namespace) - return Cluster.from_k8_cluster_object(rc, mcad=mcad) + + try: + config_check() + api_instance = client.NetworkingV1Api(api_config_handler()) + ingresses = api_instance.list_namespaced_ingress(namespace) + ingress_host = None + ingress_options = {} + for ingress in ingresses.items: + # Search for ingress with AppWrapper name as the owner + if ( + "ingress-owner" in ingress.metadata.labels + and ingress.metadata.labels["ingress-owner"] == cluster_name + ): + ingress_host = ingress.spec.rules[0].host + if ( + "ingress-options" in ingress.metadata.labels + and ingress.metadata.labels["ingress-options"] == "true" + ): + ingress_name = ingress.metadata.name + port = ( + ingress.spec.rules[0] + .http.paths[0] + .backend.service.port.number + ) + annotations = ingress.metadata.annotations + path = ingress.spec.rules[0].http.paths[0].path + ingress_class_name = ingress.spec.ingress_class_name + path_type = ingress.spec.rules[0].http.paths[0].path_type + + ingress_options = { + "ingresses": [ + { + "ingressName": ingress_name, + "port": port, + "annotations": annotations, + "ingressClassName": ingress_class_name, + "pathType": path_type, + "path": path, + "host": ingress_host, + } + ] + } + except Exception as e: + return _kube_api_error_handling(e) + # We gather the ingress domain from the host + if ingress_host is not None and ingress_options == {}: + ingress_domain = ingress_host.split(".", 1)[1] + else: + ingress_domain = None + + return Cluster.from_k8_cluster_object( + rc, + mcad=mcad, + ingress_domain=ingress_domain, + ingress_options=ingress_options, + ) raise FileNotFoundError( f"Cluster {cluster_name} is not found in {namespace} namespace" ) @@ -1028,6 +1174,42 @@

      Functions

      return None
      +
      +def get_ingress_domain_from_client(cluster_name: str, namespace: str = 'default') +
      +
      +
      +
      + +Expand source code + +
      def get_ingress_domain_from_client(cluster_name: str, namespace: str = "default"):
      +    if is_openshift_cluster():
      +        try:
      +            config_check()
      +            api_instance = client.CustomObjectsApi(api_config_handler())
      +            route = api_instance.get_namespaced_custom_object(
      +                group="route.openshift.io",
      +                version="v1",
      +                namespace=namespace,
      +                plural="routes",
      +                name=f"rayclient-{cluster_name}",
      +            )
      +            return route["spec"]["host"].split(".", 1)[1]
      +        except Exception as e:  # pragma no cover
      +            return _kube_api_error_handling(e)
      +    else:
      +        try:
      +            config_check()
      +            api_instance = client.NetworkingV1Api(api_config_handler())
      +            ingress = api_instance.read_namespaced_ingress(
      +                f"rayclient-{cluster_name}", namespace
      +            )
      +            return ingress.spec.rules[0].host.split(".", 1)[1]
      +        except Exception as e:  # pragma no cover
      +            return _kube_api_error_handling(e)
      +
      +
      def list_all_clusters(namespace: str, print_to_console: bool = True)
      @@ -1530,7 +1712,7 @@

      Classes

      to_return["requirements"] = requirements return to_return - def from_k8_cluster_object(rc, mcad=True): + def from_k8_cluster_object(rc, mcad=True, ingress_domain=None, ingress_options={}): machine_types = ( rc["metadata"]["labels"]["orderedinstance"].split("_") if "orderedinstance" in rc["metadata"]["labels"] @@ -1540,6 +1722,10 @@

      Classes

      "volumeMounts" in rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0] ) + if local_interactive: + ingress_domain = get_ingress_domain_from_client( + rc["metadata"]["name"], rc["metadata"]["namespace"] + ) cluster_config = ClusterConfiguration( name=rc["metadata"]["name"], namespace=rc["metadata"]["namespace"], @@ -1570,6 +1756,8 @@

      Classes

      ]["image"], local_interactive=local_interactive, mcad=mcad, + ingress_domain=ingress_domain, + ingress_options=ingress_options, ) return Cluster(cluster_config) @@ -1946,7 +2134,7 @@

      Methods

      -def from_k8_cluster_object(rc, mcad=True) +def from_k8_cluster_object(rc, mcad=True, ingress_domain=None, ingress_options={})
      @@ -1954,7 +2142,7 @@

      Methods

      Expand source code -
      def from_k8_cluster_object(rc, mcad=True):
      +
      def from_k8_cluster_object(rc, mcad=True, ingress_domain=None, ingress_options={}):
           machine_types = (
               rc["metadata"]["labels"]["orderedinstance"].split("_")
               if "orderedinstance" in rc["metadata"]["labels"]
      @@ -1964,6 +2152,10 @@ 

      Methods

      "volumeMounts" in rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0] ) + if local_interactive: + ingress_domain = get_ingress_domain_from_client( + rc["metadata"]["name"], rc["metadata"]["namespace"] + ) cluster_config = ClusterConfiguration( name=rc["metadata"]["name"], namespace=rc["metadata"]["namespace"], @@ -1994,6 +2186,8 @@

      Methods

      ]["image"], local_interactive=local_interactive, mcad=mcad, + ingress_domain=ingress_domain, + ingress_options=ingress_options, ) return Cluster(cluster_config)
      @@ -2057,7 +2251,7 @@

      Methods

      -def list_jobs(self) ‑> List +def list_jobs(self) ‑> List[~T]

      This method accesses the head ray node in your cluster and lists the running jobs.

      @@ -2325,6 +2519,7 @@

      Index

      diff --git a/docs/detailed-documentation/cluster/config.html b/docs/detailed-documentation/cluster/config.html index a99747fe..a4f2d662 100644 --- a/docs/detailed-documentation/cluster/config.html +++ b/docs/detailed-documentation/cluster/config.html @@ -100,7 +100,7 @@

      Classes

      class ClusterConfiguration -(name: str, namespace: str = None, head_info: list = <factory>, head_cpus: int = 2, head_memory: int = 8, head_gpus: int = 0, machine_types: list = <factory>, min_cpus: int = 1, max_cpus: int = 1, num_workers: int = 1, min_memory: int = 2, max_memory: int = 2, num_gpus: int = 0, template: str = '/home/runner/work/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', instascale: bool = False, mcad: bool = True, envs: dict = <factory>, image: str = '', local_interactive: bool = False, image_pull_secrets: list = <factory>, dispatch_priority: str = None, openshift_oauth: bool = False, ingress_options: dict = <factory>, ingress_domain: str = None) +(name: str, namespace: str = None, head_info: list = <factory>, head_cpus: int = 2, head_memory: int = 8, head_gpus: int = 0, machine_types: list = <factory>, min_cpus: int = 1, max_cpus: int = 1, num_workers: int = 1, min_memory: int = 2, max_memory: int = 2, num_gpus: int = 0, template: str = '/home/runner/work/codeflare-sdk/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', instascale: bool = False, mcad: bool = True, envs: dict = <factory>, image: str = '', local_interactive: bool = False, image_pull_secrets: list = <factory>, dispatch_priority: str = None, openshift_oauth: bool = False, ingress_options: dict = <factory>, ingress_domain: str = None)

      This dataclass is used to specify resource requirements and other details, and @@ -109,8 +109,7 @@

      Classes

      Expand source code -
      @dataclass
      -class ClusterConfiguration:
      +
      class ClusterConfiguration:
           """
           This dataclass is used to specify resource requirements and other details, and
           is passed in as an argument when creating a Cluster object.
      diff --git a/docs/detailed-documentation/cluster/model.html b/docs/detailed-documentation/cluster/model.html
      index 8ac5b7c0..3832494e 100644
      --- a/docs/detailed-documentation/cluster/model.html
      +++ b/docs/detailed-documentation/cluster/model.html
      @@ -145,8 +145,7 @@ 

      Classes

      Expand source code -
      @dataclass
      -class AppWrapper:
      +
      class AppWrapper:
           """
           For storing information about an AppWrapper.
           """
      @@ -299,8 +298,7 @@ 

      Class variables

      Expand source code -
      @dataclass
      -class RayCluster:
      +
      class RayCluster:
           """
           For storing information about a Ray cluster.
           """
      diff --git a/docs/detailed-documentation/job/jobs.html b/docs/detailed-documentation/job/jobs.html
      index 4e06013b..98e59e66 100644
      --- a/docs/detailed-documentation/job/jobs.html
      +++ b/docs/detailed-documentation/job/jobs.html
      @@ -327,7 +327,7 @@ 

      Methods

      class DDPJobDefinition -(script: Optional[str] = None, m: Optional[str] = None, script_args: Optional[List[str]] = None, name: Optional[str] = None, cpu: Optional[int] = None, gpu: Optional[int] = None, memMB: Optional[int] = None, h: Optional[str] = None, j: Optional[str] = None, env: Optional[Dict[str, str]] = None, max_retries: int = 0, mounts: Optional[List[str]] = None, rdzv_port: int = 29500, rdzv_backend: str = None, scheduler_args: Optional[Dict[str, str]] = None, image: Optional[str] = None, workspace: Optional[str] = 'file:///home/runner/work/codeflare-sdk') +(script: Optional[str] = None, m: Optional[str] = None, script_args: Optional[List[str]] = None, name: Optional[str] = None, cpu: Optional[int] = None, gpu: Optional[int] = None, memMB: Optional[int] = None, h: Optional[str] = None, j: Optional[str] = None, env: Optional[Dict[str, str]] = None, max_retries: int = 0, mounts: Optional[List[str]] = None, rdzv_port: int = 29500, rdzv_backend: str = None, scheduler_args: Optional[Dict[str, str]] = None, image: Optional[str] = None, workspace: Optional[str] = 'file:///home/runner/work/codeflare-sdk/codeflare-sdk')
      diff --git a/docs/detailed-documentation/job/ray_jobs.html b/docs/detailed-documentation/job/ray_jobs.html index 01fa5fb5..f0d10cd8 100644 --- a/docs/detailed-documentation/job/ray_jobs.html +++ b/docs/detailed-documentation/job/ray_jobs.html @@ -191,7 +191,7 @@

      Classes

      class RayJobClient -(address: Optional[str] = None, create_cluster_if_needed: bool = False, cookies: Optional[Dict[str, Any]] = None, metadata: Optional[Dict[str, Any]] = None, headers: Optional[Dict[str, Any]] = None, verify: Union[str, bool, ForwardRef(None)] = True) +(address: Optional[str] = None, create_cluster_if_needed: bool = False, cookies: Optional[Dict[str, Any]] = None, metadata: Optional[Dict[str, Any]] = None, headers: Optional[Dict[str, Any]] = None, verify: Union[str, bool, None] = True)

      A class that functions as a wrapper for the Ray Job Submission Client.

      @@ -461,7 +461,7 @@

      Methods

      -def submit_job(self, entrypoint: str, job_id: Optional[str] = None, runtime_env: Optional[Dict[str, Any]] = None, metadata: Optional[Dict[str, str]] = None, submission_id: Optional[str] = None, entrypoint_num_cpus: Union[int, float, ForwardRef(None)] = None, entrypoint_num_gpus: Union[int, float, ForwardRef(None)] = None, entrypoint_resources: Optional[Dict[str, float]] = None) ‑> str +def submit_job(self, entrypoint: str, job_id: Optional[str] = None, runtime_env: Optional[Dict[str, Any]] = None, metadata: Optional[Dict[str, str]] = None, submission_id: Optional[str] = None, entrypoint_num_cpus: Union[int, float, None] = None, entrypoint_num_gpus: Union[int, float, None] = None, entrypoint_resources: Optional[Dict[str, float]] = None) ‑> str

      Method for submitting jobs to a Ray Cluster and returning the job id with entrypoint being a mandatory field.

      diff --git a/docs/detailed-documentation/utils/generate_yaml.html b/docs/detailed-documentation/utils/generate_yaml.html index 04f95bc9..91f4bab1 100644 --- a/docs/detailed-documentation/utils/generate_yaml.html +++ b/docs/detailed-documentation/utils/generate_yaml.html @@ -60,8 +60,6 @@

      Module codeflare_sdk.utils.generate_yaml

      from base64 import b64encode from urllib3.util import parse_url -from .kube_api_helpers import _get_api_host - def read_template(template): with open(template, "r") as stream: @@ -95,9 +93,8 @@

      Module codeflare_sdk.utils.generate_yaml

      return True else: return False - except client.ApiException as e: # pragma: no cover - print(f"Error detecting cluster type defaulting to Kubernetes: {e}") - return False + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) def update_dashboard_route(route_item, cluster_name, namespace): @@ -160,28 +157,45 @@

      Module codeflare_sdk.utils.generate_yaml

      raise ValueError( f"Error: 'port' is not of type int for ingress item at index {index}" ) - if ingress_option["port"] == 8265: + if ingress_option is not None: metadata["name"] = ingress_option["ingressName"] metadata["namespace"] = namespace - if "annotations" not in ingress_option.keys(): + metadata["labels"]["ingress-owner"] = cluster_name + metadata["labels"]["ingress-options"] = "true" + if ( + "annotations" not in ingress_option.keys() + or ingress_option["annotations"] is None + ): del metadata["annotations"] else: metadata["annotations"] = ingress_option["annotations"] - if "path" not in ingress_option.keys(): + if ( + "path" not in ingress_option.keys() + or ingress_option["path"] is None + ): del spec["rules"][0]["http"]["paths"][0]["path"] else: spec["rules"][0]["http"]["paths"][0]["path"] = ingress_option[ "path" ] - if "pathType" not in ingress_option.keys(): + if ( + "pathType" not in ingress_option.keys() + or ingress_option["pathType"] is None + ): spec["rules"][0]["http"]["paths"][0][ "pathType" ] = "ImplementationSpecific" - if "host" not in ingress_option.keys(): + if ( + "host" not in ingress_option.keys() + or ingress_option["host"] is None + ): del spec["rules"][0]["host"] else: spec["rules"][0]["host"] = ingress_option["host"] - if "ingressClassName" not in ingress_option.keys(): + if ( + "ingressClassName" not in ingress_option.keys() + or ingress_option["ingressClassName"] is None + ): del spec["ingressClassName"] else: spec["ingressClassName"] = ingress_option["ingressClassName"] @@ -192,6 +206,7 @@

      Module codeflare_sdk.utils.generate_yaml

      else: spec["ingressClassName"] = "nginx" metadata["name"] = gen_dashboard_ingress_name(cluster_name) + metadata["labels"]["ingress-owner"] = cluster_name metadata["namespace"] = namespace spec["rules"][0]["http"]["paths"][0]["backend"]["service"][ "name" @@ -570,10 +585,6 @@

      Module codeflare_sdk.utils.generate_yaml

      tls_secret_name = f"{cluster_name}-proxy-tls-secret" tls_volume_name = "proxy-tls-secret" port_name = "oauth-proxy" - host = _get_api_host(k8_client) - host = host.replace( - "api.", f"{gen_dashboard_ingress_name(cluster_name)}-{namespace}.apps." - ) oauth_sidecar = _create_oauth_sidecar_object( namespace, tls_mount_location, @@ -909,10 +920,6 @@

      Functions

      tls_secret_name = f"{cluster_name}-proxy-tls-secret" tls_volume_name = "proxy-tls-secret" port_name = "oauth-proxy" - host = _get_api_host(k8_client) - host = host.replace( - "api.", f"{gen_dashboard_ingress_name(cluster_name)}-{namespace}.apps." - ) oauth_sidecar = _create_oauth_sidecar_object( namespace, tls_mount_location, @@ -1096,9 +1103,8 @@

      Functions

      return True else: return False - except client.ApiException as e: # pragma: no cover - print(f"Error detecting cluster type defaulting to Kubernetes: {e}") - return False
      + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e)
      @@ -1268,28 +1274,45 @@

      Functions

      raise ValueError( f"Error: 'port' is not of type int for ingress item at index {index}" ) - if ingress_option["port"] == 8265: + if ingress_option is not None: metadata["name"] = ingress_option["ingressName"] metadata["namespace"] = namespace - if "annotations" not in ingress_option.keys(): + metadata["labels"]["ingress-owner"] = cluster_name + metadata["labels"]["ingress-options"] = "true" + if ( + "annotations" not in ingress_option.keys() + or ingress_option["annotations"] is None + ): del metadata["annotations"] else: metadata["annotations"] = ingress_option["annotations"] - if "path" not in ingress_option.keys(): + if ( + "path" not in ingress_option.keys() + or ingress_option["path"] is None + ): del spec["rules"][0]["http"]["paths"][0]["path"] else: spec["rules"][0]["http"]["paths"][0]["path"] = ingress_option[ "path" ] - if "pathType" not in ingress_option.keys(): + if ( + "pathType" not in ingress_option.keys() + or ingress_option["pathType"] is None + ): spec["rules"][0]["http"]["paths"][0][ "pathType" ] = "ImplementationSpecific" - if "host" not in ingress_option.keys(): + if ( + "host" not in ingress_option.keys() + or ingress_option["host"] is None + ): del spec["rules"][0]["host"] else: spec["rules"][0]["host"] = ingress_option["host"] - if "ingressClassName" not in ingress_option.keys(): + if ( + "ingressClassName" not in ingress_option.keys() + or ingress_option["ingressClassName"] is None + ): del spec["ingressClassName"] else: spec["ingressClassName"] = ingress_option["ingressClassName"] @@ -1300,6 +1323,7 @@

      Functions

      else: spec["ingressClassName"] = "nginx" metadata["name"] = gen_dashboard_ingress_name(cluster_name) + metadata["labels"]["ingress-owner"] = cluster_name metadata["namespace"] = namespace spec["rules"][0]["http"]["paths"][0]["backend"]["service"][ "name" diff --git a/docs/detailed-documentation/utils/kube_api_helpers.html b/docs/detailed-documentation/utils/kube_api_helpers.html index 4105a4c5..58fcf7fc 100644 --- a/docs/detailed-documentation/utils/kube_api_helpers.html +++ b/docs/detailed-documentation/utils/kube_api_helpers.html @@ -77,11 +77,7 @@

      Module codeflare_sdk.utils.kube_api_helpers

      raise PermissionError(perm_msg) elif e.reason == "Conflict": raise FileExistsError(exists_msg) - raise e - - -def _get_api_host(api_client: client.ApiClient): # pragma: no cover - return parse_url(api_client.configuration.host).host
      + raise e
  • diff --git a/docs/detailed-documentation/utils/openshift_oauth.html b/docs/detailed-documentation/utils/openshift_oauth.html index db346904..8dc82d03 100644 --- a/docs/detailed-documentation/utils/openshift_oauth.html +++ b/docs/detailed-documentation/utils/openshift_oauth.html @@ -27,41 +27,42 @@

    Module codeflare_sdk.utils.openshift_oauth

    Expand source code
    from urllib3.util import parse_url
    -from .generate_yaml import gen_dashboard_ingress_name
    -from .kube_api_helpers import _get_api_host
    -from base64 import b64decode
    +import yaml
     
     from ..cluster.auth import config_check, api_config_handler
     
     from kubernetes import client
    +from kubernetes import dynamic
    +
    +
    +def _route_api_getter():
    +    return dynamic.DynamicClient(
    +        api_config_handler() or client.ApiClient()
    +    ).resources.get(api_version="route.openshift.io/v1", kind="Route")
     
     
     def create_openshift_oauth_objects(cluster_name, namespace):
         config_check()
    -    api_client = api_config_handler() or client.ApiClient()
         oauth_port = 8443
         oauth_sa_name = f"{cluster_name}-oauth-proxy"
         tls_secret_name = _gen_tls_secret_name(cluster_name)
         service_name = f"{cluster_name}-oauth"
         port_name = "oauth-proxy"
    -    host = _get_api_host(api_client)
     
    -    # replace "^api" with the expected host
    -    host = f"{gen_dashboard_ingress_name(cluster_name)}-{namespace}.apps" + host.lstrip(
    -        "api"
    -    )
    -
    -    _create_or_replace_oauth_sa(namespace, oauth_sa_name, host)
    +    _create_or_replace_oauth_sa(namespace, oauth_sa_name, cluster_name)
         _create_or_replace_oauth_service_obj(
             cluster_name, namespace, oauth_port, tls_secret_name, service_name, port_name
         )
    -    _create_or_replace_oauth_ingress_object(
    -        cluster_name, namespace, service_name, port_name, host
    +    _create_or_replace_oauth_route_object(
    +        cluster_name,
    +        namespace,
    +        service_name,
    +        port_name,
         )
         _create_or_replace_oauth_rb(cluster_name, namespace, oauth_sa_name)
     
     
    -def _create_or_replace_oauth_sa(namespace, oauth_sa_name, host):
    +def _create_or_replace_oauth_sa(namespace, oauth_sa_name, cluster_name):
         oauth_sa = client.V1ServiceAccount(
             api_version="v1",
             kind="ServiceAccount",
    @@ -69,7 +70,10 @@ 

    Module codeflare_sdk.utils.openshift_oauth

    name=oauth_sa_name, namespace=namespace, annotations={ - "serviceaccounts.openshift.io/oauth-redirecturi.first": f"https://{host}" + "serviceaccounts.openshift.io/oauth-redirectreference.first": '{"kind":"OAuthRedirectReference","apiVersion":"v1","reference":{"kind":"Route","name":"' + + "ray-dashboard-" + + cluster_name + + '"}}' }, ), ) @@ -126,15 +130,14 @@

    Module codeflare_sdk.utils.openshift_oauth

    # for an existing cluster before calling this => the objects should never be deleted twice oauth_sa_name = f"{cluster_name}-oauth-proxy" service_name = f"{cluster_name}-oauth" + v1_routes = _route_api_getter() client.CoreV1Api(api_config_handler()).delete_namespaced_service_account( name=oauth_sa_name, namespace=namespace ) client.CoreV1Api(api_config_handler()).delete_namespaced_service( name=service_name, namespace=namespace ) - client.NetworkingV1Api(api_config_handler()).delete_namespaced_ingress( - name=f"{cluster_name}-ingress", namespace=namespace - ) + v1_routes.delete(name=f"ray-dashboard-{cluster_name}", namespace=namespace) client.RbacAuthorizationV1Api(api_config_handler()).delete_cluster_role_binding( name=f"{cluster_name}-rb" ) @@ -189,55 +192,39 @@

    Module codeflare_sdk.utils.openshift_oauth

    raise e -def _create_or_replace_oauth_ingress_object( +def _create_or_replace_oauth_route_object( cluster_name: str, namespace: str, service_name: str, port_name: str, - host: str, -) -> client.V1Ingress: - ingress = client.V1Ingress( - api_version="networking.k8s.io/v1", - kind="Ingress", - metadata=client.V1ObjectMeta( - annotations={"route.openshift.io/termination": "passthrough"}, - name=f"{cluster_name}-ingress", - namespace=namespace, - ), - spec=client.V1IngressSpec( - rules=[ - client.V1IngressRule( - host=host, - http=client.V1HTTPIngressRuleValue( - paths=[ - client.V1HTTPIngressPath( - backend=client.V1IngressBackend( - service=client.V1IngressServiceBackend( - name=service_name, - port=client.V1ServiceBackendPort( - name=port_name - ), - ) - ), - path_type="ImplementationSpecific", - ) - ] - ), - ) - ] - ), - ) +): + route = f""" + apiVersion: route.openshift.io/v1 + kind: Route + metadata: + name: ray-dashboard-{cluster_name} + namespace: {namespace} + spec: + port: + targetPort: {port_name} + tls: + termination: passthrough + to: + kind: Service + name: {service_name} + """ + route_data = yaml.safe_load(route) + v1_routes = _route_api_getter() try: - client.NetworkingV1Api(api_config_handler()).create_namespaced_ingress( - namespace=namespace, body=ingress + existing_route = v1_routes.get( + name=f"ray-dashboard-{cluster_name}", namespace=namespace ) - except client.ApiException as e: - if e.reason == "Conflict": - client.NetworkingV1Api(api_config_handler()).replace_namespaced_ingress( - namespace=namespace, body=ingress, name=f"{cluster_name}-ingress" - ) - else: - raise e
    + route_data["metadata"]["resourceVersion"] = existing_route["metadata"][ + "resourceVersion" + ] + v1_routes.replace(body=route_data) + except dynamic.client.ApiException: + v1_routes.create(body=route_data)
    @@ -258,25 +245,21 @@

    Functions

    def create_openshift_oauth_objects(cluster_name, namespace):
         config_check()
    -    api_client = api_config_handler() or client.ApiClient()
         oauth_port = 8443
         oauth_sa_name = f"{cluster_name}-oauth-proxy"
         tls_secret_name = _gen_tls_secret_name(cluster_name)
         service_name = f"{cluster_name}-oauth"
         port_name = "oauth-proxy"
    -    host = _get_api_host(api_client)
    -
    -    # replace "^api" with the expected host
    -    host = f"{gen_dashboard_ingress_name(cluster_name)}-{namespace}.apps" + host.lstrip(
    -        "api"
    -    )
     
    -    _create_or_replace_oauth_sa(namespace, oauth_sa_name, host)
    +    _create_or_replace_oauth_sa(namespace, oauth_sa_name, cluster_name)
         _create_or_replace_oauth_service_obj(
             cluster_name, namespace, oauth_port, tls_secret_name, service_name, port_name
         )
    -    _create_or_replace_oauth_ingress_object(
    -        cluster_name, namespace, service_name, port_name, host
    +    _create_or_replace_oauth_route_object(
    +        cluster_name,
    +        namespace,
    +        service_name,
    +        port_name,
         )
         _create_or_replace_oauth_rb(cluster_name, namespace, oauth_sa_name)
    @@ -295,15 +278,14 @@

    Functions

    # for an existing cluster before calling this => the objects should never be deleted twice oauth_sa_name = f"{cluster_name}-oauth-proxy" service_name = f"{cluster_name}-oauth" + v1_routes = _route_api_getter() client.CoreV1Api(api_config_handler()).delete_namespaced_service_account( name=oauth_sa_name, namespace=namespace ) client.CoreV1Api(api_config_handler()).delete_namespaced_service( name=service_name, namespace=namespace ) - client.NetworkingV1Api(api_config_handler()).delete_namespaced_ingress( - name=f"{cluster_name}-ingress", namespace=namespace - ) + v1_routes.delete(name=f"ray-dashboard-{cluster_name}", namespace=namespace) client.RbacAuthorizationV1Api(api_config_handler()).delete_cluster_role_binding( name=f"{cluster_name}-rb" ) From 2a128da2d81e19c1f63c2da66f8a9967ae6e4345 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Wed, 17 Jan 2024 11:56:14 +0000 Subject: [PATCH 113/496] Adjust e2e test ClusterConfiguration min_memory --- tests/e2e/mnist_raycluster_sdk.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/e2e/mnist_raycluster_sdk.py b/tests/e2e/mnist_raycluster_sdk.py index d46c6e4c..0ded85e4 100644 --- a/tests/e2e/mnist_raycluster_sdk.py +++ b/tests/e2e/mnist_raycluster_sdk.py @@ -37,7 +37,7 @@ head_memory=2, min_cpus="500m", max_cpus=1, - min_memory=0.5, + min_memory=1, max_memory=2, num_gpus=0, instascale=False, From e7d52551226549cdd02a280ba0b4ba42f8933c9a Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Fri, 19 Jan 2024 15:19:48 +0000 Subject: [PATCH 114/496] Added directory creation check for cases where mcad=False --- src/codeflare_sdk/utils/generate_yaml.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 494234fa..bfbce859 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -619,6 +619,11 @@ def _create_oauth_sidecar_object( def write_components(user_yaml: dict, output_file_name: str): + # Create the directory if it doesn't exist + directory_path = os.path.dirname(output_file_name) + if not os.path.exists(directory_path): + os.makedirs(directory_path) + components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") open(output_file_name, "w").close() with open(output_file_name, "a") as outfile: From 62be50e23e7ecba074e540c4715f0dd9f4ca33a0 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Mon, 22 Jan 2024 11:44:55 +0000 Subject: [PATCH 115/496] Updated ray v1alpha1 to v1 --- .../notebook-ex-outputs/gptfttest.yaml | 2 +- .../notebook-ex-outputs/instascaletest.yaml | 2 +- .../notebook-ex-outputs/interactivetest.yaml | 2 +- .../notebook-ex-outputs/jobtest.yaml | 2 +- .../notebook-ex-outputs/raytest.yaml | 2 +- src/codeflare_sdk/cluster/cluster.py | 10 +++++----- src/codeflare_sdk/cluster/model.py | 2 +- .../templates/base-template.yaml | 2 +- tests/test-case-bad.yaml | 2 +- tests/test-case-no-mcad.yamls | 2 +- tests/test-case-prio.yaml | 2 +- tests/test-case.yaml | 2 +- tests/unit_test.py | 20 +++++++++---------- 13 files changed, 26 insertions(+), 26 deletions(-) diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml index e65a68d2..62737232 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml @@ -29,7 +29,7 @@ spec: memory: 8G nvidia.com/gpu: 1 generictemplate: - apiVersion: ray.io/v1alpha1 + apiVersion: ray.io/v1 kind: RayCluster metadata: labels: diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml index 14e4b8c9..8cb96a79 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml @@ -29,7 +29,7 @@ spec: memory: 8G nvidia.com/gpu: 1 generictemplate: - apiVersion: ray.io/v1alpha1 + apiVersion: ray.io/v1 kind: RayCluster metadata: labels: diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml index d44b6340..786ef039 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml @@ -29,7 +29,7 @@ spec: memory: 8G nvidia.com/gpu: 1 generictemplate: - apiVersion: ray.io/v1alpha1 + apiVersion: ray.io/v1 kind: RayCluster metadata: labels: diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml index a5859efb..60a4b7b2 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml @@ -27,7 +27,7 @@ spec: memory: 4G nvidia.com/gpu: 0 generictemplate: - apiVersion: ray.io/v1alpha1 + apiVersion: ray.io/v1 kind: RayCluster metadata: labels: diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml index 3a25aafd..53e4be9a 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml @@ -27,7 +27,7 @@ spec: memory: 4G nvidia.com/gpu: 0 generictemplate: - apiVersion: ray.io/v1alpha1 + apiVersion: ray.io/v1 kind: RayCluster metadata: labels: diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 4a5f69fb..607e59f3 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -557,7 +557,7 @@ def _component_resources_up( if resource["kind"] == "RayCluster": api_instance.create_namespaced_custom_object( group="ray.io", - version="v1alpha1", + version="v1", namespace=namespace, plural="rayclusters", body=resource, @@ -594,7 +594,7 @@ def _component_resources_down( if resource["kind"] == "RayCluster": api_instance.delete_namespaced_custom_object( group="ray.io", - version="v1alpha1", + version="v1", namespace=namespace, plural="rayclusters", name=self.app_wrapper_name, @@ -681,7 +681,7 @@ def get_cluster(cluster_name: str, namespace: str = "default"): api_instance = client.CustomObjectsApi(api_config_handler()) rcs = api_instance.list_namespaced_custom_object( group="ray.io", - version="v1alpha1", + version="v1", namespace=namespace, plural="rayclusters", ) @@ -838,7 +838,7 @@ def _ray_cluster_status(name, namespace="default") -> Optional[RayCluster]: api_instance = client.CustomObjectsApi(api_config_handler()) rcs = api_instance.list_namespaced_custom_object( group="ray.io", - version="v1alpha1", + version="v1", namespace=namespace, plural="rayclusters", ) @@ -858,7 +858,7 @@ def _get_ray_clusters(namespace="default") -> List[RayCluster]: api_instance = client.CustomObjectsApi(api_config_handler()) rcs = api_instance.list_namespaced_custom_object( group="ray.io", - version="v1alpha1", + version="v1", namespace=namespace, plural="rayclusters", ) diff --git a/src/codeflare_sdk/cluster/model.py b/src/codeflare_sdk/cluster/model.py index fb8873e4..2e1abaf7 100644 --- a/src/codeflare_sdk/cluster/model.py +++ b/src/codeflare_sdk/cluster/model.py @@ -27,7 +27,7 @@ class RayClusterStatus(Enum): Defines the possible reportable states of a Ray cluster. """ - # https://github.com/ray-project/kuberay/blob/master/ray-operator/apis/ray/v1alpha1/raycluster_types.go#L95 + # https://github.com/ray-project/kuberay/blob/master/ray-operator/apis/ray/v1/raycluster_types.go#L112-L117 READY = "ready" UNHEALTHY = "unhealthy" FAILED = "failed" diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index 1e99040c..16a3dc2a 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -37,7 +37,7 @@ spec: # The resource requests and limits in this config are too small for production! # For an example with more realistic resource configuration, see # ray-cluster.autoscaler.large.yaml. - apiVersion: ray.io/v1alpha1 + apiVersion: ray.io/v1 kind: RayCluster metadata: labels: diff --git a/tests/test-case-bad.yaml b/tests/test-case-bad.yaml index 68cfec34..6e969e01 100644 --- a/tests/test-case-bad.yaml +++ b/tests/test-case-bad.yaml @@ -29,7 +29,7 @@ spec: memory: 5G nvidia.com/gpu: 7 generictemplate: - apiVersion: ray.io/v1alpha1 + apiVersion: ray.io/v1 kind: RayCluster metadata: labels: diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index 4be18dc6..38109dfc 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -1,5 +1,5 @@ --- -apiVersion: ray.io/v1alpha1 +apiVersion: ray.io/v1 kind: RayCluster metadata: labels: diff --git a/tests/test-case-prio.yaml b/tests/test-case-prio.yaml index 72c73083..9f907135 100644 --- a/tests/test-case-prio.yaml +++ b/tests/test-case-prio.yaml @@ -29,7 +29,7 @@ spec: memory: 5G nvidia.com/gpu: 7 generictemplate: - apiVersion: ray.io/v1alpha1 + apiVersion: ray.io/v1 kind: RayCluster metadata: labels: diff --git a/tests/test-case.yaml b/tests/test-case.yaml index 8b0677cf..f7e287cf 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -28,7 +28,7 @@ spec: memory: 5G nvidia.com/gpu: 7 generictemplate: - apiVersion: ray.io/v1alpha1 + apiVersion: ray.io/v1 kind: RayCluster metadata: labels: diff --git a/tests/unit_test.py b/tests/unit_test.py index a7b5d9a6..e0a3c385 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -397,7 +397,7 @@ def arg_check_apply_effect(group, version, namespace, plural, body, *args): assert body == aw elif plural == "rayclusters": assert group == "ray.io" - assert version == "v1alpha1" + assert version == "v1" with open(f"{aw_dir}unit-test-cluster-ray.yaml") as f: yamls = yaml.load_all(f, Loader=yaml.FullLoader) for resource in yamls: @@ -432,7 +432,7 @@ def arg_check_del_effect(group, version, namespace, plural, name, *args): assert name == "unit-test-cluster" elif plural == "rayclusters": assert group == "ray.io" - assert version == "v1alpha1" + assert version == "v1" assert name == "unit-test-cluster-ray" elif plural == "ingresses": assert group == "networking.k8s.io" @@ -533,7 +533,7 @@ def test_aw_status(mocker): def rc_status_fields(group, version, namespace, plural, *args): assert group == "ray.io" - assert version == "v1alpha1" + assert version == "v1" assert namespace == "test-ns" assert plural == "rayclusters" assert args == tuple() @@ -913,7 +913,7 @@ def get_ray_obj(group, version, namespace, plural, cls=None): api_obj = { "items": [ { - "apiVersion": "ray.io/v1alpha1", + "apiVersion": "ray.io/v1", "kind": "RayCluster", "metadata": { "creationTimestamp": "2023-02-22T16:26:07Z", @@ -926,7 +926,7 @@ def get_ray_obj(group, version, namespace, plural, cls=None): }, "managedFields": [ { - "apiVersion": "ray.io/v1alpha1", + "apiVersion": "ray.io/v1", "fieldsType": "FieldsV1", "fieldsV1": { "f:metadata": { @@ -986,7 +986,7 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "time": "2023-02-22T16:26:07Z", }, { - "apiVersion": "ray.io/v1alpha1", + "apiVersion": "ray.io/v1", "fieldsType": "FieldsV1", "fieldsV1": { "f:status": { @@ -1186,7 +1186,7 @@ def get_aw_obj(group, version, namespace, plural): "kind": "AppWrapper", "metadata": { "annotations": { - "kubectl.kubernetes.io/last-applied-configuration": '{"apiVersion":"codeflare.dev/v1beta1","kind":"AppWrapper","metadata":{"annotations":{},"name":"quicktest1","namespace":"ns"},"spec":{"priority":9,"resources":{"GenericItems":[{"custompodresources":[{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}},{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}],"generictemplate":{"apiVersion":"ray.io/v1alpha1","kind":"RayCluster","metadata":{"labels":{"appwrapper.codeflare.dev":"quicktest1","controller-tools.k8s.io":"1.0"},"name":"quicktest1","namespace":"ns"},"spec":{"autoscalerOptions":{"idleTimeoutSeconds":60,"imagePullPolicy":"Always","resources":{"limits":{"cpu":"500m","memory":"512Mi"},"requests":{"cpu":"500m","memory":"512Mi"}},"upscalingMode":"Default"},"enableInTreeAutoscaling":false,"headGroupSpec":{"rayStartParams":{"block":"true","dashboard-host":"0.0.0.0","num-gpus":"0"},"serviceType":"ClusterIP","template":{"spec":{"containers":[{"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","imagePullPolicy":"Always","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"ray-head","ports":[{"containerPort":6379,"name":"gcs"},{"containerPort":8265,"name":"dashboard"},{"containerPort":10001,"name":"client"}],"resources":{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}}}]}}},"rayVersion":"1.12.0","workerGroupSpecs":[{"groupName":"small-group-quicktest","maxReplicas":1,"minReplicas":1,"rayStartParams":{"block":"true","num-gpus":"0"},"replicas":1,"template":{"metadata":{"annotations":{"key":"value"},"labels":{"key":"value"}},"spec":{"containers":[{"env":[{"name":"MY_POD_IP","valueFrom":{"fieldRef":{"fieldPath":"status.podIP"}}}],"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"machine-learning","resources":{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}}],}}}]}},"replicas":1},{"generictemplate":{"apiVersion":"route.openshift.io/v1","kind":"Route","metadata":{"labels":{"odh-ray-cluster-service":"quicktest-head-svc"},"name":"ray-dashboard-quicktest","namespace":"default"},"spec":{"port":{"targetPort":"dashboard"},"to":{"kind":"Service","name":"quicktest-head-svc"}}},"replica":1}],"Items":[]}}}\n' + "kubectl.kubernetes.io/last-applied-configuration": '{"apiVersion":"codeflare.dev/v1beta1","kind":"AppWrapper","metadata":{"annotations":{},"name":"quicktest1","namespace":"ns"},"spec":{"priority":9,"resources":{"GenericItems":[{"custompodresources":[{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}},{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}],"generictemplate":{"apiVersion":"ray.io/v1","kind":"RayCluster","metadata":{"labels":{"appwrapper.codeflare.dev":"quicktest1","controller-tools.k8s.io":"1.0"},"name":"quicktest1","namespace":"ns"},"spec":{"autoscalerOptions":{"idleTimeoutSeconds":60,"imagePullPolicy":"Always","resources":{"limits":{"cpu":"500m","memory":"512Mi"},"requests":{"cpu":"500m","memory":"512Mi"}},"upscalingMode":"Default"},"enableInTreeAutoscaling":false,"headGroupSpec":{"rayStartParams":{"block":"true","dashboard-host":"0.0.0.0","num-gpus":"0"},"serviceType":"ClusterIP","template":{"spec":{"containers":[{"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","imagePullPolicy":"Always","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"ray-head","ports":[{"containerPort":6379,"name":"gcs"},{"containerPort":8265,"name":"dashboard"},{"containerPort":10001,"name":"client"}],"resources":{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}}}]}}},"rayVersion":"1.12.0","workerGroupSpecs":[{"groupName":"small-group-quicktest","maxReplicas":1,"minReplicas":1,"rayStartParams":{"block":"true","num-gpus":"0"},"replicas":1,"template":{"metadata":{"annotations":{"key":"value"},"labels":{"key":"value"}},"spec":{"containers":[{"env":[{"name":"MY_POD_IP","valueFrom":{"fieldRef":{"fieldPath":"status.podIP"}}}],"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"machine-learning","resources":{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}}],}}}]}},"replicas":1},{"generictemplate":{"apiVersion":"route.openshift.io/v1","kind":"Route","metadata":{"labels":{"odh-ray-cluster-service":"quicktest-head-svc"},"name":"ray-dashboard-quicktest","namespace":"default"},"spec":{"port":{"targetPort":"dashboard"},"to":{"kind":"Service","name":"quicktest-head-svc"}}},"replica":1}],"Items":[]}}}\n' }, "creationTimestamp": "2023-02-22T16:26:07Z", "generation": 4, @@ -1280,7 +1280,7 @@ def get_aw_obj(group, version, namespace, plural): }, ], "generictemplate": { - "apiVersion": "ray.io/v1alpha1", + "apiVersion": "ray.io/v1", "kind": "RayCluster", "metadata": { "labels": { @@ -1514,7 +1514,7 @@ def get_aw_obj(group, version, namespace, plural): "kind": "AppWrapper", "metadata": { "annotations": { - "kubectl.kubernetes.io/last-applied-configuration": '{"apiVersion":"codeflare.dev/v1beta1","kind":"AppWrapper","metadata":{"annotations":{},"name":"quicktest2","namespace":"ns"},"spec":{"priority":9,"resources":{"GenericItems":[{"custompodresources":[{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}},{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}],"generictemplate":{"apiVersion":"ray.io/v1alpha1","kind":"RayCluster","metadata":{"labels":{"appwrapper.codeflare.dev":"quicktest2","controller-tools.k8s.io":"1.0"},"name":"quicktest2","namespace":"ns"},"spec":{"autoscalerOptions":{"idleTimeoutSeconds":60,"imagePullPolicy":"Always","resources":{"limits":{"cpu":"500m","memory":"512Mi"},"requests":{"cpu":"500m","memory":"512Mi"}},"upscalingMode":"Default"},"enableInTreeAutoscaling":false,"headGroupSpec":{"rayStartParams":{"block":"true","dashboard-host":"0.0.0.0","num-gpus":"0"},"serviceType":"ClusterIP","template":{"spec":{"containers":[{"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","imagePullPolicy":"Always","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"ray-head","ports":[{"containerPort":6379,"name":"gcs"},{"containerPort":8265,"name":"dashboard"},{"containerPort":10001,"name":"client"}],"resources":{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}}}]}}},"rayVersion":"1.12.0","workerGroupSpecs":[{"groupName":"small-group-quicktest","maxReplicas":1,"minReplicas":1,"rayStartParams":{"block":"true","num-gpus":"0"},"replicas":1,"template":{"metadata":{"annotations":{"key":"value"},"labels":{"key":"value"}},"spec":{"containers":[{"env":[{"name":"MY_POD_IP","valueFrom":{"fieldRef":{"fieldPath":"status.podIP"}}}],"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"machine-learning","resources":{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}}],}}}]}},"replicas":1},{"generictemplate":{"apiVersion":"route.openshift.io/v1","kind":"Route","metadata":{"labels":{"odh-ray-cluster-service":"quicktest-head-svc"},"name":"ray-dashboard-quicktest","namespace":"default"},"spec":{"port":{"targetPort":"dashboard"},"to":{"kind":"Service","name":"quicktest-head-svc"}}},"replica":1}],"Items":[]}}}\n' + "kubectl.kubernetes.io/last-applied-configuration": '{"apiVersion":"codeflare.dev/v1beta1","kind":"AppWrapper","metadata":{"annotations":{},"name":"quicktest2","namespace":"ns"},"spec":{"priority":9,"resources":{"GenericItems":[{"custompodresources":[{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}},{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}],"generictemplate":{"apiVersion":"ray.io/v1","kind":"RayCluster","metadata":{"labels":{"appwrapper.codeflare.dev":"quicktest2","controller-tools.k8s.io":"1.0"},"name":"quicktest2","namespace":"ns"},"spec":{"autoscalerOptions":{"idleTimeoutSeconds":60,"imagePullPolicy":"Always","resources":{"limits":{"cpu":"500m","memory":"512Mi"},"requests":{"cpu":"500m","memory":"512Mi"}},"upscalingMode":"Default"},"enableInTreeAutoscaling":false,"headGroupSpec":{"rayStartParams":{"block":"true","dashboard-host":"0.0.0.0","num-gpus":"0"},"serviceType":"ClusterIP","template":{"spec":{"containers":[{"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","imagePullPolicy":"Always","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"ray-head","ports":[{"containerPort":6379,"name":"gcs"},{"containerPort":8265,"name":"dashboard"},{"containerPort":10001,"name":"client"}],"resources":{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}}}]}}},"rayVersion":"1.12.0","workerGroupSpecs":[{"groupName":"small-group-quicktest","maxReplicas":1,"minReplicas":1,"rayStartParams":{"block":"true","num-gpus":"0"},"replicas":1,"template":{"metadata":{"annotations":{"key":"value"},"labels":{"key":"value"}},"spec":{"containers":[{"env":[{"name":"MY_POD_IP","valueFrom":{"fieldRef":{"fieldPath":"status.podIP"}}}],"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"machine-learning","resources":{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}}],}}}]}},"replicas":1},{"generictemplate":{"apiVersion":"route.openshift.io/v1","kind":"Route","metadata":{"labels":{"odh-ray-cluster-service":"quicktest-head-svc"},"name":"ray-dashboard-quicktest","namespace":"default"},"spec":{"port":{"targetPort":"dashboard"},"to":{"kind":"Service","name":"quicktest-head-svc"}}},"replica":1}],"Items":[]}}}\n' }, "creationTimestamp": "2023-02-22T16:26:07Z", "generation": 4, @@ -1608,7 +1608,7 @@ def get_aw_obj(group, version, namespace, plural): }, ], "generictemplate": { - "apiVersion": "ray.io/v1alpha1", + "apiVersion": "ray.io/v1", "kind": "RayCluster", "metadata": { "labels": { From 9bb5d1c08305501c8c0924d80d87e4e70ad2c5c9 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Fri, 26 Jan 2024 12:29:52 +0000 Subject: [PATCH 116/496] Update cluster-configuration doc for clarity --- docs/cluster-configuration.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/cluster-configuration.md b/docs/cluster-configuration.md index 35e8c149..24d63a1e 100644 --- a/docs/cluster-configuration.md +++ b/docs/cluster-configuration.md @@ -22,7 +22,7 @@ cluster = Cluster(ClusterConfiguration( image="quay.io/project-codeflare/ray:latest-py39-cu118", # Mandatory Field instascale=False, # Default False machine_types=["m5.xlarge", "g4dn.xlarge"], - ingress_domain="example.com" # Default None, Mandatory for Kubernetes Clusters + ingress_domain="example.com" # Default None, Mandatory for Vanilla Kubernetes Clusters - ingress_domain is ignored on OpenShift Clusters as a route is created. )) ``` @@ -32,8 +32,8 @@ From there a user can call `cluster.up()` and `cluster.down()` to create and rem In cases where `mcad=False` a yaml file will be created with the individual Ray Cluster, Route/Ingress and Secret included.
    The Ray Cluster and service will be created by KubeRay directly and the other components will be individually created. -## Ray Cluster Configuration in a Kubernetes environment -To create a Ray Cluster using the CodeFlare SDK in a Kubernetes environment an `ingress_domain` must be passed in the Cluster Configuration. +## Ray Cluster Configuration in a Vanilla Kubernetes environment (Non-OpenShift) +To create a Ray Cluster using the CodeFlare SDK in a Vanilla Kubernetes environment an `ingress_domain` must be passed in the Cluster Configuration. This is used for the creation of the Ray Dashboard and Client ingresses. `ingress_options` can be passed to create a custom Ray Dashboard ingress, `ingress_domain` is still a required variable for the Client ingress. From a36f34a6b11719fe062b8e1fb5c18806c2ff5029 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Fri, 26 Jan 2024 14:17:16 +0000 Subject: [PATCH 117/496] Added local_interactive to cluster-configuration doc --- docs/cluster-configuration.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/cluster-configuration.md b/docs/cluster-configuration.md index 24d63a1e..2ad614a6 100644 --- a/docs/cluster-configuration.md +++ b/docs/cluster-configuration.md @@ -23,8 +23,10 @@ cluster = Cluster(ClusterConfiguration( instascale=False, # Default False machine_types=["m5.xlarge", "g4dn.xlarge"], ingress_domain="example.com" # Default None, Mandatory for Vanilla Kubernetes Clusters - ingress_domain is ignored on OpenShift Clusters as a route is created. + local_interactive=False, # Default False )) ``` +Note: On OpenShift, the `ingress_domain` is only required when `local_interactive` is enabled. - This may change soon. Upon creating a cluster configuration with `mcad=True` an appwrapper will be created featuring the Ray Cluster and any Routes, Ingresses or Secrets that are needed to be created along side it.
    From there a user can call `cluster.up()` and `cluster.down()` to create and remove the appwrapper thus creating and removing the Ray Cluster. @@ -36,7 +38,7 @@ The Ray Cluster and service will be created by KubeRay directly and the other co To create a Ray Cluster using the CodeFlare SDK in a Vanilla Kubernetes environment an `ingress_domain` must be passed in the Cluster Configuration. This is used for the creation of the Ray Dashboard and Client ingresses. -`ingress_options` can be passed to create a custom Ray Dashboard ingress, `ingress_domain` is still a required variable for the Client ingress. +`ingress_options` can be passed to create a custom Ray Dashboard ingress, `ingress_domain` is still a required variable for the Client route/ingress. An example of `ingress_options` would look like this. ``` From cfb7031f1e5f11838e5ec5ca3db228536b344b57 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Fri, 2 Feb 2024 14:03:56 +0000 Subject: [PATCH 118/496] Display Code Coverage failed message --- .github/workflows/unit-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index d62c0eff..495bf487 100755 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -28,4 +28,4 @@ jobs: run: | coverage run -m --source=src pytest -v tests/unit_test.py coverage=$(coverage report -m | tail -1 | tail -c 4 | head -c 2) - if (( $coverage < 90 )); then exit 1; else echo "Coverage passed, ${coverage}%"; fi + if (( $coverage < 90 )); then echo "Coverage failed at ${coverage}%"; exit 1; else echo "Coverage passed, ${coverage}%"; fi From c8b1158086a7741dc94f47c0321ac08553ddeeea Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Mon, 29 Jan 2024 17:12:03 +0000 Subject: [PATCH 119/496] Convert Go e2e test to Python Test --- poetry.lock | 120 +++-------------- tests/e2e/mnist_raycluster_sdk.py | 9 +- tests/e2e/mnist_raycluster_sdk_test.py | 122 ++++++++++++++++++ ..._pip_requirements.txt => requirements.txt} | 0 tests/e2e/support.py | 14 ++ 5 files changed, 158 insertions(+), 107 deletions(-) create mode 100644 tests/e2e/mnist_raycluster_sdk_test.py rename tests/e2e/{mnist_pip_requirements.txt => requirements.txt} (100%) create mode 100644 tests/e2e/support.py diff --git a/poetry.lock b/poetry.lock index f52cd9c9..9a421aa7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "aiohttp" version = "3.9.1" description = "Async http client/server framework (asyncio)" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -101,7 +100,6 @@ speedups = ["Brotli", "aiodns", "brotlicffi"] name = "aiohttp-cors" version = "0.7.0" description = "CORS support for aiohttp" -category = "main" optional = false python-versions = "*" files = [ @@ -116,7 +114,6 @@ aiohttp = ">=1.1" name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -131,7 +128,6 @@ frozenlist = ">=1.1.0" name = "ansicon" version = "1.89.0" description = "Python wrapper for loading Jason Hood's ANSICON" -category = "main" optional = false python-versions = "*" files = [ @@ -143,7 +139,6 @@ files = [ name = "async-timeout" version = "4.0.3" description = "Timeout context manager for asyncio programs" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -155,7 +150,6 @@ files = [ name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -174,7 +168,6 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "bcrypt" version = "4.0.1" description = "Modern password hashing for your software and your servers" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -209,7 +202,6 @@ typecheck = ["mypy"] name = "blessed" version = "1.20.0" description = "Easy, practical library for making terminal apps, by providing an elegant, well-documented interface to Colors, Keyboard input, and screen Positioning capabilities." -category = "main" optional = false python-versions = ">=2.7" files = [ @@ -226,7 +218,6 @@ wcwidth = ">=0.1.4" name = "cachetools" version = "5.3.1" description = "Extensible memoizing collections and decorators" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -238,7 +229,6 @@ files = [ name = "certifi" version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -250,7 +240,6 @@ files = [ name = "cffi" version = "1.16.0" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -315,7 +304,6 @@ pycparser = "*" name = "charset-normalizer" version = "3.3.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -415,7 +403,6 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -430,7 +417,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "codeflare-torchx" version = "0.6.0.dev1" description = "TorchX SDK and Components" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -459,7 +445,6 @@ ray = ["ray (>=1.12.1)"] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -471,7 +456,6 @@ files = [ name = "colorful" version = "0.5.5" description = "Terminal string styling done right, in Python." -category = "main" optional = false python-versions = "*" files = [ @@ -486,7 +470,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "commonmark" version = "0.9.1" description = "Python parser for the CommonMark Markdown spec" -category = "main" optional = false python-versions = "*" files = [ @@ -501,7 +484,6 @@ test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] name = "coverage" version = "7.2.7" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -574,7 +556,6 @@ toml = ["tomli"] name = "cryptography" version = "40.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -616,7 +597,6 @@ tox = ["tox"] name = "distlib" version = "0.3.7" description = "Distribution utilities" -category = "main" optional = false python-versions = "*" files = [ @@ -628,7 +608,6 @@ files = [ name = "docker" version = "6.1.3" description = "A Python library for the Docker Engine API." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -650,7 +629,6 @@ ssh = ["paramiko (>=2.4.3)"] name = "docstring-parser" version = "0.8.1" description = "\"Parse Python docstrings in reST, Google and Numpydoc format\"" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -661,7 +639,6 @@ files = [ name = "exceptiongroup" version = "1.1.3" description = "Backport of PEP 654 (exception groups)" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -676,7 +653,6 @@ test = ["pytest (>=6)"] name = "executing" version = "1.2.0" description = "Get the currently executing AST node of a frame, and other information" -category = "main" optional = false python-versions = "*" files = [ @@ -691,7 +667,6 @@ tests = ["asttokens", "littleutils", "pytest", "rich"] name = "filelock" version = "3.12.4" description = "A platform independent file lock." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -708,7 +683,6 @@ typing = ["typing-extensions (>=4.7.1)"] name = "frozenlist" version = "1.4.0" description = "A list-like structure which implements collections.abc.MutableSequence" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -779,7 +753,6 @@ files = [ name = "fsspec" version = "2023.9.2" description = "File-system specification" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -815,7 +788,6 @@ tqdm = ["tqdm"] name = "google-api-core" version = "2.15.0" description = "Google API client core library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -838,7 +810,6 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] name = "google-auth" version = "2.23.3" description = "Google Authentication Library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -862,7 +833,6 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] name = "googleapis-common-protos" version = "1.62.0" description = "Common protobufs used in Google APIs" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -880,7 +850,6 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] name = "gpustat" version = "1.1.1" description = "An utility to monitor NVIDIA GPU status and usage" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -900,7 +869,6 @@ test = ["mockito (>=1.2.1)", "pytest (>=5.4.1)", "pytest-runner"] name = "grpcio" version = "1.60.0" description = "HTTP/2-based RPC framework" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -967,7 +935,6 @@ protobuf = ["grpcio-tools (>=1.60.0)"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -979,7 +946,6 @@ files = [ name = "importlib-metadata" version = "6.8.0" description = "Read metadata from Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -999,7 +965,6 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs name = "importlib-resources" version = "6.1.0" description = "Read resources from Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1018,7 +983,6 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1030,7 +994,6 @@ files = [ name = "jinxed" version = "1.2.1" description = "Jinxed Terminal Library" -category = "main" optional = false python-versions = "*" files = [ @@ -1045,7 +1008,6 @@ ansicon = {version = "*", markers = "platform_system == \"Windows\""} name = "jsonschema" version = "4.19.1" description = "An implementation of JSON Schema validation for Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1069,7 +1031,6 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "jsonschema-specifications" version = "2023.7.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1085,7 +1046,6 @@ referencing = ">=0.28.0" name = "kubernetes" version = "26.1.0" description = "Kubernetes python client" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1103,7 +1063,7 @@ requests-oauthlib = "*" setuptools = ">=21.0.0" six = ">=1.9.0" urllib3 = ">=1.24.2" -websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.0 || >=0.43.0" +websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" [package.extras] adal = ["adal (>=1.0.2)"] @@ -1112,7 +1072,6 @@ adal = ["adal (>=1.0.2)"] name = "mako" version = "1.2.4" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1132,7 +1091,6 @@ testing = ["pytest"] name = "markdown" version = "3.5" description = "Python implementation of John Gruber's Markdown." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1151,7 +1109,6 @@ testing = ["coverage", "pyyaml"] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1175,6 +1132,16 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -1211,7 +1178,6 @@ files = [ name = "msgpack" version = "1.0.7" description = "MessagePack serializer" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1277,7 +1243,6 @@ files = [ name = "multidict" version = "6.0.4" description = "multidict implementation" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1361,7 +1326,6 @@ files = [ name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1373,7 +1337,6 @@ files = [ name = "numpy" version = "1.24.4" description = "Fundamental package for array computing in Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1411,7 +1374,6 @@ files = [ name = "nvidia-ml-py" version = "12.535.133" description = "Python Bindings for the NVIDIA Management Library" -category = "main" optional = false python-versions = "*" files = [ @@ -1423,7 +1385,6 @@ files = [ name = "oauthlib" version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1440,7 +1401,6 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] name = "opencensus" version = "0.11.3" description = "A stats collection and distributed tracing framework" -category = "main" optional = false python-versions = "*" files = [ @@ -1456,7 +1416,6 @@ opencensus-context = ">=0.1.3" name = "opencensus-context" version = "0.1.3" description = "OpenCensus Runtime Context" -category = "main" optional = false python-versions = "*" files = [ @@ -1468,7 +1427,6 @@ files = [ name = "openshift-client" version = "1.0.18" description = "OpenShift python client" -category = "main" optional = false python-versions = "*" files = [ @@ -1485,7 +1443,6 @@ six = "*" name = "packaging" version = "23.2" description = "Core utilities for Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1497,7 +1454,6 @@ files = [ name = "pandas" version = "2.0.3" description = "Powerful data structures for data analysis, time series, and statistics" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1531,8 +1487,8 @@ files = [ [package.dependencies] numpy = [ {version = ">=1.20.3", markers = "python_version < \"3.10\""}, - {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -1565,7 +1521,6 @@ xml = ["lxml (>=4.6.3)"] name = "paramiko" version = "3.3.1" description = "SSH2 protocol library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1587,7 +1542,6 @@ invoke = ["invoke (>=2.0)"] name = "pdoc3" version = "0.10.0" description = "Auto-generate API documentation for Python projects." -category = "dev" optional = false python-versions = ">= 3.6" files = [ @@ -1603,7 +1557,6 @@ markdown = ">=3.0" name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1615,7 +1568,6 @@ files = [ name = "platformdirs" version = "3.11.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1631,7 +1583,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "pluggy" version = "1.3.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1647,7 +1598,6 @@ testing = ["pytest", "pytest-benchmark"] name = "prometheus-client" version = "0.19.0" description = "Python client for the Prometheus monitoring system." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1662,7 +1612,6 @@ twisted = ["twisted"] name = "protobuf" version = "4.24.4" description = "" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1685,7 +1634,6 @@ files = [ name = "psutil" version = "5.9.6" description = "Cross-platform lib for process and system monitoring in Python." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -1714,7 +1662,6 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] name = "py-spy" version = "0.3.14" description = "Sampling profiler for Python programs" -category = "main" optional = false python-versions = "*" files = [ @@ -1731,7 +1678,6 @@ files = [ name = "pyarrow" version = "14.0.1" description = "Python library for Apache Arrow" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1780,7 +1726,6 @@ numpy = ">=1.16.6" name = "pyasn1" version = "0.5.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -1792,7 +1737,6 @@ files = [ name = "pyasn1-modules" version = "0.3.0" description = "A collection of ASN.1-based protocols modules" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -1807,7 +1751,6 @@ pyasn1 = ">=0.4.6,<0.6.0" name = "pycparser" version = "2.21" description = "C parser in Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1819,7 +1762,6 @@ files = [ name = "pydantic" version = "1.10.13" description = "Data validation and settings management using python type hints" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1872,7 +1814,6 @@ email = ["email-validator (>=1.0.3)"] name = "pygments" version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1887,7 +1828,6 @@ plugins = ["importlib-metadata"] name = "pynacl" version = "1.5.0" description = "Python binding to the Networking and Cryptography (NaCl) library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1914,7 +1854,6 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] name = "pyre-extensions" version = "0.0.30" description = "Type system extensions for use with the pyre type checker" -category = "main" optional = false python-versions = "*" files = [ @@ -1930,7 +1869,6 @@ typing-inspect = "*" name = "pytest" version = "7.4.0" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1953,7 +1891,6 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-mock" version = "3.11.1" description = "Thin-wrapper around the mock package for easier use with pytest" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1971,7 +1908,6 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -1986,7 +1922,6 @@ six = ">=1.5" name = "pytz" version = "2023.3.post1" description = "World timezone definitions, modern and historical" -category = "main" optional = false python-versions = "*" files = [ @@ -1998,7 +1933,6 @@ files = [ name = "pywin32" version = "306" description = "Python for Window Extensions" -category = "main" optional = false python-versions = "*" files = [ @@ -2022,7 +1956,6 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2082,7 +2015,6 @@ files = [ name = "ray" version = "2.7.0" description = "Ray provides a simple, universal API for building distributed applications." -category = "main" optional = false python-versions = "*" files = [ @@ -2123,15 +2055,15 @@ frozenlist = "*" fsspec = {version = "*", optional = true, markers = "extra == \"data\""} gpustat = {version = ">=1.0.0", optional = true, markers = "extra == \"default\""} grpcio = [ - {version = ">=1.32.0", optional = true, markers = "python_version < \"3.10\""}, - {version = ">=1.42.0", optional = true, markers = "python_version >= \"3.10\""}, + {version = ">=1.32.0", optional = true, markers = "python_version < \"3.10\" and extra == \"default\""}, + {version = ">=1.42.0", optional = true, markers = "python_version >= \"3.10\" and extra == \"default\""}, ] jsonschema = "*" msgpack = ">=1.0.0,<2.0.0" numpy = [ {version = ">=1.16", markers = "python_version < \"3.9\""}, - {version = ">=1.19.3", markers = "python_version >= \"3.9\""}, {version = ">=1.20", optional = true, markers = "extra == \"data\""}, + {version = ">=1.19.3", markers = "python_version >= \"3.9\""}, ] opencensus = {version = "*", optional = true, markers = "extra == \"default\""} packaging = "*" @@ -2164,7 +2096,6 @@ tune = ["fsspec", "pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1. name = "referencing" version = "0.30.2" description = "JSON Referencing + Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2180,7 +2111,6 @@ rpds-py = ">=0.7.0" name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2202,7 +2132,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-oauthlib" version = "1.3.1" description = "OAuthlib authentication support for Requests." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2221,7 +2150,6 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] name = "rich" version = "12.6.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -category = "main" optional = false python-versions = ">=3.6.3,<4.0.0" files = [ @@ -2241,7 +2169,6 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] name = "rpds-py" version = "0.10.4" description = "Python bindings to Rust's persistent data structures (rpds)" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2350,7 +2277,6 @@ files = [ name = "rsa" version = "4.9" description = "Pure-Python RSA implementation" -category = "main" optional = false python-versions = ">=3.6,<4" files = [ @@ -2365,7 +2291,6 @@ pyasn1 = ">=0.1.3" name = "setuptools" version = "68.2.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2382,7 +2307,6 @@ testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jar name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -2394,7 +2318,6 @@ files = [ name = "smart-open" version = "6.4.0" description = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)" -category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -2416,7 +2339,6 @@ webhdfs = ["requests"] name = "tabulate" version = "0.9.0" description = "Pretty-print tabular data" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2431,7 +2353,6 @@ widechars = ["wcwidth"] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2443,7 +2364,6 @@ files = [ name = "typing-extensions" version = "4.8.0" description = "Backported and Experimental Type Hints for Python 3.8+" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2455,7 +2375,6 @@ files = [ name = "typing-inspect" version = "0.9.0" description = "Runtime inspection utilities for typing module." -category = "main" optional = false python-versions = "*" files = [ @@ -2471,7 +2390,6 @@ typing-extensions = ">=3.7.4" name = "tzdata" version = "2023.3" description = "Provider of IANA time zone data" -category = "main" optional = false python-versions = ">=2" files = [ @@ -2483,7 +2401,6 @@ files = [ name = "urllib3" version = "1.26.17" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -2500,7 +2417,6 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "virtualenv" version = "20.21.0" description = "Virtual Python Environment builder" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2521,7 +2437,6 @@ test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess name = "wcwidth" version = "0.2.12" description = "Measures the displayed width of unicode strings in a terminal" -category = "main" optional = false python-versions = "*" files = [ @@ -2533,7 +2448,6 @@ files = [ name = "websocket-client" version = "1.6.4" description = "WebSocket client for Python with low level API options" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2550,7 +2464,6 @@ test = ["websockets"] name = "yarl" version = "1.9.4" description = "Yet another URL library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2654,7 +2567,6 @@ multidict = ">=4.0" name = "zipp" version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" optional = false python-versions = ">=3.8" files = [ diff --git a/tests/e2e/mnist_raycluster_sdk.py b/tests/e2e/mnist_raycluster_sdk.py index 0ded85e4..a513212a 100644 --- a/tests/e2e/mnist_raycluster_sdk.py +++ b/tests/e2e/mnist_raycluster_sdk.py @@ -11,7 +11,7 @@ from codeflare_sdk.job.jobs import DDPJobDefinition namespace = sys.argv[1] -ray_image = os.getenv("RAY_IMAGE") +ray_image = "quay.io/project-codeflare/ray:latest-py39-cu118" host = os.getenv("CLUSTER_HOSTNAME") ingress_options = {} @@ -24,6 +24,9 @@ "pathType": "Prefix", "path": "/", "host": host, + "annotations": { + "nginx.ingress.kubernetes.io/proxy-body-size": "100M", + } }, ] } @@ -59,8 +62,8 @@ jobdef = DDPJobDefinition( name="mnist", - script="mnist.py", - scheduler_args={"requirements": "requirements.txt"}, + script="./tests/e2e/mnist.py", + scheduler_args={"requirements": "./tests/e2e/requirements.txt"}, ) job = jobdef.submit(cluster) diff --git a/tests/e2e/mnist_raycluster_sdk_test.py b/tests/e2e/mnist_raycluster_sdk_test.py new file mode 100644 index 00000000..44ec61a5 --- /dev/null +++ b/tests/e2e/mnist_raycluster_sdk_test.py @@ -0,0 +1,122 @@ +import base64 +import logging +import os +import sys +from kubernetes import client, config +from kubernetes.client import V1Job, V1ObjectMeta, V1JobSpec, V1PodTemplateSpec, V1PodSpec, V1Container, V1VolumeMount, V1Volume, V1ConfigMapVolumeSource, V1EmptyDirVolumeSource, V1EnvVar, V1SecurityContext, V1SeccompProfile, V1Capabilities +from kubernetes.client.rest import ApiException +import time +import subprocess + +import pytest + +from support import random_choice, read_file + +class TestMNISTRayClusterSDK: + def setup_method(self): + # Load the kube config from the environment or Kube config file. + config.load_kube_config() + + # Initialize Kubernetes client + self.api_instance = client.CoreV1Api() + self.batch_api = client.BatchV1Api() + self.cmap = client.V1ConfigMap() + + def teardown_method(self): + if hasattr(self, 'namespace'): + self.api_instance.delete_namespace(self.namespace) + if hasattr(self, 'configmap'): + self.api_instance.delete_namespaced_config_map(self.configmap.metadata.name, self.namespace) + + + def test_mnist_ray_cluster_sdk(self): + namespace = self.create_test_namespace() + + file_paths = [ + "./tests/e2e/mnist_raycluster_sdk_test.py", + "./tests/e2e/requirements.txt", + "./tests/e2e/mnist.py", + "./tests/e2e/install-codeflare-sdk.sh" + ] + self.create_config_map(namespace, file_paths) + + self.run_mnist_raycluster_sdk() + + + def create_test_namespace(self): + self.namespace = f"test-ns-{random_choice()}" + namespace_body = client.V1Namespace(metadata=client.V1ObjectMeta(name=self.namespace)) + self.api_instance.create_namespace(namespace_body) + return self.namespace + + def create_config_map(self, namespace, file_paths): + data = {os.path.basename(path): read_file(path) for path in file_paths} + binary_data = {key: base64.b64encode(value).decode('utf-8') for key, value in data.items()} + config_map = client.V1ConfigMap( + api_version="v1", + kind="ConfigMap", + metadata=client.V1ObjectMeta( + generate_name="config-", + namespace=namespace, + ), + binary_data=binary_data, + immutable=True, + ) + # config_map = client.V1ConfigMap(data=data) + self.api_instance.create_namespaced_config_map(namespace=namespace, body=config_map) + + def run_mnist_raycluster_sdk(self): + script_path = './tests/e2e/mnist_raycluster_sdk.py' + result = subprocess.run(['python', script_path, self.namespace]) + output = result.stdout + errors = result.stderr + if result.returncode != 0: + raise subprocess.CalledProcessError(result.returncode, 'python', output=output, stderr=errors) + return output + +# # Specifically used on KinD clusters +# def configure_pods(self): +# hostname = os.getenv('CLUSTER_HOSTNAME') +# node = self.get_first_node() +# node_ip = self.get_node_internal_ip(node) +# host_alias = client.V1HostAlias(ip=node_ip, hostnames=[hostname]) + +# pods = self.find_mnist_head_pod(self.namespace) +# for pod in pods: +# container = pod.spec.containers[0] +# if not pod.spec.host_aliases: +# pod.spec.host_aliases = [] +# pod.spec.host_aliases.append(host_alias) +# if not container.env: +# container.env = [] +# container.env.append(hostname) + + + + # def get_node_internal_ip(node): + # for address in node.status.addresses: + # if address.type == "InternalIP": + # ip = address.address + # return ip + + # def get_first_node(self): + # try: + # # List all nodes in the cluster + # nodes = self.api_instance.list_node() + # except ApiException as e: + # pytest.fail(f"Exception when calling CoreV1Api->list_node: {e}") + # return nodes.items[0] + + # def find_mnist_head_pod(self, namespace): + # try: + # # List all pods in the specified namespace + # pods = self.v1.list_namespaced_pod(namespace) + # except ApiException as e: + # print(f"Exception when calling CoreV1Api->list_namespaced_pod: {e}") + # return None + + # for pod in pods.items: + # if pod.metadata.name.startswith("mnist-head"): + # return pod + # print("No 'mnist-head' pod found in the namespace") + # return None diff --git a/tests/e2e/mnist_pip_requirements.txt b/tests/e2e/requirements.txt similarity index 100% rename from tests/e2e/mnist_pip_requirements.txt rename to tests/e2e/requirements.txt diff --git a/tests/e2e/support.py b/tests/e2e/support.py new file mode 100644 index 00000000..67ae34dc --- /dev/null +++ b/tests/e2e/support.py @@ -0,0 +1,14 @@ +import random +import string + +def read_file(file_name): + try: + with open(file_name, 'rb') as file: + return file.read() + except IOError as e: + raise e + + +alphabet = string.ascii_lowercase + string.digits +def random_choice(): + return ''.join(random.choices(alphabet, k=5)) From 1ab60e851d33cc255096c4f62cd1f75c42a51d6d Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Tue, 30 Jan 2024 16:38:33 +0000 Subject: [PATCH 120/496] Added SDK script directly to e2e test file --- ...rements.txt => mnist_pip_requirements.txt} | 0 tests/e2e/mnist_raycluster_sdk.py | 4 +- tests/e2e/mnist_raycluster_sdk_test.py | 226 +++++++++++------- 3 files changed, 137 insertions(+), 93 deletions(-) rename tests/e2e/{requirements.txt => mnist_pip_requirements.txt} (100%) diff --git a/tests/e2e/requirements.txt b/tests/e2e/mnist_pip_requirements.txt similarity index 100% rename from tests/e2e/requirements.txt rename to tests/e2e/mnist_pip_requirements.txt diff --git a/tests/e2e/mnist_raycluster_sdk.py b/tests/e2e/mnist_raycluster_sdk.py index a513212a..d9117d84 100644 --- a/tests/e2e/mnist_raycluster_sdk.py +++ b/tests/e2e/mnist_raycluster_sdk.py @@ -25,7 +25,7 @@ "path": "/", "host": host, "annotations": { - "nginx.ingress.kubernetes.io/proxy-body-size": "100M", + "nginx.ingress.kubernetes.io/proxy-body-size": "10M", } }, ] @@ -63,7 +63,7 @@ jobdef = DDPJobDefinition( name="mnist", script="./tests/e2e/mnist.py", - scheduler_args={"requirements": "./tests/e2e/requirements.txt"}, + scheduler_args={"requirements": "./tests/e2e/mnist_pip_requirements.txt"}, ) job = jobdef.submit(cluster) diff --git a/tests/e2e/mnist_raycluster_sdk_test.py b/tests/e2e/mnist_raycluster_sdk_test.py index 44ec61a5..e0e5e32f 100644 --- a/tests/e2e/mnist_raycluster_sdk_test.py +++ b/tests/e2e/mnist_raycluster_sdk_test.py @@ -1,16 +1,26 @@ -import base64 -import logging -import os -import sys from kubernetes import client, config -from kubernetes.client import V1Job, V1ObjectMeta, V1JobSpec, V1PodTemplateSpec, V1PodSpec, V1Container, V1VolumeMount, V1Volume, V1ConfigMapVolumeSource, V1EmptyDirVolumeSource, V1EnvVar, V1SecurityContext, V1SeccompProfile, V1Capabilities -from kubernetes.client.rest import ApiException -import time +import kubernetes.client import subprocess +import sys +import os + +from time import sleep + +import ray + +from torchx.specs.api import AppState, is_terminal + +from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration +from codeflare_sdk.job.jobs import DDPJobDefinition + import pytest -from support import random_choice, read_file +from support import random_choice + +# Creates a Ray cluster, and trains the MNIST dataset using the CodeFlare SDK. +# Asserts creation of AppWrapper, RayCluster, and successful completion of the training job. +# Covers successfull installation of CodeFlare-SDK class TestMNISTRayClusterSDK: def setup_method(self): @@ -19,8 +29,7 @@ def setup_method(self): # Initialize Kubernetes client self.api_instance = client.CoreV1Api() - self.batch_api = client.BatchV1Api() - self.cmap = client.V1ConfigMap() + self.custom_api = kubernetes.client.CustomObjectsApi(self.api_instance.api_client) def teardown_method(self): if hasattr(self, 'namespace'): @@ -28,95 +37,130 @@ def teardown_method(self): if hasattr(self, 'configmap'): self.api_instance.delete_namespaced_config_map(self.configmap.metadata.name, self.namespace) - def test_mnist_ray_cluster_sdk(self): - namespace = self.create_test_namespace() - - file_paths = [ - "./tests/e2e/mnist_raycluster_sdk_test.py", - "./tests/e2e/requirements.txt", - "./tests/e2e/mnist.py", - "./tests/e2e/install-codeflare-sdk.sh" - ] - self.create_config_map(namespace, file_paths) - + self.create_test_namespace() self.run_mnist_raycluster_sdk() - def create_test_namespace(self): self.namespace = f"test-ns-{random_choice()}" namespace_body = client.V1Namespace(metadata=client.V1ObjectMeta(name=self.namespace)) self.api_instance.create_namespace(namespace_body) return self.namespace - def create_config_map(self, namespace, file_paths): - data = {os.path.basename(path): read_file(path) for path in file_paths} - binary_data = {key: base64.b64encode(value).decode('utf-8') for key, value in data.items()} - config_map = client.V1ConfigMap( - api_version="v1", - kind="ConfigMap", - metadata=client.V1ObjectMeta( - generate_name="config-", - namespace=namespace, - ), - binary_data=binary_data, - immutable=True, + def run_mnist_raycluster_sdk(self): + ray_image = "quay.io/project-codeflare/ray:latest-py39-cu118" + host = os.getenv("CLUSTER_HOSTNAME") + + ingress_options = {} + if host is not None: + ingress_options = { + "ingresses": [ + { + "ingressName": "ray-dashboard", + "port": 8265, + "pathType": "Prefix", + "path": "/", + "host": host, + "annotations": { + "nginx.ingress.kubernetes.io/proxy-body-size": "10M", + } + }, + ] + } + + cluster = Cluster( + ClusterConfiguration( + name="mnist", + namespace=self.namespace, + num_workers=1, + head_cpus="500m", + head_memory=2, + min_cpus="500m", + max_cpus=1, + min_memory=1, + max_memory=2, + num_gpus=0, + instascale=False, + image=ray_image, + ingress_options=ingress_options, + ) ) - # config_map = client.V1ConfigMap(data=data) - self.api_instance.create_namespaced_config_map(namespace=namespace, body=config_map) - def run_mnist_raycluster_sdk(self): - script_path = './tests/e2e/mnist_raycluster_sdk.py' - result = subprocess.run(['python', script_path, self.namespace]) - output = result.stdout - errors = result.stderr - if result.returncode != 0: - raise subprocess.CalledProcessError(result.returncode, 'python', output=output, stderr=errors) - return output - -# # Specifically used on KinD clusters -# def configure_pods(self): -# hostname = os.getenv('CLUSTER_HOSTNAME') -# node = self.get_first_node() -# node_ip = self.get_node_internal_ip(node) -# host_alias = client.V1HostAlias(ip=node_ip, hostnames=[hostname]) - -# pods = self.find_mnist_head_pod(self.namespace) -# for pod in pods: -# container = pod.spec.containers[0] -# if not pod.spec.host_aliases: -# pod.spec.host_aliases = [] -# pod.spec.host_aliases.append(host_alias) -# if not container.env: -# container.env = [] -# container.env.append(hostname) - - - - # def get_node_internal_ip(node): - # for address in node.status.addresses: - # if address.type == "InternalIP": - # ip = address.address - # return ip - - # def get_first_node(self): - # try: - # # List all nodes in the cluster - # nodes = self.api_instance.list_node() - # except ApiException as e: - # pytest.fail(f"Exception when calling CoreV1Api->list_node: {e}") - # return nodes.items[0] - - # def find_mnist_head_pod(self, namespace): - # try: - # # List all pods in the specified namespace - # pods = self.v1.list_namespaced_pod(namespace) - # except ApiException as e: - # print(f"Exception when calling CoreV1Api->list_namespaced_pod: {e}") - # return None - - # for pod in pods.items: - # if pod.metadata.name.startswith("mnist-head"): - # return pod - # print("No 'mnist-head' pod found in the namespace") - # return None + + cluster.up() + self.assert_appwrapper_exists() + + cluster.status() + + cluster.wait_ready() + self.assert_raycluster_exists() + + cluster.status() + + cluster.details() + + jobdef = DDPJobDefinition( + name="mnist", + script="./tests/e2e/mnist.py", + scheduler_args={"requirements": "./tests/e2e/mnist_pip_requirements.txt"}, + ) + job = jobdef.submit(cluster) + + done = False + time = 0 + timeout = 900 + while not done: + status = job.status() + if is_terminal(status.state): + break + if not done: + print(status) + if timeout and time >= timeout: + raise TimeoutError(f"job has timed out after waiting {timeout}s") + sleep(5) + time += 5 + + print(job.status()) + self.assert_job_completion(status) + + print(job.logs()) + + cluster.down() + + + # if not status.state == AppState.SUCCEEDED: + + # script_path = './tests/e2e/mnist_raycluster_sdk.py' + # result = subprocess.run(['python', script_path, self.namespace]) + # output = result.stdout + # errors = result.stderr + # if result.returncode != 0: + # raise subprocess.CalledProcessError(result.returncode, 'python', output=output, stderr=errors) + # return output + + + def assert_appwrapper_exists(self): + try: + self.custom_api.get_namespaced_custom_object("workload.codeflare.dev", "v1beta1", self.namespace, "appwrappers", "mnist") + print(f"AppWrapper 'mnist' has been created in the namespace: '{self.namespace}'") + assert True + except Exception as e: + print(f"AppWrapper 'mnist' has not been created. Error: {e}") + assert False + + def assert_raycluster_exists(self): + try: + self.custom_api.get_namespaced_custom_object("ray.io", "v1", self.namespace, "rayclusters", "mnist") + print(f"RayCluster 'mnist' created successfully in the namespace: '{self.namespace}'") + assert True + except Exception as e: + print(f"RayCluster 'mnist' has not been created. Error: {e}") + assert False + + def assert_job_completion(self, status): + if status.state == AppState.SUCCEEDED: + print(f"Job has completed: '{status.state}'") + assert True + else: + print(f"Job has completed: '{status.state}'") + assert False From c4a1065d728a1ea67e4f3d13129fe86dbaa9cfaa Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Tue, 30 Jan 2024 17:00:20 +0000 Subject: [PATCH 121/496] Clean-up e2e test files --- tests/e2e/mnist_raycluster_sdk.py | 93 ----------- tests/e2e/mnist_raycluster_sdk_test.go | 208 ------------------------- tests/e2e/mnist_raycluster_sdk_test.py | 16 +- 3 files changed, 1 insertion(+), 316 deletions(-) delete mode 100644 tests/e2e/mnist_raycluster_sdk.py delete mode 100644 tests/e2e/mnist_raycluster_sdk_test.go diff --git a/tests/e2e/mnist_raycluster_sdk.py b/tests/e2e/mnist_raycluster_sdk.py deleted file mode 100644 index d9117d84..00000000 --- a/tests/e2e/mnist_raycluster_sdk.py +++ /dev/null @@ -1,93 +0,0 @@ -import sys -import os - -from time import sleep - -import ray - -from torchx.specs.api import AppState, is_terminal - -from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration -from codeflare_sdk.job.jobs import DDPJobDefinition - -namespace = sys.argv[1] -ray_image = "quay.io/project-codeflare/ray:latest-py39-cu118" -host = os.getenv("CLUSTER_HOSTNAME") - -ingress_options = {} -if host is not None: - ingress_options = { - "ingresses": [ - { - "ingressName": "ray-dashboard", - "port": 8265, - "pathType": "Prefix", - "path": "/", - "host": host, - "annotations": { - "nginx.ingress.kubernetes.io/proxy-body-size": "10M", - } - }, - ] - } - -cluster = Cluster( - ClusterConfiguration( - name="mnist", - namespace=namespace, - num_workers=1, - head_cpus="500m", - head_memory=2, - min_cpus="500m", - max_cpus=1, - min_memory=1, - max_memory=2, - num_gpus=0, - instascale=False, - image=ray_image, - ingress_options=ingress_options, - ) -) - - -cluster.up() - -cluster.status() - -cluster.wait_ready() - -cluster.status() - -cluster.details() - -jobdef = DDPJobDefinition( - name="mnist", - script="./tests/e2e/mnist.py", - scheduler_args={"requirements": "./tests/e2e/mnist_pip_requirements.txt"}, -) -job = jobdef.submit(cluster) - -done = False -time = 0 -timeout = 900 -while not done: - status = job.status() - if is_terminal(status.state): - break - if not done: - print(status) - if timeout and time >= timeout: - raise TimeoutError(f"job has timed out after waiting {timeout}s") - sleep(5) - time += 5 - -print(f"Job has completed: {status.state}") - -print(job.logs()) - -cluster.down() - -if not status.state == AppState.SUCCEEDED: - exit(1) -else: - exit(0) diff --git a/tests/e2e/mnist_raycluster_sdk_test.go b/tests/e2e/mnist_raycluster_sdk_test.go deleted file mode 100644 index 963a4872..00000000 --- a/tests/e2e/mnist_raycluster_sdk_test.go +++ /dev/null @@ -1,208 +0,0 @@ -/* -Copyright 2023. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package e2e - -import ( - "testing" - - . "github.com/onsi/gomega" - . "github.com/project-codeflare/codeflare-common/support" - mcadv1beta1 "github.com/project-codeflare/multi-cluster-app-dispatcher/pkg/apis/controller/v1beta1" - rayv1 "github.com/ray-project/kuberay/ray-operator/apis/ray/v1" - - batchv1 "k8s.io/api/batch/v1" - corev1 "k8s.io/api/core/v1" - rbacv1 "k8s.io/api/rbac/v1" - metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" -) - -// Creates a Ray cluster, and trains the MNIST dataset using the CodeFlare SDK. -// Asserts successful completion of the training job. -// -// This covers the installation of the CodeFlare SDK, as well as the RBAC required -// for the SDK to successfully perform requests to the cluster, on behalf of the -// impersonated user. -func TestMNISTRayClusterSDK(t *testing.T) { - test := With(t) - test.T().Parallel() - - // Create a namespace - namespace := test.NewTestNamespace() - - // Test configuration - config := CreateConfigMap(test, namespace.Name, map[string][]byte{ - // SDK script - "mnist_raycluster_sdk.py": ReadFile(test, "mnist_raycluster_sdk.py"), - // pip requirements - "requirements.txt": ReadFile(test, "mnist_pip_requirements.txt"), - // MNIST training script - "mnist.py": ReadFile(test, "mnist.py"), - // codeflare-sdk installation script - "install-codeflare-sdk.sh": ReadFile(test, "install-codeflare-sdk.sh"), - }) - - // Create RBAC, retrieve token for user with limited rights - policyRules := []rbacv1.PolicyRule{ - { - Verbs: []string{"get", "create", "delete", "list", "patch", "update"}, - APIGroups: []string{mcadv1beta1.GroupName}, - Resources: []string{"appwrappers"}, - }, - { - Verbs: []string{"get", "list"}, - APIGroups: []string{rayv1.GroupVersion.Group}, - Resources: []string{"rayclusters", "rayclusters/status"}, - }, - { - Verbs: []string{"get", "list"}, - APIGroups: []string{"route.openshift.io"}, - Resources: []string{"routes"}, - }, - { - Verbs: []string{"get", "list"}, - APIGroups: []string{"networking.k8s.io"}, - Resources: []string{"ingresses"}, - }, - } - - sa := CreateServiceAccount(test, namespace.Name) - role := CreateRole(test, namespace.Name, policyRules) - CreateRoleBinding(test, namespace.Name, sa, role) - - job := &batchv1.Job{ - TypeMeta: metav1.TypeMeta{ - APIVersion: batchv1.SchemeGroupVersion.String(), - Kind: "Job", - }, - ObjectMeta: metav1.ObjectMeta{ - Name: "sdk", - Namespace: namespace.Name, - }, - Spec: batchv1.JobSpec{ - Completions: Ptr(int32(1)), - Parallelism: Ptr(int32(1)), - BackoffLimit: Ptr(int32(0)), - Template: corev1.PodTemplateSpec{ - Spec: corev1.PodSpec{ - Volumes: []corev1.Volume{ - { - Name: "test", - VolumeSource: corev1.VolumeSource{ - ConfigMap: &corev1.ConfigMapVolumeSource{ - LocalObjectReference: corev1.LocalObjectReference{ - Name: config.Name, - }, - }, - }, - }, - { - Name: "codeflare-sdk", - VolumeSource: corev1.VolumeSource{ - EmptyDir: &corev1.EmptyDirVolumeSource{}, - }, - }, - { - Name: "workdir", - VolumeSource: corev1.VolumeSource{ - EmptyDir: &corev1.EmptyDirVolumeSource{}, - }, - }, - }, - Containers: []corev1.Container{ - { - Name: "test", - // FIXME: switch to base Python image once the dependency on OpenShift CLI is removed - // See https://github.com/project-codeflare/codeflare-sdk/pull/146 - Image: "quay.io/opendatahub/notebooks:jupyter-minimal-ubi8-python-3.8-4c8f26e", - Env: []corev1.EnvVar{ - {Name: "PYTHONUSERBASE", Value: "/workdir"}, - {Name: "RAY_IMAGE", Value: GetRayImage()}, - }, - Command: []string{ - "/bin/sh", "-c", - "while [ ! -f /codeflare-sdk/pyproject.toml ]; do sleep 1; done; " + - "cp /test/* . && chmod +x install-codeflare-sdk.sh && ./install-codeflare-sdk.sh && python mnist_raycluster_sdk.py " + namespace.Name, - }, - VolumeMounts: []corev1.VolumeMount{ - { - Name: "test", - MountPath: "/test", - }, - { - Name: "codeflare-sdk", - MountPath: "/codeflare-sdk", - }, - { - Name: "workdir", - MountPath: "/workdir", - }, - }, - WorkingDir: "/workdir", - SecurityContext: &corev1.SecurityContext{ - AllowPrivilegeEscalation: Ptr(false), - SeccompProfile: &corev1.SeccompProfile{ - Type: "RuntimeDefault", - }, - Capabilities: &corev1.Capabilities{ - Drop: []corev1.Capability{"ALL"}, - }, - RunAsNonRoot: Ptr(true), - }, - }, - }, - RestartPolicy: corev1.RestartPolicyNever, - ServiceAccountName: sa.Name, - }, - }, - }, - } - if GetClusterType(test) == KindCluster { - // Take first KinD node and redirect pod hostname requests there - node := GetNodes(test)[0] - hostname := GetClusterHostname(test) - IP := GetNodeInternalIP(test, node) - - test.T().Logf("Setting KinD cluster hostname '%s' to node IP '%s' for SDK pod", hostname, IP) - job.Spec.Template.Spec.HostAliases = []corev1.HostAlias{ - { - IP: IP, - Hostnames: []string{hostname}, - }, - } - - // Propagate hostname into Python code as env variable - hostnameEnvVar := corev1.EnvVar{Name: "CLUSTER_HOSTNAME", Value: hostname} - job.Spec.Template.Spec.Containers[0].Env = append(job.Spec.Template.Spec.Containers[0].Env, hostnameEnvVar) - } - job, err := test.Client().Core().BatchV1().Jobs(namespace.Name).Create(test.Ctx(), job, metav1.CreateOptions{}) - test.Expect(err).NotTo(HaveOccurred()) - test.T().Logf("Created Job %s/%s successfully", job.Namespace, job.Name) - - // Setup the codeflare-sdk inside the pod associated to the created job - SetupCodeflareSDKInsidePod(test, namespace, job.Name) - - test.T().Logf("Waiting for Job %s/%s to complete", job.Namespace, job.Name) - test.Eventually(Job(test, job.Namespace, job.Name), TestTimeoutLong).Should( - Or( - WithTransform(ConditionStatus(batchv1.JobComplete), Equal(corev1.ConditionTrue)), - WithTransform(ConditionStatus(batchv1.JobFailed), Equal(corev1.ConditionTrue)), - )) - - // Assert the job has completed successfully - test.Expect(GetJob(test, job.Namespace, job.Name)). - To(WithTransform(ConditionStatus(batchv1.JobComplete), Equal(corev1.ConditionTrue))) -} diff --git a/tests/e2e/mnist_raycluster_sdk_test.py b/tests/e2e/mnist_raycluster_sdk_test.py index e0e5e32f..33525d98 100644 --- a/tests/e2e/mnist_raycluster_sdk_test.py +++ b/tests/e2e/mnist_raycluster_sdk_test.py @@ -1,8 +1,6 @@ from kubernetes import client, config import kubernetes.client -import subprocess -import sys import os from time import sleep @@ -86,7 +84,6 @@ def run_mnist_raycluster_sdk(self): ) ) - cluster.up() self.assert_appwrapper_exists() @@ -127,18 +124,7 @@ def run_mnist_raycluster_sdk(self): cluster.down() - - # if not status.state == AppState.SUCCEEDED: - - # script_path = './tests/e2e/mnist_raycluster_sdk.py' - # result = subprocess.run(['python', script_path, self.namespace]) - # output = result.stdout - # errors = result.stderr - # if result.returncode != 0: - # raise subprocess.CalledProcessError(result.returncode, 'python', output=output, stderr=errors) - # return output - - + # Assertions def assert_appwrapper_exists(self): try: self.custom_api.get_namespaced_custom_object("workload.codeflare.dev", "v1beta1", self.namespace, "appwrappers", "mnist") From cd9466ef23b1080da832e2cb20ec4fadda85fae5 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Tue, 30 Jan 2024 17:26:33 +0000 Subject: [PATCH 122/496] Update e2e workflow for Poetry PyTest run --- .github/workflows/e2e_tests.yaml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index a89f5c21..d0b808fa 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -103,7 +103,11 @@ jobs: echo "CODEFLARE_TEST_OUTPUT_DIR=${CODEFLARE_TEST_OUTPUT_DIR}" >> $GITHUB_ENV set -euo pipefail - go test -timeout 30m -v ./tests/e2e -json 2>&1 | tee ${CODEFLARE_TEST_OUTPUT_DIR}/gotest.log | gotestfmt + pip install poetry + poetry config virtualenvs.create false + poetry lock --no-update + poetry install --with test,docs + poetry run pytest -v -s ./tests/e2e/mnist_raycluster_sdk_test.py --json-report --json-report-file=${CODEFLARE_TEST_OUTPUT_DIR}/pytest_report.json 2>&1 | tee ${CODEFLARE_TEST_OUTPUT_DIR}/pytest.log - name: Print CodeFlare operator logs if: always() && steps.deploy.outcome == 'success' From 003b8f86bf0b5b4466d42da6bfd148c149ea3938 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Tue, 30 Jan 2024 17:57:23 +0000 Subject: [PATCH 123/496] Adjust proxy-body-size value --- tests/e2e/mnist_raycluster_sdk_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/e2e/mnist_raycluster_sdk_test.py b/tests/e2e/mnist_raycluster_sdk_test.py index 33525d98..2f678790 100644 --- a/tests/e2e/mnist_raycluster_sdk_test.py +++ b/tests/e2e/mnist_raycluster_sdk_test.py @@ -60,7 +60,7 @@ def run_mnist_raycluster_sdk(self): "path": "/", "host": host, "annotations": { - "nginx.ingress.kubernetes.io/proxy-body-size": "10M", + "nginx.ingress.kubernetes.io/proxy-body-size": "100M", } }, ] From 2c944d54cd717dc48f50ddedd413572460e257de Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Wed, 31 Jan 2024 11:02:17 +0000 Subject: [PATCH 124/496] Env var for custom Ray Image --- .github/workflows/e2e_tests.yaml | 1 - poetry.lock | 1 + tests/e2e/mnist_raycluster_sdk_test.py | 43 +++++++++++++++++++------- tests/e2e/support.py | 15 +++++---- 4 files changed, 39 insertions(+), 21 deletions(-) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index d0b808fa..bf30c878 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -104,7 +104,6 @@ jobs: set -euo pipefail pip install poetry - poetry config virtualenvs.create false poetry lock --no-update poetry install --with test,docs poetry run pytest -v -s ./tests/e2e/mnist_raycluster_sdk_test.py --json-report --json-report-file=${CODEFLARE_TEST_OUTPUT_DIR}/pytest_report.json 2>&1 | tee ${CODEFLARE_TEST_OUTPUT_DIR}/pytest.log diff --git a/poetry.lock b/poetry.lock index 9a421aa7..5496961b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1977,6 +1977,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, diff --git a/tests/e2e/mnist_raycluster_sdk_test.py b/tests/e2e/mnist_raycluster_sdk_test.py index 2f678790..9b8c3d0a 100644 --- a/tests/e2e/mnist_raycluster_sdk_test.py +++ b/tests/e2e/mnist_raycluster_sdk_test.py @@ -14,12 +14,13 @@ import pytest -from support import random_choice +from support import random_choice, get_ray_image # Creates a Ray cluster, and trains the MNIST dataset using the CodeFlare SDK. # Asserts creation of AppWrapper, RayCluster, and successful completion of the training job. # Covers successfull installation of CodeFlare-SDK + class TestMNISTRayClusterSDK: def setup_method(self): # Load the kube config from the environment or Kube config file. @@ -27,13 +28,17 @@ def setup_method(self): # Initialize Kubernetes client self.api_instance = client.CoreV1Api() - self.custom_api = kubernetes.client.CustomObjectsApi(self.api_instance.api_client) + self.custom_api = kubernetes.client.CustomObjectsApi( + self.api_instance.api_client + ) def teardown_method(self): - if hasattr(self, 'namespace'): + if hasattr(self, "namespace"): self.api_instance.delete_namespace(self.namespace) - if hasattr(self, 'configmap'): - self.api_instance.delete_namespaced_config_map(self.configmap.metadata.name, self.namespace) + if hasattr(self, "configmap"): + self.api_instance.delete_namespaced_config_map( + self.configmap.metadata.name, self.namespace + ) def test_mnist_ray_cluster_sdk(self): self.create_test_namespace() @@ -41,12 +46,14 @@ def test_mnist_ray_cluster_sdk(self): def create_test_namespace(self): self.namespace = f"test-ns-{random_choice()}" - namespace_body = client.V1Namespace(metadata=client.V1ObjectMeta(name=self.namespace)) + namespace_body = client.V1Namespace( + metadata=client.V1ObjectMeta(name=self.namespace) + ) self.api_instance.create_namespace(namespace_body) return self.namespace def run_mnist_raycluster_sdk(self): - ray_image = "quay.io/project-codeflare/ray:latest-py39-cu118" + ray_image = get_ray_image() host = os.getenv("CLUSTER_HOSTNAME") ingress_options = {} @@ -61,7 +68,7 @@ def run_mnist_raycluster_sdk(self): "host": host, "annotations": { "nginx.ingress.kubernetes.io/proxy-body-size": "100M", - } + }, }, ] } @@ -127,8 +134,16 @@ def run_mnist_raycluster_sdk(self): # Assertions def assert_appwrapper_exists(self): try: - self.custom_api.get_namespaced_custom_object("workload.codeflare.dev", "v1beta1", self.namespace, "appwrappers", "mnist") - print(f"AppWrapper 'mnist' has been created in the namespace: '{self.namespace}'") + self.custom_api.get_namespaced_custom_object( + "workload.codeflare.dev", + "v1beta1", + self.namespace, + "appwrappers", + "mnist", + ) + print( + f"AppWrapper 'mnist' has been created in the namespace: '{self.namespace}'" + ) assert True except Exception as e: print(f"AppWrapper 'mnist' has not been created. Error: {e}") @@ -136,8 +151,12 @@ def assert_appwrapper_exists(self): def assert_raycluster_exists(self): try: - self.custom_api.get_namespaced_custom_object("ray.io", "v1", self.namespace, "rayclusters", "mnist") - print(f"RayCluster 'mnist' created successfully in the namespace: '{self.namespace}'") + self.custom_api.get_namespaced_custom_object( + "ray.io", "v1", self.namespace, "rayclusters", "mnist" + ) + print( + f"RayCluster 'mnist' created successfully in the namespace: '{self.namespace}'" + ) assert True except Exception as e: print(f"RayCluster 'mnist' has not been created. Error: {e}") diff --git a/tests/e2e/support.py b/tests/e2e/support.py index 67ae34dc..303b03c8 100644 --- a/tests/e2e/support.py +++ b/tests/e2e/support.py @@ -1,14 +1,13 @@ +import os import random import string -def read_file(file_name): - try: - with open(file_name, 'rb') as file: - return file.read() - except IOError as e: - raise e + +def get_ray_image(): + default_ray_image = "quay.io/project-codeflare/ray:latest-py39-cu118" + return os.getenv("RAY_IMAGE", default_ray_image) -alphabet = string.ascii_lowercase + string.digits def random_choice(): - return ''.join(random.choices(alphabet, k=5)) + alphabet = string.ascii_lowercase + string.digits + return "".join(random.choices(alphabet, k=5)) From d9b725da92bbb57625c52bc0acabeb1bdf3537ca Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Wed, 31 Jan 2024 14:55:30 +0000 Subject: [PATCH 125/496] Adjust e2e workflow output logs --- .github/workflows/e2e_tests.yaml | 4 ++-- tests/e2e/mnist_raycluster_sdk_test.py | 4 ---- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index bf30c878..55f8fba6 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -104,9 +104,9 @@ jobs: set -euo pipefail pip install poetry - poetry lock --no-update poetry install --with test,docs - poetry run pytest -v -s ./tests/e2e/mnist_raycluster_sdk_test.py --json-report --json-report-file=${CODEFLARE_TEST_OUTPUT_DIR}/pytest_report.json 2>&1 | tee ${CODEFLARE_TEST_OUTPUT_DIR}/pytest.log + echo "Running e2e tests..." + poetry run pytest -v -s ./tests/e2e/mnist_raycluster_sdk_test.py > ${CODEFLARE_TEST_OUTPUT_DIR}/pytest_output.log 2>&1 - name: Print CodeFlare operator logs if: always() && steps.deploy.outcome == 'success' diff --git a/tests/e2e/mnist_raycluster_sdk_test.py b/tests/e2e/mnist_raycluster_sdk_test.py index 9b8c3d0a..26f76b60 100644 --- a/tests/e2e/mnist_raycluster_sdk_test.py +++ b/tests/e2e/mnist_raycluster_sdk_test.py @@ -35,10 +35,6 @@ def setup_method(self): def teardown_method(self): if hasattr(self, "namespace"): self.api_instance.delete_namespace(self.namespace) - if hasattr(self, "configmap"): - self.api_instance.delete_namespaced_config_map( - self.configmap.metadata.name, self.namespace - ) def test_mnist_ray_cluster_sdk(self): self.create_test_namespace() From 45c834279c96e2d0a6a982dc37e80b9ddfa9178e Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Tue, 6 Feb 2024 17:15:00 +0000 Subject: [PATCH 126/496] Add sdk-user with limited permissions to run e2e test --- .github/workflows/e2e_tests.yaml | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index 55f8fba6..9b22e058 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -93,6 +93,23 @@ jobs: cd .. + - name: Add user to KinD + uses: ./common/github-actions/kind-add-user + with: + user-name: sdk-user + + - name: Configure RBAC for sdk user with limited permissions + run: | + kubectl create clusterrole list-ingresses --verb=get,list --resource=ingresses + kubectl create clusterrolebinding sdk-user-list-ingresses --clusterrole=list-ingresses --user=sdk-user + kubectl create clusterrole appwrapper-creator --verb=get,list,create,delete,patch --resource=appwrappers + kubectl create clusterrolebinding sdk-user-appwrapper-creator --clusterrole=appwrapper-creator --user=sdk-user + kubectl create clusterrole namespace-creator --verb=get,list,create,delete,patch --resource=namespaces + kubectl create clusterrolebinding sdk-user-namespace-creator --clusterrole=namespace-creator --user=sdk-user + kubectl create clusterrole list-rayclusters --verb=get,list --resource=rayclusters + kubectl create clusterrolebinding sdk-user-list-rayclusters --clusterrole=list-rayclusters --user=sdk-user + kubectl config use-context sdk-user + - name: Run e2e tests run: | export CODEFLARE_TEST_TIMEOUT_SHORT=1m From ae3912c9c263fd349de354883b44a6c39b04e7b9 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Wed, 7 Feb 2024 10:27:02 +0000 Subject: [PATCH 127/496] Local e2e Testing documentation --- README.md | 7 ++++++- docs/e2e.md | 38 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 44 insertions(+), 1 deletion(-) create mode 100644 docs/e2e.md diff --git a/README.md b/README.md index 8228b685..088b2d3d 100644 --- a/README.md +++ b/README.md @@ -51,10 +51,15 @@ To build the codeflare-sdk pre-commit image run `podman build -f .github/build/C ### Testing - To install codeflare-sdk in editable mode, run `pip install -e .` from the repo root. -- To run the unit tests, run `pytest -v tests/unit_test.py` - Any new test functions/scripts can be added into the `tests` folder - NOTE: Functional tests coming soon, will live in `tests/func_test.py` +#### Unit Testing +- To run the unit tests, run `pytest -v tests/unit_test.py` + +#### Local e2e Testing +- Please follow the [e2e documentation](https://https://github.com/project-codeflare/codeflare-sdk/blob/main/docs/e2e.md) + #### Code Coverage - Run tests with the following command: `coverage run -m --source=src pytest tests/unit_test.py` diff --git a/docs/e2e.md b/docs/e2e.md new file mode 100644 index 00000000..c956b248 --- /dev/null +++ b/docs/e2e.md @@ -0,0 +1,38 @@ +# Running e2e tests locally +#### Pre-requisites +- We recommend using Python 3.9, along with Poetry. + +## On KinD clusters +Pre-requisite for KinD clusters: please add in your local `/etc/hosts` file `127.0.0.1 kind`. This will map your localhost IP address to the KinD cluster's hostname. This is already performed on [GitHub Actions](https://github.com/project-codeflare/codeflare-common/blob/1edd775e2d4088a5a0bfddafb06ff3a773231c08/github-actions/kind/action.yml#L70-L72) + +- Setup Phase: + - Pull the [codeflare-operator repo](https://github.com/project-codeflare/codeflare-operator) and run the following make targets: + ``` + make kind-e2e + export CLUSTER_HOSTNAME=kind + export CODEFLARE_TEST_TIMEOUT_LONG=20m + make deploy -e IMG=quay.io/project-codeflare/codeflare-operator:v1.1.0 + make setup-e2e + ``` +- Test Phase: + - Once we have the codeflare-operator and kuberay-operator running and ready, we can run the e2e test on the codeflare-sdk repository: + ``` + poetry install --with test,docs + poetry run pytest -v -s ./tests/e2e/mnist_raycluster_sdk_test.py + ``` + + + +## On OpenShift clusters +- Setup Phase: + - Pull the [codeflare-operator repo](https://github.com/project-codeflare/codeflare-operator) and run the following make targets: + ``` + make deploy -e IMG=quay.io/project-codeflare/codeflare-operator:v1.1.0 + make setup-e2e + ``` +- Test Phase: + - Once we have the codeflare-operator and kuberay-operator running and ready, we can run the e2e test on the codeflare-sdk repository: + ``` + poetry install --with test,docs + poetry run pytest -v -s ./tests/e2e/mnist_raycluster_sdk_test.py + ``` From 660f328ea0b42153bc2f7115e3d3d1b0e270a182 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Wed, 7 Feb 2024 10:39:06 +0000 Subject: [PATCH 128/496] Add sdk-user to e2e documentation --- docs/e2e.md | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/docs/e2e.md b/docs/e2e.md index c956b248..ce04c769 100644 --- a/docs/e2e.md +++ b/docs/e2e.md @@ -14,6 +14,47 @@ Pre-requisite for KinD clusters: please add in your local `/etc/hosts` file `127 make deploy -e IMG=quay.io/project-codeflare/codeflare-operator:v1.1.0 make setup-e2e ``` + + - **(Optional)** - Create and add `sdk-user` with limited permissions to the cluster to run through the e2e tests: + ``` + # Get KinD certificates + docker cp kind-control-plane:/etc/kubernetes/pki/ca.crt . + docker cp kind-control-plane:/etc/kubernetes/pki/ca.key . + + # Generate certificates for new user + openssl genrsa -out user.key 2048 + openssl req -new -key user.key -out user.csr -subj '/CN=sdk-user/O=tenant' + openssl x509 -req -in user.csr -CA ca.crt -CAkey ca.key -CAcreateserial -out user.crt -days 360 + + # Add generated certificated to KinD context + user_crt=$(base64 --wrap=0 user.crt) + user_key=$(base64 --wrap=0 user.key) + yq eval -i ".contexts += {\"context\": {\"cluster\": \"kind-kind\", \"user\": \"sdk-user\"}, \"name\": \"sdk-user\"}" $HOME/.kube/config + yq eval -i ".users += {\"name\": \"sdk-user\", \"user\": {\"client-certificate-data\": \"$user_crt\", \"client-key-data\": \"$user_key\"}}" $HOME/.kube/config + cat $HOME/.kube/config + + # Cleanup + rm ca.crt + rm ca.srl + rm ca.key + rm user.crt + rm user.key + rm user.csr + + # Add RBAC permissions to sdk-user + kubectl create clusterrole list-ingresses --verb=get,list --resource=ingresses + kubectl create clusterrolebinding sdk-user-list-ingresses --clusterrole=list-ingresses --user=sdk-user + kubectl create clusterrole appwrapper-creator --verb=get,list,create,delete,patch --resource=appwrappers + kubectl create clusterrolebinding sdk-user-appwrapper-creator --clusterrole=appwrapper-creator --user=sdk-user + kubectl create clusterrole namespace-creator --verb=get,list,create,delete,patch --resource=namespaces + kubectl create clusterrolebinding sdk-user-namespace-creator --clusterrole=namespace-creator --user=sdk-user + kubectl create clusterrole list-rayclusters --verb=get,list --resource=rayclusters + kubectl create clusterrolebinding sdk-user-list-rayclusters --clusterrole=list-rayclusters --user=sdk-user + kubectl config use-context sdk-user + + ``` + + - Test Phase: - Once we have the codeflare-operator and kuberay-operator running and ready, we can run the e2e test on the codeflare-sdk repository: ``` From 9e6088ac66e6fd4498d638af2613f0908f4c19b5 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Thu, 1 Feb 2024 18:21:20 +0000 Subject: [PATCH 129/496] Fix cluster.status() for Routes --- src/codeflare_sdk/cluster/cluster.py | 62 +++++++++++++++++++--------- tests/unit_test.py | 1 + 2 files changed, 43 insertions(+), 20 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 607e59f3..aff3ea6b 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -902,25 +902,47 @@ def _map_to_ray_cluster(rc) -> Optional[RayCluster]: status = RayClusterStatus(rc["status"]["state"].lower()) else: status = RayClusterStatus.UNKNOWN - try: - config_check() - api_instance = client.NetworkingV1Api(api_config_handler()) - ingresses = api_instance.list_namespaced_ingress(rc["metadata"]["namespace"]) - except Exception as e: # pragma no cover - return _kube_api_error_handling(e) - ray_ingress = None - for ingress in ingresses.items: - annotations = ingress.metadata.annotations - protocol = "http" - if ( - ingress.metadata.name == f"ray-dashboard-{rc['metadata']['name']}" - or ingress.metadata.name.startswith(f"{rc['metadata']['name']}-ingress") - ): - if annotations == None: - protocol = "http" - elif "route.openshift.io/termination" in annotations: - protocol = "https" - ray_ingress = f"{protocol}://{ingress.spec.rules[0].host}" + config_check() + dashboard_url = None + if is_openshift_cluster(): + try: + api_instance = client.CustomObjectsApi(api_config_handler()) + routes = api_instance.list_namespaced_custom_object( + group="route.openshift.io", + version="v1", + namespace=rc["metadata"]["namespace"], + plural="routes", + ) + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + + for route in routes["items"]: + rc_name = rc["metadata"]["name"] + if route["metadata"]["name"] == f"ray-dashboard-{rc_name}" or route[ + "metadata" + ]["name"].startswith(f"{rc_name}-ingress"): + protocol = "https" if route["spec"].get("tls") else "http" + dashboard_url = f"{protocol}://{route['spec']['host']}" + else: + try: + api_instance = client.NetworkingV1Api(api_config_handler()) + ingresses = api_instance.list_namespaced_ingress( + rc["metadata"]["namespace"] + ) + except Exception as e: # pragma no cover + return _kube_api_error_handling(e) + for ingress in ingresses.items: + annotations = ingress.metadata.annotations + protocol = "http" + if ( + ingress.metadata.name == f"ray-dashboard-{rc['metadata']['name']}" + or ingress.metadata.name.startswith(f"{rc['metadata']['name']}-ingress") + ): + if annotations == None: + protocol = "http" + elif "route.openshift.io/termination" in annotations: + protocol = "https" + dashboard_url = f"{protocol}://{ingress.spec.rules[0].host}" return RayCluster( name=rc["metadata"]["name"], @@ -947,7 +969,7 @@ def _map_to_ray_cluster(rc) -> Optional[RayCluster]: head_gpu=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][0][ "resources" ]["limits"]["nvidia.com/gpu"], - dashboard=ray_ingress, + dashboard=dashboard_url, ) diff --git a/tests/unit_test.py b/tests/unit_test.py index e0a3c385..035789cf 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -1903,6 +1903,7 @@ def route_retrieval(group, version, namespace, plural, name): def test_list_clusters(mocker, capsys): mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch( "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", side_effect=get_obj_none, From ca65dddfa55e1e442618866c85740add695b8030 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Tue, 6 Feb 2024 14:45:15 +0000 Subject: [PATCH 130/496] Add unit tests for rc routes --- tests/unit_test.py | 48 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/tests/unit_test.py b/tests/unit_test.py index 035789cf..fc9ecde2 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -32,6 +32,7 @@ from codeflare_sdk.cluster.cluster import ( Cluster, ClusterConfiguration, + _map_to_ray_cluster, list_all_clusters, list_all_queued, _copy_to_ray, @@ -1901,6 +1902,53 @@ def route_retrieval(group, version, namespace, plural, name): } +def test_map_to_ray_cluster(mocker): + mocker.patch("kubernetes.config.load_kube_config") + + mocker.patch( + "codeflare_sdk.cluster.cluster.is_openshift_cluster", return_value=True + ) + + mock_api_client = mocker.MagicMock(spec=client.ApiClient) + mocker.patch( + "codeflare_sdk.cluster.auth.api_config_handler", return_value=mock_api_client + ) + + mock_routes = { + "items": [ + { + "apiVersion": "route.openshift.io/v1", + "kind": "Route", + "metadata": { + "name": "ray-dashboard-quicktest", + "namespace": "ns", + }, + "spec": {"host": "ray-dashboard-quicktest"}, + }, + ] + } + + def custom_side_effect(group, version, namespace, plural, **kwargs): + if plural == "routes": + return mock_routes + elif plural == "rayclusters": + return get_ray_obj("ray.io", "v1", "ns", "rayclusters") + + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + side_effect=custom_side_effect, + ) + + rc = get_ray_obj("ray.io", "v1", "ns", "rayclusters")["items"][0] + rc_name = rc["metadata"]["name"] + rc_dashboard = f"http://ray-dashboard-{rc_name}" + + result = _map_to_ray_cluster(rc) + + assert result is not None + assert result.dashboard == rc_dashboard + + def test_list_clusters(mocker, capsys): mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") mocker.patch("kubernetes.client.ApisApi.get_api_versions") From cfc708d589db2c9373250aa915c84e6b835fdae8 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Wed, 7 Feb 2024 16:56:07 +0000 Subject: [PATCH 131/496] Added astefanutti to approvers & reviewers --- OWNERS | 2 ++ 1 file changed, 2 insertions(+) diff --git a/OWNERS b/OWNERS index 6ef2c689..f3fcc7ee 100644 --- a/OWNERS +++ b/OWNERS @@ -1,4 +1,5 @@ approvers: + - astefanutti - Bobbins228 - dimakis - Fiona-Waters @@ -7,6 +8,7 @@ approvers: - MichaelClifford reviewers: - anishasthana + - astefanutti - Bobbins228 - ChristianZaccaria - dimakis From 3a15f464fde5a2f055cab50329ad07ebdcab46ea Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Thu, 8 Feb 2024 14:47:48 +0000 Subject: [PATCH 132/496] Updated coverage.svg --- coverage.svg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/coverage.svg b/coverage.svg index 59d64b37..c1490035 100644 --- a/coverage.svg +++ b/coverage.svg @@ -15,7 +15,7 @@ coverage coverage - 93% - 93% + 90% + 90% From 5f0fc09476b140d5f2eb2a9c880b538667147bd2 Mon Sep 17 00:00:00 2001 From: Eoin Gallinagh Date: Fri, 9 Feb 2024 13:06:24 +0000 Subject: [PATCH 133/496] add: simplified import statements for the codeflare-sdk --- src/codeflare_sdk/__init__.py | 18 ++++++++++++++++++ src/codeflare_sdk/cluster/__init__.py | 18 ++++++++++++++++++ src/codeflare_sdk/job/__init__.py | 3 +++ 3 files changed, 39 insertions(+) diff --git a/src/codeflare_sdk/__init__.py b/src/codeflare_sdk/__init__.py index e69de29b..80bd36c6 100644 --- a/src/codeflare_sdk/__init__.py +++ b/src/codeflare_sdk/__init__.py @@ -0,0 +1,18 @@ +from .cluster import ( + Authentication, + KubeConfiguration, + TokenAuthentication, + KubeConfigFileAuthentication, + AWManager, + Cluster, + ClusterConfiguration, + RayClusterStatus, + AppWrapperStatus, + CodeFlareClusterStatus, + RayCluster, + AppWrapper, +) + +from .job import JobDefinition, Job, DDPJobDefinition, DDPJob, RayJobClient + +from .utils import generate_cert diff --git a/src/codeflare_sdk/cluster/__init__.py b/src/codeflare_sdk/cluster/__init__.py index e69de29b..73950a5c 100644 --- a/src/codeflare_sdk/cluster/__init__.py +++ b/src/codeflare_sdk/cluster/__init__.py @@ -0,0 +1,18 @@ +from .auth import ( + Authentication, + KubeConfiguration, + TokenAuthentication, + KubeConfigFileAuthentication, +) + +from .model import ( + RayClusterStatus, + AppWrapperStatus, + CodeFlareClusterStatus, + RayCluster, + AppWrapper, +) + +from .cluster import Cluster, ClusterConfiguration + +from .awload import AWManager diff --git a/src/codeflare_sdk/job/__init__.py b/src/codeflare_sdk/job/__init__.py index e69de29b..c5b5819a 100644 --- a/src/codeflare_sdk/job/__init__.py +++ b/src/codeflare_sdk/job/__init__.py @@ -0,0 +1,3 @@ +from .jobs import JobDefinition, Job, DDPJobDefinition, DDPJob + +from .ray_jobs import RayJobClient From d987838c417fb97d8ed0aa15f206f1574b755734 Mon Sep 17 00:00:00 2001 From: oksanabaza Date: Mon, 12 Feb 2024 16:55:48 +0000 Subject: [PATCH 134/496] Login method error return --- src/codeflare_sdk/cluster/auth.py | 5 ++--- src/codeflare_sdk/utils/kube_api_helpers.py | 4 +++- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/src/codeflare_sdk/cluster/auth.py b/src/codeflare_sdk/cluster/auth.py index 1015a801..36233a0e 100644 --- a/src/codeflare_sdk/cluster/auth.py +++ b/src/codeflare_sdk/cluster/auth.py @@ -119,9 +119,8 @@ def login(self) -> str: client.AuthenticationApi(api_client).get_api_group() config_path = None return "Logged into %s" % self.server - except client.ApiException: # pragma: no cover - api_client = None - print("Authentication Error please provide the correct token + server") + except client.ApiException as e: + _kube_api_error_handling(e) def logout(self) -> str: """ diff --git a/src/codeflare_sdk/utils/kube_api_helpers.py b/src/codeflare_sdk/utils/kube_api_helpers.py index 17cf6dbe..efa1d2b6 100644 --- a/src/codeflare_sdk/utils/kube_api_helpers.py +++ b/src/codeflare_sdk/utils/kube_api_helpers.py @@ -43,7 +43,9 @@ def _kube_api_error_handling( print(nf_msg) return elif e.reason == "Unauthorized" or e.reason == "Forbidden": - raise PermissionError(perm_msg) + if print_error: + print(perm_msg) + return elif e.reason == "Conflict": raise FileExistsError(exists_msg) raise e From 91356f4b30db03ce650a88f1872d563c950a950a Mon Sep 17 00:00:00 2001 From: Srihari Date: Tue, 23 Jan 2024 11:52:55 +0530 Subject: [PATCH 135/496] Add RayCLuster Oauth Authentication test --- .github/workflows/e2e_tests.yaml | 6 +- docs/e2e.md | 9 +- poetry.lock | 123 +++++++++++++++- pyproject.toml | 6 + tests/e2e/mnist_raycluster_sdk_oauth_test.py | 144 +++++++++++++++++++ tests/e2e/mnist_raycluster_sdk_test.py | 26 +--- tests/e2e/support.py | 22 +++ 7 files changed, 305 insertions(+), 31 deletions(-) create mode 100644 tests/e2e/mnist_raycluster_sdk_oauth_test.py diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index 9b22e058..59dbec86 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -112,10 +112,6 @@ jobs: - name: Run e2e tests run: | - export CODEFLARE_TEST_TIMEOUT_SHORT=1m - export CODEFLARE_TEST_TIMEOUT_MEDIUM=5m - export CODEFLARE_TEST_TIMEOUT_LONG=15m - export CODEFLARE_TEST_OUTPUT_DIR=${{ env.TEMP_DIR }} echo "CODEFLARE_TEST_OUTPUT_DIR=${CODEFLARE_TEST_OUTPUT_DIR}" >> $GITHUB_ENV @@ -123,7 +119,7 @@ jobs: pip install poetry poetry install --with test,docs echo "Running e2e tests..." - poetry run pytest -v -s ./tests/e2e/mnist_raycluster_sdk_test.py > ${CODEFLARE_TEST_OUTPUT_DIR}/pytest_output.log 2>&1 + poetry run pytest -v -s ./tests/e2e -m kind > ${CODEFLARE_TEST_OUTPUT_DIR}/pytest_output.log 2>&1 - name: Print CodeFlare operator logs if: always() && steps.deploy.outcome == 'success' diff --git a/docs/e2e.md b/docs/e2e.md index ce04c769..469647ca 100644 --- a/docs/e2e.md +++ b/docs/e2e.md @@ -10,7 +10,6 @@ Pre-requisite for KinD clusters: please add in your local `/etc/hosts` file `127 ``` make kind-e2e export CLUSTER_HOSTNAME=kind - export CODEFLARE_TEST_TIMEOUT_LONG=20m make deploy -e IMG=quay.io/project-codeflare/codeflare-operator:v1.1.0 make setup-e2e ``` @@ -77,3 +76,11 @@ Pre-requisite for KinD clusters: please add in your local `/etc/hosts` file `127 poetry install --with test,docs poetry run pytest -v -s ./tests/e2e/mnist_raycluster_sdk_test.py ``` + - To run the multiple tests based on the cluster environment, we can run the e2e tests by marking -m with cluster environment (kind or openshift) + ``` + poetry run pytest -v -s ./tests/e2e -m openshift + ``` + - By default tests configured with timeout of `15 minutes`. If necessary, we can override the timeout using `--timeout` option + ``` + poetry run pytest -v -s ./tests/e2e -m openshift --timeout=1200 + ``` diff --git a/poetry.lock b/poetry.lock index 5496961b..a9db5ff6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,9 +1,10 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. [[package]] name = "aiohttp" version = "3.9.1" description = "Async http client/server framework (asyncio)" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -100,6 +101,7 @@ speedups = ["Brotli", "aiodns", "brotlicffi"] name = "aiohttp-cors" version = "0.7.0" description = "CORS support for aiohttp" +category = "main" optional = false python-versions = "*" files = [ @@ -114,6 +116,7 @@ aiohttp = ">=1.1" name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -128,6 +131,7 @@ frozenlist = ">=1.1.0" name = "ansicon" version = "1.89.0" description = "Python wrapper for loading Jason Hood's ANSICON" +category = "main" optional = false python-versions = "*" files = [ @@ -139,6 +143,7 @@ files = [ name = "async-timeout" version = "4.0.3" description = "Timeout context manager for asyncio programs" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -150,6 +155,7 @@ files = [ name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -168,6 +174,7 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "bcrypt" version = "4.0.1" description = "Modern password hashing for your software and your servers" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -202,6 +209,7 @@ typecheck = ["mypy"] name = "blessed" version = "1.20.0" description = "Easy, practical library for making terminal apps, by providing an elegant, well-documented interface to Colors, Keyboard input, and screen Positioning capabilities." +category = "main" optional = false python-versions = ">=2.7" files = [ @@ -218,6 +226,7 @@ wcwidth = ">=0.1.4" name = "cachetools" version = "5.3.1" description = "Extensible memoizing collections and decorators" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -229,6 +238,7 @@ files = [ name = "certifi" version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -240,6 +250,7 @@ files = [ name = "cffi" version = "1.16.0" description = "Foreign Function Interface for Python calling C code." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -304,6 +315,7 @@ pycparser = "*" name = "charset-normalizer" version = "3.3.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -403,6 +415,7 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -417,6 +430,7 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "codeflare-torchx" version = "0.6.0.dev1" description = "TorchX SDK and Components" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -445,6 +459,7 @@ ray = ["ray (>=1.12.1)"] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -456,6 +471,7 @@ files = [ name = "colorful" version = "0.5.5" description = "Terminal string styling done right, in Python." +category = "main" optional = false python-versions = "*" files = [ @@ -470,6 +486,7 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "commonmark" version = "0.9.1" description = "Python parser for the CommonMark Markdown spec" +category = "main" optional = false python-versions = "*" files = [ @@ -484,6 +501,7 @@ test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] name = "coverage" version = "7.2.7" description = "Code coverage measurement for Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -556,6 +574,7 @@ toml = ["tomli"] name = "cryptography" version = "40.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -597,6 +616,7 @@ tox = ["tox"] name = "distlib" version = "0.3.7" description = "Distribution utilities" +category = "main" optional = false python-versions = "*" files = [ @@ -608,6 +628,7 @@ files = [ name = "docker" version = "6.1.3" description = "A Python library for the Docker Engine API." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -629,6 +650,7 @@ ssh = ["paramiko (>=2.4.3)"] name = "docstring-parser" version = "0.8.1" description = "\"Parse Python docstrings in reST, Google and Numpydoc format\"" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -639,6 +661,7 @@ files = [ name = "exceptiongroup" version = "1.1.3" description = "Backport of PEP 654 (exception groups)" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -653,6 +676,7 @@ test = ["pytest (>=6)"] name = "executing" version = "1.2.0" description = "Get the currently executing AST node of a frame, and other information" +category = "main" optional = false python-versions = "*" files = [ @@ -667,6 +691,7 @@ tests = ["asttokens", "littleutils", "pytest", "rich"] name = "filelock" version = "3.12.4" description = "A platform independent file lock." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -683,6 +708,7 @@ typing = ["typing-extensions (>=4.7.1)"] name = "frozenlist" version = "1.4.0" description = "A list-like structure which implements collections.abc.MutableSequence" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -753,6 +779,7 @@ files = [ name = "fsspec" version = "2023.9.2" description = "File-system specification" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -788,6 +815,7 @@ tqdm = ["tqdm"] name = "google-api-core" version = "2.15.0" description = "Google API client core library" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -810,6 +838,7 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] name = "google-auth" version = "2.23.3" description = "Google Authentication Library" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -833,6 +862,7 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] name = "googleapis-common-protos" version = "1.62.0" description = "Common protobufs used in Google APIs" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -850,6 +880,7 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] name = "gpustat" version = "1.1.1" description = "An utility to monitor NVIDIA GPU status and usage" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -869,6 +900,7 @@ test = ["mockito (>=1.2.1)", "pytest (>=5.4.1)", "pytest-runner"] name = "grpcio" version = "1.60.0" description = "HTTP/2-based RPC framework" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -935,6 +967,7 @@ protobuf = ["grpcio-tools (>=1.60.0)"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -946,6 +979,7 @@ files = [ name = "importlib-metadata" version = "6.8.0" description = "Read metadata from Python packages" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -965,6 +999,7 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs name = "importlib-resources" version = "6.1.0" description = "Read resources from Python packages" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -983,6 +1018,7 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -994,6 +1030,7 @@ files = [ name = "jinxed" version = "1.2.1" description = "Jinxed Terminal Library" +category = "main" optional = false python-versions = "*" files = [ @@ -1008,6 +1045,7 @@ ansicon = {version = "*", markers = "platform_system == \"Windows\""} name = "jsonschema" version = "4.19.1" description = "An implementation of JSON Schema validation for Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1031,6 +1069,7 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "jsonschema-specifications" version = "2023.7.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1046,6 +1085,7 @@ referencing = ">=0.28.0" name = "kubernetes" version = "26.1.0" description = "Kubernetes python client" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1063,7 +1103,7 @@ requests-oauthlib = "*" setuptools = ">=21.0.0" six = ">=1.9.0" urllib3 = ">=1.24.2" -websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" +websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.0 || >=0.43.0" [package.extras] adal = ["adal (>=1.0.2)"] @@ -1072,6 +1112,7 @@ adal = ["adal (>=1.0.2)"] name = "mako" version = "1.2.4" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1091,6 +1132,7 @@ testing = ["pytest"] name = "markdown" version = "3.5" description = "Python implementation of John Gruber's Markdown." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1109,6 +1151,7 @@ testing = ["coverage", "pyyaml"] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1178,6 +1221,7 @@ files = [ name = "msgpack" version = "1.0.7" description = "MessagePack serializer" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1243,6 +1287,7 @@ files = [ name = "multidict" version = "6.0.4" description = "multidict implementation" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1326,6 +1371,7 @@ files = [ name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1337,6 +1383,7 @@ files = [ name = "numpy" version = "1.24.4" description = "Fundamental package for array computing in Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1374,6 +1421,7 @@ files = [ name = "nvidia-ml-py" version = "12.535.133" description = "Python Bindings for the NVIDIA Management Library" +category = "main" optional = false python-versions = "*" files = [ @@ -1385,6 +1433,7 @@ files = [ name = "oauthlib" version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1401,6 +1450,7 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] name = "opencensus" version = "0.11.3" description = "A stats collection and distributed tracing framework" +category = "main" optional = false python-versions = "*" files = [ @@ -1416,6 +1466,7 @@ opencensus-context = ">=0.1.3" name = "opencensus-context" version = "0.1.3" description = "OpenCensus Runtime Context" +category = "main" optional = false python-versions = "*" files = [ @@ -1427,6 +1478,7 @@ files = [ name = "openshift-client" version = "1.0.18" description = "OpenShift python client" +category = "main" optional = false python-versions = "*" files = [ @@ -1443,6 +1495,7 @@ six = "*" name = "packaging" version = "23.2" description = "Core utilities for Python packages" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1454,6 +1507,7 @@ files = [ name = "pandas" version = "2.0.3" description = "Powerful data structures for data analysis, time series, and statistics" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1487,8 +1541,8 @@ files = [ [package.dependencies] numpy = [ {version = ">=1.20.3", markers = "python_version < \"3.10\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, - {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -1521,6 +1575,7 @@ xml = ["lxml (>=4.6.3)"] name = "paramiko" version = "3.3.1" description = "SSH2 protocol library" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1542,6 +1597,7 @@ invoke = ["invoke (>=2.0)"] name = "pdoc3" version = "0.10.0" description = "Auto-generate API documentation for Python projects." +category = "dev" optional = false python-versions = ">= 3.6" files = [ @@ -1557,6 +1613,7 @@ markdown = ">=3.0" name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1568,6 +1625,7 @@ files = [ name = "platformdirs" version = "3.11.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1583,6 +1641,7 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "pluggy" version = "1.3.0" description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1598,6 +1657,7 @@ testing = ["pytest", "pytest-benchmark"] name = "prometheus-client" version = "0.19.0" description = "Python client for the Prometheus monitoring system." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1612,6 +1672,7 @@ twisted = ["twisted"] name = "protobuf" version = "4.24.4" description = "" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1634,6 +1695,7 @@ files = [ name = "psutil" version = "5.9.6" description = "Cross-platform lib for process and system monitoring in Python." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -1662,6 +1724,7 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] name = "py-spy" version = "0.3.14" description = "Sampling profiler for Python programs" +category = "main" optional = false python-versions = "*" files = [ @@ -1678,6 +1741,7 @@ files = [ name = "pyarrow" version = "14.0.1" description = "Python library for Apache Arrow" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1726,6 +1790,7 @@ numpy = ">=1.16.6" name = "pyasn1" version = "0.5.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -1737,6 +1802,7 @@ files = [ name = "pyasn1-modules" version = "0.3.0" description = "A collection of ASN.1-based protocols modules" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -1751,6 +1817,7 @@ pyasn1 = ">=0.4.6,<0.6.0" name = "pycparser" version = "2.21" description = "C parser in Python" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1762,6 +1829,7 @@ files = [ name = "pydantic" version = "1.10.13" description = "Data validation and settings management using python type hints" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1814,6 +1882,7 @@ email = ["email-validator (>=1.0.3)"] name = "pygments" version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1828,6 +1897,7 @@ plugins = ["importlib-metadata"] name = "pynacl" version = "1.5.0" description = "Python binding to the Networking and Cryptography (NaCl) library" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1854,6 +1924,7 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] name = "pyre-extensions" version = "0.0.30" description = "Type system extensions for use with the pyre type checker" +category = "main" optional = false python-versions = "*" files = [ @@ -1869,6 +1940,7 @@ typing-inspect = "*" name = "pytest" version = "7.4.0" description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1891,6 +1963,7 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-mock" version = "3.11.1" description = "Thin-wrapper around the mock package for easier use with pytest" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1904,10 +1977,26 @@ pytest = ">=5.0" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] +[[package]] +name = "pytest-timeout" +version = "2.2.0" +description = "pytest plugin to abort hanging tests" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-timeout-2.2.0.tar.gz", hash = "sha256:3b0b95dabf3cb50bac9ef5ca912fa0cfc286526af17afc806824df20c2f72c90"}, + {file = "pytest_timeout-2.2.0-py3-none-any.whl", hash = "sha256:bde531e096466f49398a59f2dde76fa78429a09a12411466f88a07213e220de2"}, +] + +[package.dependencies] +pytest = ">=5.0.0" + [[package]] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -1922,6 +2011,7 @@ six = ">=1.5" name = "pytz" version = "2023.3.post1" description = "World timezone definitions, modern and historical" +category = "main" optional = false python-versions = "*" files = [ @@ -1933,6 +2023,7 @@ files = [ name = "pywin32" version = "306" description = "Python for Window Extensions" +category = "main" optional = false python-versions = "*" files = [ @@ -1956,6 +2047,7 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2016,6 +2108,7 @@ files = [ name = "ray" version = "2.7.0" description = "Ray provides a simple, universal API for building distributed applications." +category = "main" optional = false python-versions = "*" files = [ @@ -2063,8 +2156,8 @@ jsonschema = "*" msgpack = ">=1.0.0,<2.0.0" numpy = [ {version = ">=1.16", markers = "python_version < \"3.9\""}, - {version = ">=1.20", optional = true, markers = "extra == \"data\""}, {version = ">=1.19.3", markers = "python_version >= \"3.9\""}, + {version = ">=1.20", optional = true, markers = "extra == \"data\""}, ] opencensus = {version = "*", optional = true, markers = "extra == \"default\""} packaging = "*" @@ -2097,6 +2190,7 @@ tune = ["fsspec", "pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1. name = "referencing" version = "0.30.2" description = "JSON Referencing + Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2112,6 +2206,7 @@ rpds-py = ">=0.7.0" name = "requests" version = "2.31.0" description = "Python HTTP for Humans." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2133,6 +2228,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-oauthlib" version = "1.3.1" description = "OAuthlib authentication support for Requests." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2151,6 +2247,7 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] name = "rich" version = "12.6.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +category = "main" optional = false python-versions = ">=3.6.3,<4.0.0" files = [ @@ -2170,6 +2267,7 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] name = "rpds-py" version = "0.10.4" description = "Python bindings to Rust's persistent data structures (rpds)" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2278,6 +2376,7 @@ files = [ name = "rsa" version = "4.9" description = "Pure-Python RSA implementation" +category = "main" optional = false python-versions = ">=3.6,<4" files = [ @@ -2292,6 +2391,7 @@ pyasn1 = ">=0.1.3" name = "setuptools" version = "68.2.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2308,6 +2408,7 @@ testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jar name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -2319,6 +2420,7 @@ files = [ name = "smart-open" version = "6.4.0" description = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)" +category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -2340,6 +2442,7 @@ webhdfs = ["requests"] name = "tabulate" version = "0.9.0" description = "Pretty-print tabular data" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2354,6 +2457,7 @@ widechars = ["wcwidth"] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2365,6 +2469,7 @@ files = [ name = "typing-extensions" version = "4.8.0" description = "Backported and Experimental Type Hints for Python 3.8+" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2376,6 +2481,7 @@ files = [ name = "typing-inspect" version = "0.9.0" description = "Runtime inspection utilities for typing module." +category = "main" optional = false python-versions = "*" files = [ @@ -2391,6 +2497,7 @@ typing-extensions = ">=3.7.4" name = "tzdata" version = "2023.3" description = "Provider of IANA time zone data" +category = "main" optional = false python-versions = ">=2" files = [ @@ -2402,6 +2509,7 @@ files = [ name = "urllib3" version = "1.26.17" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -2418,6 +2526,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "virtualenv" version = "20.21.0" description = "Virtual Python Environment builder" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2438,6 +2547,7 @@ test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess name = "wcwidth" version = "0.2.12" description = "Measures the displayed width of unicode strings in a terminal" +category = "main" optional = false python-versions = "*" files = [ @@ -2449,6 +2559,7 @@ files = [ name = "websocket-client" version = "1.6.4" description = "WebSocket client for Python with low level API options" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2465,6 +2576,7 @@ test = ["websockets"] name = "yarl" version = "1.9.4" description = "Yet another URL library" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2568,6 +2680,7 @@ multidict = ">=4.0" name = "zipp" version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2582,4 +2695,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "75531b507aa2ee5b0514864aa92fbb127ef52a4faf119b5affdfbc3c694c5b03" +content-hash = "e7fa79bd035b5bffcb1668c0a8cf8fa4e869a614b934a09100ab1d53338fe11b" diff --git a/pyproject.toml b/pyproject.toml index 66dd6cd0..e262b527 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,9 +43,15 @@ optional = true pytest = "7.4.0" coverage = "7.2.7" pytest-mock = "3.11.1" +pytest-timeout = "2.2.0" [tool.pytest.ini_options] filterwarnings = [ "ignore::DeprecationWarning:pkg_resources", "ignore:pkg_resources is deprecated as an API:DeprecationWarning", ] +markers = [ + "kind", + "openshift" +] +timeout = 900 diff --git a/tests/e2e/mnist_raycluster_sdk_oauth_test.py b/tests/e2e/mnist_raycluster_sdk_oauth_test.py new file mode 100644 index 00000000..0cee617d --- /dev/null +++ b/tests/e2e/mnist_raycluster_sdk_oauth_test.py @@ -0,0 +1,144 @@ +import requests + +from time import sleep + +from torchx.specs.api import AppState, is_terminal + +from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration +from codeflare_sdk.job.jobs import DDPJobDefinition + +import pytest + +from support import * + +# This test Creates a Ray cluster with openshift_oauth enable and covers the Ray Job submission with authentication and without authentication functionality + + +@pytest.mark.openshift +class TestRayClusterSDKOauth: + def setup_method(self): + initialize_kubernetes_client(self) + + def teardown_method(self): + delete_namespace(self) + + def test_mnist_ray_cluster_sdk_auth(self): + self.setup_method() + create_namespace(self) + self.run_mnist_raycluster_sdk_oauth() + + def run_mnist_raycluster_sdk_oauth(self): + ray_image = get_ray_image() + + cluster = Cluster( + ClusterConfiguration( + name="mnist", + namespace=self.namespace, + num_workers=1, + head_cpus="500m", + head_memory=2, + min_cpus="500m", + max_cpus=1, + min_memory=1, + max_memory=2, + num_gpus=0, + instascale=False, + image=ray_image, + openshift_oauth=True, + ) + ) + + cluster.up() + self.assert_appwrapper_exists() + + cluster.status() + + cluster.wait_ready() + + cluster.status() + + cluster.details() + + self.assert_jobsubmit_withoutLogin(cluster) + + self.assert_jobsubmit_withlogin(cluster) + + # Assertions + + def assert_jobsubmit_withoutLogin(self, cluster): + dashboard_url = cluster.cluster_dashboard_uri() + jobdata = { + "entrypoint": "python mnist.py", + "runtime_env": { + "working_dir": "./tests/e2e/", + "pip": "mnist_pip_requirements.txt", + }, + } + try: + response = requests.post( + dashboard_url + "/api/jobs/", verify=False, json=jobdata + ) + if response.status_code == 403: + assert True + else: + response.raise_for_status() + assert False + + except Exception as e: + print(f"An unexpected error occurred. Error: {e}") + assert False + + def assert_jobsubmit_withlogin(self, cluster): + self.assert_appwrapper_exists() + jobdef = DDPJobDefinition( + name="mnist", + script="./tests/e2e/mnist.py", + scheduler_args={"requirements": "./tests/e2e/mnist_pip_requirements.txt"}, + ) + job = jobdef.submit(cluster) + + done = False + time = 0 + timeout = 900 + while not done: + status = job.status() + if is_terminal(status.state): + break + if not done: + print(status) + if timeout and time >= timeout: + raise TimeoutError(f"job has timed out after waiting {timeout}s") + sleep(5) + time += 5 + + print(job.status()) + self.assert_job_completion(status) + + print(job.logs()) + + cluster.down() + + def assert_appwrapper_exists(self): + try: + self.custom_api.get_namespaced_custom_object( + "workload.codeflare.dev", + "v1beta1", + self.namespace, + "appwrappers", + "mnist", + ) + print( + f"AppWrapper 'mnist' has been created in the namespace: '{self.namespace}'" + ) + assert True + except Exception as e: + print(f"AppWrapper 'mnist' has not been created. Error: {e}") + assert False + + def assert_job_completion(self, status): + if status.state == AppState.SUCCEEDED: + print(f"Job has completed: '{status.state}'") + assert True + else: + print(f"Job has completed: '{status.state}'") + assert False diff --git a/tests/e2e/mnist_raycluster_sdk_test.py b/tests/e2e/mnist_raycluster_sdk_test.py index 26f76b60..76a5e260 100644 --- a/tests/e2e/mnist_raycluster_sdk_test.py +++ b/tests/e2e/mnist_raycluster_sdk_test.py @@ -14,40 +14,26 @@ import pytest -from support import random_choice, get_ray_image +from support import * # Creates a Ray cluster, and trains the MNIST dataset using the CodeFlare SDK. # Asserts creation of AppWrapper, RayCluster, and successful completion of the training job. # Covers successfull installation of CodeFlare-SDK +@pytest.mark.kind +@pytest.mark.openshift class TestMNISTRayClusterSDK: def setup_method(self): - # Load the kube config from the environment or Kube config file. - config.load_kube_config() - - # Initialize Kubernetes client - self.api_instance = client.CoreV1Api() - self.custom_api = kubernetes.client.CustomObjectsApi( - self.api_instance.api_client - ) + initialize_kubernetes_client(self) def teardown_method(self): - if hasattr(self, "namespace"): - self.api_instance.delete_namespace(self.namespace) + delete_namespace(self) def test_mnist_ray_cluster_sdk(self): - self.create_test_namespace() + create_namespace(self) self.run_mnist_raycluster_sdk() - def create_test_namespace(self): - self.namespace = f"test-ns-{random_choice()}" - namespace_body = client.V1Namespace( - metadata=client.V1ObjectMeta(name=self.namespace) - ) - self.api_instance.create_namespace(namespace_body) - return self.namespace - def run_mnist_raycluster_sdk(self): ray_image = get_ray_image() host = os.getenv("CLUSTER_HOSTNAME") diff --git a/tests/e2e/support.py b/tests/e2e/support.py index 303b03c8..1ab45c19 100644 --- a/tests/e2e/support.py +++ b/tests/e2e/support.py @@ -1,6 +1,8 @@ import os import random import string +from kubernetes import client, config +import kubernetes.client def get_ray_image(): @@ -11,3 +13,23 @@ def get_ray_image(): def random_choice(): alphabet = string.ascii_lowercase + string.digits return "".join(random.choices(alphabet, k=5)) + + +def create_namespace(self): + self.namespace = f"test-ns-{random_choice()}" + namespace_body = client.V1Namespace( + metadata=client.V1ObjectMeta(name=self.namespace) + ) + self.api_instance.create_namespace(namespace_body) + + +def delete_namespace(self): + if hasattr(self, "namespace"): + self.api_instance.delete_namespace(self.namespace) + + +def initialize_kubernetes_client(self): + config.load_kube_config() + # Initialize Kubernetes client + self.api_instance = client.CoreV1Api() + self.custom_api = kubernetes.client.CustomObjectsApi(self.api_instance.api_client) From debb96123b305e1c47cbea3d540aa06e40d839e0 Mon Sep 17 00:00:00 2001 From: Karel Suta Date: Thu, 15 Feb 2024 13:34:34 +0100 Subject: [PATCH 136/496] Fix broken link in README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 088b2d3d..0ea7f694 100644 --- a/README.md +++ b/README.md @@ -58,7 +58,7 @@ To build the codeflare-sdk pre-commit image run `podman build -f .github/build/C - To run the unit tests, run `pytest -v tests/unit_test.py` #### Local e2e Testing -- Please follow the [e2e documentation](https://https://github.com/project-codeflare/codeflare-sdk/blob/main/docs/e2e.md) +- Please follow the [e2e documentation](https://github.com/project-codeflare/codeflare-sdk/blob/main/docs/e2e.md) #### Code Coverage From a3c57ed0466645bbe85dae7c15a14354fb119cc4 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Mon, 12 Feb 2024 09:27:11 +0000 Subject: [PATCH 137/496] Fix component output logs in e2e workflow --- .github/workflows/e2e_tests.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index 59dbec86..7efb9937 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -121,6 +121,8 @@ jobs: echo "Running e2e tests..." poetry run pytest -v -s ./tests/e2e -m kind > ${CODEFLARE_TEST_OUTPUT_DIR}/pytest_output.log 2>&1 + kubectl config use-context kind-cluster + - name: Print CodeFlare operator logs if: always() && steps.deploy.outcome == 'success' run: | From 6a74c617fce641c7afe9d4ec09a0668144b2a50d Mon Sep 17 00:00:00 2001 From: codeflare-machine-account Date: Tue, 20 Feb 2024 17:09:58 +0000 Subject: [PATCH 138/496] Changes in docs for release: v0.14.0 --- docs/detailed-documentation/cluster/auth.html | 15 ++-- .../cluster/cluster.html | 78 ++++++++++++------- .../detailed-documentation/cluster/index.html | 23 ++++++ .../detailed-documentation/cluster/model.html | 4 +- docs/detailed-documentation/index.html | 23 ++++++ docs/detailed-documentation/job/index.html | 8 ++ .../utils/generate_yaml.html | 10 +++ .../utils/kube_api_helpers.html | 4 +- 8 files changed, 125 insertions(+), 40 deletions(-) diff --git a/docs/detailed-documentation/cluster/auth.html b/docs/detailed-documentation/cluster/auth.html index f1d8ad00..be649e78 100644 --- a/docs/detailed-documentation/cluster/auth.html +++ b/docs/detailed-documentation/cluster/auth.html @@ -152,9 +152,8 @@

    Module codeflare_sdk.cluster.auth

    client.AuthenticationApi(api_client).get_api_group() config_path = None return "Logged into %s" % self.server - except client.ApiException: # pragma: no cover - api_client = None - print("Authentication Error please provide the correct token + server") + except client.ApiException as e: + _kube_api_error_handling(e) def logout(self) -> str: """ @@ -580,9 +579,8 @@

    Methods

    client.AuthenticationApi(api_client).get_api_group() config_path = None return "Logged into %s" % self.server - except client.ApiException: # pragma: no cover - api_client = None - print("Authentication Error please provide the correct token + server") + except client.ApiException as e: + _kube_api_error_handling(e) def logout(self) -> str: """ @@ -637,9 +635,8 @@

    Methods

    client.AuthenticationApi(api_client).get_api_group() config_path = None return "Logged into %s" % self.server - except client.ApiException: # pragma: no cover - api_client = None - print("Authentication Error please provide the correct token + server") + except client.ApiException as e: + _kube_api_error_handling(e)
    diff --git a/docs/detailed-documentation/cluster/cluster.html b/docs/detailed-documentation/cluster/cluster.html index 3c91c6a6..07e44e4b 100644 --- a/docs/detailed-documentation/cluster/cluster.html +++ b/docs/detailed-documentation/cluster/cluster.html @@ -589,7 +589,7 @@

    Module codeflare_sdk.cluster.cluster

    if resource["kind"] == "RayCluster": api_instance.create_namespaced_custom_object( group="ray.io", - version="v1alpha1", + version="v1", namespace=namespace, plural="rayclusters", body=resource, @@ -626,7 +626,7 @@

    Module codeflare_sdk.cluster.cluster

    if resource["kind"] == "RayCluster": api_instance.delete_namespaced_custom_object( group="ray.io", - version="v1alpha1", + version="v1", namespace=namespace, plural="rayclusters", name=self.app_wrapper_name, @@ -713,7 +713,7 @@

    Module codeflare_sdk.cluster.cluster

    api_instance = client.CustomObjectsApi(api_config_handler()) rcs = api_instance.list_namespaced_custom_object( group="ray.io", - version="v1alpha1", + version="v1", namespace=namespace, plural="rayclusters", ) @@ -870,7 +870,7 @@

    Module codeflare_sdk.cluster.cluster

    api_instance = client.CustomObjectsApi(api_config_handler()) rcs = api_instance.list_namespaced_custom_object( group="ray.io", - version="v1alpha1", + version="v1", namespace=namespace, plural="rayclusters", ) @@ -890,7 +890,7 @@

    Module codeflare_sdk.cluster.cluster

    api_instance = client.CustomObjectsApi(api_config_handler()) rcs = api_instance.list_namespaced_custom_object( group="ray.io", - version="v1alpha1", + version="v1", namespace=namespace, plural="rayclusters", ) @@ -934,25 +934,47 @@

    Module codeflare_sdk.cluster.cluster

    status = RayClusterStatus(rc["status"]["state"].lower()) else: status = RayClusterStatus.UNKNOWN - try: - config_check() - api_instance = client.NetworkingV1Api(api_config_handler()) - ingresses = api_instance.list_namespaced_ingress(rc["metadata"]["namespace"]) - except Exception as e: # pragma no cover - return _kube_api_error_handling(e) - ray_ingress = None - for ingress in ingresses.items: - annotations = ingress.metadata.annotations - protocol = "http" - if ( - ingress.metadata.name == f"ray-dashboard-{rc['metadata']['name']}" - or ingress.metadata.name.startswith(f"{rc['metadata']['name']}-ingress") - ): - if annotations == None: - protocol = "http" - elif "route.openshift.io/termination" in annotations: - protocol = "https" - ray_ingress = f"{protocol}://{ingress.spec.rules[0].host}" + config_check() + dashboard_url = None + if is_openshift_cluster(): + try: + api_instance = client.CustomObjectsApi(api_config_handler()) + routes = api_instance.list_namespaced_custom_object( + group="route.openshift.io", + version="v1", + namespace=rc["metadata"]["namespace"], + plural="routes", + ) + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + + for route in routes["items"]: + rc_name = rc["metadata"]["name"] + if route["metadata"]["name"] == f"ray-dashboard-{rc_name}" or route[ + "metadata" + ]["name"].startswith(f"{rc_name}-ingress"): + protocol = "https" if route["spec"].get("tls") else "http" + dashboard_url = f"{protocol}://{route['spec']['host']}" + else: + try: + api_instance = client.NetworkingV1Api(api_config_handler()) + ingresses = api_instance.list_namespaced_ingress( + rc["metadata"]["namespace"] + ) + except Exception as e: # pragma no cover + return _kube_api_error_handling(e) + for ingress in ingresses.items: + annotations = ingress.metadata.annotations + protocol = "http" + if ( + ingress.metadata.name == f"ray-dashboard-{rc['metadata']['name']}" + or ingress.metadata.name.startswith(f"{rc['metadata']['name']}-ingress") + ): + if annotations == None: + protocol = "http" + elif "route.openshift.io/termination" in annotations: + protocol = "https" + dashboard_url = f"{protocol}://{ingress.spec.rules[0].host}" return RayCluster( name=rc["metadata"]["name"], @@ -979,7 +1001,7 @@

    Module codeflare_sdk.cluster.cluster

    head_gpu=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][0][ "resources" ]["limits"]["nvidia.com/gpu"], - dashboard=ray_ingress, + dashboard=dashboard_url, ) @@ -1068,7 +1090,7 @@

    Functions

    api_instance = client.CustomObjectsApi(api_config_handler()) rcs = api_instance.list_namespaced_custom_object( group="ray.io", - version="v1alpha1", + version="v1", namespace=namespace, plural="rayclusters", ) @@ -1777,7 +1799,7 @@

    Classes

    if resource["kind"] == "RayCluster": api_instance.create_namespaced_custom_object( group="ray.io", - version="v1alpha1", + version="v1", namespace=namespace, plural="rayclusters", body=resource, @@ -1814,7 +1836,7 @@

    Classes

    if resource["kind"] == "RayCluster": api_instance.delete_namespaced_custom_object( group="ray.io", - version="v1alpha1", + version="v1", namespace=namespace, plural="rayclusters", name=self.app_wrapper_name, diff --git a/docs/detailed-documentation/cluster/index.html b/docs/detailed-documentation/cluster/index.html index a6027e6f..8454c875 100644 --- a/docs/detailed-documentation/cluster/index.html +++ b/docs/detailed-documentation/cluster/index.html @@ -22,6 +22,29 @@

    Module codeflare_sdk.cluster

    +
    + +Expand source code + +
    from .auth import (
    +    Authentication,
    +    KubeConfiguration,
    +    TokenAuthentication,
    +    KubeConfigFileAuthentication,
    +)
    +
    +from .model import (
    +    RayClusterStatus,
    +    AppWrapperStatus,
    +    CodeFlareClusterStatus,
    +    RayCluster,
    +    AppWrapper,
    +)
    +
    +from .cluster import Cluster, ClusterConfiguration
    +
    +from .awload import AWManager
    +

    Sub-modules

    diff --git a/docs/detailed-documentation/cluster/model.html b/docs/detailed-documentation/cluster/model.html index 3832494e..ff8a0071 100644 --- a/docs/detailed-documentation/cluster/model.html +++ b/docs/detailed-documentation/cluster/model.html @@ -60,7 +60,7 @@

    Module codeflare_sdk.cluster.model

    Defines the possible reportable states of a Ray cluster. """ - # https://github.com/ray-project/kuberay/blob/master/ray-operator/apis/ray/v1alpha1/raycluster_types.go#L95 + # https://github.com/ray-project/kuberay/blob/master/ray-operator/apis/ray/v1/raycluster_types.go#L112-L117 READY = "ready" UNHEALTHY = "unhealthy" FAILED = "failed" @@ -383,7 +383,7 @@

    Class variables

    Defines the possible reportable states of a Ray cluster. """ - # https://github.com/ray-project/kuberay/blob/master/ray-operator/apis/ray/v1alpha1/raycluster_types.go#L95 + # https://github.com/ray-project/kuberay/blob/master/ray-operator/apis/ray/v1/raycluster_types.go#L112-L117 READY = "ready" UNHEALTHY = "unhealthy" FAILED = "failed" diff --git a/docs/detailed-documentation/index.html b/docs/detailed-documentation/index.html index bd408f76..4895f0a9 100644 --- a/docs/detailed-documentation/index.html +++ b/docs/detailed-documentation/index.html @@ -22,6 +22,29 @@

    Package codeflare_sdk

    +
    + +Expand source code + +
    from .cluster import (
    +    Authentication,
    +    KubeConfiguration,
    +    TokenAuthentication,
    +    KubeConfigFileAuthentication,
    +    AWManager,
    +    Cluster,
    +    ClusterConfiguration,
    +    RayClusterStatus,
    +    AppWrapperStatus,
    +    CodeFlareClusterStatus,
    +    RayCluster,
    +    AppWrapper,
    +)
    +
    +from .job import JobDefinition, Job, DDPJobDefinition, DDPJob, RayJobClient
    +
    +from .utils import generate_cert
    +

    Sub-modules

    diff --git a/docs/detailed-documentation/job/index.html b/docs/detailed-documentation/job/index.html index f4bfe93f..514cb895 100644 --- a/docs/detailed-documentation/job/index.html +++ b/docs/detailed-documentation/job/index.html @@ -22,6 +22,14 @@

    Module codeflare_sdk.job

    +
    + +Expand source code + +
    from .jobs import JobDefinition, Job, DDPJobDefinition, DDPJob
    +
    +from .ray_jobs import RayJobClient
    +

    Sub-modules

    diff --git a/docs/detailed-documentation/utils/generate_yaml.html b/docs/detailed-documentation/utils/generate_yaml.html index 91f4bab1..c2643556 100644 --- a/docs/detailed-documentation/utils/generate_yaml.html +++ b/docs/detailed-documentation/utils/generate_yaml.html @@ -650,6 +650,11 @@

    Module codeflare_sdk.utils.generate_yaml

    def write_components(user_yaml: dict, output_file_name: str): + # Create the directory if it doesn't exist + directory_path = os.path.dirname(output_file_name) + if not os.path.exists(directory_path): + os.makedirs(directory_path) + components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") open(output_file_name, "w").close() with open(output_file_name, "a") as outfile: @@ -1638,6 +1643,11 @@

    Functions

    Expand source code
    def write_components(user_yaml: dict, output_file_name: str):
    +    # Create the directory if it doesn't exist
    +    directory_path = os.path.dirname(output_file_name)
    +    if not os.path.exists(directory_path):
    +        os.makedirs(directory_path)
    +
         components = user_yaml.get("spec", "resources")["resources"].get("GenericItems")
         open(output_file_name, "w").close()
         with open(output_file_name, "a") as outfile:
    diff --git a/docs/detailed-documentation/utils/kube_api_helpers.html b/docs/detailed-documentation/utils/kube_api_helpers.html
    index 58fcf7fc..6bf6fe81 100644
    --- a/docs/detailed-documentation/utils/kube_api_helpers.html
    +++ b/docs/detailed-documentation/utils/kube_api_helpers.html
    @@ -74,7 +74,9 @@ 

    Module codeflare_sdk.utils.kube_api_helpers

    print(nf_msg) return elif e.reason == "Unauthorized" or e.reason == "Forbidden": - raise PermissionError(perm_msg) + if print_error: + print(perm_msg) + return elif e.reason == "Conflict": raise FileExistsError(exists_msg) raise e
    From ca5b1982a970f9cb31a549910003c7e19584ca61 Mon Sep 17 00:00:00 2001 From: Fiona Waters Date: Tue, 20 Feb 2024 12:06:15 +0000 Subject: [PATCH 139/496] replacing passthrough with edge/reencrypt --- src/codeflare_sdk/templates/base-template.yaml | 2 ++ src/codeflare_sdk/utils/openshift_oauth.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index 16a3dc2a..960d7383 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -325,6 +325,8 @@ spec: name: deployment-name-head-svc port: targetPort: dashboard + tls: + termination: edge - replicas: 1 generictemplate: apiVersion: networking.k8s.io/v1 diff --git a/src/codeflare_sdk/utils/openshift_oauth.py b/src/codeflare_sdk/utils/openshift_oauth.py index 37038c56..facd8411 100644 --- a/src/codeflare_sdk/utils/openshift_oauth.py +++ b/src/codeflare_sdk/utils/openshift_oauth.py @@ -180,7 +180,7 @@ def _create_or_replace_oauth_route_object( port: targetPort: {port_name} tls: - termination: passthrough + termination: reencrypt to: kind: Service name: {service_name} From 46822a2020e1dd13a8ecbda97a82839cb2d6b136 Mon Sep 17 00:00:00 2001 From: Kevin Date: Mon, 19 Feb 2024 17:13:13 -0500 Subject: [PATCH 140/496] odh configmap configuration to default template I also changed the removal of raycluster tls objects so it is done by name rather than all at once Signed-off-by: Kevin --- .../templates/base-template.yaml | 52 +++++++++++++++++++ src/codeflare_sdk/utils/generate_yaml.py | 42 +++++++-------- 2 files changed, 73 insertions(+), 21 deletions(-) diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index 960d7383..0f142c71 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -157,6 +157,18 @@ spec: - name: server-cert mountPath: "/home/ray/workspace/tls" readOnly: true + - mountPath: /etc/pki/tls/certs + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs + name: odh-ca-cert + subPath: odh-ca-bundle.crt initContainers: - command: - sh @@ -181,6 +193,20 @@ spec: optional: false - name: server-cert emptyDir: {} + - name: odh-trusted-ca-cert + configMap: + name: odh-trusted-ca-bundle + items: + - key: ca-bundle.crt + path: odh-custom-ca-bundle.crt + optional: true + - name: odh-ca-cert + configMap: + name: odh-trusted-ca-bundle + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + optional: true workerGroupSpecs: # the pod replicas in this group typed worker - replicas: 3 @@ -277,6 +303,18 @@ spec: - name: server-cert mountPath: "/home/ray/workspace/tls" readOnly: true + - mountPath: /etc/pki/tls/certs + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs + name: odh-ca-cert + subPath: odh-ca-bundle.crt volumes: - name: ca-vol secret: @@ -284,6 +322,20 @@ spec: optional: false - name: server-cert emptyDir: {} + - name: odh-trusted-ca-cert + configMap: + name: odh-trusted-ca-bundle + items: + - key: ca-bundle.crt + path: odh-custom-ca-bundle.crt + optional: true + - name: odh-ca-cert + configMap: + name: odh-trusted-ca-bundle + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + optional: true - replicas: 1 generictemplate: apiVersion: networking.k8s.io/v1 diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index bfbce859..1a1212eb 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -17,6 +17,7 @@ (in the cluster sub-module) for AppWrapper generation. """ +import typing import yaml import sys import os @@ -466,35 +467,34 @@ def enable_local_interactive(resources, cluster_name, namespace, ingress_domain) ][0].get("command")[2] = command +def del_from_list_by_name(l: list, target: typing.List[str]): + for item in l: + if item["name"] in ["ca-vol", "server-cert"]: + l.remove(item) + + def disable_raycluster_tls(resources): generic_template_spec = resources["GenericItems"][0]["generictemplate"]["spec"] - if "volumes" in generic_template_spec["headGroupSpec"]["template"]["spec"]: - del generic_template_spec["headGroupSpec"]["template"]["spec"]["volumes"] + del_from_list_by_name( + generic_template_spec["headGroupSpec"]["template"]["spec"].get("volumes", []), + ["ca-vol", "server-cert"], + ) - if ( - "volumeMounts" - in generic_template_spec["headGroupSpec"]["template"]["spec"]["containers"][0] - ): - del generic_template_spec["headGroupSpec"]["template"]["spec"]["containers"][0][ - "volumeMounts" - ] + c: dict + for c in generic_template_spec["headGroupSpec"]["template"]["spec"]["containers"]: + del_from_list_by_name(c.get("volumeMounts", []), ["ca-vol", "server-cert"]) if "initContainers" in generic_template_spec["headGroupSpec"]["template"]["spec"]: del generic_template_spec["headGroupSpec"]["template"]["spec"]["initContainers"] - if "volumes" in generic_template_spec["workerGroupSpecs"][0]["template"]["spec"]: - del generic_template_spec["workerGroupSpecs"][0]["template"]["spec"]["volumes"] - - if ( - "volumeMounts" - in generic_template_spec["workerGroupSpecs"][0]["template"]["spec"][ - "containers" - ][0] - ): - del generic_template_spec["workerGroupSpecs"][0]["template"]["spec"][ - "containers" - ][0]["volumeMounts"] + for workerGroup in generic_template_spec.get("workerGroupSpecs"): + del_from_list_by_name( + workerGroup["template"]["spec"].get("volumes", []), + ["ca-vol", "server-cert"], + ) + for c in workerGroup["template"]["spec"].get("containers", []): + del_from_list_by_name(c.get("volumeMounts", []), ["ca-vol", "server-cert"]) del generic_template_spec["workerGroupSpecs"][0]["template"]["spec"][ "initContainers" From 9dd120be78f7dc4d33881d438075856d75815be6 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Tue, 20 Feb 2024 11:54:32 +0000 Subject: [PATCH 141/496] Altered tests so they pass --- tests/test-case-no-mcad.yamls | 66 +++++++++++++++++++++++++++++++++++ tests/test-case-prio.yaml | 66 +++++++++++++++++++++++++++++++++++ tests/test-case.yaml | 66 +++++++++++++++++++++++++++++++++++ tests/unit_test.py | 21 ++++++++++- 4 files changed, 218 insertions(+), 1 deletion(-) diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index 38109dfc..180c4c5b 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -77,8 +77,41 @@ spec: cpu: 2 memory: 8G nvidia.com/gpu: 0 + volumeMounts: + - mountPath: /home/ray/workspace/tls + name: server-cert + readOnly: true + - mountPath: /etc/pki/tls/certs + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs + name: odh-ca-cert + subPath: odh-ca-bundle.crt imagePullSecrets: - name: unit-test-pull-secret + volumes: + - emptyDir: {} + name: server-cert + - configMap: + items: + - key: ca-bundle.crt + path: odh-custom-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert rayVersion: 2.7.0 workerGroupSpecs: - groupName: small-group-unit-test-cluster-ray @@ -136,8 +169,41 @@ spec: cpu: 3 memory: 5G nvidia.com/gpu: 7 + volumeMounts: + - mountPath: /home/ray/workspace/tls + name: server-cert + readOnly: true + - mountPath: /etc/pki/tls/certs + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs + name: odh-ca-cert + subPath: odh-ca-bundle.crt imagePullSecrets: - name: unit-test-pull-secret + volumes: + - emptyDir: {} + name: server-cert + - configMap: + items: + - key: ca-bundle.crt + path: odh-custom-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert --- apiVersion: networking.k8s.io/v1 kind: Ingress diff --git a/tests/test-case-prio.yaml b/tests/test-case-prio.yaml index 9f907135..77ec3bbe 100644 --- a/tests/test-case-prio.yaml +++ b/tests/test-case-prio.yaml @@ -107,9 +107,42 @@ spec: cpu: 2 memory: 8G nvidia.com/gpu: 0 + volumeMounts: + - mountPath: /home/ray/workspace/tls + name: server-cert + readOnly: true + - mountPath: /etc/pki/tls/certs + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs + name: odh-ca-cert + subPath: odh-ca-bundle.crt imagePullSecrets: - name: unit-test-pull-secret priorityClassName: default + volumes: + - emptyDir: {} + name: server-cert + - configMap: + items: + - key: ca-bundle.crt + path: odh-custom-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert rayVersion: 2.7.0 workerGroupSpecs: - groupName: small-group-prio-test-cluster @@ -167,9 +200,42 @@ spec: cpu: 3 memory: 5G nvidia.com/gpu: 7 + volumeMounts: + - mountPath: /home/ray/workspace/tls + name: server-cert + readOnly: true + - mountPath: /etc/pki/tls/certs + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs + name: odh-ca-cert + subPath: odh-ca-bundle.crt imagePullSecrets: - name: unit-test-pull-secret priorityClassName: default + volumes: + - emptyDir: {} + name: server-cert + - configMap: + items: + - key: ca-bundle.crt + path: odh-custom-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert replicas: 1 - generictemplate: apiVersion: networking.k8s.io/v1 diff --git a/tests/test-case.yaml b/tests/test-case.yaml index f7e287cf..73fdd42d 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -106,8 +106,41 @@ spec: cpu: 2 memory: 8G nvidia.com/gpu: 0 + volumeMounts: + - mountPath: /home/ray/workspace/tls + name: server-cert + readOnly: true + - mountPath: /etc/pki/tls/certs + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs + name: odh-ca-cert + subPath: odh-ca-bundle.crt imagePullSecrets: - name: unit-test-pull-secret + volumes: + - emptyDir: {} + name: server-cert + - configMap: + items: + - key: ca-bundle.crt + path: odh-custom-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert rayVersion: 2.7.0 workerGroupSpecs: - groupName: small-group-unit-test-cluster @@ -165,8 +198,41 @@ spec: cpu: 3 memory: 5G nvidia.com/gpu: 7 + volumeMounts: + - mountPath: /home/ray/workspace/tls + name: server-cert + readOnly: true + - mountPath: /etc/pki/tls/certs + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs + name: odh-ca-cert + subPath: odh-ca-bundle.crt imagePullSecrets: - name: unit-test-pull-secret + volumes: + - emptyDir: {} + name: server-cert + - configMap: + items: + - key: ca-bundle.crt + path: odh-custom-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert replicas: 1 - generictemplate: apiVersion: networking.k8s.io/v1 diff --git a/tests/unit_test.py b/tests/unit_test.py index fc9ecde2..dd9fad63 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -2709,10 +2709,26 @@ def test_enable_local_interactive(mocker): volumes = [ { "name": "ca-vol", - "secret": {"secretName": f"ca-secret-{cluster_name}"}, + "secret": {"secretName": "ca-secret-test-enable-local"}, "optional": False, }, {"name": "server-cert", "emptyDir": {}}, + { + "name": "odh-trusted-ca-cert", + "configMap": { + "name": "odh-trusted-ca-bundle", + "items": [{"key": "ca-bundle.crt", "path": "odh-custom-ca-bundle.crt"}], + "optional": True, + }, + }, + { + "name": "odh-ca-cert", + "configMap": { + "name": "odh-trusted-ca-bundle", + "items": [{"key": "odh-ca-bundle.crt", "path": "odh-ca-bundle.crt"}], + "optional": True, + }, + }, ] tls_env = [ {"name": "RAY_USE_TLS", "value": "1"}, @@ -2740,6 +2756,9 @@ def test_enable_local_interactive(mocker): head_group_spec["template"]["spec"]["initContainers"][0]["volumeMounts"] == volume_mounts ) + print(head_group_spec["template"]["spec"]["volumes"]) + print("----------------") + print(volumes) assert head_group_spec["template"]["spec"]["volumes"] == volumes # 2. workerGroupSpec has the initContainers command to generated TLS cert from the mounted CA cert. From 07f79ade2e90b27635ff2267d1dc2e80f8957d22 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Tue, 20 Feb 2024 16:01:36 +0000 Subject: [PATCH 142/496] Specificed mount paths and fixed unit tests --- src/codeflare_sdk/templates/base-template.yaml | 16 ++++++++-------- tests/test-case-no-mcad.yamls | 16 ++++++++-------- tests/test-case-prio.yaml | 16 ++++++++-------- tests/test-case.yaml | 16 ++++++++-------- 4 files changed, 32 insertions(+), 32 deletions(-) diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index 0f142c71..1cb65d5a 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -157,16 +157,16 @@ spec: - name: server-cert mountPath: "/home/ray/workspace/tls" readOnly: true - - mountPath: /etc/pki/tls/certs + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/ssl/certs + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/pki/tls/certs + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt - - mountPath: /etc/ssl/certs + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt initContainers: @@ -303,16 +303,16 @@ spec: - name: server-cert mountPath: "/home/ray/workspace/tls" readOnly: true - - mountPath: /etc/pki/tls/certs + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/ssl/certs + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/pki/tls/certs + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt - - mountPath: /etc/ssl/certs + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt volumes: diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index 180c4c5b..d17e979a 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -81,16 +81,16 @@ spec: - mountPath: /home/ray/workspace/tls name: server-cert readOnly: true - - mountPath: /etc/pki/tls/certs + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/ssl/certs + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/pki/tls/certs + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt - - mountPath: /etc/ssl/certs + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt imagePullSecrets: @@ -173,16 +173,16 @@ spec: - mountPath: /home/ray/workspace/tls name: server-cert readOnly: true - - mountPath: /etc/pki/tls/certs + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/ssl/certs + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/pki/tls/certs + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt - - mountPath: /etc/ssl/certs + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt imagePullSecrets: diff --git a/tests/test-case-prio.yaml b/tests/test-case-prio.yaml index 77ec3bbe..1eb8ad48 100644 --- a/tests/test-case-prio.yaml +++ b/tests/test-case-prio.yaml @@ -111,16 +111,16 @@ spec: - mountPath: /home/ray/workspace/tls name: server-cert readOnly: true - - mountPath: /etc/pki/tls/certs + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/ssl/certs + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/pki/tls/certs + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt - - mountPath: /etc/ssl/certs + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt imagePullSecrets: @@ -204,16 +204,16 @@ spec: - mountPath: /home/ray/workspace/tls name: server-cert readOnly: true - - mountPath: /etc/pki/tls/certs + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/ssl/certs + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/pki/tls/certs + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt - - mountPath: /etc/ssl/certs + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt imagePullSecrets: diff --git a/tests/test-case.yaml b/tests/test-case.yaml index 73fdd42d..563ebf90 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -110,16 +110,16 @@ spec: - mountPath: /home/ray/workspace/tls name: server-cert readOnly: true - - mountPath: /etc/pki/tls/certs + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/ssl/certs + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/pki/tls/certs + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt - - mountPath: /etc/ssl/certs + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt imagePullSecrets: @@ -202,16 +202,16 @@ spec: - mountPath: /home/ray/workspace/tls name: server-cert readOnly: true - - mountPath: /etc/pki/tls/certs + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/ssl/certs + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/pki/tls/certs + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt - - mountPath: /etc/ssl/certs + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt imagePullSecrets: From 8cbb7aa8cfc2996dccb57c19437344fbc83387a4 Mon Sep 17 00:00:00 2001 From: Kevin Date: Tue, 20 Feb 2024 11:40:50 -0500 Subject: [PATCH 143/496] fix remove by name function to not be in place Signed-off-by: Kevin --- .../templates/base-template.yaml | 4 +-- src/codeflare_sdk/utils/generate_yaml.py | 26 +++++++++++-------- tests/test-case-no-mcad.yamls | 10 ------- tests/test-case-prio.yaml | 10 ------- tests/test-case.yaml | 10 ------- tests/unit_test.py | 3 --- 6 files changed, 17 insertions(+), 46 deletions(-) diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index 1cb65d5a..d2c6074f 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -198,7 +198,7 @@ spec: name: odh-trusted-ca-bundle items: - key: ca-bundle.crt - path: odh-custom-ca-bundle.crt + path: odh-trusted-ca-bundle.crt optional: true - name: odh-ca-cert configMap: @@ -327,7 +327,7 @@ spec: name: odh-trusted-ca-bundle items: - key: ca-bundle.crt - path: odh-custom-ca-bundle.crt + path: odh-trusted-ca-bundle.crt optional: true - name: odh-ca-cert configMap: diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 1a1212eb..49380523 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -467,34 +467,38 @@ def enable_local_interactive(resources, cluster_name, namespace, ingress_domain) ][0].get("command")[2] = command -def del_from_list_by_name(l: list, target: typing.List[str]): - for item in l: - if item["name"] in ["ca-vol", "server-cert"]: - l.remove(item) +def del_from_list_by_name(l: list, target: typing.List[str]) -> list: + return [x for x in l if x["name"] not in target] def disable_raycluster_tls(resources): generic_template_spec = resources["GenericItems"][0]["generictemplate"]["spec"] - del_from_list_by_name( - generic_template_spec["headGroupSpec"]["template"]["spec"].get("volumes", []), + headGroupTemplateSpec = generic_template_spec["headGroupSpec"]["template"]["spec"] + headGroupTemplateSpec["volumes"] = del_from_list_by_name( + headGroupTemplateSpec.get("volumes", []), ["ca-vol", "server-cert"], ) c: dict for c in generic_template_spec["headGroupSpec"]["template"]["spec"]["containers"]: - del_from_list_by_name(c.get("volumeMounts", []), ["ca-vol", "server-cert"]) + c["volumeMounts"] = del_from_list_by_name( + c.get("volumeMounts", []), ["ca-vol", "server-cert"] + ) if "initContainers" in generic_template_spec["headGroupSpec"]["template"]["spec"]: del generic_template_spec["headGroupSpec"]["template"]["spec"]["initContainers"] - for workerGroup in generic_template_spec.get("workerGroupSpecs"): - del_from_list_by_name( - workerGroup["template"]["spec"].get("volumes", []), + for workerGroup in generic_template_spec.get("workerGroupSpecs", []): + workerGroupSpec = workerGroup["template"]["spec"] + workerGroupSpec["volumes"] = del_from_list_by_name( + workerGroupSpec.get("volumes", []), ["ca-vol", "server-cert"], ) for c in workerGroup["template"]["spec"].get("containers", []): - del_from_list_by_name(c.get("volumeMounts", []), ["ca-vol", "server-cert"]) + c["volumeMounts"] = del_from_list_by_name( + c.get("volumeMounts", []), ["ca-vol", "server-cert"] + ) del generic_template_spec["workerGroupSpecs"][0]["template"]["spec"][ "initContainers" diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index d17e979a..dd05c8db 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -78,9 +78,6 @@ spec: memory: 8G nvidia.com/gpu: 0 volumeMounts: - - mountPath: /home/ray/workspace/tls - name: server-cert - readOnly: true - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt @@ -96,8 +93,6 @@ spec: imagePullSecrets: - name: unit-test-pull-secret volumes: - - emptyDir: {} - name: server-cert - configMap: items: - key: ca-bundle.crt @@ -170,9 +165,6 @@ spec: memory: 5G nvidia.com/gpu: 7 volumeMounts: - - mountPath: /home/ray/workspace/tls - name: server-cert - readOnly: true - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt @@ -188,8 +180,6 @@ spec: imagePullSecrets: - name: unit-test-pull-secret volumes: - - emptyDir: {} - name: server-cert - configMap: items: - key: ca-bundle.crt diff --git a/tests/test-case-prio.yaml b/tests/test-case-prio.yaml index 1eb8ad48..dc8eb2a7 100644 --- a/tests/test-case-prio.yaml +++ b/tests/test-case-prio.yaml @@ -108,9 +108,6 @@ spec: memory: 8G nvidia.com/gpu: 0 volumeMounts: - - mountPath: /home/ray/workspace/tls - name: server-cert - readOnly: true - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt @@ -127,8 +124,6 @@ spec: - name: unit-test-pull-secret priorityClassName: default volumes: - - emptyDir: {} - name: server-cert - configMap: items: - key: ca-bundle.crt @@ -201,9 +196,6 @@ spec: memory: 5G nvidia.com/gpu: 7 volumeMounts: - - mountPath: /home/ray/workspace/tls - name: server-cert - readOnly: true - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt @@ -220,8 +212,6 @@ spec: - name: unit-test-pull-secret priorityClassName: default volumes: - - emptyDir: {} - name: server-cert - configMap: items: - key: ca-bundle.crt diff --git a/tests/test-case.yaml b/tests/test-case.yaml index 563ebf90..c88c04ab 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -107,9 +107,6 @@ spec: memory: 8G nvidia.com/gpu: 0 volumeMounts: - - mountPath: /home/ray/workspace/tls - name: server-cert - readOnly: true - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt @@ -125,8 +122,6 @@ spec: imagePullSecrets: - name: unit-test-pull-secret volumes: - - emptyDir: {} - name: server-cert - configMap: items: - key: ca-bundle.crt @@ -199,9 +194,6 @@ spec: memory: 5G nvidia.com/gpu: 7 volumeMounts: - - mountPath: /home/ray/workspace/tls - name: server-cert - readOnly: true - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt @@ -217,8 +209,6 @@ spec: imagePullSecrets: - name: unit-test-pull-secret volumes: - - emptyDir: {} - name: server-cert - configMap: items: - key: ca-bundle.crt diff --git a/tests/unit_test.py b/tests/unit_test.py index dd9fad63..d3ef11e1 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -2756,9 +2756,6 @@ def test_enable_local_interactive(mocker): head_group_spec["template"]["spec"]["initContainers"][0]["volumeMounts"] == volume_mounts ) - print(head_group_spec["template"]["spec"]["volumes"]) - print("----------------") - print(volumes) assert head_group_spec["template"]["spec"]["volumes"] == volumes # 2. workerGroupSpec has the initContainers command to generated TLS cert from the mounted CA cert. From 8f891648c08d69095d366139f81ac250ed73d8a8 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 22 Feb 2024 11:18:34 +0000 Subject: [PATCH 144/496] Fixed unit tests --- tests/test-case-no-mcad.yamls | 4 ++-- tests/test-case-prio.yaml | 4 ++-- tests/test-case.yaml | 4 ++-- tests/unit_test.py | 4 +++- 4 files changed, 9 insertions(+), 7 deletions(-) diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index dd05c8db..299ff9a8 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -96,7 +96,7 @@ spec: - configMap: items: - key: ca-bundle.crt - path: odh-custom-ca-bundle.crt + path: odh-trusted-ca-bundle.crt name: odh-trusted-ca-bundle optional: true name: odh-trusted-ca-cert @@ -183,7 +183,7 @@ spec: - configMap: items: - key: ca-bundle.crt - path: odh-custom-ca-bundle.crt + path: odh-trusted-ca-bundle.crt name: odh-trusted-ca-bundle optional: true name: odh-trusted-ca-cert diff --git a/tests/test-case-prio.yaml b/tests/test-case-prio.yaml index dc8eb2a7..1e72c442 100644 --- a/tests/test-case-prio.yaml +++ b/tests/test-case-prio.yaml @@ -127,7 +127,7 @@ spec: - configMap: items: - key: ca-bundle.crt - path: odh-custom-ca-bundle.crt + path: odh-trusted-ca-bundle.crt name: odh-trusted-ca-bundle optional: true name: odh-trusted-ca-cert @@ -215,7 +215,7 @@ spec: - configMap: items: - key: ca-bundle.crt - path: odh-custom-ca-bundle.crt + path: odh-trusted-ca-bundle.crt name: odh-trusted-ca-bundle optional: true name: odh-trusted-ca-cert diff --git a/tests/test-case.yaml b/tests/test-case.yaml index c88c04ab..76285209 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -125,7 +125,7 @@ spec: - configMap: items: - key: ca-bundle.crt - path: odh-custom-ca-bundle.crt + path: odh-trusted-ca-bundle.crt name: odh-trusted-ca-bundle optional: true name: odh-trusted-ca-cert @@ -212,7 +212,7 @@ spec: - configMap: items: - key: ca-bundle.crt - path: odh-custom-ca-bundle.crt + path: odh-trusted-ca-bundle.crt name: odh-trusted-ca-bundle optional: true name: odh-trusted-ca-cert diff --git a/tests/unit_test.py b/tests/unit_test.py index d3ef11e1..3fde4960 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -2717,7 +2717,9 @@ def test_enable_local_interactive(mocker): "name": "odh-trusted-ca-cert", "configMap": { "name": "odh-trusted-ca-bundle", - "items": [{"key": "ca-bundle.crt", "path": "odh-custom-ca-bundle.crt"}], + "items": [ + {"key": "ca-bundle.crt", "path": "odh-trusted-ca-bundle.crt"} + ], "optional": True, }, }, From 2452a108a605939f3d3f4d8148dab283bf7e8451 Mon Sep 17 00:00:00 2001 From: Mark Campbell Date: Thu, 22 Feb 2024 16:56:38 +0000 Subject: [PATCH 145/496] Updated codeflare-torchx version (#468) --- poetry.lock | 12 ++++++------ pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index a9db5ff6..45756ec3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. +# This file is automatically @generated by Poetry and should not be changed by hand. [[package]] name = "aiohttp" @@ -428,13 +428,13 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [[package]] name = "codeflare-torchx" -version = "0.6.0.dev1" +version = "0.6.0.dev2" description = "TorchX SDK and Components" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "codeflare_torchx-0.6.0.dev1-py3-none-any.whl", hash = "sha256:a03bf570c775cb7fc7787a436ef5a76c726669e287af510513c5bc03033d1fca"}, + {file = "codeflare_torchx-0.6.0.dev2-py3-none-any.whl", hash = "sha256:d303efffb9b1e105390ed672a3358de40174146530929df83c7d7af27372fbcc"}, ] [package.dependencies] @@ -2149,8 +2149,8 @@ frozenlist = "*" fsspec = {version = "*", optional = true, markers = "extra == \"data\""} gpustat = {version = ">=1.0.0", optional = true, markers = "extra == \"default\""} grpcio = [ - {version = ">=1.32.0", optional = true, markers = "python_version < \"3.10\" and extra == \"default\""}, - {version = ">=1.42.0", optional = true, markers = "python_version >= \"3.10\" and extra == \"default\""}, + {version = ">=1.32.0", optional = true, markers = "python_version < \"3.10\""}, + {version = ">=1.42.0", optional = true, markers = "python_version >= \"3.10\""}, ] jsonschema = "*" msgpack = ">=1.0.0,<2.0.0" @@ -2695,4 +2695,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "e7fa79bd035b5bffcb1668c0a8cf8fa4e869a614b934a09100ab1d53338fe11b" +content-hash = "33f4e77105de697fd6fc15537180df77ab057e3d2e5db27d7a9a2f8cc9f69a03" diff --git a/pyproject.toml b/pyproject.toml index e262b527..953ce0f6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,7 +25,7 @@ openshift-client = "1.0.18" rich = "^12.5" ray = {version = "2.7.0", extras = ["data", "default"]} kubernetes = ">= 25.3.0, < 27" -codeflare-torchx = "0.6.0.dev1" +codeflare-torchx = "0.6.0.dev2" cryptography = "40.0.2" executing = "1.2.0" pydantic = "< 2" From c6e2b79db3cf9bf6ea634bf65a4396617eba779b Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 22 Feb 2024 17:06:28 +0000 Subject: [PATCH 146/496] Changes in docs for release: vv0.14.1 --- .../utils/generate_yaml.html | 101 ++++++++++-------- .../utils/openshift_oauth.html | 2 +- 2 files changed, 60 insertions(+), 43 deletions(-) diff --git a/docs/detailed-documentation/utils/generate_yaml.html b/docs/detailed-documentation/utils/generate_yaml.html index c2643556..d533c8d1 100644 --- a/docs/detailed-documentation/utils/generate_yaml.html +++ b/docs/detailed-documentation/utils/generate_yaml.html @@ -48,6 +48,7 @@

    Module codeflare_sdk.utils.generate_yaml

    (in the cluster sub-module) for AppWrapper generation. """ +import typing import yaml import sys import os @@ -497,35 +498,38 @@

    Module codeflare_sdk.utils.generate_yaml

    ][0].get("command")[2] = command +def del_from_list_by_name(l: list, target: typing.List[str]) -> list: + return [x for x in l if x["name"] not in target] + + def disable_raycluster_tls(resources): generic_template_spec = resources["GenericItems"][0]["generictemplate"]["spec"] - if "volumes" in generic_template_spec["headGroupSpec"]["template"]["spec"]: - del generic_template_spec["headGroupSpec"]["template"]["spec"]["volumes"] + headGroupTemplateSpec = generic_template_spec["headGroupSpec"]["template"]["spec"] + headGroupTemplateSpec["volumes"] = del_from_list_by_name( + headGroupTemplateSpec.get("volumes", []), + ["ca-vol", "server-cert"], + ) - if ( - "volumeMounts" - in generic_template_spec["headGroupSpec"]["template"]["spec"]["containers"][0] - ): - del generic_template_spec["headGroupSpec"]["template"]["spec"]["containers"][0][ - "volumeMounts" - ] + c: dict + for c in generic_template_spec["headGroupSpec"]["template"]["spec"]["containers"]: + c["volumeMounts"] = del_from_list_by_name( + c.get("volumeMounts", []), ["ca-vol", "server-cert"] + ) if "initContainers" in generic_template_spec["headGroupSpec"]["template"]["spec"]: del generic_template_spec["headGroupSpec"]["template"]["spec"]["initContainers"] - if "volumes" in generic_template_spec["workerGroupSpecs"][0]["template"]["spec"]: - del generic_template_spec["workerGroupSpecs"][0]["template"]["spec"]["volumes"] - - if ( - "volumeMounts" - in generic_template_spec["workerGroupSpecs"][0]["template"]["spec"][ - "containers" - ][0] - ): - del generic_template_spec["workerGroupSpecs"][0]["template"]["spec"][ - "containers" - ][0]["volumeMounts"] + for workerGroup in generic_template_spec.get("workerGroupSpecs", []): + workerGroupSpec = workerGroup["template"]["spec"] + workerGroupSpec["volumes"] = del_from_list_by_name( + workerGroupSpec.get("volumes", []), + ["ca-vol", "server-cert"], + ) + for c in workerGroup["template"]["spec"].get("containers", []): + c["volumeMounts"] = del_from_list_by_name( + c.get("volumeMounts", []), ["ca-vol", "server-cert"] + ) del generic_template_spec["workerGroupSpecs"][0]["template"]["spec"][ "initContainers" @@ -765,6 +769,19 @@

    Module codeflare_sdk.utils.generate_yaml

    Functions

    +
    +def del_from_list_by_name(l: list, target: List[str]) ‑> list +
    +
    +
    +
    + +Expand source code + +
    def del_from_list_by_name(l: list, target: typing.List[str]) -> list:
    +    return [x for x in l if x["name"] not in target]
    +
    +
    def delete_route_or_ingress(resources)
    @@ -806,32 +823,31 @@

    Functions

    def disable_raycluster_tls(resources):
         generic_template_spec = resources["GenericItems"][0]["generictemplate"]["spec"]
     
    -    if "volumes" in generic_template_spec["headGroupSpec"]["template"]["spec"]:
    -        del generic_template_spec["headGroupSpec"]["template"]["spec"]["volumes"]
    +    headGroupTemplateSpec = generic_template_spec["headGroupSpec"]["template"]["spec"]
    +    headGroupTemplateSpec["volumes"] = del_from_list_by_name(
    +        headGroupTemplateSpec.get("volumes", []),
    +        ["ca-vol", "server-cert"],
    +    )
     
    -    if (
    -        "volumeMounts"
    -        in generic_template_spec["headGroupSpec"]["template"]["spec"]["containers"][0]
    -    ):
    -        del generic_template_spec["headGroupSpec"]["template"]["spec"]["containers"][0][
    -            "volumeMounts"
    -        ]
    +    c: dict
    +    for c in generic_template_spec["headGroupSpec"]["template"]["spec"]["containers"]:
    +        c["volumeMounts"] = del_from_list_by_name(
    +            c.get("volumeMounts", []), ["ca-vol", "server-cert"]
    +        )
     
         if "initContainers" in generic_template_spec["headGroupSpec"]["template"]["spec"]:
             del generic_template_spec["headGroupSpec"]["template"]["spec"]["initContainers"]
     
    -    if "volumes" in generic_template_spec["workerGroupSpecs"][0]["template"]["spec"]:
    -        del generic_template_spec["workerGroupSpecs"][0]["template"]["spec"]["volumes"]
    -
    -    if (
    -        "volumeMounts"
    -        in generic_template_spec["workerGroupSpecs"][0]["template"]["spec"][
    -            "containers"
    -        ][0]
    -    ):
    -        del generic_template_spec["workerGroupSpecs"][0]["template"]["spec"][
    -            "containers"
    -        ][0]["volumeMounts"]
    +    for workerGroup in generic_template_spec.get("workerGroupSpecs", []):
    +        workerGroupSpec = workerGroup["template"]["spec"]
    +        workerGroupSpec["volumes"] = del_from_list_by_name(
    +            workerGroupSpec.get("volumes", []),
    +            ["ca-vol", "server-cert"],
    +        )
    +        for c in workerGroup["template"]["spec"].get("containers", []):
    +            c["volumeMounts"] = del_from_list_by_name(
    +                c.get("volumeMounts", []), ["ca-vol", "server-cert"]
    +            )
     
         del generic_template_spec["workerGroupSpecs"][0]["template"]["spec"][
             "initContainers"
    @@ -1699,6 +1715,7 @@ 

    Index

  • Functions

      +
    • del_from_list_by_name
    • delete_route_or_ingress
    • disable_raycluster_tls
    • enable_local_interactive
    • diff --git a/docs/detailed-documentation/utils/openshift_oauth.html b/docs/detailed-documentation/utils/openshift_oauth.html index 8dc82d03..de848cf5 100644 --- a/docs/detailed-documentation/utils/openshift_oauth.html +++ b/docs/detailed-documentation/utils/openshift_oauth.html @@ -208,7 +208,7 @@

      Module codeflare_sdk.utils.openshift_oauth

      port: targetPort: {port_name} tls: - termination: passthrough + termination: reencrypt to: kind: Service name: {service_name} From 8298e9eec87e929307e14604fa3b935d032d1c67 Mon Sep 17 00:00:00 2001 From: Eoin Gallinagh Date: Mon, 12 Feb 2024 14:48:19 +0000 Subject: [PATCH 147/496] update: Demo notebooks to use simplified import statements for the codeflare-sdk --- demo-notebooks/additional-demos/hf_interactive.ipynb | 3 +-- demo-notebooks/additional-demos/local_interactive.ipynb | 5 ++--- demo-notebooks/guided-demos/0_basic_ray.ipynb | 3 +-- demo-notebooks/guided-demos/1_basic_instascale.ipynb | 3 +-- demo-notebooks/guided-demos/2_basic_jobs.ipynb | 5 ++--- demo-notebooks/guided-demos/3_basic_interactive.ipynb | 3 +-- demo-notebooks/guided-demos/4_gpt.ipynb | 5 ++--- .../guided-demos/notebook-ex-outputs/0_basic_ray.ipynb | 3 +-- .../notebook-ex-outputs/1_basic_instascale.ipynb | 3 +-- .../guided-demos/notebook-ex-outputs/2_basic_jobs.ipynb | 5 ++--- .../notebook-ex-outputs/3_basic_interactive.ipynb | 3 +-- demo-notebooks/guided-demos/notebook-ex-outputs/4_gpt.ipynb | 5 ++--- demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb | 3 +-- .../guided-demos/preview_nbs/1_basic_instascale.ipynb | 3 +-- demo-notebooks/guided-demos/preview_nbs/2_basic_jobs.ipynb | 5 ++--- .../guided-demos/preview_nbs/3_basic_interactive.ipynb | 3 +-- demo-notebooks/guided-demos/preview_nbs/4_gpt.ipynb | 5 ++--- docs/authentication.md | 4 ++-- docs/cluster-configuration.md | 2 +- 19 files changed, 27 insertions(+), 44 deletions(-) diff --git a/demo-notebooks/additional-demos/hf_interactive.ipynb b/demo-notebooks/additional-demos/hf_interactive.ipynb index 45e9653f..37216b5d 100644 --- a/demo-notebooks/additional-demos/hf_interactive.ipynb +++ b/demo-notebooks/additional-demos/hf_interactive.ipynb @@ -42,8 +42,7 @@ "outputs": [], "source": [ "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration\n", - "from codeflare_sdk.cluster.auth import TokenAuthentication" + "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" ] }, { diff --git a/demo-notebooks/additional-demos/local_interactive.ipynb b/demo-notebooks/additional-demos/local_interactive.ipynb index b6773ee1..674a655e 100644 --- a/demo-notebooks/additional-demos/local_interactive.ipynb +++ b/demo-notebooks/additional-demos/local_interactive.ipynb @@ -10,8 +10,7 @@ "outputs": [], "source": [ "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration\n", - "from codeflare_sdk.cluster.auth import TokenAuthentication" + "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" ] }, { @@ -116,7 +115,7 @@ "metadata": {}, "outputs": [], "source": [ - "from codeflare_sdk.utils import generate_cert\n", + "from codeflare_sdk import generate_cert\n", "\n", "if local_interactive:\n", " generate_cert.generate_tls_cert(cluster_name, namespace)\n", diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb index c905d175..205f0217 100644 --- a/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -19,8 +19,7 @@ "outputs": [], "source": [ "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration\n", - "from codeflare_sdk.cluster.auth import TokenAuthentication" + "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" ] }, { diff --git a/demo-notebooks/guided-demos/1_basic_instascale.ipynb b/demo-notebooks/guided-demos/1_basic_instascale.ipynb index f1795382..418737eb 100644 --- a/demo-notebooks/guided-demos/1_basic_instascale.ipynb +++ b/demo-notebooks/guided-demos/1_basic_instascale.ipynb @@ -16,8 +16,7 @@ "outputs": [], "source": [ "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration\n", - "from codeflare_sdk.cluster.auth import TokenAuthentication" + "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" ] }, { diff --git a/demo-notebooks/guided-demos/2_basic_jobs.ipynb b/demo-notebooks/guided-demos/2_basic_jobs.ipynb index 4b3cee91..e57a6ed8 100644 --- a/demo-notebooks/guided-demos/2_basic_jobs.ipynb +++ b/demo-notebooks/guided-demos/2_basic_jobs.ipynb @@ -16,8 +16,7 @@ "outputs": [], "source": [ "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration\n", - "from codeflare_sdk.cluster.auth import TokenAuthentication" + "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" ] }, { @@ -108,7 +107,7 @@ "metadata": {}, "outputs": [], "source": [ - "from codeflare_sdk.job.jobs import DDPJobDefinition" + "from codeflare_sdk import DDPJobDefinition" ] }, { diff --git a/demo-notebooks/guided-demos/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/3_basic_interactive.ipynb index bfcb2df3..090a4a30 100644 --- a/demo-notebooks/guided-demos/3_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/3_basic_interactive.ipynb @@ -16,8 +16,7 @@ "outputs": [], "source": [ "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration\n", - "from codeflare_sdk.cluster.auth import TokenAuthentication" + "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" ] }, { diff --git a/demo-notebooks/guided-demos/4_gpt.ipynb b/demo-notebooks/guided-demos/4_gpt.ipynb index dba03bc4..a4e1f366 100644 --- a/demo-notebooks/guided-demos/4_gpt.ipynb +++ b/demo-notebooks/guided-demos/4_gpt.ipynb @@ -8,8 +8,7 @@ "outputs": [], "source": [ "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration\n", - "from codeflare_sdk.cluster.auth import TokenAuthentication" + "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" ] }, { @@ -98,7 +97,7 @@ "metadata": {}, "outputs": [], "source": [ - "from codeflare_sdk.job.jobs import DDPJobDefinition" + "from codeflare_sdk import DDPJobDefinition" ] }, { diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb index d07fd430..d8f6c34c 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb @@ -19,8 +19,7 @@ "outputs": [], "source": [ "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration\n", - "from codeflare_sdk.cluster.auth import TokenAuthentication" + "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" ] }, { diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb index 6cfe8143..4b28b205 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb @@ -16,8 +16,7 @@ "outputs": [], "source": [ "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration\n", - "from codeflare_sdk.cluster.auth import TokenAuthentication" + "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" ] }, { diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_jobs.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_jobs.ipynb index 42600a3f..16656177 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_jobs.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_jobs.ipynb @@ -16,8 +16,7 @@ "outputs": [], "source": [ "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration\n", - "from codeflare_sdk.cluster.auth import TokenAuthentication" + "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" ] }, { @@ -183,7 +182,7 @@ "metadata": {}, "outputs": [], "source": [ - "from codeflare_sdk.job.jobs import DDPJobDefinition" + "from codeflare_sdk import DDPJobDefinition" ] }, { diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb index 974bd058..7ac00470 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb @@ -16,8 +16,7 @@ "outputs": [], "source": [ "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration\n", - "from codeflare_sdk.cluster.auth import TokenAuthentication" + "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" ] }, { diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/4_gpt.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/4_gpt.ipynb index 4ed6cc54..3f8afa35 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/4_gpt.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/4_gpt.ipynb @@ -8,8 +8,7 @@ "outputs": [], "source": [ "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration\n", - "from codeflare_sdk.cluster.auth import TokenAuthentication" + "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" ] }, { @@ -174,7 +173,7 @@ "metadata": {}, "outputs": [], "source": [ - "from codeflare_sdk.job.jobs import DDPJobDefinition" + "from codeflare_sdk import DDPJobDefinition" ] }, { diff --git a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb index be2ca1e5..b0f12d4b 100644 --- a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb @@ -19,8 +19,7 @@ "outputs": [], "source": [ "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration\n", - "from codeflare_sdk.cluster.auth import TokenAuthentication" + "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" ] }, { diff --git a/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb b/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb index f1795382..418737eb 100644 --- a/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb @@ -16,8 +16,7 @@ "outputs": [], "source": [ "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration\n", - "from codeflare_sdk.cluster.auth import TokenAuthentication" + "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" ] }, { diff --git a/demo-notebooks/guided-demos/preview_nbs/2_basic_jobs.ipynb b/demo-notebooks/guided-demos/preview_nbs/2_basic_jobs.ipynb index e3bbbce9..495a0855 100644 --- a/demo-notebooks/guided-demos/preview_nbs/2_basic_jobs.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/2_basic_jobs.ipynb @@ -16,8 +16,7 @@ "outputs": [], "source": [ "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration\n", - "from codeflare_sdk.cluster.auth import TokenAuthentication" + "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" ] }, { @@ -108,7 +107,7 @@ "metadata": {}, "outputs": [], "source": [ - "from codeflare_sdk.job.jobs import DDPJobDefinition" + "from codeflare_sdk import DDPJobDefinition" ] }, { diff --git a/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb index bfcb2df3..090a4a30 100644 --- a/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb @@ -16,8 +16,7 @@ "outputs": [], "source": [ "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration\n", - "from codeflare_sdk.cluster.auth import TokenAuthentication" + "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" ] }, { diff --git a/demo-notebooks/guided-demos/preview_nbs/4_gpt.ipynb b/demo-notebooks/guided-demos/preview_nbs/4_gpt.ipynb index 77edf591..e0407e8c 100644 --- a/demo-notebooks/guided-demos/preview_nbs/4_gpt.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/4_gpt.ipynb @@ -8,8 +8,7 @@ "outputs": [], "source": [ "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration\n", - "from codeflare_sdk.cluster.auth import TokenAuthentication" + "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" ] }, { @@ -98,7 +97,7 @@ "metadata": {}, "outputs": [], "source": [ - "from codeflare_sdk.job.jobs import DDPJobDefinition" + "from codeflare_sdk import DDPJobDefinition" ] }, { diff --git a/docs/authentication.md b/docs/authentication.md index 20dd4452..8812a6fc 100644 --- a/docs/authentication.md +++ b/docs/authentication.md @@ -5,7 +5,7 @@ Authenticating with your cluster allows you to perform actions such as creating ## Method 1 Token Authentication This is how a typical user would authenticate to their cluster using `TokenAuthentication`. ``` -from codeflare_sdk.cluster.auth import TokenAuthentication +from codeflare_sdk import TokenAuthentication auth = TokenAuthentication( token = "XXXXX", @@ -27,7 +27,7 @@ If the user has not specifically authenticated through the SDK by other means su A user can specify a config file via a different authentication class `KubeConfigFileAuthentication` for authenticating with the SDK.
      This is what loading a custom config file would typically look like. ``` -from codeflare_sdk.cluster.auth import KubeConfigFileAuthentication +from codeflare_sdk import KubeConfigFileAuthentication auth = KubeConfigFileAuthentication( kube_config_path="/path/to/config", diff --git a/docs/cluster-configuration.md b/docs/cluster-configuration.md index 2ad614a6..bb058fa4 100644 --- a/docs/cluster-configuration.md +++ b/docs/cluster-configuration.md @@ -4,7 +4,7 @@ To create Ray Clusters using the CodeFlare SDK a cluster configuration needs to This is what a typical cluster configuration would look like; Note: The values for CPU and Memory are at the minimum requirements for creating the Ray Cluster. ``` -from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration +from codeflare_sdk import Cluster, ClusterConfiguration cluster = Cluster(ClusterConfiguration( name='ray-example', # Mandatory Field From 90c45b88ba78cb64661f5e2efc2856205a964efb Mon Sep 17 00:00:00 2001 From: Karel Suta Date: Tue, 20 Feb 2024 15:23:33 +0100 Subject: [PATCH 148/496] Update deprecated GitHub actions --- .github/workflows/coverage-badge.yaml | 8 ++++---- .github/workflows/e2e_tests.yaml | 10 +++++----- .github/workflows/nightly-image-build.yaml | 6 +++--- .github/workflows/pre-commit.yaml | 2 +- .github/workflows/release.yaml | 4 ++-- .github/workflows/unit-tests.yml | 4 ++-- 6 files changed, 17 insertions(+), 17 deletions(-) diff --git a/.github/workflows/coverage-badge.yaml b/.github/workflows/coverage-badge.yaml index a58ab8db..5a88934c 100644 --- a/.github/workflows/coverage-badge.yaml +++ b/.github/workflows/coverage-badge.yaml @@ -12,9 +12,9 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Set up Python 3.9 - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: python-version: 3.9 - name: Install dependencies @@ -29,10 +29,10 @@ jobs: coverage run -m --source=src pytest -v tests/unit_test.py - name: Coverage Badge - uses: tj-actions/coverage-badge-py@v1.8 + uses: tj-actions/coverage-badge-py@v2 - name: Verify Changed files - uses: tj-actions/verify-changed-files@v12 + uses: tj-actions/verify-changed-files@v18 id: changed_files with: files: coverage.svg diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index 7efb9937..1b0e4900 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -48,25 +48,25 @@ jobs: df -h - name: Checkout code - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: submodules: recursive - name: Checkout common repo code - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: 'project-codeflare/codeflare-common' ref: 'main' path: 'common' - name: Checkout CodeFlare operator repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: project-codeflare/codeflare-operator path: codeflare-operator - name: Set Go - uses: actions/setup-go@v3 + uses: actions/setup-go@v5 with: go-version: v1.20 @@ -142,7 +142,7 @@ jobs: output-directory: ${CODEFLARE_TEST_OUTPUT_DIR} - name: Upload logs - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 if: always() && steps.deploy.outcome == 'success' with: name: logs diff --git a/.github/workflows/nightly-image-build.yaml b/.github/workflows/nightly-image-build.yaml index 43e53fc2..4532c4a7 100644 --- a/.github/workflows/nightly-image-build.yaml +++ b/.github/workflows/nightly-image-build.yaml @@ -18,9 +18,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout the repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} - name: Install Poetry @@ -41,7 +41,7 @@ jobs: working-directory: custom-nb-image run: docker build -t quay.io/${{ env.QUAY_ORGANIZATION }}/notebook:${{ env.IMAGE_TAG }} . - name: Login to Quay.io - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: quay.io username: ${{ secrets.QUAY_ID }} diff --git a/.github/workflows/pre-commit.yaml b/.github/workflows/pre-commit.yaml index b2ed80ca..4aa51882 100644 --- a/.github/workflows/pre-commit.yaml +++ b/.github/workflows/pre-commit.yaml @@ -14,7 +14,7 @@ jobs: container: image: quay.io/project-codeflare/codeflare-sdk-precommit:v0.0.1 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Run pre-commit checks run: pre-commit run --all-files diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index a191baa9..1bf40644 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -37,9 +37,9 @@ jobs: PR_BRANCH_NAME: adjustments-release-${{ github.event.inputs.release-version }} steps: - name: Checkout the repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ github.event.inputs.python_version }} - name: Install Poetry diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index 495bf487..5d12791b 100755 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -13,10 +13,10 @@ jobs: container: image: quay.io/project-codeflare/codeflare-sdk-precommit:v0.0.3 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Install poetry run: pip install poetry - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: '3.8' - name: Install dependencies From de6cdd57df3269105b2a199af31eaf3332a060bb Mon Sep 17 00:00:00 2001 From: abhijeet-dhumal Date: Tue, 27 Feb 2024 16:09:33 +0530 Subject: [PATCH 149/496] add an automated CI workflow to sync ODH-notebooks with latest codeflare-sdk release --- .github/workflows/odh-notebooks-sync.yml | 128 +++++++++++++++++++++++ .github/workflows/release.yaml | 12 +++ 2 files changed, 140 insertions(+) create mode 100644 .github/workflows/odh-notebooks-sync.yml diff --git a/.github/workflows/odh-notebooks-sync.yml b/.github/workflows/odh-notebooks-sync.yml new file mode 100644 index 00000000..7671b736 --- /dev/null +++ b/.github/workflows/odh-notebooks-sync.yml @@ -0,0 +1,128 @@ +# The aim of this GitHub workflow is to update the pipfile to sync with Codeflare-SDK release. +name: Sync ODH-notebooks with codeflare-sdk release +on: + workflow_dispatch: + inputs: + upstream-repository-organization: + required: true + description: "Owner of target upstream notebooks repository used to open a PR against" + default: "opendatahub-io" + + codeflare-repository-organization: + required: true + description: "Owner of origin notebooks repository used to open a PR" + default: "project-codeflare" + + codeflare_sdk_release_version: + required: true + description: "Provide version of the Codeflare-SDK release" + +env: + BRANCH_NAME: main + CODEFLARE_RELEASE_VERSION: ${{ github.event.inputs.codeflare_sdk_release_version }} + UPDATER_BRANCH: odh-sync-updater-${{ github.run_id }} + UPSTREAM_OWNER: ${{ github.event.inputs.upstream-repository-organization }} + REPO_OWNER: ${{ github.event.inputs.codeflare-repository-organization }} + REPO_NAME: notebooks + GITHUB_TOKEN: ${{ secrets.CODEFLARE_MACHINE_ACCOUNT_TOKEN }} + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: Clone repository and Sync + run: | + git clone https://x-access-token:${GITHUB_TOKEN}@github.com/$REPO_OWNER/$REPO_NAME.git $REPO_NAME + cd $REPO_NAME + git remote add upstream https://github.com/$UPSTREAM_OWNER/$REPO_NAME.git + git config --global user.email "138894154+codeflare-machine-account@users.noreply.github.com" + git config --global user.name "codeflare-machine-account" + git remote -v + git pull upstream main && git push origin main + + - name: Setup Python environment + uses: actions/setup-python@v4 + with: + python-version: | + 3.8 + 3.9 + + - name: Install pipenv and pip-versions + run: pip install pipenv pip-versions + + - name: Update Pipfiles in accordance with Codeflare-SDK latest release + run: | + package_name=codeflare-sdk + # Get the list of available versions for the package + if ! versions=$(pipenv run pip-versions list $package_name);then + echo "Failed to retrieve versions for $package_name" + exit 1 + fi + # Check if the desired version exists in the list + if echo "$versions" | grep -q "${CODEFLARE_RELEASE_VERSION}"; then + echo "Version ${CODEFLARE_RELEASE_VERSION} is available for $package_name" + # list all Pipfile paths having Codeflare-SDK listed + paths+=($(grep -rl "${package_name} = \"~=.*\"")) + # Extracting only directories from file paths, excluding a `.gitworkflow` directory + directories=() + exclude_directories=( + ".git/objects/pack" + ".github/workflows/", + ) + for path in "${paths[@]}"; do + current_dir=$(dirname "$path") + #Check if current_dir is not in exclude_directories list + if [[ ! "${exclude_directories[@]}" =~ "$current_dir" ]]; then + #Check if Pipfile exists in current_dir + if [ -f "$current_dir/Pipfile" ];then + directories+=("$current_dir") + fi + fi + done + # Remove duplicates + directories=($(echo "${directories[@]}" | tr ' ' '\n' | sort -u | tr '\n' ' ')) + # Print the directories for verification + echo "Directories (Start updating Pipfile in these below directories in accordance with Codeflare-SDK latest release):" + for dir in "${directories[@]}"; do + echo "- $dir" + done + # iterate over the directories and update Pipfile + counter=0 + total=${#directories[@]} + for dir in "${directories[@]}"; do + counter=$((counter+1)) + echo "--Processing directory $counter '$dir' of total $total" + cd "$dir" && pipenv install ${package_name}~="${CODEFLARE_RELEASE_VERSION}" && pipenv --rm && cd - + echo "$((total-counter)) directories remaining.." + done + else + versions_list=$(echo "$versions" | tr '\n' ' ' | sed 's/, $//') + versions="${versions_list%,}" + echo "Version '${CODEFLARE_RELEASE_VERSION}' is not available for $package_name" + echo "Available versions for $package_name: $versions" + exit 1 + fi + + - name: Push changes + run: | + cd $REPO_NAME + git add . && git status && git checkout -b ${{ env.UPDATER_BRANCH }} && \ + git commit -am "Updated notebooks via ${{ env.UPDATER_BRANCH }} GitHub action" --signoff && + git remote set-url origin https://x-access-token:${GITHUB_TOKEN}@github.com/$REPO_OWNER/$REPO_NAME.git + git push origin ${{ env.UPDATER_BRANCH }} + + - name: Create Pull Request + run: | + gh pr create --repo $UPSTREAM_OWNER/$REPO_NAME \ + --title "$pr_title" \ + --body "$pr_body" \ + --head $REPO_OWNER:$UPDATER_BRANCH \ + --base $BRANCH_NAME + env: + pr_title: "[Digest Updater Action] Update notebook's pipfile to sync with Codeflare-SDK release" + pr_body: | + :rocket: This is an automated Pull Request. + + This PR updates the `Pipfile` to sync with latest Codeflare-SDK release. + + :exclamation: **IMPORTANT NOTE**: Remember to delete the ` ${{ env.UPDATER_BRANCH }}` branch after merging the changes diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 1bf40644..b3f374d0 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -100,3 +100,15 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.CODEFLARE_MACHINE_ACCOUNT_TOKEN }} shell: bash + + - name: Sync ODH Notebooks + run: | + gh workflow run odh-notebooks-sync.yml \ + --repo ${{ github.event.inputs.codeflare-repository-organization }}/codeflare-sdk \ + --ref ${{ github.ref }} \ + --field upstream-repository-organization=opendatahub-io + --field codeflare-repository-organization=${{ github.event.inputs.codeflare-repository-organization }} \ + --field codeflare_sdk_release_version=${{ github.event.inputs.release-version }} + env: + GITHUB_TOKEN: ${{ secrets.CODEFLARE_MACHINE_ACCOUNT_TOKEN }} + shell: bash From 0beece180873973801b8f6a536659bf9d043044d Mon Sep 17 00:00:00 2001 From: abhijeet-dhumal Date: Fri, 1 Mar 2024 15:25:46 +0530 Subject: [PATCH 150/496] Update PR title to make it more specific and meaningful --- .github/workflows/odh-notebooks-sync.yml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/workflows/odh-notebooks-sync.yml b/.github/workflows/odh-notebooks-sync.yml index 7671b736..0248ed92 100644 --- a/.github/workflows/odh-notebooks-sync.yml +++ b/.github/workflows/odh-notebooks-sync.yml @@ -119,10 +119,8 @@ jobs: --head $REPO_OWNER:$UPDATER_BRANCH \ --base $BRANCH_NAME env: - pr_title: "[Digest Updater Action] Update notebook's pipfile to sync with Codeflare-SDK release" + pr_title: "[Codeflare Action] Update notebook's pipfile to sync with Codeflare-SDK release ${{ env.CODEFLARE_RELEASE_VERSION }}" pr_body: | :rocket: This is an automated Pull Request. This PR updates the `Pipfile` to sync with latest Codeflare-SDK release. - - :exclamation: **IMPORTANT NOTE**: Remember to delete the ` ${{ env.UPDATER_BRANCH }}` branch after merging the changes From 47381fbedc929972e2eb0a6ec72b72d043a8c85b Mon Sep 17 00:00:00 2001 From: Jiri Petrlik Date: Fri, 1 Mar 2024 16:01:30 +0100 Subject: [PATCH 151/496] RHOAIENG-3771 - Reduce execution time of E2E tests --- tests/e2e/mnist.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/tests/e2e/mnist.py b/tests/e2e/mnist.py index a9958965..2971d9c9 100644 --- a/tests/e2e/mnist.py +++ b/tests/e2e/mnist.py @@ -19,7 +19,7 @@ from pytorch_lightning.callbacks.progress import TQDMProgressBar from torch import nn from torch.nn import functional as F -from torch.utils.data import DataLoader, random_split +from torch.utils.data import DataLoader, random_split, RandomSampler from torchmetrics import Accuracy from torchvision import transforms from torchvision.datasets import MNIST @@ -127,7 +127,11 @@ def setup(self, stage=None): ) def train_dataloader(self): - return DataLoader(self.mnist_train, batch_size=BATCH_SIZE) + return DataLoader( + self.mnist_train, + batch_size=BATCH_SIZE, + sampler=RandomSampler(self.mnist_train, num_samples=1000), + ) def val_dataloader(self): return DataLoader(self.mnist_val, batch_size=BATCH_SIZE) @@ -147,10 +151,11 @@ def test_dataloader(self): trainer = Trainer( accelerator="auto", # devices=1 if torch.cuda.is_available() else None, # limiting got iPython runs - max_epochs=5, + max_epochs=3, callbacks=[TQDMProgressBar(refresh_rate=20)], num_nodes=int(os.environ.get("GROUP_WORLD_SIZE", 1)), devices=int(os.environ.get("LOCAL_WORLD_SIZE", 1)), + replace_sampler_ddp=False, strategy="ddp", ) From 54229367184e3b5da44d2e06e4999f032fc67f68 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Wed, 13 Mar 2024 12:48:23 +0000 Subject: [PATCH 152/496] Fix e2e test output logs on failure --- .github/workflows/e2e_tests.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index 1b0e4900..dbf2fce2 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -121,7 +121,8 @@ jobs: echo "Running e2e tests..." poetry run pytest -v -s ./tests/e2e -m kind > ${CODEFLARE_TEST_OUTPUT_DIR}/pytest_output.log 2>&1 - kubectl config use-context kind-cluster + - name: Switch to kind-cluster context to print logs + run: kubectl config use-context kind-cluster - name: Print CodeFlare operator logs if: always() && steps.deploy.outcome == 'success' From 0c014f17623c35f7df69be4e7096968081898e41 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Wed, 13 Mar 2024 12:31:36 +0000 Subject: [PATCH 153/496] Fix warning from pytest and increase pretty_print coverage --- pyproject.toml | 1 - src/codeflare_sdk/utils/pretty_print.py | 16 ++++------------ 2 files changed, 4 insertions(+), 13 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 953ce0f6..41782c4f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,4 +54,3 @@ markers = [ "kind", "openshift" ] -timeout = 900 diff --git a/src/codeflare_sdk/utils/pretty_print.py b/src/codeflare_sdk/utils/pretty_print.py index ca371182..0bdc185d 100644 --- a/src/codeflare_sdk/utils/pretty_print.py +++ b/src/codeflare_sdk/utils/pretty_print.py @@ -70,15 +70,11 @@ def print_cluster_status(cluster: RayCluster): ) name = cluster.name dashboard = cluster.dashboard - # owned = bool(cluster["userOwned"]) - owned = True #'table0' to display the cluster name, status, url, and dashboard link table0 = Table(box=None, show_header=False) - if owned: - table0.add_row("[white on green][bold]Name") - else: - table0.add_row("") + + table0.add_row("[white on green][bold]Name") table0.add_row("[bold underline]" + name, status) table0.add_row() # fixme harcded to default for now @@ -119,15 +115,11 @@ def print_clusters(clusters: List[RayCluster]): memory = str(cluster.worker_mem_min) + "~" + str(cluster.worker_mem_max) cpu = str(cluster.worker_cpu) gpu = str(cluster.worker_gpu) - # owned = bool(cluster["userOwned"]) - owned = True #'table0' to display the cluster name, status, url, and dashboard link table0 = Table(box=None, show_header=False) - if owned: - table0.add_row("[white on green][bold]Name") - else: - table0.add_row("") + + table0.add_row("[white on green][bold]Name") table0.add_row("[bold underline]" + name, status) table0.add_row() # fixme harcded to default for now From 171e91ad7148398a0edc34bdee54191d23c8d7c1 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Fri, 15 Mar 2024 15:44:28 +0000 Subject: [PATCH 154/496] Set Cluster Configuration to operate in memory by default --- src/codeflare_sdk/cluster/cluster.py | 202 +++++++++------- src/codeflare_sdk/cluster/config.py | 1 + src/codeflare_sdk/utils/generate_yaml.py | 38 +++- tests/e2e/mnist_raycluster_sdk_oauth_test.py | 1 + tests/e2e/mnist_raycluster_sdk_test.py | 1 + tests/test-default-appwrapper.yaml | 228 +++++++++++++++++++ tests/unit_test.py | 51 ++++- tests/unit_test_support.py | 1 + 8 files changed, 432 insertions(+), 91 deletions(-) create mode 100644 tests/test-default-appwrapper.yaml diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index aff3ea6b..4341227a 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -73,9 +73,7 @@ def __init__(self, config: ClusterConfiguration): self.config = config self.app_wrapper_yaml = self.create_app_wrapper() self._job_submission_client = None - self.app_wrapper_name = self.app_wrapper_yaml.replace(".yaml", "").split("/")[ - -1 - ] + self.app_wrapper_name = self.config.name @property def _client_headers(self): @@ -192,6 +190,7 @@ def create_app_wrapper(self): dispatch_priority = self.config.dispatch_priority ingress_domain = self.config.ingress_domain ingress_options = self.config.ingress_options + write_to_file = self.config.write_to_file return generate_appwrapper( name=name, namespace=namespace, @@ -217,6 +216,7 @@ def create_app_wrapper(self): openshift_oauth=self.config.openshift_oauth, ingress_domain=ingress_domain, ingress_options=ingress_options, + write_to_file=write_to_file, ) # creates a new cluster with the provided or default spec @@ -235,15 +235,25 @@ def up(self): config_check() api_instance = client.CustomObjectsApi(api_config_handler()) if self.config.mcad: - with open(self.app_wrapper_yaml) as f: - aw = yaml.load(f, Loader=yaml.FullLoader) - api_instance.create_namespaced_custom_object( - group="workload.codeflare.dev", - version="v1beta1", - namespace=namespace, - plural="appwrappers", - body=aw, - ) + if self.config.write_to_file: + with open(self.app_wrapper_yaml) as f: + aw = yaml.load(f, Loader=yaml.FullLoader) + api_instance.create_namespaced_custom_object( + group="workload.codeflare.dev", + version="v1beta1", + namespace=namespace, + plural="appwrappers", + body=aw, + ) + else: + aw = yaml.safe_load(self.app_wrapper_yaml) + api_instance.create_namespaced_custom_object( + group="workload.codeflare.dev", + version="v1beta1", + namespace=namespace, + plural="appwrappers", + body=aw, + ) else: self._component_resources_up(namespace, api_instance) except Exception as e: # pragma: no cover @@ -492,7 +502,9 @@ def torchx_config( to_return["requirements"] = requirements return to_return - def from_k8_cluster_object(rc, mcad=True, ingress_domain=None, ingress_options={}): + def from_k8_cluster_object( + rc, mcad=True, ingress_domain=None, ingress_options={}, write_to_file=False + ): machine_types = ( rc["metadata"]["labels"]["orderedinstance"].split("_") if "orderedinstance" in rc["metadata"]["labels"] @@ -538,6 +550,7 @@ def from_k8_cluster_object(rc, mcad=True, ingress_domain=None, ingress_options={ mcad=mcad, ingress_domain=ingress_domain, ingress_options=ingress_options, + write_to_file=write_to_file, ) return Cluster(cluster_config) @@ -551,79 +564,25 @@ def local_client_url(self): def _component_resources_up( self, namespace: str, api_instance: client.CustomObjectsApi ): - with open(self.app_wrapper_yaml) as f: - yamls = yaml.load_all(f, Loader=yaml.FullLoader) - for resource in yamls: - if resource["kind"] == "RayCluster": - api_instance.create_namespaced_custom_object( - group="ray.io", - version="v1", - namespace=namespace, - plural="rayclusters", - body=resource, - ) - elif resource["kind"] == "Ingress": - api_instance.create_namespaced_custom_object( - group="networking.k8s.io", - version="v1", - namespace=namespace, - plural="ingresses", - body=resource, - ) - elif resource["kind"] == "Route": - api_instance.create_namespaced_custom_object( - group="route.openshift.io", - version="v1", - namespace=namespace, - plural="routes", - body=resource, - ) - elif resource["kind"] == "Secret": - secret_instance = client.CoreV1Api(api_config_handler()) - secret_instance.create_namespaced_secret( - namespace=namespace, - body=resource, - ) + if self.config.write_to_file: + with open(self.app_wrapper_yaml) as f: + yamls = yaml.load_all(f, Loader=yaml.FullLoader) + _create_resources(yamls, namespace, api_instance) + else: + yamls = yaml.load_all(self.app_wrapper_yaml, Loader=yaml.FullLoader) + _create_resources(yamls, namespace, api_instance) def _component_resources_down( self, namespace: str, api_instance: client.CustomObjectsApi ): - with open(self.app_wrapper_yaml) as f: - yamls = yaml.load_all(f, Loader=yaml.FullLoader) - for resource in yamls: - if resource["kind"] == "RayCluster": - api_instance.delete_namespaced_custom_object( - group="ray.io", - version="v1", - namespace=namespace, - plural="rayclusters", - name=self.app_wrapper_name, - ) - elif resource["kind"] == "Ingress": - name = resource["metadata"]["name"] - api_instance.delete_namespaced_custom_object( - group="networking.k8s.io", - version="v1", - namespace=namespace, - plural="ingresses", - name=name, - ) - elif resource["kind"] == "Route": - name = resource["metadata"]["name"] - api_instance.delete_namespaced_custom_object( - group="route.openshift.io", - version="v1", - namespace=namespace, - plural="routes", - name=name, - ) - elif resource["kind"] == "Secret": - name = resource["metadata"]["name"] - secret_instance = client.CoreV1Api(api_config_handler()) - secret_instance.delete_namespaced_secret( - namespace=namespace, - name=name, - ) + cluster_name = self.config.name + if self.config.write_to_file: + with open(self.app_wrapper_yaml) as f: + yamls = yaml.load_all(f, Loader=yaml.FullLoader) + _delete_resources(yamls, namespace, api_instance, cluster_name) + else: + yamls = yaml.safe_load_all(self.app_wrapper_yaml) + _delete_resources(yamls, namespace, api_instance, cluster_name) def list_all_clusters(namespace: str, print_to_console: bool = True): @@ -675,7 +634,9 @@ def get_current_namespace(): # pragma: no cover return None -def get_cluster(cluster_name: str, namespace: str = "default"): +def get_cluster( + cluster_name: str, namespace: str = "default", write_to_file: bool = False +): try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) @@ -746,6 +707,7 @@ def get_cluster(cluster_name: str, namespace: str = "default"): mcad=mcad, ingress_domain=ingress_domain, ingress_options=ingress_options, + write_to_file=write_to_file, ) raise FileNotFoundError( f"Cluster {cluster_name} is not found in {namespace} namespace" @@ -753,6 +715,80 @@ def get_cluster(cluster_name: str, namespace: str = "default"): # private methods +def _delete_resources( + yamls, namespace: str, api_instance: client.CustomObjectsApi, cluster_name: str +): + for resource in yamls: + if resource["kind"] == "RayCluster": + name = resource["metadata"]["name"] + api_instance.delete_namespaced_custom_object( + group="ray.io", + version="v1", + namespace=namespace, + plural="rayclusters", + name=name, + ) + elif resource["kind"] == "Ingress": + name = resource["metadata"]["name"] + api_instance.delete_namespaced_custom_object( + group="networking.k8s.io", + version="v1", + namespace=namespace, + plural="ingresses", + name=name, + ) + elif resource["kind"] == "Route": + name = resource["metadata"]["name"] + api_instance.delete_namespaced_custom_object( + group="route.openshift.io", + version="v1", + namespace=namespace, + plural="routes", + name=name, + ) + elif resource["kind"] == "Secret": + name = resource["metadata"]["name"] + secret_instance = client.CoreV1Api(api_config_handler()) + secret_instance.delete_namespaced_secret( + namespace=namespace, + name=name, + ) + + +def _create_resources(yamls, namespace: str, api_instance: client.CustomObjectsApi): + for resource in yamls: + if resource["kind"] == "RayCluster": + api_instance.create_namespaced_custom_object( + group="ray.io", + version="v1", + namespace=namespace, + plural="rayclusters", + body=resource, + ) + elif resource["kind"] == "Ingress": + api_instance.create_namespaced_custom_object( + group="networking.k8s.io", + version="v1", + namespace=namespace, + plural="ingresses", + body=resource, + ) + elif resource["kind"] == "Route": + api_instance.create_namespaced_custom_object( + group="route.openshift.io", + version="v1", + namespace=namespace, + plural="routes", + body=resource, + ) + elif resource["kind"] == "Secret": + secret_instance = client.CoreV1Api(api_config_handler()) + secret_instance.create_namespaced_secret( + namespace=namespace, + body=resource, + ) + + def _check_aw_exists(name: str, namespace: str) -> bool: try: config_check() diff --git a/src/codeflare_sdk/cluster/config.py b/src/codeflare_sdk/cluster/config.py index 0311d0e3..86d4252e 100644 --- a/src/codeflare_sdk/cluster/config.py +++ b/src/codeflare_sdk/cluster/config.py @@ -55,3 +55,4 @@ class ClusterConfiguration: openshift_oauth: bool = False # NOTE: to use the user must have permission to create a RoleBinding for system:auth-delegator ingress_options: dict = field(default_factory=dict) ingress_domain: str = None + write_to_file: bool = False diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 49380523..fe012c6f 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -640,6 +640,27 @@ def write_components(user_yaml: dict, output_file_name: str): print(f"Written to: {output_file_name}") +def load_components(user_yaml: dict, name: str): + component_list = [] + components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") + for component in components: + if "generictemplate" in component: + component_list.append(component["generictemplate"]) + + resources = "---\n" + "---\n".join( + [yaml.dump(component) for component in component_list] + ) + user_yaml = resources + print(f"Yaml resources loaded for {name}") + return user_yaml + + +def load_appwrapper(user_yaml: dict, name: str): + user_yaml = yaml.dump(user_yaml) + print(f"Yaml resources loaded for {name}") + return user_yaml + + def generate_appwrapper( name: str, namespace: str, @@ -665,6 +686,7 @@ def generate_appwrapper( openshift_oauth: bool, ingress_domain: str, ingress_options: dict, + write_to_file: bool, ): user_yaml = read_template(template) appwrapper_name, cluster_name = gen_names(name) @@ -724,8 +746,16 @@ def generate_appwrapper( directory_path = os.path.expanduser("~/.codeflare/appwrapper/") outfile = os.path.join(directory_path, appwrapper_name + ".yaml") - if not mcad: - write_components(user_yaml, outfile) + + if write_to_file: + if mcad: + write_user_appwrapper(user_yaml, outfile) + else: + write_components(user_yaml, outfile) + return outfile else: - write_user_appwrapper(user_yaml, outfile) - return outfile + if mcad: + user_yaml = load_appwrapper(user_yaml, name) + else: + user_yaml = load_components(user_yaml, name) + return user_yaml diff --git a/tests/e2e/mnist_raycluster_sdk_oauth_test.py b/tests/e2e/mnist_raycluster_sdk_oauth_test.py index 0cee617d..07991a30 100644 --- a/tests/e2e/mnist_raycluster_sdk_oauth_test.py +++ b/tests/e2e/mnist_raycluster_sdk_oauth_test.py @@ -45,6 +45,7 @@ def run_mnist_raycluster_sdk_oauth(self): instascale=False, image=ray_image, openshift_oauth=True, + write_to_file=True, ) ) diff --git a/tests/e2e/mnist_raycluster_sdk_test.py b/tests/e2e/mnist_raycluster_sdk_test.py index 76a5e260..27c1451e 100644 --- a/tests/e2e/mnist_raycluster_sdk_test.py +++ b/tests/e2e/mnist_raycluster_sdk_test.py @@ -70,6 +70,7 @@ def run_mnist_raycluster_sdk(self): instascale=False, image=ray_image, ingress_options=ingress_options, + write_to_file=True, ) ) diff --git a/tests/test-default-appwrapper.yaml b/tests/test-default-appwrapper.yaml new file mode 100644 index 00000000..1ed93670 --- /dev/null +++ b/tests/test-default-appwrapper.yaml @@ -0,0 +1,228 @@ +apiVersion: workload.codeflare.dev/v1beta1 +kind: AppWrapper +metadata: + name: unit-test-default-cluster + namespace: opendatahub +spec: + resources: + GenericItems: + - custompodresources: + - limits: + cpu: 2 + memory: 8G + nvidia.com/gpu: 0 + replicas: 1 + requests: + cpu: 2 + memory: 8G + nvidia.com/gpu: 0 + - limits: + cpu: 1 + memory: 2G + nvidia.com/gpu: 0 + replicas: 1 + requests: + cpu: 1 + memory: 2G + nvidia.com/gpu: 0 + generictemplate: + apiVersion: ray.io/v1 + kind: RayCluster + metadata: + labels: + controller-tools.k8s.io: '1.0' + workload.codeflare.dev/appwrapper: unit-test-default-cluster + name: unit-test-default-cluster + namespace: opendatahub + spec: + autoscalerOptions: + idleTimeoutSeconds: 60 + imagePullPolicy: Always + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 500m + memory: 512Mi + upscalingMode: Default + enableInTreeAutoscaling: false + headGroupSpec: + rayStartParams: + block: 'true' + dashboard-host: 0.0.0.0 + num-gpus: '0' + serviceType: ClusterIP + template: + spec: + containers: + - env: + - name: MY_POD_IP + valueFrom: + fieldRef: + fieldPath: status.podIP + - name: RAY_USE_TLS + value: '0' + - name: RAY_TLS_SERVER_CERT + value: /home/ray/workspace/tls/server.crt + - name: RAY_TLS_SERVER_KEY + value: /home/ray/workspace/tls/server.key + - name: RAY_TLS_CA_CERT + value: /home/ray/workspace/tls/ca.crt + image: quay.io/project-codeflare/ray:latest-py39-cu118 + imagePullPolicy: Always + lifecycle: + preStop: + exec: + command: + - /bin/sh + - -c + - ray stop + name: ray-head + ports: + - containerPort: 6379 + name: gcs + - containerPort: 8265 + name: dashboard + - containerPort: 10001 + name: client + resources: + limits: + cpu: 2 + memory: 8G + nvidia.com/gpu: 0 + requests: + cpu: 2 + memory: 8G + nvidia.com/gpu: 0 + volumeMounts: + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + imagePullSecrets: [] + volumes: + - configMap: + items: + - key: ca-bundle.crt + path: odh-trusted-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert + rayVersion: 2.7.0 + workerGroupSpecs: + - groupName: small-group-unit-test-default-cluster + maxReplicas: 1 + minReplicas: 1 + rayStartParams: + block: 'true' + num-gpus: '0' + replicas: 1 + template: + metadata: + annotations: + key: value + labels: + key: value + spec: + containers: + - env: + - name: MY_POD_IP + valueFrom: + fieldRef: + fieldPath: status.podIP + - name: RAY_USE_TLS + value: '0' + - name: RAY_TLS_SERVER_CERT + value: /home/ray/workspace/tls/server.crt + - name: RAY_TLS_SERVER_KEY + value: /home/ray/workspace/tls/server.key + - name: RAY_TLS_CA_CERT + value: /home/ray/workspace/tls/ca.crt + image: quay.io/project-codeflare/ray:latest-py39-cu118 + lifecycle: + preStop: + exec: + command: + - /bin/sh + - -c + - ray stop + name: machine-learning + resources: + limits: + cpu: 1 + memory: 2G + nvidia.com/gpu: 0 + requests: + cpu: 1 + memory: 2G + nvidia.com/gpu: 0 + volumeMounts: + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + imagePullSecrets: [] + volumes: + - configMap: + items: + - key: ca-bundle.crt + path: odh-trusted-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert + replicas: 1 + - generictemplate: + apiVersion: networking.k8s.io/v1 + kind: Ingress + metadata: + labels: + ingress-options: 'false' + ingress-owner: unit-test-default-cluster + name: ray-dashboard-unit-test-default-cluster + namespace: opendatahub + spec: + ingressClassName: nginx + rules: + - host: ray-dashboard-unit-test-default-cluster-opendatahub.apps.cluster.awsroute.org + http: + paths: + - backend: + service: + name: unit-test-default-cluster-head-svc + port: + number: 8265 + path: / + pathType: Prefix + replicas: 1 + Items: [] diff --git a/tests/unit_test.py b/tests/unit_test.py index 3fde4960..9c31d305 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -308,6 +308,7 @@ def test_create_app_wrapper_raises_error_with_no_image(): def test_cluster_creation_no_mcad(mocker): + # With written resources mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch( "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", @@ -316,6 +317,7 @@ def test_cluster_creation_no_mcad(mocker): config = createClusterConfig() config.name = "unit-test-cluster-ray" config.mcad = False + config.write_to_file = True cluster = Cluster(config) assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-ray.yaml" assert cluster.app_wrapper_name == "unit-test-cluster-ray" @@ -324,6 +326,37 @@ def test_cluster_creation_no_mcad(mocker): f"{parent}/tests/test-case-no-mcad.yamls", shallow=True, ) + # With resources loaded in memory + config = ClusterConfiguration( + name="unit-test-cluster-ray", + namespace="ns", + num_workers=2, + min_cpus=3, + max_cpus=4, + min_memory=5, + max_memory=6, + num_gpus=7, + instascale=True, + machine_types=["cpu.small", "gpu.large"], + image_pull_secrets=["unit-test-pull-secret"], + ingress_domain="apps.cluster.awsroute.org", + image="quay.io/project-codeflare/ray:latest-py39-cu118", + write_to_file=False, + mcad=False, + ) + cluster = Cluster(config) + test_resources = [] + expected_resources = [] + test_aw = yaml.load_all(cluster.app_wrapper_yaml, Loader=yaml.FullLoader) + for resource in test_aw: + test_resources.append(resource) + with open( + f"{parent}/tests/test-case-no-mcad.yamls", + ) as f: + default_aw = yaml.load_all(f, Loader=yaml.FullLoader) + for resource in default_aw: + expected_resources.append(resource) + assert test_resources == expected_resources def test_cluster_creation_priority(mocker): @@ -362,8 +395,13 @@ def test_default_cluster_creation(mocker): ingress_domain="apps.cluster.awsroute.org", ) cluster = Cluster(default_config) + test_aw = yaml.safe_load(cluster.app_wrapper_yaml) + with open( + f"{parent}/tests/test-default-appwrapper.yaml", + ) as f: + default_aw = yaml.load(f, Loader=yaml.FullLoader) + assert test_aw == default_aw - assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-default-cluster.yaml" assert cluster.app_wrapper_name == "unit-test-default-cluster" assert cluster.config.namespace == "opendatahub" @@ -621,7 +659,10 @@ def test_local_client_url(mocker): ) cluster_config = ClusterConfiguration( - name="unit-test-cluster-localinter", namespace="ns", local_interactive=True + name="unit-test-cluster-localinter", + namespace="ns", + local_interactive=True, + write_to_file=True, ) cluster = Cluster(cluster_config) assert ( @@ -802,6 +843,7 @@ def test_ray_details(mocker, capsys): namespace="ns", image="quay.io/project-codeflare/ray:latest-py39-cu118", ingress_domain="apps.cluster.awsroute.org", + write_to_file=True, ) ) captured = capsys.readouterr() @@ -2055,6 +2097,7 @@ def test_cluster_status(mocker): namespace="ns", image="quay.io/project-codeflare/ray:latest-py39-cu118", ingress_domain="apps.cluster.awsroute.org", + write_to_file=True, ) ) mocker.patch("codeflare_sdk.cluster.cluster._app_wrapper_status", return_value=None) @@ -2149,6 +2192,7 @@ def test_wait_ready(mocker, capsys): namespace="ns", image="quay.io/project-codeflare/ray:latest-py39-cu118", ingress_domain="apps.cluster.awsroute.org", + write_to_file=True, ) ) try: @@ -2924,6 +2968,7 @@ def test_gen_app_wrapper_with_oauth(mocker: MockerFixture): openshift_oauth=True, image="quay.io/project-codeflare/ray:latest-py39-cu118", ingress_domain="apps.cluster.awsroute.org", + write_to_file=True, ) ) user_yaml = write_user_appwrapper.call_args.args[0] @@ -3094,10 +3139,8 @@ def test_rjc_list_jobs(ray_job_client, mocker): def test_cleanup(): os.remove(f"{aw_dir}unit-test-cluster.yaml") os.remove(f"{aw_dir}prio-test-cluster.yaml") - os.remove(f"{aw_dir}unit-test-default-cluster.yaml") os.remove(f"{aw_dir}test.yaml") os.remove(f"{aw_dir}raytest2.yaml") - os.remove(f"{aw_dir}quicktest.yaml") os.remove("tls-cluster-namespace/ca.crt") os.remove("tls-cluster-namespace/tls.crt") os.remove("tls-cluster-namespace/tls.key") diff --git a/tests/unit_test_support.py b/tests/unit_test_support.py index 85f1a76d..c4b7416d 100644 --- a/tests/unit_test_support.py +++ b/tests/unit_test_support.py @@ -48,6 +48,7 @@ def createClusterConfig(): image_pull_secrets=["unit-test-pull-secret"], ingress_domain="apps.cluster.awsroute.org", image="quay.io/project-codeflare/ray:latest-py39-cu118", + write_to_file=True, ) return config From 415b938b047d924ad8125c0138eccfd151f471d5 Mon Sep 17 00:00:00 2001 From: Srihari Date: Wed, 20 Mar 2024 18:12:14 +0530 Subject: [PATCH 155/496] Fix codeflare-sdk python e2e tests to pick timeout --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 41782c4f..c47e466a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,3 +54,4 @@ markers = [ "kind", "openshift" ] +addopts = "--timeout=900" From 47ea469a898c2f96c132d8bb8c6b992089baf526 Mon Sep 17 00:00:00 2001 From: Srihari Date: Thu, 21 Mar 2024 22:23:31 +0530 Subject: [PATCH 156/496] Add TokenAuthentication step for RayCluster oauth test --- tests/e2e/mnist_raycluster_sdk_oauth_test.py | 9 ++++++++- tests/e2e/support.py | 12 ++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/tests/e2e/mnist_raycluster_sdk_oauth_test.py b/tests/e2e/mnist_raycluster_sdk_oauth_test.py index 07991a30..3e24d465 100644 --- a/tests/e2e/mnist_raycluster_sdk_oauth_test.py +++ b/tests/e2e/mnist_raycluster_sdk_oauth_test.py @@ -4,7 +4,7 @@ from torchx.specs.api import AppState, is_terminal -from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration +from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication from codeflare_sdk.job.jobs import DDPJobDefinition import pytest @@ -30,6 +30,13 @@ def test_mnist_ray_cluster_sdk_auth(self): def run_mnist_raycluster_sdk_oauth(self): ray_image = get_ray_image() + auth = TokenAuthentication( + token=run_oc_command(["whoami", "--show-token=true"]), + server=run_oc_command(["whoami", "--show-server=true"]), + skip_tls=True, + ) + auth.login() + cluster = Cluster( ClusterConfiguration( name="mnist", diff --git a/tests/e2e/support.py b/tests/e2e/support.py index 1ab45c19..abf4ac66 100644 --- a/tests/e2e/support.py +++ b/tests/e2e/support.py @@ -1,6 +1,7 @@ import os import random import string +import subprocess from kubernetes import client, config import kubernetes.client @@ -33,3 +34,14 @@ def initialize_kubernetes_client(self): # Initialize Kubernetes client self.api_instance = client.CoreV1Api() self.custom_api = kubernetes.client.CustomObjectsApi(self.api_instance.api_client) + + +def run_oc_command(args): + try: + result = subprocess.run( + ["oc"] + args, capture_output=True, text=True, check=True + ) + return result.stdout.strip() + except subprocess.CalledProcessError as e: + print(f"Error executing 'oc {' '.join(args)}': {e}") + return None From c59dab5b6fc2fdbdc706d9fa83584b757641b26d Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Tue, 5 Mar 2024 10:28:34 +0000 Subject: [PATCH 157/496] fix(users): fix get_cluster logic changed the get cluster logic to reflect the new volume mounts added References RHOAIENG-4028 --- src/codeflare_sdk/__init__.py | 1 + src/codeflare_sdk/cluster/__init__.py | 2 +- src/codeflare_sdk/cluster/cluster.py | 10 +- tests/unit_test.py | 228 +++++++++++++++++++++++--- 4 files changed, 216 insertions(+), 25 deletions(-) diff --git a/src/codeflare_sdk/__init__.py b/src/codeflare_sdk/__init__.py index 80bd36c6..0ed41d15 100644 --- a/src/codeflare_sdk/__init__.py +++ b/src/codeflare_sdk/__init__.py @@ -11,6 +11,7 @@ CodeFlareClusterStatus, RayCluster, AppWrapper, + get_cluster, ) from .job import JobDefinition, Job, DDPJobDefinition, DDPJob, RayJobClient diff --git a/src/codeflare_sdk/cluster/__init__.py b/src/codeflare_sdk/cluster/__init__.py index 73950a5c..419561d7 100644 --- a/src/codeflare_sdk/cluster/__init__.py +++ b/src/codeflare_sdk/cluster/__init__.py @@ -13,6 +13,6 @@ AppWrapper, ) -from .cluster import Cluster, ClusterConfiguration +from .cluster import Cluster, ClusterConfiguration, get_cluster from .awload import AWManager diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 4341227a..6f48e9ab 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -510,10 +510,12 @@ def from_k8_cluster_object( if "orderedinstance" in rc["metadata"]["labels"] else [] ) - local_interactive = ( - "volumeMounts" - in rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0] - ) + for volume in rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["volumes"]: + if volume["name"] == "ca-vol": + local_interactive = True + break + else: + local_interactive = False if local_interactive: ingress_domain = get_ingress_domain_from_client( rc["metadata"]["name"], rc["metadata"]["namespace"] diff --git a/tests/unit_test.py b/tests/unit_test.py index 9c31d305..1fda0227 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -959,12 +959,13 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "apiVersion": "ray.io/v1", "kind": "RayCluster", "metadata": { - "creationTimestamp": "2023-02-22T16:26:07Z", + "creationTimestamp": "2024-03-05T09:55:37Z", "generation": 1, "labels": { - "workload.codeflare.dev/appwrapper": "quicktest", + "appwrapper.mcad.ibm.com": "quicktest", "controller-tools.k8s.io": "1.0", "resourceName": "quicktest", + "workload.codeflare.dev/appwrapper": "quicktest", "orderedinstance": "m4.xlarge_g4dn.xlarge", }, "managedFields": [ @@ -975,13 +976,14 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "f:metadata": { "f:labels": { ".": {}, - "f:workload.codeflare.dev/appwrapper": {}, + "f:appwrapper.mcad.ibm.com": {}, "f:controller-tools.k8s.io": {}, "f:resourceName": {}, + "f:workload.codeflare.dev/appwrapper": {}, }, "f:ownerReferences": { ".": {}, - 'k:{"uid":"6334fc1b-471e-4876-8e7b-0b2277679235"}': {}, + 'k:{"uid":"a29b1a7a-0992-4860-a8d5-a689a751a3e8"}': {}, }, }, "f:spec": { @@ -1017,41 +1019,53 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "f:serviceType": {}, "f:template": { ".": {}, - "f:spec": {".": {}, "f:containers": {}}, + "f:spec": { + ".": {}, + "f:affinity": { + ".": {}, + "f:nodeAffinity": { + ".": {}, + "f:requiredDuringSchedulingIgnoredDuringExecution": {}, + }, + }, + "f:imagePullSecrets": {}, + "f:volumes": {}, + }, }, }, "f:rayVersion": {}, "f:workerGroupSpecs": {}, }, }, - "manager": "mcad-controller", + "manager": "codeflare-operator", "operation": "Update", - "time": "2023-02-22T16:26:07Z", + "time": "2024-03-05T09:55:37Z", }, { - "apiVersion": "ray.io/v1", + "apiVersion": "ray.io/v1alpha1", "fieldsType": "FieldsV1", "fieldsV1": { "f:status": { ".": {}, - "f:availableWorkerReplicas": {}, "f:desiredWorkerReplicas": {}, "f:endpoints": { ".": {}, "f:client": {}, "f:dashboard": {}, "f:gcs": {}, + "f:metrics": {}, }, + "f:head": {".": {}, "f:serviceIP": {}}, "f:lastUpdateTime": {}, "f:maxWorkerReplicas": {}, "f:minWorkerReplicas": {}, - "f:state": {}, + "f:observedGeneration": {}, } }, "manager": "manager", "operation": "Update", "subresource": "status", - "time": "2023-02-22T16:26:16Z", + "time": "2024-03-05T09:55:37Z", }, ], "name": "quicktest", @@ -1063,11 +1077,11 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "controller": True, "kind": "AppWrapper", "name": "quicktest", - "uid": "6334fc1b-471e-4876-8e7b-0b2277679235", + "uid": "a29b1a7a-0992-4860-a8d5-a689a751a3e8", } ], - "resourceVersion": "9482407", - "uid": "44d45d1f-26c8-43e7-841f-831dbd8c1285", + "resourceVersion": "5305674", + "uid": "820d065d-bf0c-4675-b951-d32ea496020e", }, "spec": { "autoscalerOptions": { @@ -1088,9 +1102,50 @@ def get_ray_obj(group, version, namespace, plural, cls=None): }, "serviceType": "ClusterIP", "template": { + "metadata": {}, "spec": { + "affinity": { + "nodeAffinity": { + "requiredDuringSchedulingIgnoredDuringExecution": { + "nodeSelectorTerms": [ + { + "matchExpressions": [ + { + "key": "quicktest", + "operator": "In", + "values": ["quicktest"], + } + ] + } + ] + } + } + }, "containers": [ { + "env": [ + { + "name": "MY_POD_IP", + "valueFrom": { + "fieldRef": { + "fieldPath": "status.podIP" + } + }, + }, + {"name": "RAY_USE_TLS", "value": "0"}, + { + "name": "RAY_TLS_SERVER_CERT", + "value": "/home/ray/workspace/tls/server.crt", + }, + { + "name": "RAY_TLS_SERVER_KEY", + "value": "/home/ray/workspace/tls/server.key", + }, + { + "name": "RAY_TLS_CA_CERT", + "value": "/home/ray/workspace/tls/ca.crt", + }, + ], "image": "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103", "imagePullPolicy": "Always", "lifecycle": { @@ -1134,12 +1189,62 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "nvidia.com/gpu": 0, }, }, + "volumeMounts": [ + { + "mountPath": "/etc/pki/tls/certs/odh-trusted-ca-bundle.crt", + "name": "odh-trusted-ca-cert", + "subPath": "odh-trusted-ca-bundle.crt", + }, + { + "mountPath": "/etc/ssl/certs/odh-trusted-ca-bundle.crt", + "name": "odh-trusted-ca-cert", + "subPath": "odh-trusted-ca-bundle.crt", + }, + { + "mountPath": "/etc/pki/tls/certs/odh-ca-bundle.crt", + "name": "odh-ca-cert", + "subPath": "odh-ca-bundle.crt", + }, + { + "mountPath": "/etc/ssl/certs/odh-ca-bundle.crt", + "name": "odh-ca-cert", + "subPath": "odh-ca-bundle.crt", + }, + ], } - ] - } + ], + "volumes": [ + { + "configMap": { + "items": [ + { + "key": "ca-bundle.crt", + "path": "odh-trusted-ca-bundle.crt", + } + ], + "name": "odh-trusted-ca-bundle", + "optional": True, + }, + "name": "odh-trusted-ca-cert", + }, + { + "configMap": { + "items": [ + { + "key": "odh-ca-bundle.crt", + "path": "odh-ca-bundle.crt", + } + ], + "name": "odh-trusted-ca-bundle", + "optional": True, + }, + "name": "odh-ca-cert", + }, + ], + }, }, }, - "rayVersion": "1.12.0", + "rayVersion": "2.7.0", "workerGroupSpecs": [ { "groupName": "small-group-quicktest", @@ -1147,12 +1252,30 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "minReplicas": 1, "rayStartParams": {"block": "true", "num-gpus": "0"}, "replicas": 1, + "scaleStrategy": {}, "template": { "metadata": { "annotations": {"key": "value"}, "labels": {"key": "value"}, }, "spec": { + "affinity": { + "nodeAffinity": { + "requiredDuringSchedulingIgnoredDuringExecution": { + "nodeSelectorTerms": [ + { + "matchExpressions": [ + { + "key": "quicktest", + "operator": "In", + "values": ["quicktest"], + } + ] + } + ] + } + } + }, "containers": [ { "env": [ @@ -1163,7 +1286,20 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "fieldPath": "status.podIP" } }, - } + }, + {"name": "RAY_USE_TLS", "value": "0"}, + { + "name": "RAY_TLS_SERVER_CERT", + "value": "/home/ray/workspace/tls/server.crt", + }, + { + "name": "RAY_TLS_SERVER_KEY", + "value": "/home/ray/workspace/tls/server.key", + }, + { + "name": "RAY_TLS_CA_CERT", + "value": "/home/ray/workspace/tls/ca.crt", + }, ], "image": "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103", "lifecycle": { @@ -1190,24 +1326,76 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "nvidia.com/gpu": 0, }, }, + "volumeMounts": [ + { + "mountPath": "/etc/pki/tls/certs/odh-trusted-ca-bundle.crt", + "name": "odh-trusted-ca-cert", + "subPath": "odh-trusted-ca-bundle.crt", + }, + { + "mountPath": "/etc/ssl/certs/odh-trusted-ca-bundle.crt", + "name": "odh-trusted-ca-cert", + "subPath": "odh-trusted-ca-bundle.crt", + }, + { + "mountPath": "/etc/pki/tls/certs/odh-ca-bundle.crt", + "name": "odh-ca-cert", + "subPath": "odh-ca-bundle.crt", + }, + { + "mountPath": "/etc/ssl/certs/odh-ca-bundle.crt", + "name": "odh-ca-cert", + "subPath": "odh-ca-bundle.crt", + }, + ], } ], + "volumes": [ + { + "configMap": { + "items": [ + { + "key": "ca-bundle.crt", + "path": "odh-trusted-ca-bundle.crt", + } + ], + "name": "odh-trusted-ca-bundle", + "optional": True, + }, + "name": "odh-trusted-ca-cert", + }, + { + "configMap": { + "items": [ + { + "key": "odh-ca-bundle.crt", + "path": "odh-ca-bundle.crt", + } + ], + "name": "odh-trusted-ca-bundle", + "optional": True, + }, + "name": "odh-ca-cert", + }, + ], }, }, } ], }, "status": { - "availableWorkerReplicas": 2, "desiredWorkerReplicas": 1, "endpoints": { "client": "10001", "dashboard": "8265", "gcs": "6379", + "metrics": "8080", }, - "lastUpdateTime": "2023-02-22T16:26:16Z", + "head": {"serviceIP": "172.30.179.88"}, + "lastUpdateTime": "2024-03-05T09:55:37Z", "maxWorkerReplicas": 1, "minWorkerReplicas": 1, + "observedGeneration": 1, "state": "ready", }, } From 28ad59ce84a6b7cc49c7f098c6348cf57489e2fb Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Mon, 11 Mar 2024 15:10:32 +0000 Subject: [PATCH 158/496] Altered local_interactive logic & added tests --- src/codeflare_sdk/cluster/cluster.py | 143 ++++++++++++++++---------- tests/unit_test.py | 148 ++++++++++++++++++++++++--- 2 files changed, 222 insertions(+), 69 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 6f48e9ab..2c9e6cfd 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -468,7 +468,7 @@ def cluster_dashboard_uri(self) -> str: elif "route.openshift.io/termination" in annotations: protocol = "https" return f"{protocol}://{ingress.spec.rules[0].host}" - return "Dashboard ingress not available yet, have you run cluster.up()?" + return "Dashboard not available yet, have you run cluster.up()?" def list_jobs(self) -> List: """ @@ -502,21 +502,55 @@ def torchx_config( to_return["requirements"] = requirements return to_return - def from_k8_cluster_object( - rc, mcad=True, ingress_domain=None, ingress_options={}, write_to_file=False - ): + def from_k8_cluster_object(rc, mcad=True, ingress_domain=None, ingress_options={}, write_to_file=False): + config_check() + cluster_name = rc["metadata"]["name"] + if is_openshift_cluster(): + try: + api_instance = client.CustomObjectsApi(api_config_handler()) + routes = api_instance.list_namespaced_custom_object( + group="route.openshift.io", + version="v1", + namespace=rc["metadata"]["namespace"], + plural="routes", + ) + except Exception as e: # pragma no cover + return _kube_api_error_handling(e) + for route in routes["items"]: + if ( + route["metadata"]["name"] == f"rayclient-{cluster_name}" + and route["spec"]["port"]["targetPort"] == "client" + ): + local_interactive = True + break + else: + local_interactive = False + else: + try: + api_instance = client.NetworkingV1Api(api_config_handler()) + ingresses = api_instance.list_namespaced_ingress( + rc["metadata"]["namespace"] + ) + except Exception as e: # pragma no cover + return _kube_api_error_handling(e) + for ingress in ingresses.items: + if ( + f"rayclient-{cluster_name}" == ingress.metadata.name + and ingress.spec.rules[0].http.paths[0].backend.service.port.number + == 10001 + ): + local_interactive = True + break + else: + local_interactive = False + machine_types = ( rc["metadata"]["labels"]["orderedinstance"].split("_") if "orderedinstance" in rc["metadata"]["labels"] else [] ) - for volume in rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["volumes"]: - if volume["name"] == "ca-vol": - local_interactive = True - break - else: - local_interactive = False - if local_interactive: + + if local_interactive and ingress_domain == None: ingress_domain = get_ingress_domain_from_client( rc["metadata"]["name"], rc["metadata"]["namespace"] ) @@ -654,56 +688,57 @@ def get_cluster( for rc in rcs["items"]: if rc["metadata"]["name"] == cluster_name: mcad = _check_aw_exists(cluster_name, namespace) - - try: - config_check() - api_instance = client.NetworkingV1Api(api_config_handler()) - ingresses = api_instance.list_namespaced_ingress(namespace) - ingress_host = None - ingress_options = {} - for ingress in ingresses.items: - # Search for ingress with AppWrapper name as the owner - if ( - "ingress-owner" in ingress.metadata.labels - and ingress.metadata.labels["ingress-owner"] == cluster_name - ): - ingress_host = ingress.spec.rules[0].host + ingress_host = None + ingress_options = {} + if is_openshift_cluster() == False: + try: + config_check() + api_instance = client.NetworkingV1Api(api_config_handler()) + ingresses = api_instance.list_namespaced_ingress(namespace) + for ingress in ingresses.items: + # Search for ingress with AppWrapper name as the owner if ( - "ingress-options" in ingress.metadata.labels - and ingress.metadata.labels["ingress-options"] == "true" + "ingress-owner" in ingress.metadata.labels + and ingress.metadata.labels["ingress-owner"] == cluster_name ): - ingress_name = ingress.metadata.name - port = ( - ingress.spec.rules[0] - .http.paths[0] - .backend.service.port.number - ) - annotations = ingress.metadata.annotations - path = ingress.spec.rules[0].http.paths[0].path - ingress_class_name = ingress.spec.ingress_class_name - path_type = ingress.spec.rules[0].http.paths[0].path_type - - ingress_options = { - "ingresses": [ - { - "ingressName": ingress_name, - "port": port, - "annotations": annotations, - "ingressClassName": ingress_class_name, - "pathType": path_type, - "path": path, - "host": ingress_host, - } - ] - } - except Exception as e: - return _kube_api_error_handling(e) + ingress_host = ingress.spec.rules[0].host + if ( + "ingress-options" in ingress.metadata.labels + and ingress.metadata.labels["ingress-options"] == "true" + ): + ingress_name = ingress.metadata.name + port = ( + ingress.spec.rules[0] + .http.paths[0] + .backend.service.port.number + ) + annotations = ingress.metadata.annotations + path = ingress.spec.rules[0].http.paths[0].path + ingress_class_name = ingress.spec.ingress_class_name + path_type = ( + ingress.spec.rules[0].http.paths[0].path_type + ) + + ingress_options = { + "ingresses": [ + { + "ingressName": ingress_name, + "port": port, + "annotations": annotations, + "ingressClassName": ingress_class_name, + "pathType": path_type, + "path": path, + "host": ingress_host, + } + ] + } + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) # We gather the ingress domain from the host if ingress_host is not None and ingress_options == {}: ingress_domain = ingress_host.split(".", 1)[1] else: ingress_domain = None - return Cluster.from_k8_cluster_object( rc, mcad=mcad, diff --git a/tests/unit_test.py b/tests/unit_test.py index 1fda0227..157e1e4e 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -618,7 +618,8 @@ def test_cluster_uris(mocker): mocker.patch( "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", return_value=ingress_retrieval( - port=8265, annotations={"route.openshift.io/termination": "passthrough"} + cluster_name="unit-test-cluster", + annotations={"route.openshift.io/termination": "passthrough"}, ), ) assert ( @@ -627,7 +628,7 @@ def test_cluster_uris(mocker): ) mocker.patch( "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", - return_value=ingress_retrieval(port=8265), + return_value=ingress_retrieval(), ) assert cluster.cluster_uri() == "ray://unit-test-cluster-head-svc.ns.svc:10001" assert ( @@ -640,7 +641,7 @@ def test_cluster_uris(mocker): ) assert ( cluster.cluster_dashboard_uri() - == "Dashboard ingress not available yet, have you run cluster.up()?" + == "Dashboard not available yet, have you run cluster.up()?" ) @@ -675,15 +676,15 @@ def ray_addr(self, *args): return self._address -def ingress_retrieval(port, annotations=None, cluster_name="unit-test-cluster"): +def mocked_ingress(port, cluster_name="unit-test-cluster", annotations: dict = None): labels = {"ingress-owner": cluster_name, "ingress-options": "false"} if port == 10001: - serviceName = "client" + name = f"rayclient-{cluster_name}" else: - serviceName = "dashboard" + name = f"ray-dashboard-{cluster_name}" mock_ingress = client.V1Ingress( metadata=client.V1ObjectMeta( - name=f"ray-{serviceName}-{cluster_name}", + name=name, annotations=annotations, labels=labels, owner_references=[ @@ -695,7 +696,7 @@ def ingress_retrieval(port, annotations=None, cluster_name="unit-test-cluster"): spec=client.V1IngressSpec( rules=[ client.V1IngressRule( - host=f"ray-{serviceName}-{cluster_name}-ns.apps.cluster.awsroute.org", + host=f"{name}-ns.apps.cluster.awsroute.org", http=client.V1HTTPIngressRuleValue( paths=[ client.V1HTTPIngressPath( @@ -714,7 +715,23 @@ def ingress_retrieval(port, annotations=None, cluster_name="unit-test-cluster"): ], ), ) - mock_ingress_list = client.V1IngressList(items=[mock_ingress]) + return mock_ingress + + +def ingress_retrieval( + cluster_name="unit-test-cluster", client_ing: bool = False, annotations: dict = None +): + dashboard_ingress = mocked_ingress(8265, cluster_name, annotations) + if client_ing: + client_ingress = mocked_ingress( + 10001, cluster_name=cluster_name, annotations=annotations + ) + mock_ingress_list = client.V1IngressList( + items=[client_ingress, dashboard_ingress] + ) + else: + mock_ingress_list = client.V1IngressList(items=[dashboard_ingress]) + return mock_ingress_list @@ -736,7 +753,7 @@ def test_ray_job_wrapping(mocker): ) mocker.patch( "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", - return_value=ingress_retrieval(8265), + return_value=ingress_retrieval(), ) assert cluster.list_jobs() == cluster.cluster_dashboard_uri() @@ -2057,6 +2074,108 @@ def get_aw_obj(group, version, namespace, plural): return api_obj1 +def route_list_retrieval(group, version, namespace, plural): + assert group == "route.openshift.io" + assert version == "v1" + assert namespace == "ns" + assert plural == "routes" + return { + "kind": "RouteList", + "apiVersion": "route.openshift.io/v1", + "metadata": {"resourceVersion": "6072398"}, + "items": [ + { + "metadata": { + "name": "ray-dashboard-quicktest", + "namespace": "ns", + }, + "spec": { + "host": "ray-dashboard-quicktest-opendatahub.apps.cluster.awsroute.org", + "to": { + "kind": "Service", + "name": "quicktest-head-svc", + "weight": 100, + }, + "port": {"targetPort": "dashboard"}, + "tls": {"termination": "edge"}, + }, + }, + { + "metadata": { + "name": "rayclient-quicktest", + "namespace": "ns", + }, + "spec": { + "host": "rayclient-quicktest-opendatahub.apps.cluster.awsroute.org", + "to": { + "kind": "Service", + "name": "quicktest-head-svc", + "weight": 100, + }, + "port": {"targetPort": "client"}, + "tls": {"termination": "passthrough"}, + }, + }, + ], + } + + +def test_get_cluster_openshift(mocker): + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + # Mock the client.ApisApi function to return a mock object + mock_api = MagicMock() + mock_api.get_api_versions.return_value.groups = [ + MagicMock(versions=[MagicMock(group_version="route.openshift.io/v1")]) + ] + mocker.patch("kubernetes.client.ApisApi", return_value=mock_api) + + assert is_openshift_cluster() == True + + def custom_side_effect(group, version, namespace, plural, **kwargs): + if plural == "routes": + return route_list_retrieval("route.openshift.io", "v1", "ns", "routes") + elif plural == "rayclusters": + return get_ray_obj("ray.io", "v1", "ns", "rayclusters") + elif plural == "appwrappers": + return get_aw_obj("workload.codeflare.dev", "v1beta1", "ns", "appwrappers") + + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", get_aw_obj + ) + + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + side_effect=custom_side_effect, + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_namespaced_custom_object", + return_value=get_named_aw, + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_namespaced_custom_object", + side_effect=route_list_retrieval("route.openshift.io", "v1", "ns", "routes")[ + "items" + ], + ) + + cluster = get_cluster("quicktest") + cluster_config = cluster.config + assert cluster_config.name == "quicktest" and cluster_config.namespace == "ns" + assert ( + "m4.xlarge" in cluster_config.machine_types + and "g4dn.xlarge" in cluster_config.machine_types + ) + assert cluster_config.min_cpus == 1 and cluster_config.max_cpus == 1 + assert cluster_config.min_memory == 2 and cluster_config.max_memory == 2 + assert cluster_config.num_gpus == 0 + assert cluster_config.local_interactive == True + assert ( + cluster_config.image + == "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103" + ) + assert cluster_config.num_workers == 1 + + def test_get_cluster(mocker): mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") @@ -2070,7 +2189,7 @@ def test_get_cluster(mocker): ) mocker.patch( "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", - return_value=ingress_retrieval(port=8265, cluster_name="quicktest"), + return_value=ingress_retrieval(cluster_name="quicktest", client_ing=True), ) cluster = get_cluster("quicktest") cluster_config = cluster.config @@ -2083,6 +2202,7 @@ def test_get_cluster(mocker): assert cluster_config.min_memory == 2 and cluster_config.max_memory == 2 assert cluster_config.num_gpus == 0 assert cluster_config.instascale + assert cluster_config.local_interactive assert ( cluster_config.image == "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103" @@ -2095,9 +2215,7 @@ def test_get_ingress_domain_from_client(mocker): mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch( "kubernetes.client.NetworkingV1Api.read_namespaced_ingress", - return_value=ingress_retrieval( - port=8265, cluster_name="unit-test-cluster" - ).items[0], + return_value=ingress_retrieval(cluster_name="unit-test-cluster").items[0], ) ingress_domain = get_ingress_domain_from_client("unit-test-cluster", "ns") @@ -2354,7 +2472,7 @@ def test_wait_ready(mocker, capsys): mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch( "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", - return_value=ingress_retrieval(8265), + return_value=ingress_retrieval(), ) mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") mocker.patch("codeflare_sdk.cluster.cluster._app_wrapper_status", return_value=None) From d919c23636f289f2bc7f7cd7f7b4356d9476e8b1 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Tue, 19 Mar 2024 11:25:15 +0000 Subject: [PATCH 159/496] Added local_interactive annotation --- src/codeflare_sdk/cluster/cluster.py | 78 +++---------------- .../templates/base-template.yaml | 2 + src/codeflare_sdk/utils/generate_yaml.py | 6 ++ tests/test-case-bad.yaml | 2 + tests/test-case-no-mcad.yamls | 2 + tests/test-case-prio.yaml | 2 + tests/test-case.yaml | 2 + tests/unit_test.py | 33 +++----- 8 files changed, 36 insertions(+), 91 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 2c9e6cfd..d21fc036 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -504,45 +504,13 @@ def torchx_config( def from_k8_cluster_object(rc, mcad=True, ingress_domain=None, ingress_options={}, write_to_file=False): config_check() - cluster_name = rc["metadata"]["name"] - if is_openshift_cluster(): - try: - api_instance = client.CustomObjectsApi(api_config_handler()) - routes = api_instance.list_namespaced_custom_object( - group="route.openshift.io", - version="v1", - namespace=rc["metadata"]["namespace"], - plural="routes", - ) - except Exception as e: # pragma no cover - return _kube_api_error_handling(e) - for route in routes["items"]: - if ( - route["metadata"]["name"] == f"rayclient-{cluster_name}" - and route["spec"]["port"]["targetPort"] == "client" - ): - local_interactive = True - break - else: - local_interactive = False + if ( + rc["metadata"]["annotations"]["sdk.codeflare.dev/local_interactive"] + == "true" + ): + local_interactive = True else: - try: - api_instance = client.NetworkingV1Api(api_config_handler()) - ingresses = api_instance.list_namespaced_ingress( - rc["metadata"]["namespace"] - ) - except Exception as e: # pragma no cover - return _kube_api_error_handling(e) - for ingress in ingresses.items: - if ( - f"rayclient-{cluster_name}" == ingress.metadata.name - and ingress.spec.rules[0].http.paths[0].backend.service.port.number - == 10001 - ): - local_interactive = True - break - else: - local_interactive = False + local_interactive = False machine_types = ( rc["metadata"]["labels"]["orderedinstance"].split("_") @@ -551,9 +519,10 @@ def from_k8_cluster_object(rc, mcad=True, ingress_domain=None, ingress_options={ ) if local_interactive and ingress_domain == None: - ingress_domain = get_ingress_domain_from_client( - rc["metadata"]["name"], rc["metadata"]["namespace"] - ) + ingress_domain = rc["metadata"]["annotations"][ + "sdk.codeflare.dev/ingress_domain" + ] + cluster_config = ClusterConfiguration( name=rc["metadata"]["name"], namespace=rc["metadata"]["namespace"], @@ -1080,30 +1049,3 @@ def _copy_to_ray(cluster: Cluster) -> RayCluster: if ray.status == CodeFlareClusterStatus.READY: ray.status = RayClusterStatus.READY return ray - - -def get_ingress_domain_from_client(cluster_name: str, namespace: str = "default"): - if is_openshift_cluster(): - try: - config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) - route = api_instance.get_namespaced_custom_object( - group="route.openshift.io", - version="v1", - namespace=namespace, - plural="routes", - name=f"rayclient-{cluster_name}", - ) - return route["spec"]["host"].split(".", 1)[1] - except Exception as e: # pragma no cover - return _kube_api_error_handling(e) - else: - try: - config_check() - api_instance = client.NetworkingV1Api(api_config_handler()) - ingress = api_instance.read_namespaced_ingress( - f"rayclient-{cluster_name}", namespace - ) - return ingress.spec.rules[0].host.split(".", 1)[1] - except Exception as e: # pragma no cover - return _kube_api_error_handling(e) diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index d2c6074f..be3e6926 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -40,6 +40,8 @@ spec: apiVersion: ray.io/v1 kind: RayCluster metadata: + annotations: + sdk.codeflare.dev/local_interactive: "false" labels: workload.codeflare.dev/appwrapper: "aw-kuberay" controller-tools.k8s.io: "1.0" diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index fe012c6f..3d4dad0c 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -461,6 +461,12 @@ def enable_local_interactive(resources, cluster_name, namespace, ingress_domain) namespace, ingress_domain, ) + item["generictemplate"]["metadata"]["annotations"][ + "sdk.codeflare.dev/local_interactive" + ] = "true" + item["generictemplate"]["metadata"]["annotations"][ + "sdk.codeflare.dev/ingress_domain" + ] = ingress_domain item["generictemplate"]["spec"]["headGroupSpec"]["template"]["spec"][ "initContainers" diff --git a/tests/test-case-bad.yaml b/tests/test-case-bad.yaml index 6e969e01..56fc56fe 100644 --- a/tests/test-case-bad.yaml +++ b/tests/test-case-bad.yaml @@ -32,6 +32,8 @@ spec: apiVersion: ray.io/v1 kind: RayCluster metadata: + annotations: + sdk.codeflare.dev/local_interactive: 'false' labels: workload.codeflare.dev/appwrapper: unit-test-cluster controller-tools.k8s.io: '1.0' diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index 299ff9a8..ff6098b8 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -2,6 +2,8 @@ apiVersion: ray.io/v1 kind: RayCluster metadata: + annotations: + sdk.codeflare.dev/local_interactive: 'false' labels: controller-tools.k8s.io: '1.0' workload.codeflare.dev/appwrapper: unit-test-cluster-ray diff --git a/tests/test-case-prio.yaml b/tests/test-case-prio.yaml index 1e72c442..4da4b538 100644 --- a/tests/test-case-prio.yaml +++ b/tests/test-case-prio.yaml @@ -32,6 +32,8 @@ spec: apiVersion: ray.io/v1 kind: RayCluster metadata: + annotations: + sdk.codeflare.dev/local_interactive: 'false' labels: controller-tools.k8s.io: '1.0' workload.codeflare.dev/appwrapper: prio-test-cluster diff --git a/tests/test-case.yaml b/tests/test-case.yaml index 76285209..aa90f4d2 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -31,6 +31,8 @@ spec: apiVersion: ray.io/v1 kind: RayCluster metadata: + annotations: + sdk.codeflare.dev/local_interactive: 'false' labels: controller-tools.k8s.io: '1.0' workload.codeflare.dev/appwrapper: unit-test-cluster diff --git a/tests/unit_test.py b/tests/unit_test.py index 157e1e4e..35f4cf70 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -40,7 +40,6 @@ _app_wrapper_status, _ray_cluster_status, _get_ingress_domain, - get_ingress_domain_from_client, ) from codeflare_sdk.cluster.auth import ( TokenAuthentication, @@ -978,6 +977,10 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "metadata": { "creationTimestamp": "2024-03-05T09:55:37Z", "generation": 1, + "annotations": { + "sdk.codeflare.dev/local_interactive": "true", + "sdk.codeflare.dev/ingress_domain": "apps.cluster.awsroute.org", + }, "labels": { "appwrapper.mcad.ibm.com": "quicktest", "controller-tools.k8s.io": "1.0", @@ -1531,6 +1534,9 @@ def get_aw_obj(group, version, namespace, plural): "apiVersion": "ray.io/v1", "kind": "RayCluster", "metadata": { + "annotations": { + "sdk.codeflare.dev/local_interactive": "false" + }, "labels": { "workload.codeflare.dev/appwrapper": "quicktest1", "controller-tools.k8s.io": "1.0", @@ -1859,6 +1865,9 @@ def get_aw_obj(group, version, namespace, plural): "apiVersion": "ray.io/v1", "kind": "RayCluster", "metadata": { + "annotations": { + "sdk.codeflare.dev/local_interactive": "false" + }, "labels": { "workload.codeflare.dev/appwrapper": "quicktest2", "controller-tools.k8s.io": "1.0", @@ -2210,28 +2219,6 @@ def test_get_cluster(mocker): assert cluster_config.num_workers == 1 -def test_get_ingress_domain_from_client(mocker): - mocker.patch("kubernetes.config.load_kube_config") - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch( - "kubernetes.client.NetworkingV1Api.read_namespaced_ingress", - return_value=ingress_retrieval(cluster_name="unit-test-cluster").items[0], - ) - - ingress_domain = get_ingress_domain_from_client("unit-test-cluster", "ns") - assert ingress_domain == "apps.cluster.awsroute.org" - - mocker.patch( - "codeflare_sdk.utils.generate_yaml.is_openshift_cluster", return_value=True - ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_namespaced_custom_object", - side_effect=route_retrieval, - ) - ingress_domain = get_ingress_domain_from_client("unit-test-cluster", "ns") - assert ingress_domain == "apps.cluster.awsroute.org" - - def route_retrieval(group, version, namespace, plural, name): assert group == "route.openshift.io" assert version == "v1" From 46e9f7de8796c507cfcc1d1e3ddf79b9064af68a Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Wed, 20 Mar 2024 16:35:11 +0000 Subject: [PATCH 160/496] Updated tests & get_cluster for in memory --- src/codeflare_sdk/cluster/cluster.py | 4 +++- tests/test-default-appwrapper.yaml | 2 ++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index d21fc036..8d564d1a 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -502,7 +502,9 @@ def torchx_config( to_return["requirements"] = requirements return to_return - def from_k8_cluster_object(rc, mcad=True, ingress_domain=None, ingress_options={}, write_to_file=False): + def from_k8_cluster_object( + rc, mcad=True, ingress_domain=None, ingress_options={}, write_to_file=False + ): config_check() if ( rc["metadata"]["annotations"]["sdk.codeflare.dev/local_interactive"] diff --git a/tests/test-default-appwrapper.yaml b/tests/test-default-appwrapper.yaml index 1ed93670..c2979ffb 100644 --- a/tests/test-default-appwrapper.yaml +++ b/tests/test-default-appwrapper.yaml @@ -29,6 +29,8 @@ spec: apiVersion: ray.io/v1 kind: RayCluster metadata: + annotations: + sdk.codeflare.dev/local_interactive: 'false' labels: controller-tools.k8s.io: '1.0' workload.codeflare.dev/appwrapper: unit-test-default-cluster From 032b74fdf94a92055089d93cbe0c49cd71c3b9dd Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Fri, 22 Mar 2024 15:42:29 +0000 Subject: [PATCH 161/496] Updated ingress_domain annotation --- src/codeflare_sdk/utils/generate_yaml.py | 10 ++++++++++ tests/test-case-no-mcad.yamls | 1 + tests/test-case-prio.yaml | 1 + tests/test-case.yaml | 1 + tests/test-default-appwrapper.yaml | 1 + tests/unit_test.py | 1 + 6 files changed, 15 insertions(+) diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 3d4dad0c..eaa21413 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -473,6 +473,13 @@ def enable_local_interactive(resources, cluster_name, namespace, ingress_domain) ][0].get("command")[2] = command +def apply_ingress_domain_annotation(resources, ingress_domain): + item = resources["resources"].get("GenericItems")[0] + item["generictemplate"]["metadata"]["annotations"][ + "sdk.codeflare.dev/ingress_domain" + ] = ingress_domain + + def del_from_list_by_name(l: list, target: typing.List[str]) -> list: return [x for x in l if x["name"] not in target] @@ -740,6 +747,9 @@ def generate_appwrapper( ingress_options, ingress_domain, ) + if ingress_domain is not None: + apply_ingress_domain_annotation(resources, ingress_domain) + if local_interactive: enable_local_interactive(resources, cluster_name, namespace, ingress_domain) else: diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index ff6098b8..7af787c8 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -3,6 +3,7 @@ apiVersion: ray.io/v1 kind: RayCluster metadata: annotations: + sdk.codeflare.dev/ingress_domain: apps.cluster.awsroute.org sdk.codeflare.dev/local_interactive: 'false' labels: controller-tools.k8s.io: '1.0' diff --git a/tests/test-case-prio.yaml b/tests/test-case-prio.yaml index 4da4b538..5bedb35f 100644 --- a/tests/test-case-prio.yaml +++ b/tests/test-case-prio.yaml @@ -33,6 +33,7 @@ spec: kind: RayCluster metadata: annotations: + sdk.codeflare.dev/ingress_domain: apps.cluster.awsroute.org sdk.codeflare.dev/local_interactive: 'false' labels: controller-tools.k8s.io: '1.0' diff --git a/tests/test-case.yaml b/tests/test-case.yaml index aa90f4d2..d41f9be4 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -32,6 +32,7 @@ spec: kind: RayCluster metadata: annotations: + sdk.codeflare.dev/ingress_domain: apps.cluster.awsroute.org sdk.codeflare.dev/local_interactive: 'false' labels: controller-tools.k8s.io: '1.0' diff --git a/tests/test-default-appwrapper.yaml b/tests/test-default-appwrapper.yaml index c2979ffb..d2ac065e 100644 --- a/tests/test-default-appwrapper.yaml +++ b/tests/test-default-appwrapper.yaml @@ -30,6 +30,7 @@ spec: kind: RayCluster metadata: annotations: + sdk.codeflare.dev/ingress_domain: apps.cluster.awsroute.org sdk.codeflare.dev/local_interactive: 'false' labels: controller-tools.k8s.io: '1.0' diff --git a/tests/unit_test.py b/tests/unit_test.py index 35f4cf70..87f284ee 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -3434,6 +3434,7 @@ def test_cleanup(): os.remove(f"{aw_dir}prio-test-cluster.yaml") os.remove(f"{aw_dir}test.yaml") os.remove(f"{aw_dir}raytest2.yaml") + os.remove(f"{aw_dir}unit-test-cluster-ray.yaml") os.remove("tls-cluster-namespace/ca.crt") os.remove("tls-cluster-namespace/tls.crt") os.remove("tls-cluster-namespace/tls.key") From ef8e94b5f0cdb58bb22442bbc580e2252b26eba9 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Mon, 25 Mar 2024 16:58:32 +0000 Subject: [PATCH 162/496] Added openshift_oauth to get_cluster --- src/codeflare_sdk/cluster/cluster.py | 36 ++++++++++++++++++++-------- 1 file changed, 26 insertions(+), 10 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 8d564d1a..c0a015e6 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -506,6 +506,7 @@ def from_k8_cluster_object( rc, mcad=True, ingress_domain=None, ingress_options={}, write_to_file=False ): config_check() + openshift_oauth = False if ( rc["metadata"]["annotations"]["sdk.codeflare.dev/local_interactive"] == "true" @@ -513,7 +514,15 @@ def from_k8_cluster_object( local_interactive = True else: local_interactive = False - + if "codeflare.dev/oauth" in rc["metadata"]["annotations"]: + if rc["metadata"]["annotations"]["codeflare.dev/oauth"] == "True": + openshift_oauth = True + else: + for container in rc["spec"]["headGroupSpec"]["template"]["spec"][ + "containers" + ]: + if "oauth-proxy" in container["name"]: + openshift_oauth = True machine_types = ( rc["metadata"]["labels"]["orderedinstance"].split("_") if "orderedinstance" in rc["metadata"]["labels"] @@ -530,12 +539,16 @@ def from_k8_cluster_object( namespace=rc["metadata"]["namespace"], machine_types=machine_types, num_workers=rc["spec"]["workerGroupSpecs"][0]["minReplicas"], - min_cpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ - "containers" - ][0]["resources"]["requests"]["cpu"], - max_cpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ - "containers" - ][0]["resources"]["limits"]["cpu"], + min_cpus=int( + rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ + "resources" + ]["requests"]["cpu"] + ), + max_cpus=int( + rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ + "resources" + ]["limits"]["cpu"] + ), min_memory=int( rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ "resources" @@ -546,9 +559,11 @@ def from_k8_cluster_object( "resources" ]["limits"]["memory"][:-1] ), - num_gpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ - "containers" - ][0]["resources"]["limits"]["nvidia.com/gpu"], + num_gpus=int( + rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ + "resources" + ]["limits"]["nvidia.com/gpu"] + ), instascale=True if machine_types else False, image=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][ 0 @@ -558,6 +573,7 @@ def from_k8_cluster_object( ingress_domain=ingress_domain, ingress_options=ingress_options, write_to_file=write_to_file, + openshift_oauth=openshift_oauth, ) return Cluster(cluster_config) From 62519b2d0323bfdb9f63b02208b5cdf73d1256f4 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Mon, 25 Mar 2024 17:29:43 +0000 Subject: [PATCH 163/496] Review changes local_interactive --- src/codeflare_sdk/cluster/cluster.py | 4 ++-- src/codeflare_sdk/templates/base-template.yaml | 2 +- src/codeflare_sdk/utils/generate_yaml.py | 2 +- tests/test-case-bad.yaml | 2 +- tests/test-case-no-mcad.yamls | 2 +- tests/test-case-prio.yaml | 2 +- tests/test-case.yaml | 2 +- tests/test-default-appwrapper.yaml | 2 +- tests/unit_test.py | 8 ++++---- 9 files changed, 13 insertions(+), 13 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index c0a015e6..8aebf42b 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -509,7 +509,7 @@ def from_k8_cluster_object( openshift_oauth = False if ( rc["metadata"]["annotations"]["sdk.codeflare.dev/local_interactive"] - == "true" + == "True" ): local_interactive = True else: @@ -677,7 +677,7 @@ def get_cluster( mcad = _check_aw_exists(cluster_name, namespace) ingress_host = None ingress_options = {} - if is_openshift_cluster() == False: + if not is_openshift_cluster(): try: config_check() api_instance = client.NetworkingV1Api(api_config_handler()) diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index be3e6926..fb6ef427 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -41,7 +41,7 @@ spec: kind: RayCluster metadata: annotations: - sdk.codeflare.dev/local_interactive: "false" + sdk.codeflare.dev/local_interactive: "False" labels: workload.codeflare.dev/appwrapper: "aw-kuberay" controller-tools.k8s.io: "1.0" diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index eaa21413..da65defd 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -463,7 +463,7 @@ def enable_local_interactive(resources, cluster_name, namespace, ingress_domain) ) item["generictemplate"]["metadata"]["annotations"][ "sdk.codeflare.dev/local_interactive" - ] = "true" + ] = "True" item["generictemplate"]["metadata"]["annotations"][ "sdk.codeflare.dev/ingress_domain" ] = ingress_domain diff --git a/tests/test-case-bad.yaml b/tests/test-case-bad.yaml index 56fc56fe..aeccf519 100644 --- a/tests/test-case-bad.yaml +++ b/tests/test-case-bad.yaml @@ -33,7 +33,7 @@ spec: kind: RayCluster metadata: annotations: - sdk.codeflare.dev/local_interactive: 'false' + sdk.codeflare.dev/local_interactive: 'False' labels: workload.codeflare.dev/appwrapper: unit-test-cluster controller-tools.k8s.io: '1.0' diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index 7af787c8..b15833fe 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -4,7 +4,7 @@ kind: RayCluster metadata: annotations: sdk.codeflare.dev/ingress_domain: apps.cluster.awsroute.org - sdk.codeflare.dev/local_interactive: 'false' + sdk.codeflare.dev/local_interactive: 'False' labels: controller-tools.k8s.io: '1.0' workload.codeflare.dev/appwrapper: unit-test-cluster-ray diff --git a/tests/test-case-prio.yaml b/tests/test-case-prio.yaml index 5bedb35f..c81d4396 100644 --- a/tests/test-case-prio.yaml +++ b/tests/test-case-prio.yaml @@ -34,7 +34,7 @@ spec: metadata: annotations: sdk.codeflare.dev/ingress_domain: apps.cluster.awsroute.org - sdk.codeflare.dev/local_interactive: 'false' + sdk.codeflare.dev/local_interactive: 'False' labels: controller-tools.k8s.io: '1.0' workload.codeflare.dev/appwrapper: prio-test-cluster diff --git a/tests/test-case.yaml b/tests/test-case.yaml index d41f9be4..d7c31a11 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -33,7 +33,7 @@ spec: metadata: annotations: sdk.codeflare.dev/ingress_domain: apps.cluster.awsroute.org - sdk.codeflare.dev/local_interactive: 'false' + sdk.codeflare.dev/local_interactive: 'False' labels: controller-tools.k8s.io: '1.0' workload.codeflare.dev/appwrapper: unit-test-cluster diff --git a/tests/test-default-appwrapper.yaml b/tests/test-default-appwrapper.yaml index d2ac065e..c9da340c 100644 --- a/tests/test-default-appwrapper.yaml +++ b/tests/test-default-appwrapper.yaml @@ -31,7 +31,7 @@ spec: metadata: annotations: sdk.codeflare.dev/ingress_domain: apps.cluster.awsroute.org - sdk.codeflare.dev/local_interactive: 'false' + sdk.codeflare.dev/local_interactive: 'False' labels: controller-tools.k8s.io: '1.0' workload.codeflare.dev/appwrapper: unit-test-default-cluster diff --git a/tests/unit_test.py b/tests/unit_test.py index 87f284ee..3edadc63 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -978,7 +978,7 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "creationTimestamp": "2024-03-05T09:55:37Z", "generation": 1, "annotations": { - "sdk.codeflare.dev/local_interactive": "true", + "sdk.codeflare.dev/local_interactive": "True", "sdk.codeflare.dev/ingress_domain": "apps.cluster.awsroute.org", }, "labels": { @@ -1535,7 +1535,7 @@ def get_aw_obj(group, version, namespace, plural): "kind": "RayCluster", "metadata": { "annotations": { - "sdk.codeflare.dev/local_interactive": "false" + "sdk.codeflare.dev/local_interactive": "False" }, "labels": { "workload.codeflare.dev/appwrapper": "quicktest1", @@ -1866,7 +1866,7 @@ def get_aw_obj(group, version, namespace, plural): "kind": "RayCluster", "metadata": { "annotations": { - "sdk.codeflare.dev/local_interactive": "false" + "sdk.codeflare.dev/local_interactive": "False" }, "labels": { "workload.codeflare.dev/appwrapper": "quicktest2", @@ -2138,7 +2138,7 @@ def test_get_cluster_openshift(mocker): ] mocker.patch("kubernetes.client.ApisApi", return_value=mock_api) - assert is_openshift_cluster() == True + assert is_openshift_cluster() def custom_side_effect(group, version, namespace, plural, **kwargs): if plural == "routes": From f1a86223862614aae64f0cd8b57759e5d79bf19e Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Wed, 27 Mar 2024 19:54:31 +0000 Subject: [PATCH 164/496] Review changes: conditionals --- src/codeflare_sdk/cluster/cluster.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 8aebf42b..af8b6f6d 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -515,14 +515,14 @@ def from_k8_cluster_object( else: local_interactive = False if "codeflare.dev/oauth" in rc["metadata"]["annotations"]: - if rc["metadata"]["annotations"]["codeflare.dev/oauth"] == "True": - openshift_oauth = True + openshift_oauth = ( + rc["metadata"]["annotations"]["codeflare.dev/oauth"] == "True" + ) else: for container in rc["spec"]["headGroupSpec"]["template"]["spec"][ "containers" ]: - if "oauth-proxy" in container["name"]: - openshift_oauth = True + openshift_oauth = "oauth-proxy" in container["name"] machine_types = ( rc["metadata"]["labels"]["orderedinstance"].split("_") if "orderedinstance" in rc["metadata"]["labels"] From 9d7e19311ffd1f6749c52305b65e321f5279e5c4 Mon Sep 17 00:00:00 2001 From: Kevin Date: Wed, 6 Mar 2024 17:11:54 -0500 Subject: [PATCH 165/496] refactor while loops for wait ready Signed-off-by: Kevin --- src/codeflare_sdk/cluster/cluster.py | 41 +++++++++++++--------------- 1 file changed, 19 insertions(+), 22 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index af8b6f6d..115ac25e 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -379,36 +379,33 @@ def wait_ready(self, timeout: Optional[int] = None, dashboard_check: bool = True Checks every five seconds. """ print("Waiting for requested resources to be set up...") - ready = False - dashboard_ready = False - status = None time = 0 - while not ready: + while True: + if timeout and time >= timeout: + raise TimeoutError( + f"wait() timed out after waiting {timeout}s for cluster to be ready" + ) status, ready = self.status(print_to_console=False) if status == CodeFlareClusterStatus.UNKNOWN: print( "WARNING: Current cluster status is unknown, have you run cluster.up yet?" ) - if not ready: - if timeout and time >= timeout: - raise TimeoutError( - f"wait() timed out after waiting {timeout}s for cluster to be ready" - ) - sleep(5) - time += 5 + if ready: + break + sleep(5) + time += 5 print("Requested cluster is up and running!") - while dashboard_check and not dashboard_ready: - dashboard_ready = self.is_dashboard_ready() - if not dashboard_ready: - if timeout and time >= timeout: - raise TimeoutError( - f"wait() timed out after waiting {timeout}s for dashboard to be ready" - ) - sleep(5) - time += 5 - if dashboard_ready: - print("Dashboard is ready!") + while dashboard_check: + if timeout and time >= timeout: + raise TimeoutError( + f"wait() timed out after waiting {timeout}s for dashboard to be ready" + ) + if self.is_dashboard_ready(): + print("Dashboard is ready!") + break + sleep(5) + time += 5 def details(self, print_to_console: bool = True) -> RayCluster: cluster = _copy_to_ray(self) From f94b92793163a234499fc43d45d65451f7b5b490 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Thu, 28 Mar 2024 14:03:26 +0000 Subject: [PATCH 166/496] Updated coverage.svg --- coverage.svg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/coverage.svg b/coverage.svg index c1490035..a8c7e72a 100644 --- a/coverage.svg +++ b/coverage.svg @@ -15,7 +15,7 @@ coverage coverage - 90% - 90% + 92% + 92% From 14974342aa89d234c86fd07b150e5fde2e7aa911 Mon Sep 17 00:00:00 2001 From: "openshift-merge-bot[bot]" Date: Thu, 28 Mar 2024 14:03:27 +0000 Subject: [PATCH 167/496] [create-pull-request] automated change --- poetry.lock | 111 +++------------------------------------------------- 1 file changed, 6 insertions(+), 105 deletions(-) diff --git a/poetry.lock b/poetry.lock index 45756ec3..cf848f0b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "aiohttp" version = "3.9.1" description = "Async http client/server framework (asyncio)" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -101,7 +100,6 @@ speedups = ["Brotli", "aiodns", "brotlicffi"] name = "aiohttp-cors" version = "0.7.0" description = "CORS support for aiohttp" -category = "main" optional = false python-versions = "*" files = [ @@ -116,7 +114,6 @@ aiohttp = ">=1.1" name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -131,7 +128,6 @@ frozenlist = ">=1.1.0" name = "ansicon" version = "1.89.0" description = "Python wrapper for loading Jason Hood's ANSICON" -category = "main" optional = false python-versions = "*" files = [ @@ -143,7 +139,6 @@ files = [ name = "async-timeout" version = "4.0.3" description = "Timeout context manager for asyncio programs" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -155,7 +150,6 @@ files = [ name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -174,7 +168,6 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "bcrypt" version = "4.0.1" description = "Modern password hashing for your software and your servers" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -209,7 +202,6 @@ typecheck = ["mypy"] name = "blessed" version = "1.20.0" description = "Easy, practical library for making terminal apps, by providing an elegant, well-documented interface to Colors, Keyboard input, and screen Positioning capabilities." -category = "main" optional = false python-versions = ">=2.7" files = [ @@ -226,7 +218,6 @@ wcwidth = ">=0.1.4" name = "cachetools" version = "5.3.1" description = "Extensible memoizing collections and decorators" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -238,7 +229,6 @@ files = [ name = "certifi" version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -250,7 +240,6 @@ files = [ name = "cffi" version = "1.16.0" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -315,7 +304,6 @@ pycparser = "*" name = "charset-normalizer" version = "3.3.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -415,7 +403,6 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -430,7 +417,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "codeflare-torchx" version = "0.6.0.dev2" description = "TorchX SDK and Components" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -459,7 +445,6 @@ ray = ["ray (>=1.12.1)"] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -471,7 +456,6 @@ files = [ name = "colorful" version = "0.5.5" description = "Terminal string styling done right, in Python." -category = "main" optional = false python-versions = "*" files = [ @@ -486,7 +470,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "commonmark" version = "0.9.1" description = "Python parser for the CommonMark Markdown spec" -category = "main" optional = false python-versions = "*" files = [ @@ -501,7 +484,6 @@ test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] name = "coverage" version = "7.2.7" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -574,7 +556,6 @@ toml = ["tomli"] name = "cryptography" version = "40.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -616,7 +597,6 @@ tox = ["tox"] name = "distlib" version = "0.3.7" description = "Distribution utilities" -category = "main" optional = false python-versions = "*" files = [ @@ -628,7 +608,6 @@ files = [ name = "docker" version = "6.1.3" description = "A Python library for the Docker Engine API." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -650,7 +629,6 @@ ssh = ["paramiko (>=2.4.3)"] name = "docstring-parser" version = "0.8.1" description = "\"Parse Python docstrings in reST, Google and Numpydoc format\"" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -661,7 +639,6 @@ files = [ name = "exceptiongroup" version = "1.1.3" description = "Backport of PEP 654 (exception groups)" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -676,7 +653,6 @@ test = ["pytest (>=6)"] name = "executing" version = "1.2.0" description = "Get the currently executing AST node of a frame, and other information" -category = "main" optional = false python-versions = "*" files = [ @@ -691,7 +667,6 @@ tests = ["asttokens", "littleutils", "pytest", "rich"] name = "filelock" version = "3.12.4" description = "A platform independent file lock." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -708,7 +683,6 @@ typing = ["typing-extensions (>=4.7.1)"] name = "frozenlist" version = "1.4.0" description = "A list-like structure which implements collections.abc.MutableSequence" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -779,7 +753,6 @@ files = [ name = "fsspec" version = "2023.9.2" description = "File-system specification" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -815,7 +788,6 @@ tqdm = ["tqdm"] name = "google-api-core" version = "2.15.0" description = "Google API client core library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -838,7 +810,6 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] name = "google-auth" version = "2.23.3" description = "Google Authentication Library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -862,7 +833,6 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] name = "googleapis-common-protos" version = "1.62.0" description = "Common protobufs used in Google APIs" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -880,7 +850,6 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] name = "gpustat" version = "1.1.1" description = "An utility to monitor NVIDIA GPU status and usage" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -900,7 +869,6 @@ test = ["mockito (>=1.2.1)", "pytest (>=5.4.1)", "pytest-runner"] name = "grpcio" version = "1.60.0" description = "HTTP/2-based RPC framework" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -967,7 +935,6 @@ protobuf = ["grpcio-tools (>=1.60.0)"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -979,7 +946,6 @@ files = [ name = "importlib-metadata" version = "6.8.0" description = "Read metadata from Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -999,7 +965,6 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs name = "importlib-resources" version = "6.1.0" description = "Read resources from Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1018,7 +983,6 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1030,7 +994,6 @@ files = [ name = "jinxed" version = "1.2.1" description = "Jinxed Terminal Library" -category = "main" optional = false python-versions = "*" files = [ @@ -1045,7 +1008,6 @@ ansicon = {version = "*", markers = "platform_system == \"Windows\""} name = "jsonschema" version = "4.19.1" description = "An implementation of JSON Schema validation for Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1069,7 +1031,6 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "jsonschema-specifications" version = "2023.7.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1085,7 +1046,6 @@ referencing = ">=0.28.0" name = "kubernetes" version = "26.1.0" description = "Kubernetes python client" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1103,7 +1063,7 @@ requests-oauthlib = "*" setuptools = ">=21.0.0" six = ">=1.9.0" urllib3 = ">=1.24.2" -websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.0 || >=0.43.0" +websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" [package.extras] adal = ["adal (>=1.0.2)"] @@ -1112,7 +1072,6 @@ adal = ["adal (>=1.0.2)"] name = "mako" version = "1.2.4" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1132,7 +1091,6 @@ testing = ["pytest"] name = "markdown" version = "3.5" description = "Python implementation of John Gruber's Markdown." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1151,7 +1109,6 @@ testing = ["coverage", "pyyaml"] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1221,7 +1178,6 @@ files = [ name = "msgpack" version = "1.0.7" description = "MessagePack serializer" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1287,7 +1243,6 @@ files = [ name = "multidict" version = "6.0.4" description = "multidict implementation" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1371,7 +1326,6 @@ files = [ name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1383,7 +1337,6 @@ files = [ name = "numpy" version = "1.24.4" description = "Fundamental package for array computing in Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1421,7 +1374,6 @@ files = [ name = "nvidia-ml-py" version = "12.535.133" description = "Python Bindings for the NVIDIA Management Library" -category = "main" optional = false python-versions = "*" files = [ @@ -1433,7 +1385,6 @@ files = [ name = "oauthlib" version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1450,7 +1401,6 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] name = "opencensus" version = "0.11.3" description = "A stats collection and distributed tracing framework" -category = "main" optional = false python-versions = "*" files = [ @@ -1466,7 +1416,6 @@ opencensus-context = ">=0.1.3" name = "opencensus-context" version = "0.1.3" description = "OpenCensus Runtime Context" -category = "main" optional = false python-versions = "*" files = [ @@ -1478,7 +1427,6 @@ files = [ name = "openshift-client" version = "1.0.18" description = "OpenShift python client" -category = "main" optional = false python-versions = "*" files = [ @@ -1495,7 +1443,6 @@ six = "*" name = "packaging" version = "23.2" description = "Core utilities for Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1507,7 +1454,6 @@ files = [ name = "pandas" version = "2.0.3" description = "Powerful data structures for data analysis, time series, and statistics" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1541,8 +1487,8 @@ files = [ [package.dependencies] numpy = [ {version = ">=1.20.3", markers = "python_version < \"3.10\""}, - {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -1575,7 +1521,6 @@ xml = ["lxml (>=4.6.3)"] name = "paramiko" version = "3.3.1" description = "SSH2 protocol library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1597,7 +1542,6 @@ invoke = ["invoke (>=2.0)"] name = "pdoc3" version = "0.10.0" description = "Auto-generate API documentation for Python projects." -category = "dev" optional = false python-versions = ">= 3.6" files = [ @@ -1613,7 +1557,6 @@ markdown = ">=3.0" name = "pkgutil-resolve-name" version = "1.3.10" description = "Resolve a name to an object." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1625,7 +1568,6 @@ files = [ name = "platformdirs" version = "3.11.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1641,7 +1583,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "pluggy" version = "1.3.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1657,7 +1598,6 @@ testing = ["pytest", "pytest-benchmark"] name = "prometheus-client" version = "0.19.0" description = "Python client for the Prometheus monitoring system." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1672,7 +1612,6 @@ twisted = ["twisted"] name = "protobuf" version = "4.24.4" description = "" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1695,7 +1634,6 @@ files = [ name = "psutil" version = "5.9.6" description = "Cross-platform lib for process and system monitoring in Python." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -1724,7 +1662,6 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] name = "py-spy" version = "0.3.14" description = "Sampling profiler for Python programs" -category = "main" optional = false python-versions = "*" files = [ @@ -1741,7 +1678,6 @@ files = [ name = "pyarrow" version = "14.0.1" description = "Python library for Apache Arrow" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1790,7 +1726,6 @@ numpy = ">=1.16.6" name = "pyasn1" version = "0.5.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -1802,7 +1737,6 @@ files = [ name = "pyasn1-modules" version = "0.3.0" description = "A collection of ASN.1-based protocols modules" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -1817,7 +1751,6 @@ pyasn1 = ">=0.4.6,<0.6.0" name = "pycparser" version = "2.21" description = "C parser in Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1829,7 +1762,6 @@ files = [ name = "pydantic" version = "1.10.13" description = "Data validation and settings management using python type hints" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1882,7 +1814,6 @@ email = ["email-validator (>=1.0.3)"] name = "pygments" version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1897,7 +1828,6 @@ plugins = ["importlib-metadata"] name = "pynacl" version = "1.5.0" description = "Python binding to the Networking and Cryptography (NaCl) library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1924,7 +1854,6 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] name = "pyre-extensions" version = "0.0.30" description = "Type system extensions for use with the pyre type checker" -category = "main" optional = false python-versions = "*" files = [ @@ -1940,7 +1869,6 @@ typing-inspect = "*" name = "pytest" version = "7.4.0" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1963,7 +1891,6 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-mock" version = "3.11.1" description = "Thin-wrapper around the mock package for easier use with pytest" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1981,7 +1908,6 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] name = "pytest-timeout" version = "2.2.0" description = "pytest plugin to abort hanging tests" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1996,7 +1922,6 @@ pytest = ">=5.0.0" name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -2011,7 +1936,6 @@ six = ">=1.5" name = "pytz" version = "2023.3.post1" description = "World timezone definitions, modern and historical" -category = "main" optional = false python-versions = "*" files = [ @@ -2023,7 +1947,6 @@ files = [ name = "pywin32" version = "306" description = "Python for Window Extensions" -category = "main" optional = false python-versions = "*" files = [ @@ -2047,7 +1970,6 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2108,7 +2030,6 @@ files = [ name = "ray" version = "2.7.0" description = "Ray provides a simple, universal API for building distributed applications." -category = "main" optional = false python-versions = "*" files = [ @@ -2149,15 +2070,15 @@ frozenlist = "*" fsspec = {version = "*", optional = true, markers = "extra == \"data\""} gpustat = {version = ">=1.0.0", optional = true, markers = "extra == \"default\""} grpcio = [ - {version = ">=1.32.0", optional = true, markers = "python_version < \"3.10\""}, - {version = ">=1.42.0", optional = true, markers = "python_version >= \"3.10\""}, + {version = ">=1.32.0", optional = true, markers = "python_version < \"3.10\" and extra == \"default\""}, + {version = ">=1.42.0", optional = true, markers = "python_version >= \"3.10\" and extra == \"default\""}, ] jsonschema = "*" msgpack = ">=1.0.0,<2.0.0" numpy = [ {version = ">=1.16", markers = "python_version < \"3.9\""}, - {version = ">=1.19.3", markers = "python_version >= \"3.9\""}, {version = ">=1.20", optional = true, markers = "extra == \"data\""}, + {version = ">=1.19.3", markers = "python_version >= \"3.9\""}, ] opencensus = {version = "*", optional = true, markers = "extra == \"default\""} packaging = "*" @@ -2190,7 +2111,6 @@ tune = ["fsspec", "pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1. name = "referencing" version = "0.30.2" description = "JSON Referencing + Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2206,7 +2126,6 @@ rpds-py = ">=0.7.0" name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2228,7 +2147,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-oauthlib" version = "1.3.1" description = "OAuthlib authentication support for Requests." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2247,7 +2165,6 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] name = "rich" version = "12.6.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -category = "main" optional = false python-versions = ">=3.6.3,<4.0.0" files = [ @@ -2267,7 +2184,6 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] name = "rpds-py" version = "0.10.4" description = "Python bindings to Rust's persistent data structures (rpds)" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2376,7 +2292,6 @@ files = [ name = "rsa" version = "4.9" description = "Pure-Python RSA implementation" -category = "main" optional = false python-versions = ">=3.6,<4" files = [ @@ -2391,7 +2306,6 @@ pyasn1 = ">=0.1.3" name = "setuptools" version = "68.2.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2408,7 +2322,6 @@ testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jar name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -2420,7 +2333,6 @@ files = [ name = "smart-open" version = "6.4.0" description = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)" -category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -2442,7 +2354,6 @@ webhdfs = ["requests"] name = "tabulate" version = "0.9.0" description = "Pretty-print tabular data" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2457,7 +2368,6 @@ widechars = ["wcwidth"] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2469,7 +2379,6 @@ files = [ name = "typing-extensions" version = "4.8.0" description = "Backported and Experimental Type Hints for Python 3.8+" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2481,7 +2390,6 @@ files = [ name = "typing-inspect" version = "0.9.0" description = "Runtime inspection utilities for typing module." -category = "main" optional = false python-versions = "*" files = [ @@ -2497,7 +2405,6 @@ typing-extensions = ">=3.7.4" name = "tzdata" version = "2023.3" description = "Provider of IANA time zone data" -category = "main" optional = false python-versions = ">=2" files = [ @@ -2509,7 +2416,6 @@ files = [ name = "urllib3" version = "1.26.17" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -2526,7 +2432,6 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "virtualenv" version = "20.21.0" description = "Virtual Python Environment builder" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2547,7 +2452,6 @@ test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess name = "wcwidth" version = "0.2.12" description = "Measures the displayed width of unicode strings in a terminal" -category = "main" optional = false python-versions = "*" files = [ @@ -2559,7 +2463,6 @@ files = [ name = "websocket-client" version = "1.6.4" description = "WebSocket client for Python with low level API options" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2576,7 +2479,6 @@ test = ["websockets"] name = "yarl" version = "1.9.4" description = "Yet another URL library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2680,7 +2582,6 @@ multidict = ">=4.0" name = "zipp" version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" optional = false python-versions = ">=3.8" files = [ From 403cca63c759eb43cc0279cae04f91b83fc1fb55 Mon Sep 17 00:00:00 2001 From: Christian Zaccaria <73656840+ChristianZaccaria@users.noreply.github.com> Date: Wed, 3 Apr 2024 14:56:33 +0100 Subject: [PATCH 168/496] Remove creation of OAuth resources/logic and remove openshift_oauth option (#480) * Remove creation of OAuth resources/logic and add annotation * Remove openshift_oauth configuration * Add verify_tls to ClusterConfiguration --- src/codeflare_sdk/cluster/cluster.py | 48 ++--- src/codeflare_sdk/cluster/config.py | 8 +- src/codeflare_sdk/utils/generate_yaml.py | 23 ++- src/codeflare_sdk/utils/openshift_oauth.py | 199 ------------------- tests/e2e/mnist_raycluster_sdk_oauth_test.py | 1 - tests/unit_test.py | 107 +--------- tests/unit_test_support.py | 1 - 7 files changed, 46 insertions(+), 341 deletions(-) delete mode 100644 src/codeflare_sdk/utils/openshift_oauth.py diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 115ac25e..127c6fad 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -33,10 +33,7 @@ ) from ..utils.kube_api_helpers import _kube_api_error_handling from ..utils.generate_yaml import is_openshift_cluster -from ..utils.openshift_oauth import ( - create_openshift_oauth_objects, - delete_openshift_oauth_objects, -) + from .config import ClusterConfiguration from .model import ( AppWrapper, @@ -86,14 +83,16 @@ def _client_headers(self): @property def _client_verify_tls(self): - return not self.config.openshift_oauth + if not is_openshift_cluster or not self.config.verify_tls: + return False + return True @property def job_client(self): k8client = api_config_handler() or client.ApiClient() if self._job_submission_client: return self._job_submission_client - if self.config.openshift_oauth: + if is_openshift_cluster(): print(k8client.configuration.get_api_key_with_prefix("authorization")) self._job_submission_client = JobSubmissionClient( self.cluster_dashboard_uri(), @@ -191,6 +190,7 @@ def create_app_wrapper(self): ingress_domain = self.config.ingress_domain ingress_options = self.config.ingress_options write_to_file = self.config.write_to_file + verify_tls = self.config.verify_tls return generate_appwrapper( name=name, namespace=namespace, @@ -213,10 +213,10 @@ def create_app_wrapper(self): image_pull_secrets=image_pull_secrets, dispatch_priority=dispatch_priority, priority_val=priority_val, - openshift_oauth=self.config.openshift_oauth, ingress_domain=ingress_domain, ingress_options=ingress_options, write_to_file=write_to_file, + verify_tls=verify_tls, ) # creates a new cluster with the provided or default spec @@ -226,10 +226,6 @@ def up(self): the MCAD queue. """ namespace = self.config.namespace - if self.config.openshift_oauth: - create_openshift_oauth_objects( - cluster_name=self.config.name, namespace=namespace - ) try: config_check() @@ -281,11 +277,6 @@ def down(self): except Exception as e: # pragma: no cover return _kube_api_error_handling(e) - if self.config.openshift_oauth: - delete_openshift_oauth_objects( - cluster_name=self.config.name, namespace=namespace - ) - def status( self, print_to_console: bool = True ) -> Tuple[CodeFlareClusterStatus, bool]: @@ -500,10 +491,14 @@ def torchx_config( return to_return def from_k8_cluster_object( - rc, mcad=True, ingress_domain=None, ingress_options={}, write_to_file=False + rc, + mcad=True, + ingress_domain=None, + ingress_options={}, + write_to_file=False, + verify_tls=True, ): config_check() - openshift_oauth = False if ( rc["metadata"]["annotations"]["sdk.codeflare.dev/local_interactive"] == "True" @@ -511,15 +506,6 @@ def from_k8_cluster_object( local_interactive = True else: local_interactive = False - if "codeflare.dev/oauth" in rc["metadata"]["annotations"]: - openshift_oauth = ( - rc["metadata"]["annotations"]["codeflare.dev/oauth"] == "True" - ) - else: - for container in rc["spec"]["headGroupSpec"]["template"]["spec"][ - "containers" - ]: - openshift_oauth = "oauth-proxy" in container["name"] machine_types = ( rc["metadata"]["labels"]["orderedinstance"].split("_") if "orderedinstance" in rc["metadata"]["labels"] @@ -570,7 +556,7 @@ def from_k8_cluster_object( ingress_domain=ingress_domain, ingress_options=ingress_options, write_to_file=write_to_file, - openshift_oauth=openshift_oauth, + verify_tls=verify_tls, ) return Cluster(cluster_config) @@ -655,7 +641,10 @@ def get_current_namespace(): # pragma: no cover def get_cluster( - cluster_name: str, namespace: str = "default", write_to_file: bool = False + cluster_name: str, + namespace: str = "default", + write_to_file: bool = False, + verify_tls: bool = True, ): try: config_check() @@ -729,6 +718,7 @@ def get_cluster( ingress_domain=ingress_domain, ingress_options=ingress_options, write_to_file=write_to_file, + verify_tls=verify_tls, ) raise FileNotFoundError( f"Cluster {cluster_name} is not found in {namespace} namespace" diff --git a/src/codeflare_sdk/cluster/config.py b/src/codeflare_sdk/cluster/config.py index 86d4252e..195349ce 100644 --- a/src/codeflare_sdk/cluster/config.py +++ b/src/codeflare_sdk/cluster/config.py @@ -52,7 +52,13 @@ class ClusterConfiguration: local_interactive: bool = False image_pull_secrets: list = field(default_factory=list) dispatch_priority: str = None - openshift_oauth: bool = False # NOTE: to use the user must have permission to create a RoleBinding for system:auth-delegator ingress_options: dict = field(default_factory=dict) ingress_domain: str = None write_to_file: bool = False + verify_tls: bool = True + + def __post_init__(self): + if not self.verify_tls: + print( + "Warning: TLS verification has been disabled - Endpoint checks will be bypassed" + ) diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index da65defd..00790ac2 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -227,6 +227,20 @@ def update_names(yaml, item, appwrapper_name, cluster_name, namespace): lower_meta["labels"]["workload.codeflare.dev/appwrapper"] = appwrapper_name lower_meta["name"] = cluster_name lower_meta["namespace"] = namespace + lower_spec = item.get("generictemplate", {}).get("spec") + if is_openshift_cluster(): + cookie_secret_env_var = { + "name": "COOKIE_SECRET", + "valueFrom": { + "secretKeyRef": { + "key": "cookie_secret", + "name": f"{cluster_name}-oauth-config", + } + }, + } + lower_spec["headGroupSpec"]["template"]["spec"]["containers"][0]["env"].append( + cookie_secret_env_var + ) def update_labels(yaml, instascale, instance_types): @@ -585,9 +599,6 @@ def enable_openshift_oauth(user_yaml, cluster_name, namespace): ) # allows for setting value of Cluster object when initializing object from an existing AppWrapper on cluster user_yaml["metadata"]["annotations"] = user_yaml["metadata"].get("annotations", {}) - user_yaml["metadata"]["annotations"][ - "codeflare-sdk-use-oauth" - ] = "true" # if the user gets an ray_headgroup_pod = user_yaml["spec"]["resources"]["GenericItems"][0][ "generictemplate" ]["spec"]["headGroupSpec"]["template"]["spec"] @@ -620,7 +631,7 @@ def _create_oauth_sidecar_object( "--upstream=http://localhost:8265", f"--tls-cert={tls_mount_location}/tls.crt", f"--tls-key={tls_mount_location}/tls.key", - f"--cookie-secret={b64encode(urandom(64)).decode('utf-8')}", # create random string for encrypting cookie + "--cookie-secret=$(COOKIE_SECRET)", f'--openshift-delegate-urls={{"/":{{"resource":"pods","namespace":"{namespace}","verb":"get"}}}}', ], image="registry.redhat.io/openshift4/ose-oauth-proxy@sha256:1ea6a01bf3e63cdcf125c6064cbd4a4a270deaf0f157b3eabb78f60556840366", @@ -696,10 +707,10 @@ def generate_appwrapper( image_pull_secrets: list, dispatch_priority: str, priority_val: int, - openshift_oauth: bool, ingress_domain: str, ingress_options: dict, write_to_file: bool, + verify_tls: bool, ): user_yaml = read_template(template) appwrapper_name, cluster_name = gen_names(name) @@ -757,7 +768,7 @@ def generate_appwrapper( delete_route_or_ingress(resources["resources"]) - if openshift_oauth: + if is_openshift_cluster(): enable_openshift_oauth(user_yaml, cluster_name, namespace) directory_path = os.path.expanduser("~/.codeflare/appwrapper/") diff --git a/src/codeflare_sdk/utils/openshift_oauth.py b/src/codeflare_sdk/utils/openshift_oauth.py deleted file mode 100644 index facd8411..00000000 --- a/src/codeflare_sdk/utils/openshift_oauth.py +++ /dev/null @@ -1,199 +0,0 @@ -from urllib3.util import parse_url -import yaml - -from ..cluster.auth import config_check, api_config_handler - -from kubernetes import client -from kubernetes import dynamic - - -def _route_api_getter(): - return dynamic.DynamicClient( - api_config_handler() or client.ApiClient() - ).resources.get(api_version="route.openshift.io/v1", kind="Route") - - -def create_openshift_oauth_objects(cluster_name, namespace): - config_check() - oauth_port = 8443 - oauth_sa_name = f"{cluster_name}-oauth-proxy" - tls_secret_name = _gen_tls_secret_name(cluster_name) - service_name = f"{cluster_name}-oauth" - port_name = "oauth-proxy" - - _create_or_replace_oauth_sa(namespace, oauth_sa_name, cluster_name) - _create_or_replace_oauth_service_obj( - cluster_name, namespace, oauth_port, tls_secret_name, service_name, port_name - ) - _create_or_replace_oauth_route_object( - cluster_name, - namespace, - service_name, - port_name, - ) - _create_or_replace_oauth_rb(cluster_name, namespace, oauth_sa_name) - - -def _create_or_replace_oauth_sa(namespace, oauth_sa_name, cluster_name): - oauth_sa = client.V1ServiceAccount( - api_version="v1", - kind="ServiceAccount", - metadata=client.V1ObjectMeta( - name=oauth_sa_name, - namespace=namespace, - annotations={ - "serviceaccounts.openshift.io/oauth-redirectreference.first": '{"kind":"OAuthRedirectReference","apiVersion":"v1","reference":{"kind":"Route","name":"' - + "ray-dashboard-" - + cluster_name - + '"}}' - }, - ), - ) - try: - client.CoreV1Api(api_config_handler()).create_namespaced_service_account( - namespace=namespace, body=oauth_sa - ) - except client.ApiException as e: - if e.reason == "Conflict": - client.CoreV1Api(api_config_handler()).replace_namespaced_service_account( - namespace=namespace, - body=oauth_sa, - name=oauth_sa_name, - ) - else: - raise e - - -def _create_or_replace_oauth_rb(cluster_name, namespace, oauth_sa_name): - oauth_crb = client.V1ClusterRoleBinding( - api_version="rbac.authorization.k8s.io/v1", - kind="ClusterRoleBinding", - metadata=client.V1ObjectMeta(name=f"{cluster_name}-rb"), - role_ref=client.V1RoleRef( - api_group="rbac.authorization.k8s.io", - kind="ClusterRole", - name="system:auth-delegator", - ), - subjects=[ - client.V1Subject( - kind="ServiceAccount", name=oauth_sa_name, namespace=namespace - ) - ], - ) - try: - client.RbacAuthorizationV1Api(api_config_handler()).create_cluster_role_binding( - body=oauth_crb - ) - except client.ApiException as e: - if e.reason == "Conflict": - client.RbacAuthorizationV1Api( - api_config_handler() - ).replace_cluster_role_binding(body=oauth_crb, name=f"{cluster_name}-rb") - else: - raise e - - -def _gen_tls_secret_name(cluster_name): - return f"{cluster_name}-proxy-tls-secret" - - -def delete_openshift_oauth_objects(cluster_name, namespace): - # NOTE: it might be worth adding error handling here, but shouldn't be necessary because cluster.down(...) checks - # for an existing cluster before calling this => the objects should never be deleted twice - oauth_sa_name = f"{cluster_name}-oauth-proxy" - service_name = f"{cluster_name}-oauth" - v1_routes = _route_api_getter() - client.CoreV1Api(api_config_handler()).delete_namespaced_service_account( - name=oauth_sa_name, namespace=namespace - ) - client.CoreV1Api(api_config_handler()).delete_namespaced_service( - name=service_name, namespace=namespace - ) - v1_routes.delete(name=f"ray-dashboard-{cluster_name}", namespace=namespace) - client.RbacAuthorizationV1Api(api_config_handler()).delete_cluster_role_binding( - name=f"{cluster_name}-rb" - ) - - -def _create_or_replace_oauth_service_obj( - cluster_name: str, - namespace: str, - oauth_port: int, - tls_secret_name: str, - service_name: str, - port_name: str, -) -> client.V1Service: - oauth_service = client.V1Service( - api_version="v1", - kind="Service", - metadata=client.V1ObjectMeta( - annotations={ - "service.beta.openshift.io/serving-cert-secret-name": tls_secret_name - }, - name=service_name, - namespace=namespace, - ), - spec=client.V1ServiceSpec( - ports=[ - client.V1ServicePort( - name=port_name, - protocol="TCP", - port=443, - target_port=oauth_port, - ) - ], - selector={ - "app.kubernetes.io/created-by": "kuberay-operator", - "app.kubernetes.io/name": "kuberay", - "ray.io/cluster": cluster_name, - "ray.io/identifier": f"{cluster_name}-head", - "ray.io/node-type": "head", - }, - ), - ) - try: - client.CoreV1Api(api_config_handler()).create_namespaced_service( - namespace=namespace, body=oauth_service - ) - except client.ApiException as e: - if e.reason == "Conflict": - client.CoreV1Api(api_config_handler()).replace_namespaced_service( - namespace=namespace, body=oauth_service, name=service_name - ) - else: - raise e - - -def _create_or_replace_oauth_route_object( - cluster_name: str, - namespace: str, - service_name: str, - port_name: str, -): - route = f""" - apiVersion: route.openshift.io/v1 - kind: Route - metadata: - name: ray-dashboard-{cluster_name} - namespace: {namespace} - spec: - port: - targetPort: {port_name} - tls: - termination: reencrypt - to: - kind: Service - name: {service_name} - """ - route_data = yaml.safe_load(route) - v1_routes = _route_api_getter() - try: - existing_route = v1_routes.get( - name=f"ray-dashboard-{cluster_name}", namespace=namespace - ) - route_data["metadata"]["resourceVersion"] = existing_route["metadata"][ - "resourceVersion" - ] - v1_routes.replace(body=route_data) - except dynamic.client.ApiException: - v1_routes.create(body=route_data) diff --git a/tests/e2e/mnist_raycluster_sdk_oauth_test.py b/tests/e2e/mnist_raycluster_sdk_oauth_test.py index 3e24d465..708a389d 100644 --- a/tests/e2e/mnist_raycluster_sdk_oauth_test.py +++ b/tests/e2e/mnist_raycluster_sdk_oauth_test.py @@ -51,7 +51,6 @@ def run_mnist_raycluster_sdk_oauth(self): num_gpus=0, instascale=False, image=ray_image, - openshift_oauth=True, write_to_file=True, ) ) diff --git a/tests/unit_test.py b/tests/unit_test.py index 3edadc63..9a28d1c1 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -39,7 +39,6 @@ get_cluster, _app_wrapper_status, _ray_cluster_status, - _get_ingress_domain, ) from codeflare_sdk.cluster.auth import ( TokenAuthentication, @@ -47,10 +46,6 @@ KubeConfigFileAuthentication, config_check, ) -from codeflare_sdk.utils.openshift_oauth import ( - create_openshift_oauth_objects, - delete_openshift_oauth_objects, -) from codeflare_sdk.utils.pretty_print import ( print_no_resources_found, print_app_wrappers_status, @@ -91,7 +86,6 @@ read_template, enable_local_interactive, ) -import codeflare_sdk.utils.openshift_oauth as sdk_oauth import openshift from openshift.selector import Selector @@ -113,7 +107,6 @@ def mock_routes_api(mocker): mocker.patch.object( - sdk_oauth, "_route_api_getter", return_value=MagicMock( resources=MagicMock( @@ -588,24 +581,6 @@ def test_rc_status(mocker): assert rc == None -def test_delete_openshift_oauth_objects(mocker): - mocker.patch.object(client.CoreV1Api, "delete_namespaced_service_account") - mocker.patch.object(client.CoreV1Api, "delete_namespaced_service") - mocker.patch.object(client.NetworkingV1Api, "delete_namespaced_ingress") - mocker.patch.object(client.RbacAuthorizationV1Api, "delete_cluster_role_binding") - mock_routes_api(mocker) - delete_openshift_oauth_objects("test-cluster", "test-namespace") - client.CoreV1Api.delete_namespaced_service_account.assert_called_with( - name="test-cluster-oauth-proxy", namespace="test-namespace" - ) - client.CoreV1Api.delete_namespaced_service.assert_called_with( - name="test-cluster-oauth", namespace="test-namespace" - ) - client.RbacAuthorizationV1Api.delete_cluster_role_binding.assert_called_with( - name="test-cluster-rb" - ) - - def test_cluster_uris(mocker): mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") @@ -3167,90 +3142,15 @@ def test_enable_local_interactive(mocker): } -def test_create_openshift_oauth(mocker: MockerFixture): - create_namespaced_service_account = MagicMock() - create_cluster_role_binding = MagicMock() - create_namespaced_service = MagicMock() - mocker.patch.object( - client.CoreV1Api, - "create_namespaced_service_account", - create_namespaced_service_account, - ) - mocker.patch.object( - client.RbacAuthorizationV1Api, - "create_cluster_role_binding", - create_cluster_role_binding, - ) - mocker.patch.object( - client.CoreV1Api, "create_namespaced_service", create_namespaced_service - ) - mock_routes_api(mocker) - create_openshift_oauth_objects("foo", "bar") - create_ns_sa_args = create_namespaced_service_account.call_args - create_crb_args = create_cluster_role_binding.call_args - create_ns_serv_args = create_namespaced_service.call_args - assert ( - create_ns_sa_args.kwargs["namespace"] == create_ns_serv_args.kwargs["namespace"] - ) - assert isinstance(create_ns_sa_args.kwargs["body"], client.V1ServiceAccount) - assert isinstance(create_crb_args.kwargs["body"], client.V1ClusterRoleBinding) - assert isinstance(create_ns_serv_args.kwargs["body"], client.V1Service) - - -def test_replace_openshift_oauth(mocker: MockerFixture): - # not_found_exception = client.ApiException(reason="Conflict") - create_namespaced_service_account = MagicMock( - side_effect=client.ApiException(reason="Conflict") - ) - create_cluster_role_binding = MagicMock( - side_effect=client.ApiException(reason="Conflict") - ) - create_namespaced_service = MagicMock( - side_effect=client.ApiException(reason="Conflict") - ) - mocker.patch.object( - client.CoreV1Api, - "create_namespaced_service_account", - create_namespaced_service_account, - ) - mocker.patch.object( - client.RbacAuthorizationV1Api, - "create_cluster_role_binding", - create_cluster_role_binding, - ) - mocker.patch.object( - client.CoreV1Api, "create_namespaced_service", create_namespaced_service - ) - mocker.patch.object(dynamic.ResourceList, "get", return_value=True) - replace_namespaced_service_account = MagicMock() - replace_cluster_role_binding = MagicMock() - replace_namespaced_service = MagicMock() - mocker.patch.object( - client.CoreV1Api, - "replace_namespaced_service_account", - replace_namespaced_service_account, - ) - mocker.patch.object( - client.RbacAuthorizationV1Api, - "replace_cluster_role_binding", - replace_cluster_role_binding, - ) - mocker.patch.object( - client.CoreV1Api, "replace_namespaced_service", replace_namespaced_service - ) - mock_routes_api(mocker) - create_openshift_oauth_objects("foo", "bar") - replace_namespaced_service_account.assert_called_once() - replace_cluster_role_binding.assert_called_once() - replace_namespaced_service.assert_called_once() - - def test_gen_app_wrapper_with_oauth(mocker: MockerFixture): mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch( "codeflare_sdk.cluster.cluster.get_current_namespace", return_value="opendatahub", ) + mocker.patch( + "codeflare_sdk.utils.generate_yaml.is_openshift_cluster", return_value=True + ) write_user_appwrapper = MagicMock() mocker.patch( "codeflare_sdk.utils.generate_yaml.write_user_appwrapper", write_user_appwrapper @@ -3258,7 +3158,6 @@ def test_gen_app_wrapper_with_oauth(mocker: MockerFixture): Cluster( ClusterConfiguration( "test_cluster", - openshift_oauth=True, image="quay.io/project-codeflare/ray:latest-py39-cu118", ingress_domain="apps.cluster.awsroute.org", write_to_file=True, diff --git a/tests/unit_test_support.py b/tests/unit_test_support.py index c4b7416d..36c25c69 100644 --- a/tests/unit_test_support.py +++ b/tests/unit_test_support.py @@ -60,7 +60,6 @@ def createClusterWithConfig(mocker): return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, ) cluster = Cluster(createClusterConfig()) - cluster.config.image = "quay.io/project-codeflare/ray:latest-py39-cu118" return cluster From 9f629f8b057666efee086c921def40a27d05abbe Mon Sep 17 00:00:00 2001 From: Christian Zaccaria <73656840+ChristianZaccaria@users.noreply.github.com> Date: Fri, 5 Apr 2024 16:36:26 +0100 Subject: [PATCH 169/496] Remove ingress/routes logic from SDK (#495) * WIP - Remove ingress/routes logic from SDK * Remove ingress_options and update tests --- .github/workflows/e2e_tests.yaml | 1 + src/codeflare_sdk/cluster/cluster.py | 100 ------- src/codeflare_sdk/cluster/config.py | 2 - .../templates/base-template.yaml | 85 ------ src/codeflare_sdk/utils/generate_yaml.py | 244 ++-------------- tests/e2e/mnist_raycluster_sdk_test.py | 19 -- tests/e2e/start_ray_cluster.py | 16 -- tests/test-case-no-mcad.yamls | 23 -- tests/test-case-prio.yaml | 24 -- tests/test-case.yaml | 24 -- tests/test-default-appwrapper.yaml | 24 -- tests/unit_test.py | 264 ++++++++---------- tests/unit_test_support.py | 1 - 13 files changed, 139 insertions(+), 688 deletions(-) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index dbf2fce2..4698af25 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -84,6 +84,7 @@ jobs: cd codeflare-operator echo Deploying CodeFlare operator IMG="${REGISTRY_ADDRESS}"/codeflare-operator + sed -i 's/RayDashboardOAuthEnabled: pointer.Bool(true)/RayDashboardOAuthEnabled: pointer.Bool(false)/' main.go make image-push -e IMG="${IMG}" make deploy -e IMG="${IMG}" -e ENV="e2e" kubectl wait --timeout=120s --for=condition=Available=true deployment -n openshift-operators codeflare-operator-manager diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 127c6fad..707ea61d 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -187,8 +187,6 @@ def create_app_wrapper(self): local_interactive = self.config.local_interactive image_pull_secrets = self.config.image_pull_secrets dispatch_priority = self.config.dispatch_priority - ingress_domain = self.config.ingress_domain - ingress_options = self.config.ingress_options write_to_file = self.config.write_to_file verify_tls = self.config.verify_tls return generate_appwrapper( @@ -213,8 +211,6 @@ def create_app_wrapper(self): image_pull_secrets=image_pull_secrets, dispatch_priority=dispatch_priority, priority_val=priority_val, - ingress_domain=ingress_domain, - ingress_options=ingress_options, write_to_file=write_to_file, verify_tls=verify_tls, ) @@ -493,8 +489,6 @@ def torchx_config( def from_k8_cluster_object( rc, mcad=True, - ingress_domain=None, - ingress_options={}, write_to_file=False, verify_tls=True, ): @@ -512,11 +506,6 @@ def from_k8_cluster_object( else [] ) - if local_interactive and ingress_domain == None: - ingress_domain = rc["metadata"]["annotations"][ - "sdk.codeflare.dev/ingress_domain" - ] - cluster_config = ClusterConfiguration( name=rc["metadata"]["name"], namespace=rc["metadata"]["namespace"], @@ -553,8 +542,6 @@ def from_k8_cluster_object( ]["image"], local_interactive=local_interactive, mcad=mcad, - ingress_domain=ingress_domain, - ingress_options=ingress_options, write_to_file=write_to_file, verify_tls=verify_tls, ) @@ -661,62 +648,9 @@ def get_cluster( for rc in rcs["items"]: if rc["metadata"]["name"] == cluster_name: mcad = _check_aw_exists(cluster_name, namespace) - ingress_host = None - ingress_options = {} - if not is_openshift_cluster(): - try: - config_check() - api_instance = client.NetworkingV1Api(api_config_handler()) - ingresses = api_instance.list_namespaced_ingress(namespace) - for ingress in ingresses.items: - # Search for ingress with AppWrapper name as the owner - if ( - "ingress-owner" in ingress.metadata.labels - and ingress.metadata.labels["ingress-owner"] == cluster_name - ): - ingress_host = ingress.spec.rules[0].host - if ( - "ingress-options" in ingress.metadata.labels - and ingress.metadata.labels["ingress-options"] == "true" - ): - ingress_name = ingress.metadata.name - port = ( - ingress.spec.rules[0] - .http.paths[0] - .backend.service.port.number - ) - annotations = ingress.metadata.annotations - path = ingress.spec.rules[0].http.paths[0].path - ingress_class_name = ingress.spec.ingress_class_name - path_type = ( - ingress.spec.rules[0].http.paths[0].path_type - ) - - ingress_options = { - "ingresses": [ - { - "ingressName": ingress_name, - "port": port, - "annotations": annotations, - "ingressClassName": ingress_class_name, - "pathType": path_type, - "path": path, - "host": ingress_host, - } - ] - } - except Exception as e: # pragma: no cover - return _kube_api_error_handling(e) - # We gather the ingress domain from the host - if ingress_host is not None and ingress_options == {}: - ingress_domain = ingress_host.split(".", 1)[1] - else: - ingress_domain = None return Cluster.from_k8_cluster_object( rc, mcad=mcad, - ingress_domain=ingress_domain, - ingress_options=ingress_options, write_to_file=write_to_file, verify_tls=verify_tls, ) @@ -739,24 +673,6 @@ def _delete_resources( plural="rayclusters", name=name, ) - elif resource["kind"] == "Ingress": - name = resource["metadata"]["name"] - api_instance.delete_namespaced_custom_object( - group="networking.k8s.io", - version="v1", - namespace=namespace, - plural="ingresses", - name=name, - ) - elif resource["kind"] == "Route": - name = resource["metadata"]["name"] - api_instance.delete_namespaced_custom_object( - group="route.openshift.io", - version="v1", - namespace=namespace, - plural="routes", - name=name, - ) elif resource["kind"] == "Secret": name = resource["metadata"]["name"] secret_instance = client.CoreV1Api(api_config_handler()) @@ -776,22 +692,6 @@ def _create_resources(yamls, namespace: str, api_instance: client.CustomObjectsA plural="rayclusters", body=resource, ) - elif resource["kind"] == "Ingress": - api_instance.create_namespaced_custom_object( - group="networking.k8s.io", - version="v1", - namespace=namespace, - plural="ingresses", - body=resource, - ) - elif resource["kind"] == "Route": - api_instance.create_namespaced_custom_object( - group="route.openshift.io", - version="v1", - namespace=namespace, - plural="routes", - body=resource, - ) elif resource["kind"] == "Secret": secret_instance = client.CoreV1Api(api_config_handler()) secret_instance.create_namespaced_secret( diff --git a/src/codeflare_sdk/cluster/config.py b/src/codeflare_sdk/cluster/config.py index 195349ce..7156495f 100644 --- a/src/codeflare_sdk/cluster/config.py +++ b/src/codeflare_sdk/cluster/config.py @@ -52,8 +52,6 @@ class ClusterConfiguration: local_interactive: bool = False image_pull_secrets: list = field(default_factory=list) dispatch_priority: str = None - ingress_options: dict = field(default_factory=dict) - ingress_domain: str = None write_to_file: bool = False verify_tls: bool = True diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index fb6ef427..0f0f8b32 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -338,91 +338,6 @@ spec: - key: odh-ca-bundle.crt path: odh-ca-bundle.crt optional: true - - replicas: 1 - generictemplate: - apiVersion: networking.k8s.io/v1 - kind: Ingress - metadata: - name: ray-dashboard-deployment-ingress - namespace: default - annotations: - annotations-example:annotations-example - labels: - ingress-options: "false" - ingress-owner: appwrapper-name - spec: - ingressClassName: nginx - rules: - - http: - paths: - - backend: - service: - name: raytest-head-svc - port: - number: 8265 - pathType: Prefix - path: / - host: ray-dashboard-raytest. - - replicas: 1 - generictemplate: - kind: Route - apiVersion: route.openshift.io/v1 - metadata: - name: ray-dashboard-deployment-route - namespace: default - labels: - # allows me to return name of service that Ray operator creates - odh-ray-cluster-service: deployment-name-head-svc - spec: - to: - kind: Service - name: deployment-name-head-svc - port: - targetPort: dashboard - tls: - termination: edge - - replicas: 1 - generictemplate: - apiVersion: networking.k8s.io/v1 - kind: Ingress - metadata: - name: rayclient-deployment-ingress - namespace: default - annotations: - annotations-example:annotations-example - labels: - odh-ray-cluster-service: deployment-name-head-svc - spec: - ingressClassName: nginx - rules: - - http: - paths: - - backend: - service: - name: deployment-name-head-svc - port: - number: 10001 - path: '' - pathType: ImplementationSpecific - host: rayclient-raytest. - - replicas: 1 - generictemplate: - apiVersion: route.openshift.io/v1 - kind: Route - metadata: - name: rayclient-deployment-route - namespace: default - labels: - # allows me to return name of service that Ray operator creates - odh-ray-cluster-service: deployment-name-head-svc - spec: - port: - targetPort: client - tls: - termination: passthrough - to: - kind: Service - name: deployment-name-head-svc - replicas: 1 generictemplate: apiVersion: v1 diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 00790ac2..7f14b5ba 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -49,10 +49,6 @@ def gen_names(name): return name, name -def gen_dashboard_ingress_name(cluster_name): - return f"ray-dashboard-{cluster_name}" - - # Check if the routes api exists def is_openshift_cluster(): try: @@ -67,156 +63,17 @@ def is_openshift_cluster(): return _kube_api_error_handling(e) -def update_dashboard_route(route_item, cluster_name, namespace): - metadata = route_item.get("generictemplate", {}).get("metadata") - metadata["name"] = gen_dashboard_ingress_name(cluster_name) - metadata["namespace"] = namespace - metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc" - spec = route_item.get("generictemplate", {}).get("spec") - spec["to"]["name"] = f"{cluster_name}-head-svc" - - -# ToDo: refactor the update_x_route() functions -def update_rayclient_route(route_item, cluster_name, namespace): - metadata = route_item.get("generictemplate", {}).get("metadata") - metadata["name"] = f"rayclient-{cluster_name}" - metadata["namespace"] = namespace - metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc" - spec = route_item.get("generictemplate", {}).get("spec") - spec["to"]["name"] = f"{cluster_name}-head-svc" - - -def update_dashboard_exposure( - ingress_item, route_item, cluster_name, namespace, ingress_options, ingress_domain -): - if is_openshift_cluster(): - update_dashboard_route(route_item, cluster_name, namespace) - else: - update_dashboard_ingress( - ingress_item, cluster_name, namespace, ingress_options, ingress_domain - ) - - -def update_rayclient_exposure( - client_route_item, client_ingress_item, cluster_name, namespace, ingress_domain -): - if is_openshift_cluster(): - update_rayclient_route(client_route_item, cluster_name, namespace) - else: - update_rayclient_ingress( - client_ingress_item, cluster_name, namespace, ingress_domain - ) - - -def update_dashboard_ingress( - ingress_item, cluster_name, namespace, ingress_options, ingress_domain -): # pragma: no cover - metadata = ingress_item.get("generictemplate", {}).get("metadata") - spec = ingress_item.get("generictemplate", {}).get("spec") - if ingress_options != {}: - for index, ingress_option in enumerate(ingress_options["ingresses"]): - if "ingressName" not in ingress_option.keys(): - raise ValueError( - f"Error: 'ingressName' is missing or empty for ingress item at index {index}" - ) - if "port" not in ingress_option.keys(): - raise ValueError( - f"Error: 'port' is missing or empty for ingress item at index {index}" - ) - elif not isinstance(ingress_option["port"], int): - raise ValueError( - f"Error: 'port' is not of type int for ingress item at index {index}" - ) - if ingress_option is not None: - metadata["name"] = ingress_option["ingressName"] - metadata["namespace"] = namespace - metadata["labels"]["ingress-owner"] = cluster_name - metadata["labels"]["ingress-options"] = "true" - if ( - "annotations" not in ingress_option.keys() - or ingress_option["annotations"] is None - ): - del metadata["annotations"] - else: - metadata["annotations"] = ingress_option["annotations"] - if ( - "path" not in ingress_option.keys() - or ingress_option["path"] is None - ): - del spec["rules"][0]["http"]["paths"][0]["path"] - else: - spec["rules"][0]["http"]["paths"][0]["path"] = ingress_option[ - "path" - ] - if ( - "pathType" not in ingress_option.keys() - or ingress_option["pathType"] is None - ): - spec["rules"][0]["http"]["paths"][0][ - "pathType" - ] = "ImplementationSpecific" - if ( - "host" not in ingress_option.keys() - or ingress_option["host"] is None - ): - del spec["rules"][0]["host"] - else: - spec["rules"][0]["host"] = ingress_option["host"] - if ( - "ingressClassName" not in ingress_option.keys() - or ingress_option["ingressClassName"] is None - ): - del spec["ingressClassName"] - else: - spec["ingressClassName"] = ingress_option["ingressClassName"] - - spec["rules"][0]["http"]["paths"][0]["backend"]["service"][ - "name" - ] = f"{cluster_name}-head-svc" - else: - spec["ingressClassName"] = "nginx" - metadata["name"] = gen_dashboard_ingress_name(cluster_name) - metadata["labels"]["ingress-owner"] = cluster_name - metadata["namespace"] = namespace - spec["rules"][0]["http"]["paths"][0]["backend"]["service"][ - "name" - ] = f"{cluster_name}-head-svc" - if ingress_domain is None: - raise ValueError( - "ingress_domain is invalid. Please specify an ingress domain" - ) - else: - domain = ingress_domain - del metadata["annotations"] - spec["rules"][0]["host"] = f"ray-dashboard-{cluster_name}-{namespace}.{domain}" - - -def update_rayclient_ingress( - ingress_item, cluster_name, namespace, ingress_domain -): # pragma: no cover - metadata = ingress_item.get("generictemplate", {}).get("metadata") - spec = ingress_item.get("generictemplate", {}).get("spec") - metadata["name"] = f"rayclient-{cluster_name}" - metadata["namespace"] = namespace - metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc" - - spec["rules"][0]["http"]["paths"][0]["backend"]["service"][ - "name" - ] = f"{cluster_name}-head-svc" - - if ingress_domain is not None: - ingressClassName = "nginx" - annotations = { - "nginx.ingress.kubernetes.io/rewrite-target": "/", - "nginx.ingress.kubernetes.io/ssl-redirect": "true", - "nginx.ingress.kubernetes.io/ssl-passthrough": "true", - } - else: - raise ValueError("ingress_domain is invalid. Please specify a domain") - - metadata["annotations"] = annotations - spec["ingressClassName"] = ingressClassName - spec["rules"][0]["host"] = f"rayclient-{cluster_name}-{namespace}.{ingress_domain}" +def is_kind_cluster(): + try: + config_check() + v1 = client.CoreV1Api() + label_selector = "kubernetes.io/hostname=kind-control-plane" + nodes = v1.list_node(label_selector=label_selector) + # If we find one or more nodes with the label, assume it's a KinD cluster + return len(nodes.items) > 0 + except Exception as e: + print(f"Error checking if cluster is KinD: {e}") + return False def update_names(yaml, item, appwrapper_name, cluster_name, namespace): @@ -433,10 +290,10 @@ def update_ca_secret(ca_secret_item, cluster_name, namespace): data["ca.key"], data["ca.crt"] = generate_cert.generate_ca_cert(365) -def enable_local_interactive(resources, cluster_name, namespace, ingress_domain): - rayclient_ingress_item = resources["resources"].get("GenericItems")[3] - rayclient_route_item = resources["resources"].get("GenericItems")[4] - ca_secret_item = resources["resources"].get("GenericItems")[5] +def enable_local_interactive(resources, cluster_name, namespace): # pragma: no cover + from ..cluster.cluster import _get_ingress_domain + + ca_secret_item = resources["resources"].get("GenericItems")[1] item = resources["resources"].get("GenericItems")[0] update_ca_secret(ca_secret_item, cluster_name, namespace) # update_ca_secret_volumes @@ -460,40 +317,18 @@ def enable_local_interactive(resources, cluster_name, namespace, ingress_domain) command = command.replace("deployment-name", cluster_name) - if ingress_domain is None: - raise ValueError( - "ingress_domain is invalid. For creating the client route/ingress please specify an ingress domain" - ) - else: - domain = ingress_domain + domain = "" ## FIX - We can't retrieve ingress domain - move init container to CFO command = command.replace("server-name", domain) - update_rayclient_exposure( - rayclient_route_item, - rayclient_ingress_item, - cluster_name, - namespace, - ingress_domain, - ) item["generictemplate"]["metadata"]["annotations"][ "sdk.codeflare.dev/local_interactive" ] = "True" - item["generictemplate"]["metadata"]["annotations"][ - "sdk.codeflare.dev/ingress_domain" - ] = ingress_domain item["generictemplate"]["spec"]["headGroupSpec"]["template"]["spec"][ "initContainers" ][0].get("command")[2] = command -def apply_ingress_domain_annotation(resources, ingress_domain): - item = resources["resources"].get("GenericItems")[0] - item["generictemplate"]["metadata"]["annotations"][ - "sdk.codeflare.dev/ingress_domain" - ] = ingress_domain - - def del_from_list_by_name(l: list, target: typing.List[str]) -> list: return [x for x in l if x["name"] not in target] @@ -544,26 +379,6 @@ def disable_raycluster_tls(resources): resources["GenericItems"] = updated_items -def delete_route_or_ingress(resources): - if is_openshift_cluster(): - client_to_remove_name = "rayclient-deployment-ingress" - dashboard_to_remove_name = "ray-dashboard-deployment-ingress" - else: - client_to_remove_name = "rayclient-deployment-route" - dashboard_to_remove_name = "ray-dashboard-deployment-route" - - updated_items = [] - for i in resources["GenericItems"][:]: - if dashboard_to_remove_name in i["generictemplate"]["metadata"]["name"]: - continue - elif client_to_remove_name in i["generictemplate"]["metadata"]["name"]: - continue - - updated_items.append(i) - - resources["GenericItems"] = updated_items - - def write_user_appwrapper(user_yaml, output_file_name): # Create the directory if it doesn't exist directory_path = os.path.dirname(output_file_name) @@ -602,7 +417,6 @@ def enable_openshift_oauth(user_yaml, cluster_name, namespace): ray_headgroup_pod = user_yaml["spec"]["resources"]["GenericItems"][0][ "generictemplate" ]["spec"]["headGroupSpec"]["template"]["spec"] - user_yaml["spec"]["resources"]["GenericItems"].pop(1) ray_headgroup_pod["serviceAccount"] = oauth_sa_name ray_headgroup_pod["volumes"] = ray_headgroup_pod.get("volumes", []) @@ -707,8 +521,6 @@ def generate_appwrapper( image_pull_secrets: list, dispatch_priority: str, priority_val: int, - ingress_domain: str, - ingress_options: dict, write_to_file: bool, verify_tls: bool, ): @@ -716,9 +528,13 @@ def generate_appwrapper( appwrapper_name, cluster_name = gen_names(name) resources = user_yaml.get("spec", "resources") item = resources["resources"].get("GenericItems")[0] - ingress_item = resources["resources"].get("GenericItems")[1] - route_item = resources["resources"].get("GenericItems")[2] - update_names(user_yaml, item, appwrapper_name, cluster_name, namespace) + update_names( + user_yaml, + item, + appwrapper_name, + cluster_name, + namespace, + ) update_labels(user_yaml, instascale, instance_types) update_priority(user_yaml, item, dispatch_priority, priority_val) update_custompodresources( @@ -750,24 +566,12 @@ def generate_appwrapper( head_memory, head_gpus, ) - update_dashboard_exposure( - ingress_item, - route_item, - cluster_name, - namespace, - ingress_options, - ingress_domain, - ) - if ingress_domain is not None: - apply_ingress_domain_annotation(resources, ingress_domain) if local_interactive: - enable_local_interactive(resources, cluster_name, namespace, ingress_domain) + enable_local_interactive(resources, cluster_name, namespace) else: disable_raycluster_tls(resources["resources"]) - delete_route_or_ingress(resources["resources"]) - if is_openshift_cluster(): enable_openshift_oauth(user_yaml, cluster_name, namespace) diff --git a/tests/e2e/mnist_raycluster_sdk_test.py b/tests/e2e/mnist_raycluster_sdk_test.py index 27c1451e..b98b860b 100644 --- a/tests/e2e/mnist_raycluster_sdk_test.py +++ b/tests/e2e/mnist_raycluster_sdk_test.py @@ -36,24 +36,6 @@ def test_mnist_ray_cluster_sdk(self): def run_mnist_raycluster_sdk(self): ray_image = get_ray_image() - host = os.getenv("CLUSTER_HOSTNAME") - - ingress_options = {} - if host is not None: - ingress_options = { - "ingresses": [ - { - "ingressName": "ray-dashboard", - "port": 8265, - "pathType": "Prefix", - "path": "/", - "host": host, - "annotations": { - "nginx.ingress.kubernetes.io/proxy-body-size": "100M", - }, - }, - ] - } cluster = Cluster( ClusterConfiguration( @@ -69,7 +51,6 @@ def run_mnist_raycluster_sdk(self): num_gpus=0, instascale=False, image=ray_image, - ingress_options=ingress_options, write_to_file=True, ) ) diff --git a/tests/e2e/start_ray_cluster.py b/tests/e2e/start_ray_cluster.py index 774be8f0..f4cf7e73 100644 --- a/tests/e2e/start_ray_cluster.py +++ b/tests/e2e/start_ray_cluster.py @@ -7,21 +7,6 @@ namespace = sys.argv[1] ray_image = os.getenv("RAY_IMAGE") -host = os.getenv("CLUSTER_HOSTNAME") - -ingress_options = {} -if host is not None: - ingress_options = { - "ingresses": [ - { - "ingressName": "ray-dashboard", - "port": 8265, - "pathType": "Prefix", - "path": "/", - "host": host, - }, - ] - } cluster = Cluster( ClusterConfiguration( @@ -37,7 +22,6 @@ num_gpus=0, instascale=False, image=ray_image, - ingress_options=ingress_options, ) ) diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index b15833fe..99745760 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -3,7 +3,6 @@ apiVersion: ray.io/v1 kind: RayCluster metadata: annotations: - sdk.codeflare.dev/ingress_domain: apps.cluster.awsroute.org sdk.codeflare.dev/local_interactive: 'False' labels: controller-tools.k8s.io: '1.0' @@ -197,25 +196,3 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert ---- -apiVersion: networking.k8s.io/v1 -kind: Ingress -metadata: - labels: - ingress-options: 'false' - ingress-owner: unit-test-cluster-ray - name: ray-dashboard-unit-test-cluster-ray - namespace: ns -spec: - ingressClassName: nginx - rules: - - host: ray-dashboard-unit-test-cluster-ray-ns.apps.cluster.awsroute.org - http: - paths: - - backend: - service: - name: unit-test-cluster-ray-head-svc - port: - number: 8265 - path: / - pathType: Prefix diff --git a/tests/test-case-prio.yaml b/tests/test-case-prio.yaml index c81d4396..fd83fc3a 100644 --- a/tests/test-case-prio.yaml +++ b/tests/test-case-prio.yaml @@ -33,7 +33,6 @@ spec: kind: RayCluster metadata: annotations: - sdk.codeflare.dev/ingress_domain: apps.cluster.awsroute.org sdk.codeflare.dev/local_interactive: 'False' labels: controller-tools.k8s.io: '1.0' @@ -230,27 +229,4 @@ spec: optional: true name: odh-ca-cert replicas: 1 - - generictemplate: - apiVersion: networking.k8s.io/v1 - kind: Ingress - metadata: - labels: - ingress-options: 'false' - ingress-owner: prio-test-cluster - name: ray-dashboard-prio-test-cluster - namespace: ns - spec: - ingressClassName: nginx - rules: - - host: ray-dashboard-prio-test-cluster-ns.apps.cluster.awsroute.org - http: - paths: - - backend: - service: - name: prio-test-cluster-head-svc - port: - number: 8265 - path: / - pathType: Prefix - replicas: 1 Items: [] diff --git a/tests/test-case.yaml b/tests/test-case.yaml index d7c31a11..14d8e1a4 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -32,7 +32,6 @@ spec: kind: RayCluster metadata: annotations: - sdk.codeflare.dev/ingress_domain: apps.cluster.awsroute.org sdk.codeflare.dev/local_interactive: 'False' labels: controller-tools.k8s.io: '1.0' @@ -227,27 +226,4 @@ spec: optional: true name: odh-ca-cert replicas: 1 - - generictemplate: - apiVersion: networking.k8s.io/v1 - kind: Ingress - metadata: - labels: - ingress-options: 'false' - ingress-owner: unit-test-cluster - name: ray-dashboard-unit-test-cluster - namespace: ns - spec: - ingressClassName: nginx - rules: - - host: ray-dashboard-unit-test-cluster-ns.apps.cluster.awsroute.org - http: - paths: - - backend: - service: - name: unit-test-cluster-head-svc - port: - number: 8265 - path: / - pathType: Prefix - replicas: 1 Items: [] diff --git a/tests/test-default-appwrapper.yaml b/tests/test-default-appwrapper.yaml index c9da340c..321a5d51 100644 --- a/tests/test-default-appwrapper.yaml +++ b/tests/test-default-appwrapper.yaml @@ -30,7 +30,6 @@ spec: kind: RayCluster metadata: annotations: - sdk.codeflare.dev/ingress_domain: apps.cluster.awsroute.org sdk.codeflare.dev/local_interactive: 'False' labels: controller-tools.k8s.io: '1.0' @@ -205,27 +204,4 @@ spec: optional: true name: odh-ca-cert replicas: 1 - - generictemplate: - apiVersion: networking.k8s.io/v1 - kind: Ingress - metadata: - labels: - ingress-options: 'false' - ingress-owner: unit-test-default-cluster - name: ray-dashboard-unit-test-default-cluster - namespace: opendatahub - spec: - ingressClassName: nginx - rules: - - host: ray-dashboard-unit-test-default-cluster-opendatahub.apps.cluster.awsroute.org - http: - paths: - - backend: - service: - name: unit-test-default-cluster-head-svc - port: - number: 8265 - path: / - pathType: Prefix - replicas: 1 Items: [] diff --git a/tests/unit_test.py b/tests/unit_test.py index 9a28d1c1..6831ea65 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -331,7 +331,6 @@ def test_cluster_creation_no_mcad(mocker): instascale=True, machine_types=["cpu.small", "gpu.large"], image_pull_secrets=["unit-test-pull-secret"], - ingress_domain="apps.cluster.awsroute.org", image="quay.io/project-codeflare/ray:latest-py39-cu118", write_to_file=False, mcad=False, @@ -384,7 +383,6 @@ def test_default_cluster_creation(mocker): default_config = ClusterConfiguration( name="unit-test-default-cluster", image="quay.io/project-codeflare/ray:latest-py39-cu118", - ingress_domain="apps.cluster.awsroute.org", ) cluster = Cluster(default_config) test_aw = yaml.safe_load(cluster.app_wrapper_yaml) @@ -651,7 +649,7 @@ def ray_addr(self, *args): def mocked_ingress(port, cluster_name="unit-test-cluster", annotations: dict = None): - labels = {"ingress-owner": cluster_name, "ingress-options": "false"} + labels = {"ingress-owner": cluster_name} if port == 10001: name = f"rayclient-{cluster_name}" else: @@ -833,7 +831,6 @@ def test_ray_details(mocker, capsys): name="raytest2", namespace="ns", image="quay.io/project-codeflare/ray:latest-py39-cu118", - ingress_domain="apps.cluster.awsroute.org", write_to_file=True, ) ) @@ -954,7 +951,6 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "generation": 1, "annotations": { "sdk.codeflare.dev/local_interactive": "True", - "sdk.codeflare.dev/ingress_domain": "apps.cluster.awsroute.org", }, "labels": { "appwrapper.mcad.ibm.com": "quicktest", @@ -1667,7 +1663,6 @@ def get_aw_obj(group, version, namespace, plural): "metadata": { "labels": { "ingress-owner": "appwrapper-name", - "ingress-options": "false", }, "name": "ray-dashboard-quicktest", "namespace": "default", @@ -2364,7 +2359,6 @@ def test_cluster_status(mocker): name="test", namespace="ns", image="quay.io/project-codeflare/ray:latest-py39-cu118", - ingress_domain="apps.cluster.awsroute.org", write_to_file=True, ) ) @@ -2459,7 +2453,6 @@ def test_wait_ready(mocker, capsys): name="test", namespace="ns", image="quay.io/project-codeflare/ray:latest-py39-cu118", - ingress_domain="apps.cluster.awsroute.org", write_to_file=True, ) ) @@ -2999,147 +2992,119 @@ def test_export_env(): ) -def test_enable_local_interactive(mocker): - template = f"{parent}/src/codeflare_sdk/templates/base-template.yaml" - user_yaml = read_template(template) - aw_spec = user_yaml.get("spec", None) - cluster_name = "test-enable-local" - namespace = "default" - ingress_domain = "mytest.domain" - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch( - "codeflare_sdk.utils.generate_yaml.is_openshift_cluster", return_value=False - ) - volume_mounts = [ - {"name": "ca-vol", "mountPath": "/home/ray/workspace/ca", "readOnly": True}, - { - "name": "server-cert", - "mountPath": "/home/ray/workspace/tls", - "readOnly": False, - }, - ] - volumes = [ - { - "name": "ca-vol", - "secret": {"secretName": "ca-secret-test-enable-local"}, - "optional": False, - }, - {"name": "server-cert", "emptyDir": {}}, - { - "name": "odh-trusted-ca-cert", - "configMap": { - "name": "odh-trusted-ca-bundle", - "items": [ - {"key": "ca-bundle.crt", "path": "odh-trusted-ca-bundle.crt"} - ], - "optional": True, - }, - }, - { - "name": "odh-ca-cert", - "configMap": { - "name": "odh-trusted-ca-bundle", - "items": [{"key": "odh-ca-bundle.crt", "path": "odh-ca-bundle.crt"}], - "optional": True, - }, - }, - ] - tls_env = [ - {"name": "RAY_USE_TLS", "value": "1"}, - {"name": "RAY_TLS_SERVER_CERT", "value": "/home/ray/workspace/tls/server.crt"}, - {"name": "RAY_TLS_SERVER_KEY", "value": "/home/ray/workspace/tls/server.key"}, - {"name": "RAY_TLS_CA_CERT", "value": "/home/ray/workspace/tls/ca.crt"}, - ] - assert aw_spec != None - enable_local_interactive(aw_spec, cluster_name, namespace, ingress_domain) - head_group_spec = aw_spec["resources"]["GenericItems"][0]["generictemplate"][ - "spec" - ]["headGroupSpec"] - worker_group_spec = aw_spec["resources"]["GenericItems"][0]["generictemplate"][ - "spec" - ]["workerGroupSpecs"] - ca_secret = aw_spec["resources"]["GenericItems"][5]["generictemplate"] - # At a minimal, make sure the following items are presented in the appwrapper spec.resources. - # 1. headgroup has the initContainers command to generated TLS cert from the mounted CA cert. - # Note: In this particular command, the DNS.5 in [alt_name] must match the exposed local_client_url: rayclient-{cluster_name}.{namespace}.{ingress_domain} - assert ( - head_group_spec["template"]["spec"]["initContainers"][0]["command"][2] - == f"cd /home/ray/workspace/tls && openssl req -nodes -newkey rsa:2048 -keyout server.key -out server.csr -subj '/CN=ray-head' && printf \"authorityKeyIdentifier=keyid,issuer\\nbasicConstraints=CA:FALSE\\nsubjectAltName = @alt_names\\n[alt_names]\\nDNS.1 = 127.0.0.1\\nDNS.2 = localhost\\nDNS.3 = ${{FQ_RAY_IP}}\\nDNS.4 = $(awk 'END{{print $1}}' /etc/hosts)\\nDNS.5 = rayclient-{cluster_name}-$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).{ingress_domain}\">./domain.ext && cp /home/ray/workspace/ca/* . && openssl x509 -req -CA ca.crt -CAkey ca.key -in server.csr -out server.crt -days 365 -CAcreateserial -extfile domain.ext" - ) - assert ( - head_group_spec["template"]["spec"]["initContainers"][0]["volumeMounts"] - == volume_mounts - ) - assert head_group_spec["template"]["spec"]["volumes"] == volumes - - # 2. workerGroupSpec has the initContainers command to generated TLS cert from the mounted CA cert. - assert ( - worker_group_spec[0]["template"]["spec"]["initContainers"][0]["command"][2] - == "cd /home/ray/workspace/tls && openssl req -nodes -newkey rsa:2048 -keyout server.key -out server.csr -subj '/CN=ray-head' && printf \"authorityKeyIdentifier=keyid,issuer\\nbasicConstraints=CA:FALSE\\nsubjectAltName = @alt_names\\n[alt_names]\\nDNS.1 = 127.0.0.1\\nDNS.2 = localhost\\nDNS.3 = ${FQ_RAY_IP}\\nDNS.4 = $(awk 'END{print $1}' /etc/hosts)\">./domain.ext && cp /home/ray/workspace/ca/* . && openssl x509 -req -CA ca.crt -CAkey ca.key -in server.csr -out server.crt -days 365 -CAcreateserial -extfile domain.ext" - ) - assert ( - worker_group_spec[0]["template"]["spec"]["initContainers"][0]["volumeMounts"] - == volume_mounts - ) - assert worker_group_spec[0]["template"]["spec"]["volumes"] == volumes - - # 3. Required Envs to enable TLS encryption between head and workers - for i in range(len(tls_env)): - assert ( - head_group_spec["template"]["spec"]["containers"][0]["env"][i + 1]["name"] - == tls_env[i]["name"] - ) - assert ( - head_group_spec["template"]["spec"]["containers"][0]["env"][i + 1]["value"] - == tls_env[i]["value"] - ) - assert ( - worker_group_spec[0]["template"]["spec"]["containers"][0]["env"][i + 1][ - "name" - ] - == tls_env[i]["name"] - ) - assert ( - worker_group_spec[0]["template"]["spec"]["containers"][0]["env"][i + 1][ - "value" - ] - == tls_env[i]["value"] - ) - - # 4. Secret with ca.crt and ca.key - assert ca_secret["kind"] == "Secret" - assert ca_secret["data"]["ca.crt"] != None - assert ca_secret["data"]["ca.key"] != None - assert ca_secret["metadata"]["name"] == f"ca-secret-{cluster_name}" - assert ca_secret["metadata"]["namespace"] == namespace - - # 5. Rayclient ingress - Kind - rayclient_ingress = aw_spec["resources"]["GenericItems"][3]["generictemplate"] - paths = [ - { - "backend": { - "service": { - "name": f"{cluster_name}-head-svc", - "port": {"number": 10001}, - } - }, - "path": "", - "pathType": "ImplementationSpecific", - } - ] - - assert rayclient_ingress["kind"] == "Ingress" - assert rayclient_ingress["metadata"]["namespace"] == namespace - assert rayclient_ingress["metadata"]["annotations"] == { - "nginx.ingress.kubernetes.io/rewrite-target": "/", - "nginx.ingress.kubernetes.io/ssl-redirect": "true", - "nginx.ingress.kubernetes.io/ssl-passthrough": "true", - } - assert rayclient_ingress["metadata"]["name"] == f"rayclient-{cluster_name}" - assert rayclient_ingress["spec"]["rules"][0] == { - "host": f"rayclient-{cluster_name}-{namespace}.{ingress_domain}", - "http": {"paths": paths}, - } +# def test_enable_local_interactive(mocker): +# template = f"{parent}/src/codeflare_sdk/templates/base-template.yaml" +# user_yaml = read_template(template) +# aw_spec = user_yaml.get("spec", None) +# cluster_name = "test-enable-local" +# namespace = "default" +# ingress_domain = "mytest.domain" +# mocker.patch("kubernetes.client.ApisApi.get_api_versions") +# mocker.patch( +# "codeflare_sdk.utils.generate_yaml.is_openshift_cluster", return_value=False +# ) +# volume_mounts = [ +# {"name": "ca-vol", "mountPath": "/home/ray/workspace/ca", "readOnly": True}, +# { +# "name": "server-cert", +# "mountPath": "/home/ray/workspace/tls", +# "readOnly": False, +# }, +# ] +# volumes = [ +# { +# "name": "ca-vol", +# "secret": {"secretName": "ca-secret-test-enable-local"}, +# "optional": False, +# }, +# {"name": "server-cert", "emptyDir": {}}, +# { +# "name": "odh-trusted-ca-cert", +# "configMap": { +# "name": "odh-trusted-ca-bundle", +# "items": [ +# {"key": "ca-bundle.crt", "path": "odh-trusted-ca-bundle.crt"} +# ], +# "optional": True, +# }, +# }, +# { +# "name": "odh-ca-cert", +# "configMap": { +# "name": "odh-trusted-ca-bundle", +# "items": [{"key": "odh-ca-bundle.crt", "path": "odh-ca-bundle.crt"}], +# "optional": True, +# }, +# }, +# ] +# tls_env = [ +# {"name": "RAY_USE_TLS", "value": "1"}, +# {"name": "RAY_TLS_SERVER_CERT", "value": "/home/ray/workspace/tls/server.crt"}, +# {"name": "RAY_TLS_SERVER_KEY", "value": "/home/ray/workspace/tls/server.key"}, +# {"name": "RAY_TLS_CA_CERT", "value": "/home/ray/workspace/tls/ca.crt"}, +# ] +# assert aw_spec != None +# enable_local_interactive(aw_spec, cluster_name, namespace, ingress_domain) +# head_group_spec = aw_spec["resources"]["GenericItems"][0]["generictemplate"][ +# "spec" +# ]["headGroupSpec"] +# worker_group_spec = aw_spec["resources"]["GenericItems"][0]["generictemplate"][ +# "spec" +# ]["workerGroupSpecs"] +# ca_secret = aw_spec["resources"]["GenericItems"][1]["generictemplate"] +# # At a minimal, make sure the following items are presented in the appwrapper spec.resources. +# # 1. headgroup has the initContainers command to generated TLS cert from the mounted CA cert. +# # Note: In this particular command, the DNS.5 in [alt_name] must match the exposed local_client_url: rayclient-{cluster_name}.{namespace}.{ingress_domain} +# assert ( +# head_group_spec["template"]["spec"]["initContainers"][0]["command"][2] +# == f"cd /home/ray/workspace/tls && openssl req -nodes -newkey rsa:2048 -keyout server.key -out server.csr -subj '/CN=ray-head' && printf \"authorityKeyIdentifier=keyid,issuer\\nbasicConstraints=CA:FALSE\\nsubjectAltName = @alt_names\\n[alt_names]\\nDNS.1 = 127.0.0.1\\nDNS.2 = localhost\\nDNS.3 = ${{FQ_RAY_IP}}\\nDNS.4 = $(awk 'END{{print $1}}' /etc/hosts)\\nDNS.5 = rayclient-{cluster_name}-$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).{ingress_domain}\">./domain.ext && cp /home/ray/workspace/ca/* . && openssl x509 -req -CA ca.crt -CAkey ca.key -in server.csr -out server.crt -days 365 -CAcreateserial -extfile domain.ext" +# ) +# assert ( +# head_group_spec["template"]["spec"]["initContainers"][0]["volumeMounts"] +# == volume_mounts +# ) +# assert head_group_spec["template"]["spec"]["volumes"] == volumes + +# # 2. workerGroupSpec has the initContainers command to generated TLS cert from the mounted CA cert. +# assert ( +# worker_group_spec[0]["template"]["spec"]["initContainers"][0]["command"][2] +# == "cd /home/ray/workspace/tls && openssl req -nodes -newkey rsa:2048 -keyout server.key -out server.csr -subj '/CN=ray-head' && printf \"authorityKeyIdentifier=keyid,issuer\\nbasicConstraints=CA:FALSE\\nsubjectAltName = @alt_names\\n[alt_names]\\nDNS.1 = 127.0.0.1\\nDNS.2 = localhost\\nDNS.3 = ${FQ_RAY_IP}\\nDNS.4 = $(awk 'END{print $1}' /etc/hosts)\">./domain.ext && cp /home/ray/workspace/ca/* . && openssl x509 -req -CA ca.crt -CAkey ca.key -in server.csr -out server.crt -days 365 -CAcreateserial -extfile domain.ext" +# ) +# assert ( +# worker_group_spec[0]["template"]["spec"]["initContainers"][0]["volumeMounts"] +# == volume_mounts +# ) +# assert worker_group_spec[0]["template"]["spec"]["volumes"] == volumes + +# # 3. Required Envs to enable TLS encryption between head and workers +# for i in range(len(tls_env)): +# assert ( +# head_group_spec["template"]["spec"]["containers"][0]["env"][i + 1]["name"] +# == tls_env[i]["name"] +# ) +# assert ( +# head_group_spec["template"]["spec"]["containers"][0]["env"][i + 1]["value"] +# == tls_env[i]["value"] +# ) +# assert ( +# worker_group_spec[0]["template"]["spec"]["containers"][0]["env"][i + 1][ +# "name" +# ] +# == tls_env[i]["name"] +# ) +# assert ( +# worker_group_spec[0]["template"]["spec"]["containers"][0]["env"][i + 1][ +# "value" +# ] +# == tls_env[i]["value"] +# ) + +# # 4. Secret with ca.crt and ca.key +# assert ca_secret["kind"] == "Secret" +# assert ca_secret["data"]["ca.crt"] != None +# assert ca_secret["data"]["ca.key"] != None +# assert ca_secret["metadata"]["name"] == f"ca-secret-{cluster_name}" +# assert ca_secret["metadata"]["namespace"] == namespace def test_gen_app_wrapper_with_oauth(mocker: MockerFixture): @@ -3159,7 +3124,6 @@ def test_gen_app_wrapper_with_oauth(mocker: MockerFixture): ClusterConfiguration( "test_cluster", image="quay.io/project-codeflare/ray:latest-py39-cu118", - ingress_domain="apps.cluster.awsroute.org", write_to_file=True, ) ) diff --git a/tests/unit_test_support.py b/tests/unit_test_support.py index 36c25c69..31328338 100644 --- a/tests/unit_test_support.py +++ b/tests/unit_test_support.py @@ -46,7 +46,6 @@ def createClusterConfig(): instascale=True, machine_types=["cpu.small", "gpu.large"], image_pull_secrets=["unit-test-pull-secret"], - ingress_domain="apps.cluster.awsroute.org", image="quay.io/project-codeflare/ray:latest-py39-cu118", write_to_file=True, ) From 3d7d5c53ff7d31a65d2970903fff698de9f9e644 Mon Sep 17 00:00:00 2001 From: Eoin Gallinagh Date: Thu, 4 Apr 2024 16:17:33 +0100 Subject: [PATCH 170/496] remove: GPT guided demo --- demo-notebooks/guided-demos/4_gpt.ipynb | 237 ------ demo-notebooks/guided-demos/gpt_og.py | 728 ------------------ .../notebook-ex-outputs/4_gpt.ipynb | 395 ---------- .../notebook-ex-outputs/gpt_og.py | 728 ------------------ .../notebook-ex-outputs/gptfttest.yaml | 185 ----- .../notebook-ex-outputs/requirements_gpt.txt | 8 - .../guided-demos/preview_nbs/4_gpt.ipynb | 237 ------ .../guided-demos/preview_nbs/gpt_og.py | 728 ------------------ .../preview_nbs/requirements_gpt.txt | 8 - .../guided-demos/requirements_gpt.txt | 8 - 10 files changed, 3262 deletions(-) delete mode 100644 demo-notebooks/guided-demos/4_gpt.ipynb delete mode 100644 demo-notebooks/guided-demos/gpt_og.py delete mode 100644 demo-notebooks/guided-demos/notebook-ex-outputs/4_gpt.ipynb delete mode 100644 demo-notebooks/guided-demos/notebook-ex-outputs/gpt_og.py delete mode 100644 demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml delete mode 100644 demo-notebooks/guided-demos/notebook-ex-outputs/requirements_gpt.txt delete mode 100644 demo-notebooks/guided-demos/preview_nbs/4_gpt.ipynb delete mode 100644 demo-notebooks/guided-demos/preview_nbs/gpt_og.py delete mode 100644 demo-notebooks/guided-demos/preview_nbs/requirements_gpt.txt delete mode 100644 demo-notebooks/guided-demos/requirements_gpt.txt diff --git a/demo-notebooks/guided-demos/4_gpt.ipynb b/demo-notebooks/guided-demos/4_gpt.ipynb deleted file mode 100644 index a4e1f366..00000000 --- a/demo-notebooks/guided-demos/4_gpt.ipynb +++ /dev/null @@ -1,237 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "b6c05b69-4ce8-45ef-82d3-bacb2491bee8", - "metadata": {}, - "outputs": [], - "source": [ - "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "32f99bbd-9903-4d38-a4f2-223dec684ae2", - "metadata": {}, - "outputs": [], - "source": [ - "# Create authentication object for user permissions\n", - "# IF unused, SDK will automatically check for default kubeconfig, then in-cluster config\n", - "# KubeConfigFileAuthentication can also be used to specify kubeconfig path manually\n", - "auth = TokenAuthentication(\n", - " token = \"XXXXX\",\n", - " server = \"XXXXX\",\n", - " skip_tls=False\n", - ")\n", - "auth.login()" - ] - }, - { - "cell_type": "markdown", - "id": "8f4b200f", - "metadata": {}, - "source": [ - "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3f32119a-c4ee-4163-b103-d9ca3bddbdb5", - "metadata": {}, - "outputs": [], - "source": [ - "cluster = Cluster(ClusterConfiguration(\n", - " name='gptfttest',\n", - " namespace='default',\n", - " num_workers=2,\n", - " min_cpus=2,\n", - " max_cpus=2,\n", - " min_memory=8,\n", - " max_memory=8,\n", - " num_gpus=1,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", - " instascale=True, #<---instascale enabled\n", - " machine_types=[\"m5.xlarge\", \"g4dn.xlarge\"],\n", - "))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "107c8277-3b3b-4238-a786-a391a662fd7c", - "metadata": {}, - "outputs": [], - "source": [ - "cluster.up()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "730f66ce-adaa-4709-b9cf-22417847e059", - "metadata": {}, - "outputs": [], - "source": [ - "cluster.wait_ready()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "48fac218-2f22-428b-9228-137a4bb0e666", - "metadata": {}, - "outputs": [], - "source": [ - "cluster.details()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9ed5bd75-4230-4c7c-a9e2-0f247890e62a", - "metadata": {}, - "outputs": [], - "source": [ - "from codeflare_sdk import DDPJobDefinition" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "611d203a-35aa-4357-a748-1d01b022fcdb", - "metadata": {}, - "outputs": [], - "source": [ - "arg_list = [\n", - " \"--model_name_or_path\", \"gpt2\",\n", - " \"--dataset_name\", \"wikitext\",\n", - " \"--dataset_config_name\", \"wikitext-2-raw-v1\",\n", - " \"--per_device_train_batch_size\", \"2\",\n", - " \"--per_device_eval_batch_size\", \"2\",\n", - " \"--do_train\",\n", - " \"--do_eval\",\n", - " \"--output_dir\", \"/tmp/test-clm\",\n", - " \"--overwrite_output_dir\"\n", - "]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8ac7c34f-e227-44c2-a4b1-a57c853ac3a7", - "metadata": {}, - "outputs": [], - "source": [ - "jobdef = DDPJobDefinition(\n", - " name=\"gpttest\",\n", - " script=\"gpt_og.py\",\n", - " script_args=arg_list,\n", - " scheduler_args={\"requirements\": \"requirements_gpt.txt\"}\n", - ")\n", - "job = jobdef.submit(cluster)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1680d287-de46-45f8-b95a-02ba3c83912c", - "metadata": {}, - "outputs": [], - "source": [ - "job.status()" - ] - }, - { - "cell_type": "markdown", - "id": "80bc1961", - "metadata": {}, - "source": [ - "Retrieve raw log output at anytime with:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "d25d6198-9941-47e8-857f-9811830cc854", - "metadata": {}, - "outputs": [], - "source": [ - "job.logs()" - ] - }, - { - "cell_type": "markdown", - "id": "876b96b5", - "metadata": {}, - "source": [ - "View live updates for status, logs, and other information with:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "58f8a2e8", - "metadata": {}, - "outputs": [], - "source": [ - "cluster.cluster_dashboard_uri()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "d7c13eab", - "metadata": {}, - "outputs": [], - "source": [ - "job.status()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "beb1a6b9-d9b3-49b7-b036-09f1d3569b59", - "metadata": {}, - "outputs": [], - "source": [ - "cluster.down()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8398d977-db24-46d0-a7d2-b4e9197808d7", - "metadata": {}, - "outputs": [], - "source": [ - "auth.logout()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/demo-notebooks/guided-demos/gpt_og.py b/demo-notebooks/guided-demos/gpt_og.py deleted file mode 100644 index d69e41fc..00000000 --- a/demo-notebooks/guided-demos/gpt_og.py +++ /dev/null @@ -1,728 +0,0 @@ -#!/usr/bin/env python -# coding=utf-8 -# Copyright 2020 The HuggingFace Inc. team. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Fine-tuning the library models for causal language modeling (GPT, GPT-2, CTRL, ...) on a text file or a dataset. - -Here is the full list of checkpoints on the hub that can be fine-tuned by this script: -https://huggingface.co/models?filter=text-generation -""" -# You can also adapt this script on your own causal language modeling task. Pointers for this are left as comments. - -import subprocess - -subprocess.run(["pip", "uninstall", "protobuf"]) -subprocess.run( - [ - "pip", - "install", - "--upgrade", - "--target=/home/ray/workspace", - "-r", - "requirements.txt", - ] -) - -import logging -import math -import os -import sys -from dataclasses import dataclass, field -from itertools import chain -from typing import Optional - -import datasets -import evaluate -import torch -from datasets import load_dataset - -import transformers -from transformers import ( - CONFIG_MAPPING, - MODEL_FOR_CAUSAL_LM_MAPPING, - AutoConfig, - AutoModelForCausalLM, - AutoTokenizer, - HfArgumentParser, - Trainer, - TrainingArguments, - default_data_collator, - is_torch_tpu_available, - set_seed, -) -from transformers.testing_utils import CaptureLogger -from transformers.trainer_utils import get_last_checkpoint -from transformers.utils import check_min_version, send_example_telemetry -from transformers.utils.versions import require_version - - -# Will error if the minimal version of Transformers is not installed. Remove at your own risks. -# check_min_version("4.29.0.dev0") - -require_version( - "datasets>=1.8.0", - "To fix: pip install -r examples/pytorch/language-modeling/requirements.txt", -) - -logger = logging.getLogger(__name__) - - -MODEL_CONFIG_CLASSES = list(MODEL_FOR_CAUSAL_LM_MAPPING.keys()) -MODEL_TYPES = tuple(conf.model_type for conf in MODEL_CONFIG_CLASSES) - - -@dataclass -class ModelArguments: - """ - Arguments pertaining to which model/config/tokenizer we are going to fine-tune, or train from scratch. - """ - - model_name_or_path: Optional[str] = field( - default=None, - metadata={ - "help": ( - "The model checkpoint for weights initialization.Don't set if you want to train a model from scratch." - ) - }, - ) - model_type: Optional[str] = field( - default=None, - metadata={ - "help": "If training from scratch, pass a model type from the list: " - + ", ".join(MODEL_TYPES) - }, - ) - config_overrides: Optional[str] = field( - default=None, - metadata={ - "help": ( - "Override some existing default config settings when a model is trained from scratch. Example: " - "n_embd=10,resid_pdrop=0.2,scale_attn_weights=false,summary_type=cls_index" - ) - }, - ) - config_name: Optional[str] = field( - default=None, - metadata={ - "help": "Pretrained config name or path if not the same as model_name" - }, - ) - tokenizer_name: Optional[str] = field( - default=None, - metadata={ - "help": "Pretrained tokenizer name or path if not the same as model_name" - }, - ) - cache_dir: Optional[str] = field( - default=None, - metadata={ - "help": "Where do you want to store the pretrained models downloaded from huggingface.co" - }, - ) - use_fast_tokenizer: bool = field( - default=True, - metadata={ - "help": "Whether to use one of the fast tokenizer (backed by the tokenizers library) or not." - }, - ) - model_revision: str = field( - default="main", - metadata={ - "help": "The specific model version to use (can be a branch name, tag name or commit id)." - }, - ) - use_auth_token: bool = field( - default=False, - metadata={ - "help": ( - "Will use the token generated when running `huggingface-cli login` (necessary to use this script " - "with private models)." - ) - }, - ) - torch_dtype: Optional[str] = field( - default=None, - metadata={ - "help": ( - "Override the default `torch.dtype` and load the model under this dtype. If `auto` is passed, the " - "dtype will be automatically derived from the model's weights." - ), - "choices": ["auto", "bfloat16", "float16", "float32"], - }, - ) - low_cpu_mem_usage: bool = field( - default=False, - metadata={ - "help": ( - "It is an option to create the model as an empty shell, then only materialize its parameters when the pretrained weights are loaded." - "set True will benefit LLM loading time and RAM consumption." - ) - }, - ) - - def __post_init__(self): - if self.config_overrides is not None and ( - self.config_name is not None or self.model_name_or_path is not None - ): - raise ValueError( - "--config_overrides can't be used in combination with --config_name or --model_name_or_path" - ) - - -@dataclass -class DataTrainingArguments: - """ - Arguments pertaining to what data we are going to input our model for training and eval. - """ - - dataset_name: Optional[str] = field( - default=None, - metadata={"help": "The name of the dataset to use (via the datasets library)."}, - ) - dataset_config_name: Optional[str] = field( - default=None, - metadata={ - "help": "The configuration name of the dataset to use (via the datasets library)." - }, - ) - train_file: Optional[str] = field( - default=None, metadata={"help": "The input training data file (a text file)."} - ) - validation_file: Optional[str] = field( - default=None, - metadata={ - "help": "An optional input evaluation data file to evaluate the perplexity on (a text file)." - }, - ) - max_train_samples: Optional[int] = field( - default=None, - metadata={ - "help": ( - "For debugging purposes or quicker training, truncate the number of training examples to this " - "value if set." - ) - }, - ) - max_eval_samples: Optional[int] = field( - default=None, - metadata={ - "help": ( - "For debugging purposes or quicker training, truncate the number of evaluation examples to this " - "value if set." - ) - }, - ) - streaming: bool = field(default=False, metadata={"help": "Enable streaming mode"}) - block_size: Optional[int] = field( - default=None, - metadata={ - "help": ( - "Optional input sequence length after tokenization. " - "The training dataset will be truncated in block of this size for training. " - "Default to the model max input length for single sentence inputs (take into account special tokens)." - ) - }, - ) - overwrite_cache: bool = field( - default=False, - metadata={"help": "Overwrite the cached training and evaluation sets"}, - ) - validation_split_percentage: Optional[int] = field( - default=5, - metadata={ - "help": "The percentage of the train set used as validation set in case there's no validation split" - }, - ) - preprocessing_num_workers: Optional[int] = field( - default=None, - metadata={"help": "The number of processes to use for the preprocessing."}, - ) - keep_linebreaks: bool = field( - default=True, - metadata={"help": "Whether to keep line breaks when using TXT files or not."}, - ) - - def __post_init__(self): - if self.streaming: - require_version( - "datasets>=2.0.0", "The streaming feature requires `datasets>=2.0.0`" - ) - - if ( - self.dataset_name is None - and self.train_file is None - and self.validation_file is None - ): - raise ValueError( - "Need either a dataset name or a training/validation file." - ) - else: - if self.train_file is not None: - extension = self.train_file.split(".")[-1] - assert extension in [ - "csv", - "json", - "txt", - ], "`train_file` should be a csv, a json or a txt file." - if self.validation_file is not None: - extension = self.validation_file.split(".")[-1] - assert extension in [ - "csv", - "json", - "txt", - ], "`validation_file` should be a csv, a json or a txt file." - - -def main(): - # See all possible arguments in src/transformers/training_args.py - # or by passing the --help flag to this script. - # We now keep distinct sets of args, for a cleaner separation of concerns. - - parser = HfArgumentParser( - (ModelArguments, DataTrainingArguments, TrainingArguments) - ) - if len(sys.argv) == 2 and sys.argv[1].endswith(".json"): - # If we pass only one argument to the script and it's the path to a json file, - # let's parse it to get our arguments. - model_args, data_args, training_args = parser.parse_json_file( - json_file=os.path.abspath(sys.argv[1]) - ) - else: - model_args, data_args, training_args = parser.parse_args_into_dataclasses() - - # Sending telemetry. Tracking the example usage helps us better allocate resources to maintain them. The - # information sent is the one passed as arguments along with your Python/PyTorch versions. - send_example_telemetry("run_clm", model_args, data_args) - - # Setup logging - logging.basicConfig( - format="%(asctime)s - %(levelname)s - %(name)s - %(message)s", - datefmt="%m/%d/%Y %H:%M:%S", - handlers=[logging.StreamHandler(sys.stdout)], - ) - - if training_args.should_log: - # The default of training_args.log_level is passive, so we set log level at info here to have that default. - transformers.utils.logging.set_verbosity_info() - - log_level = training_args.get_process_log_level() - logger.setLevel(log_level) - datasets.utils.logging.set_verbosity(log_level) - transformers.utils.logging.set_verbosity(log_level) - transformers.utils.logging.enable_default_handler() - transformers.utils.logging.enable_explicit_format() - - # Log on each process the small summary: - logger.warning( - f"Process rank: {training_args.local_rank}, device: {training_args.device}, n_gpu: {training_args.n_gpu}" - + f"distributed training: {bool(training_args.local_rank != -1)}, 16-bits training: {training_args.fp16}" - ) - logger.info(f"Training/evaluation parameters {training_args}") - - # Detecting last checkpoint. - last_checkpoint = None - if ( - os.path.isdir(training_args.output_dir) - and training_args.do_train - and not training_args.overwrite_output_dir - ): - last_checkpoint = get_last_checkpoint(training_args.output_dir) - if last_checkpoint is None and len(os.listdir(training_args.output_dir)) > 0: - raise ValueError( - f"Output directory ({training_args.output_dir}) already exists and is not empty. " - "Use --overwrite_output_dir to overcome." - ) - elif ( - last_checkpoint is not None and training_args.resume_from_checkpoint is None - ): - logger.info( - f"Checkpoint detected, resuming training at {last_checkpoint}. To avoid this behavior, change " - "the `--output_dir` or add `--overwrite_output_dir` to train from scratch." - ) - - # Set seed before initializing model. - set_seed(training_args.seed) - - # Get the datasets: you can either provide your own CSV/JSON/TXT training and evaluation files (see below) - # or just provide the name of one of the public datasets available on the hub at https://huggingface.co/datasets/ - # (the dataset will be downloaded automatically from the datasets Hub). - # - # For CSV/JSON files, this script will use the column called 'text' or the first column if no column called - # 'text' is found. You can easily tweak this behavior (see below). - # - # In distributed training, the load_dataset function guarantee that only one local process can concurrently - # download the dataset. - if data_args.dataset_name is not None: - # Downloading and loading a dataset from the hub. - raw_datasets = load_dataset( - data_args.dataset_name, - data_args.dataset_config_name, - cache_dir=model_args.cache_dir, - use_auth_token=True if model_args.use_auth_token else None, - streaming=data_args.streaming, - ) - if "validation" not in raw_datasets.keys(): - raw_datasets["validation"] = load_dataset( - data_args.dataset_name, - data_args.dataset_config_name, - split=f"train[:{data_args.validation_split_percentage}%]", - cache_dir=model_args.cache_dir, - use_auth_token=True if model_args.use_auth_token else None, - streaming=data_args.streaming, - ) - raw_datasets["train"] = load_dataset( - data_args.dataset_name, - data_args.dataset_config_name, - split=f"train[{data_args.validation_split_percentage}%:]", - cache_dir=model_args.cache_dir, - use_auth_token=True if model_args.use_auth_token else None, - streaming=data_args.streaming, - ) - else: - data_files = {} - dataset_args = {} - if data_args.train_file is not None: - data_files["train"] = data_args.train_file - if data_args.validation_file is not None: - data_files["validation"] = data_args.validation_file - extension = ( - data_args.train_file.split(".")[-1] - if data_args.train_file is not None - else data_args.validation_file.split(".")[-1] - ) - if extension == "txt": - extension = "text" - dataset_args["keep_linebreaks"] = data_args.keep_linebreaks - raw_datasets = load_dataset( - extension, - data_files=data_files, - cache_dir=model_args.cache_dir, - use_auth_token=True if model_args.use_auth_token else None, - **dataset_args, - ) - # If no validation data is there, validation_split_percentage will be used to divide the dataset. - if "validation" not in raw_datasets.keys(): - raw_datasets["validation"] = load_dataset( - extension, - data_files=data_files, - split=f"train[:{data_args.validation_split_percentage}%]", - cache_dir=model_args.cache_dir, - use_auth_token=True if model_args.use_auth_token else None, - **dataset_args, - ) - raw_datasets["train"] = load_dataset( - extension, - data_files=data_files, - split=f"train[{data_args.validation_split_percentage}%:]", - cache_dir=model_args.cache_dir, - use_auth_token=True if model_args.use_auth_token else None, - **dataset_args, - ) - - # See more about loading any type of standard or custom dataset (from files, python dict, pandas DataFrame, etc) at - # https://huggingface.co/docs/datasets/loading_datasets.html. - - # Load pretrained model and tokenizer - # - # Distributed training: - # The .from_pretrained methods guarantee that only one local process can concurrently - # download model & vocab. - - config_kwargs = { - "cache_dir": model_args.cache_dir, - "revision": model_args.model_revision, - "use_auth_token": True if model_args.use_auth_token else None, - } - if model_args.config_name: - config = AutoConfig.from_pretrained(model_args.config_name, **config_kwargs) - elif model_args.model_name_or_path: - config = AutoConfig.from_pretrained( - model_args.model_name_or_path, **config_kwargs - ) - else: - config = CONFIG_MAPPING[model_args.model_type]() - logger.warning("You are instantiating a new config instance from scratch.") - if model_args.config_overrides is not None: - logger.info(f"Overriding config: {model_args.config_overrides}") - config.update_from_string(model_args.config_overrides) - logger.info(f"New config: {config}") - - tokenizer_kwargs = { - "cache_dir": model_args.cache_dir, - "use_fast": model_args.use_fast_tokenizer, - "revision": model_args.model_revision, - "use_auth_token": True if model_args.use_auth_token else None, - } - if model_args.tokenizer_name: - tokenizer = AutoTokenizer.from_pretrained( - model_args.tokenizer_name, **tokenizer_kwargs - ) - elif model_args.model_name_or_path: - tokenizer = AutoTokenizer.from_pretrained( - model_args.model_name_or_path, **tokenizer_kwargs - ) - else: - raise ValueError( - "You are instantiating a new tokenizer from scratch. This is not supported by this script." - "You can do it from another script, save it, and load it from here, using --tokenizer_name." - ) - - if model_args.model_name_or_path: - torch_dtype = ( - model_args.torch_dtype - if model_args.torch_dtype in ["auto", None] - else getattr(torch, model_args.torch_dtype) - ) - model = AutoModelForCausalLM.from_pretrained( - model_args.model_name_or_path, - from_tf=bool(".ckpt" in model_args.model_name_or_path), - config=config, - cache_dir=model_args.cache_dir, - revision=model_args.model_revision, - use_auth_token=True if model_args.use_auth_token else None, - torch_dtype=torch_dtype, - low_cpu_mem_usage=model_args.low_cpu_mem_usage, - ) - else: - model = AutoModelForCausalLM.from_config(config) - n_params = sum({p.data_ptr(): p.numel() for p in model.parameters()}.values()) - logger.info( - f"Training new model from scratch - Total size={n_params/2**20:.2f}M params" - ) - - # We resize the embeddings only when necessary to avoid index errors. If you are creating a model from scratch - # on a small vocab and want a smaller embedding size, remove this test. - embedding_size = model.get_input_embeddings().weight.shape[0] - if len(tokenizer) > embedding_size: - model.resize_token_embeddings(len(tokenizer)) - - # Preprocessing the datasets. - # First we tokenize all the texts. - if training_args.do_train: - column_names = list(raw_datasets["train"].features) - else: - column_names = list(raw_datasets["validation"].features) - text_column_name = "text" if "text" in column_names else column_names[0] - - # since this will be pickled to avoid _LazyModule error in Hasher force logger loading before tokenize_function - tok_logger = transformers.utils.logging.get_logger( - "transformers.tokenization_utils_base" - ) - - def tokenize_function(examples): - with CaptureLogger(tok_logger) as cl: - output = tokenizer(examples[text_column_name]) - # clm input could be much much longer than block_size - if "Token indices sequence length is longer than the" in cl.out: - tok_logger.warning( - "^^^^^^^^^^^^^^^^ Please ignore the warning above - this long input will be chunked into smaller bits" - " before being passed to the model." - ) - return output - - with training_args.main_process_first(desc="dataset map tokenization"): - if not data_args.streaming: - tokenized_datasets = raw_datasets.map( - tokenize_function, - batched=True, - num_proc=data_args.preprocessing_num_workers, - remove_columns=column_names, - load_from_cache_file=not data_args.overwrite_cache, - desc="Running tokenizer on dataset", - ) - else: - tokenized_datasets = raw_datasets.map( - tokenize_function, - batched=True, - remove_columns=column_names, - ) - - if data_args.block_size is None: - block_size = tokenizer.model_max_length - if block_size > 1024: - logger.warning( - "The chosen tokenizer supports a `model_max_length` that is longer than the default `block_size` value" - " of 1024. If you would like to use a longer `block_size` up to `tokenizer.model_max_length` you can" - " override this default with `--block_size xxx`." - ) - block_size = 1024 - else: - if data_args.block_size > tokenizer.model_max_length: - logger.warning( - f"The block_size passed ({data_args.block_size}) is larger than the maximum length for the model" - f"({tokenizer.model_max_length}). Using block_size={tokenizer.model_max_length}." - ) - block_size = min(data_args.block_size, tokenizer.model_max_length) - - # Main data processing function that will concatenate all texts from our dataset and generate chunks of block_size. - def group_texts(examples): - # Concatenate all texts. - concatenated_examples = {k: list(chain(*examples[k])) for k in examples.keys()} - total_length = len(concatenated_examples[list(examples.keys())[0]]) - # We drop the small remainder, we could add padding if the model supported it instead of this drop, you can - # customize this part to your needs. - if total_length >= block_size: - total_length = (total_length // block_size) * block_size - # Split by chunks of max_len. - result = { - k: [t[i : i + block_size] for i in range(0, total_length, block_size)] - for k, t in concatenated_examples.items() - } - result["labels"] = result["input_ids"].copy() - return result - - # Note that with `batched=True`, this map processes 1,000 texts together, so group_texts throws away a remainder - # for each of those groups of 1,000 texts. You can adjust that batch_size here but a higher value might be slower - # to preprocess. - # - # To speed up this part, we use multiprocessing. See the documentation of the map method for more information: - # https://huggingface.co/docs/datasets/package_reference/main_classes.html#datasets.Dataset.map - - with training_args.main_process_first(desc="grouping texts together"): - if not data_args.streaming: - lm_datasets = tokenized_datasets.map( - group_texts, - batched=True, - num_proc=data_args.preprocessing_num_workers, - load_from_cache_file=not data_args.overwrite_cache, - desc=f"Grouping texts in chunks of {block_size}", - ) - else: - lm_datasets = tokenized_datasets.map( - group_texts, - batched=True, - ) - - if training_args.do_train: - if "train" not in tokenized_datasets: - raise ValueError("--do_train requires a train dataset") - train_dataset = lm_datasets["train"] - if data_args.max_train_samples is not None: - max_train_samples = min(len(train_dataset), data_args.max_train_samples) - train_dataset = train_dataset.select(range(max_train_samples)) - - if training_args.do_eval: - if "validation" not in tokenized_datasets: - raise ValueError("--do_eval requires a validation dataset") - eval_dataset = lm_datasets["validation"] - if data_args.max_eval_samples is not None: - max_eval_samples = min(len(eval_dataset), data_args.max_eval_samples) - eval_dataset = eval_dataset.select(range(max_eval_samples)) - - def preprocess_logits_for_metrics(logits, labels): - if isinstance(logits, tuple): - # Depending on the model and config, logits may contain extra tensors, - # like past_key_values, but logits always come first - logits = logits[0] - return logits.argmax(dim=-1) - - metric = evaluate.load("accuracy") - - def compute_metrics(eval_preds): - preds, labels = eval_preds - # preds have the same shape as the labels, after the argmax(-1) has been calculated - # by preprocess_logits_for_metrics but we need to shift the labels - labels = labels[:, 1:].reshape(-1) - preds = preds[:, :-1].reshape(-1) - return metric.compute(predictions=preds, references=labels) - - # Initialize our Trainer - trainer = Trainer( - model=model, - args=training_args, - train_dataset=train_dataset if training_args.do_train else None, - eval_dataset=eval_dataset if training_args.do_eval else None, - tokenizer=tokenizer, - # Data collator will default to DataCollatorWithPadding, so we change it. - data_collator=default_data_collator, - compute_metrics=compute_metrics - if training_args.do_eval and not is_torch_tpu_available() - else None, - preprocess_logits_for_metrics=preprocess_logits_for_metrics - if training_args.do_eval and not is_torch_tpu_available() - else None, - ) - - # Training - if training_args.do_train: - checkpoint = None - if training_args.resume_from_checkpoint is not None: - checkpoint = training_args.resume_from_checkpoint - elif last_checkpoint is not None: - checkpoint = last_checkpoint - train_result = trainer.train(resume_from_checkpoint=checkpoint) - trainer.save_model() # Saves the tokenizer too for easy upload - - metrics = train_result.metrics - - max_train_samples = ( - data_args.max_train_samples - if data_args.max_train_samples is not None - else len(train_dataset) - ) - metrics["train_samples"] = min(max_train_samples, len(train_dataset)) - - trainer.log_metrics("train", metrics) - trainer.save_metrics("train", metrics) - trainer.save_state() - - # Evaluation - if training_args.do_eval: - logger.info("*** Evaluate ***") - - metrics = trainer.evaluate() - - max_eval_samples = ( - data_args.max_eval_samples - if data_args.max_eval_samples is not None - else len(eval_dataset) - ) - metrics["eval_samples"] = min(max_eval_samples, len(eval_dataset)) - try: - perplexity = math.exp(metrics["eval_loss"]) - except OverflowError: - perplexity = float("inf") - metrics["perplexity"] = perplexity - - trainer.log_metrics("eval", metrics) - trainer.save_metrics("eval", metrics) - - kwargs = { - "finetuned_from": model_args.model_name_or_path, - "tasks": "text-generation", - } - if data_args.dataset_name is not None: - kwargs["dataset_tags"] = data_args.dataset_name - if data_args.dataset_config_name is not None: - kwargs["dataset_args"] = data_args.dataset_config_name - kwargs[ - "dataset" - ] = f"{data_args.dataset_name} {data_args.dataset_config_name}" - else: - kwargs["dataset"] = data_args.dataset_name - - if training_args.push_to_hub: - trainer.push_to_hub(**kwargs) - else: - trainer.create_model_card(**kwargs) - - -def _mp_fn(index): - # For xla_spawn (TPUs) - main() - - -if __name__ == "__main__": - main() diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/4_gpt.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/4_gpt.ipynb deleted file mode 100644 index 3f8afa35..00000000 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/4_gpt.ipynb +++ /dev/null @@ -1,395 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 1, - "id": "b6c05b69-4ce8-45ef-82d3-bacb2491bee8", - "metadata": {}, - "outputs": [], - "source": [ - "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "32f99bbd-9903-4d38-a4f2-223dec684ae2", - "metadata": {}, - "outputs": [], - "source": [ - "# Create authentication object for user permissions\n", - "# IF unused, SDK will automatically check for default kubeconfig, then in-cluster config\n", - "# KubeConfigFileAuthentication can also be used to specify kubeconfig path manually\n", - "auth = TokenAuthentication(\n", - " token = \"XXXXX\",\n", - " server = \"XXXXX\",\n", - " skip_tls=False\n", - ")\n", - "auth.login()" - ] - }, - { - "cell_type": "markdown", - "id": "b43e8e21", - "metadata": {}, - "source": [ - "\n", - "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image." - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "3f32119a-c4ee-4163-b103-d9ca3bddbdb5", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Written to: gptfttest.yaml\n" - ] - } - ], - "source": [ - "cluster = Cluster(ClusterConfiguration(\n", - " name='gptfttest',\n", - " namespace='default',\n", - " num_workers=2,\n", - " min_cpus=2,\n", - " max_cpus=2,\n", - " min_memory=8,\n", - " max_memory=8,\n", - " num_gpus=1,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", - " instascale=True, #<---instascale enabled\n", - " machine_types=[\"m5.xlarge\", \"g4dn.xlarge\"],\n", - "))" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "107c8277-3b3b-4238-a786-a391a662fd7c", - "metadata": {}, - "outputs": [], - "source": [ - "cluster.up()" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "730f66ce-adaa-4709-b9cf-22417847e059", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Waiting for requested resources to be set up...\n", - "Requested cluster up and running!\n" - ] - } - ], - "source": [ - "cluster.wait_ready()" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "48fac218-2f22-428b-9228-137a4bb0e666", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
                         🚀 CodeFlare Cluster Details 🚀                  \n",
      -       "                                                                    \n",
      -       " ╭────────────────────────────────────────────────────────────────╮ \n",
      -       " │   Name                                                         │ \n",
      -       " │   gptfttest                                        Active ✅   │ \n",
      -       " │                                                                │ \n",
      -       " │   URI: ray://gptfttest-head-svc.default.svc:10001              │ \n",
      -       " │                                                                │ \n",
      -       " │   Dashboard🔗                                                  │ \n",
      -       " │                                                                │ \n",
      -       " │                       Cluster Resources                        │ \n",
      -       " │   ╭── Workers ──╮  ╭───────── Worker specs(each) ─────────╮    │ \n",
      -       " │   │  # Workers  │  │  Memory      CPU         GPU         │    │ \n",
      -       " │   │             │  │                                      │    │ \n",
      -       " │   │  2          │  │  8~8         2           1           │    │ \n",
      -       " │   │             │  │                                      │    │ \n",
      -       " │   ╰─────────────╯  ╰──────────────────────────────────────╯    │ \n",
      -       " ╰────────────────────────────────────────────────────────────────╯ \n",
      -       "
      \n" - ], - "text/plain": [ - "\u001b[3m \u001b[0m\u001b[1;3m 🚀 CodeFlare Cluster Details 🚀\u001b[0m\u001b[3m \u001b[0m\n", - "\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\n", - " ╭────────────────────────────────────────────────────────────────╮ \n", - " │ \u001b[1;37;42mName\u001b[0m │ \n", - " │ \u001b[1;4mgptfttest\u001b[0m Active ✅ │ \n", - " │ │ \n", - " │ \u001b[1mURI:\u001b[0m ray://gptfttest-head-svc.default.svc:10001 │ \n", - " │ │ \n", - " │ \u001b]8;id=476902;http://ray-dashboard-gptfttest-default.apps.meyceoz-07122023.psap.aws.rhperfscale.org\u001b\\\u001b[4;34mDashboard🔗\u001b[0m\u001b]8;;\u001b\\ │ \n", - " │ │ \n", - " │ \u001b[3m Cluster Resources \u001b[0m │ \n", - " │ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │ \n", - " │ │ \u001b[1m \u001b[0m\u001b[1m# Workers\u001b[0m\u001b[1m \u001b[0m │ │ \u001b[1m \u001b[0m\u001b[1mMemory \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mCPU \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mGPU \u001b[0m\u001b[1m \u001b[0m │ │ \n", - " │ │ \u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ │ \u001b[35m \u001b[0m\u001b[35m2 \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m8~8 \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m2 \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m1 \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ │ \u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ ╰─────────────╯ ╰──────────────────────────────────────╯ │ \n", - " ╰────────────────────────────────────────────────────────────────╯ \n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/plain": [ - "RayCluster(name='gptfttest', status=, workers=2, worker_mem_min=8, worker_mem_max=8, worker_cpu=2, worker_gpu=1, namespace='default', dashboard='http://ray-dashboard-gptfttest-default.apps.meyceoz-07122023.psap.aws.rhperfscale.org')" - ] - }, - "execution_count": 5, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "cluster.details()" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "9ed5bd75-4230-4c7c-a9e2-0f247890e62a", - "metadata": {}, - "outputs": [], - "source": [ - "from codeflare_sdk import DDPJobDefinition" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "611d203a-35aa-4357-a748-1d01b022fcdb", - "metadata": {}, - "outputs": [], - "source": [ - "arg_list = [\n", - " \"--model_name_or_path\", \"gpt2\",\n", - " \"--dataset_name\", \"wikitext\",\n", - " \"--dataset_config_name\", \"wikitext-2-raw-v1\",\n", - " \"--per_device_train_batch_size\", \"2\",\n", - " \"--per_device_eval_batch_size\", \"2\",\n", - " \"--do_train\",\n", - " \"--do_eval\",\n", - " \"--output_dir\", \"/tmp/test-clm\",\n", - " \"--overwrite_output_dir\"\n", - "]" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "8ac7c34f-e227-44c2-a4b1-a57c853ac3a7", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "The Ray scheduler does not support port mapping.\n" - ] - } - ], - "source": [ - "jobdef = DDPJobDefinition(\n", - " name=\"gpttest\",\n", - " script=\"gpt_og.py\",\n", - " script_args=arg_list,\n", - " scheduler_args={\"requirements\": \"requirements_gpt.txt\"}\n", - ")\n", - "job = jobdef.submit(cluster)" - ] - }, - { - "cell_type": "code", - "execution_count": 16, - "id": "1680d287-de46-45f8-b95a-02ba3c83912c", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "AppStatus:\n", - " msg: !!python/object/apply:ray.dashboard.modules.job.common.JobStatus\n", - " - RUNNING\n", - " num_restarts: -1\n", - " roles:\n", - " - replicas:\n", - " - hostname: \n", - " id: 0\n", - " role: ray\n", - " state: !!python/object/apply:torchx.specs.api.AppState\n", - " - 3\n", - " structured_error_msg: \n", - " role: ray\n", - " state: RUNNING (3)\n", - " structured_error_msg: \n", - " ui_url: null" - ] - }, - "execution_count": 16, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "job.status()" - ] - }, - { - "cell_type": "markdown", - "id": "8222e884-7091-4b74-bdcf-565f25abed11", - "metadata": {}, - "source": [ - "Retrieve raw log output at anytime with:" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "id": "d25d6198-9941-47e8-857f-9811830cc854", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'[RayActor(name=\\'gpt_og\\', command=[\\'bash\\', \\'-c\\', \"torchrun --rdzv_backend static --rdzv_endpoint $TORCHX_RANK0_HOST:49782 --rdzv_id \\'gpttest-tg69zmd1xf19l\\' --nnodes 2 --nproc_per_node 1 --node_rank \\'0\\' --tee 3 --role \\'\\' gpt_og.py --model_name_or_path gpt2 --dataset_name wikitext --dataset_config_name wikitext-2-raw-v1 --per_device_train_batch_size 2 --per_device_eval_batch_size 2 --do_train --do_eval --output_dir /tmp/test-clm --overwrite_output_dir\"], env={\\'TORCHX_TRACKING_EXPERIMENT_NAME\\': \\'default-experiment\\', \\'LOGLEVEL\\': \\'WARNING\\', \\'TORCHX_JOB_ID\\': \\'ray://torchx/gpttest-tg69zmd1xf19l\\'}, num_cpus=2, num_gpus=1, min_replicas=2), RayActor(name=\\'gpt_og\\', command=[\\'bash\\', \\'-c\\', \"torchrun --rdzv_backend static --rdzv_endpoint $TORCHX_RANK0_HOST:49782 --rdzv_id \\'gpttest-tg69zmd1xf19l\\' --nnodes 2 --nproc_per_node 1 --node_rank \\'1\\' --tee 3 --role \\'\\' gpt_og.py --model_name_or_path gpt2 --dataset_name wikitext --dataset_config_name wikitext-2-raw-v1 --per_device_train_batch_size 2 --per_device_eval_batch_size 2 --do_train --do_eval --output_dir /tmp/test-clm --overwrite_output_dir\"], env={\\'TORCHX_TRACKING_EXPERIMENT_NAME\\': \\'default-experiment\\', \\'LOGLEVEL\\': \\'WARNING\\', \\'TORCHX_JOB_ID\\': \\'ray://torchx/gpttest-tg69zmd1xf19l\\'}, num_cpus=2, num_gpus=1, min_replicas=2)]\\n2023-08-09 15:39:16,608\\tINFO worker.py:1334 -- Using address 10.129.6.9:6379 set in the environment variable RAY_ADDRESS\\n2023-08-09 15:39:16,608\\tINFO worker.py:1452 -- Connecting to existing Ray cluster at address: 10.129.6.9:6379...\\n2023-08-09 15:39:16,667\\tINFO worker.py:1627 -- Connected to Ray cluster. View the dashboard at \\x1b[1m\\x1b[32mhttp://10.129.6.9:8265 \\x1b[39m\\x1b[22m\\nWaiting for minimum placement group to start.\\nSuccessfully created placement groups\\nrdzv_endpoint set to 10.128.6.19 for actor b557571d0e8b5ce27f34a7e802000000\\nrdzv_endpoint set to 10.128.6.19 for actor 8826a9b5f730773c7c99241102000000\\nSuccessfully placed command actors\\nEntering main loop, start executing the script on worker nodes\\nrunning ray.wait on [ObjectRef(e082c90ab8422b00b557571d0e8b5ce27f34a7e80200000001000000), ObjectRef(ce868e48e2fa9a948826a9b5f730773c7c9924110200000001000000)]\\nrunning ray.wait on [ObjectRef(ce868e48e2fa9a948826a9b5f730773c7c9924110200000001000000), ObjectRef(f81ec6ff838b16dbb557571d0e8b5ce27f34a7e80200000001000000)]\\nrunning ray.wait on [ObjectRef(f81ec6ff838b16dbb557571d0e8b5ce27f34a7e80200000001000000), ObjectRef(32b0eec39cfa87ac8826a9b5f730773c7c9924110200000001000000)]\\n'" - ] - }, - "execution_count": 21, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "job.logs()" - ] - }, - { - "cell_type": "markdown", - "id": "92b0a8ec-c2ee-43ce-abd7-368c769abd57", - "metadata": {}, - "source": [ - "View live updates for status, logs, and other information with:" - ] - }, - { - "cell_type": "code", - "execution_count": 22, - "id": "36bfa974-d923-44d2-b079-4c42b66152f7", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'http://ray-dashboard-gptfttest-default.apps.meyceoz-07122023.psap.aws.rhperfscale.org'" - ] - }, - "execution_count": 22, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "cluster.cluster_dashboard_uri()" - ] - }, - { - "cell_type": "code", - "execution_count": 24, - "id": "d7b27d0a-b0e3-421c-a3f5-d9db97890f7c", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "AppStatus:\n", - " msg: !!python/object/apply:ray.dashboard.modules.job.common.JobStatus\n", - " - SUCCEEDED\n", - " num_restarts: -1\n", - " roles:\n", - " - replicas:\n", - " - hostname: \n", - " id: 0\n", - " role: ray\n", - " state: !!python/object/apply:torchx.specs.api.AppState\n", - " - 4\n", - " structured_error_msg: \n", - " role: ray\n", - " state: SUCCEEDED (4)\n", - " structured_error_msg: \n", - " ui_url: null" - ] - }, - "execution_count": 24, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "job.status()" - ] - }, - { - "cell_type": "code", - "execution_count": 25, - "id": "beb1a6b9-d9b3-49b7-b036-09f1d3569b59", - "metadata": {}, - "outputs": [], - "source": [ - "cluster.down()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8398d977-db24-46d0-a7d2-b4e9197808d7", - "metadata": {}, - "outputs": [], - "source": [ - "auth.logout()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.17" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/gpt_og.py b/demo-notebooks/guided-demos/notebook-ex-outputs/gpt_og.py deleted file mode 100644 index d69e41fc..00000000 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/gpt_og.py +++ /dev/null @@ -1,728 +0,0 @@ -#!/usr/bin/env python -# coding=utf-8 -# Copyright 2020 The HuggingFace Inc. team. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Fine-tuning the library models for causal language modeling (GPT, GPT-2, CTRL, ...) on a text file or a dataset. - -Here is the full list of checkpoints on the hub that can be fine-tuned by this script: -https://huggingface.co/models?filter=text-generation -""" -# You can also adapt this script on your own causal language modeling task. Pointers for this are left as comments. - -import subprocess - -subprocess.run(["pip", "uninstall", "protobuf"]) -subprocess.run( - [ - "pip", - "install", - "--upgrade", - "--target=/home/ray/workspace", - "-r", - "requirements.txt", - ] -) - -import logging -import math -import os -import sys -from dataclasses import dataclass, field -from itertools import chain -from typing import Optional - -import datasets -import evaluate -import torch -from datasets import load_dataset - -import transformers -from transformers import ( - CONFIG_MAPPING, - MODEL_FOR_CAUSAL_LM_MAPPING, - AutoConfig, - AutoModelForCausalLM, - AutoTokenizer, - HfArgumentParser, - Trainer, - TrainingArguments, - default_data_collator, - is_torch_tpu_available, - set_seed, -) -from transformers.testing_utils import CaptureLogger -from transformers.trainer_utils import get_last_checkpoint -from transformers.utils import check_min_version, send_example_telemetry -from transformers.utils.versions import require_version - - -# Will error if the minimal version of Transformers is not installed. Remove at your own risks. -# check_min_version("4.29.0.dev0") - -require_version( - "datasets>=1.8.0", - "To fix: pip install -r examples/pytorch/language-modeling/requirements.txt", -) - -logger = logging.getLogger(__name__) - - -MODEL_CONFIG_CLASSES = list(MODEL_FOR_CAUSAL_LM_MAPPING.keys()) -MODEL_TYPES = tuple(conf.model_type for conf in MODEL_CONFIG_CLASSES) - - -@dataclass -class ModelArguments: - """ - Arguments pertaining to which model/config/tokenizer we are going to fine-tune, or train from scratch. - """ - - model_name_or_path: Optional[str] = field( - default=None, - metadata={ - "help": ( - "The model checkpoint for weights initialization.Don't set if you want to train a model from scratch." - ) - }, - ) - model_type: Optional[str] = field( - default=None, - metadata={ - "help": "If training from scratch, pass a model type from the list: " - + ", ".join(MODEL_TYPES) - }, - ) - config_overrides: Optional[str] = field( - default=None, - metadata={ - "help": ( - "Override some existing default config settings when a model is trained from scratch. Example: " - "n_embd=10,resid_pdrop=0.2,scale_attn_weights=false,summary_type=cls_index" - ) - }, - ) - config_name: Optional[str] = field( - default=None, - metadata={ - "help": "Pretrained config name or path if not the same as model_name" - }, - ) - tokenizer_name: Optional[str] = field( - default=None, - metadata={ - "help": "Pretrained tokenizer name or path if not the same as model_name" - }, - ) - cache_dir: Optional[str] = field( - default=None, - metadata={ - "help": "Where do you want to store the pretrained models downloaded from huggingface.co" - }, - ) - use_fast_tokenizer: bool = field( - default=True, - metadata={ - "help": "Whether to use one of the fast tokenizer (backed by the tokenizers library) or not." - }, - ) - model_revision: str = field( - default="main", - metadata={ - "help": "The specific model version to use (can be a branch name, tag name or commit id)." - }, - ) - use_auth_token: bool = field( - default=False, - metadata={ - "help": ( - "Will use the token generated when running `huggingface-cli login` (necessary to use this script " - "with private models)." - ) - }, - ) - torch_dtype: Optional[str] = field( - default=None, - metadata={ - "help": ( - "Override the default `torch.dtype` and load the model under this dtype. If `auto` is passed, the " - "dtype will be automatically derived from the model's weights." - ), - "choices": ["auto", "bfloat16", "float16", "float32"], - }, - ) - low_cpu_mem_usage: bool = field( - default=False, - metadata={ - "help": ( - "It is an option to create the model as an empty shell, then only materialize its parameters when the pretrained weights are loaded." - "set True will benefit LLM loading time and RAM consumption." - ) - }, - ) - - def __post_init__(self): - if self.config_overrides is not None and ( - self.config_name is not None or self.model_name_or_path is not None - ): - raise ValueError( - "--config_overrides can't be used in combination with --config_name or --model_name_or_path" - ) - - -@dataclass -class DataTrainingArguments: - """ - Arguments pertaining to what data we are going to input our model for training and eval. - """ - - dataset_name: Optional[str] = field( - default=None, - metadata={"help": "The name of the dataset to use (via the datasets library)."}, - ) - dataset_config_name: Optional[str] = field( - default=None, - metadata={ - "help": "The configuration name of the dataset to use (via the datasets library)." - }, - ) - train_file: Optional[str] = field( - default=None, metadata={"help": "The input training data file (a text file)."} - ) - validation_file: Optional[str] = field( - default=None, - metadata={ - "help": "An optional input evaluation data file to evaluate the perplexity on (a text file)." - }, - ) - max_train_samples: Optional[int] = field( - default=None, - metadata={ - "help": ( - "For debugging purposes or quicker training, truncate the number of training examples to this " - "value if set." - ) - }, - ) - max_eval_samples: Optional[int] = field( - default=None, - metadata={ - "help": ( - "For debugging purposes or quicker training, truncate the number of evaluation examples to this " - "value if set." - ) - }, - ) - streaming: bool = field(default=False, metadata={"help": "Enable streaming mode"}) - block_size: Optional[int] = field( - default=None, - metadata={ - "help": ( - "Optional input sequence length after tokenization. " - "The training dataset will be truncated in block of this size for training. " - "Default to the model max input length for single sentence inputs (take into account special tokens)." - ) - }, - ) - overwrite_cache: bool = field( - default=False, - metadata={"help": "Overwrite the cached training and evaluation sets"}, - ) - validation_split_percentage: Optional[int] = field( - default=5, - metadata={ - "help": "The percentage of the train set used as validation set in case there's no validation split" - }, - ) - preprocessing_num_workers: Optional[int] = field( - default=None, - metadata={"help": "The number of processes to use for the preprocessing."}, - ) - keep_linebreaks: bool = field( - default=True, - metadata={"help": "Whether to keep line breaks when using TXT files or not."}, - ) - - def __post_init__(self): - if self.streaming: - require_version( - "datasets>=2.0.0", "The streaming feature requires `datasets>=2.0.0`" - ) - - if ( - self.dataset_name is None - and self.train_file is None - and self.validation_file is None - ): - raise ValueError( - "Need either a dataset name or a training/validation file." - ) - else: - if self.train_file is not None: - extension = self.train_file.split(".")[-1] - assert extension in [ - "csv", - "json", - "txt", - ], "`train_file` should be a csv, a json or a txt file." - if self.validation_file is not None: - extension = self.validation_file.split(".")[-1] - assert extension in [ - "csv", - "json", - "txt", - ], "`validation_file` should be a csv, a json or a txt file." - - -def main(): - # See all possible arguments in src/transformers/training_args.py - # or by passing the --help flag to this script. - # We now keep distinct sets of args, for a cleaner separation of concerns. - - parser = HfArgumentParser( - (ModelArguments, DataTrainingArguments, TrainingArguments) - ) - if len(sys.argv) == 2 and sys.argv[1].endswith(".json"): - # If we pass only one argument to the script and it's the path to a json file, - # let's parse it to get our arguments. - model_args, data_args, training_args = parser.parse_json_file( - json_file=os.path.abspath(sys.argv[1]) - ) - else: - model_args, data_args, training_args = parser.parse_args_into_dataclasses() - - # Sending telemetry. Tracking the example usage helps us better allocate resources to maintain them. The - # information sent is the one passed as arguments along with your Python/PyTorch versions. - send_example_telemetry("run_clm", model_args, data_args) - - # Setup logging - logging.basicConfig( - format="%(asctime)s - %(levelname)s - %(name)s - %(message)s", - datefmt="%m/%d/%Y %H:%M:%S", - handlers=[logging.StreamHandler(sys.stdout)], - ) - - if training_args.should_log: - # The default of training_args.log_level is passive, so we set log level at info here to have that default. - transformers.utils.logging.set_verbosity_info() - - log_level = training_args.get_process_log_level() - logger.setLevel(log_level) - datasets.utils.logging.set_verbosity(log_level) - transformers.utils.logging.set_verbosity(log_level) - transformers.utils.logging.enable_default_handler() - transformers.utils.logging.enable_explicit_format() - - # Log on each process the small summary: - logger.warning( - f"Process rank: {training_args.local_rank}, device: {training_args.device}, n_gpu: {training_args.n_gpu}" - + f"distributed training: {bool(training_args.local_rank != -1)}, 16-bits training: {training_args.fp16}" - ) - logger.info(f"Training/evaluation parameters {training_args}") - - # Detecting last checkpoint. - last_checkpoint = None - if ( - os.path.isdir(training_args.output_dir) - and training_args.do_train - and not training_args.overwrite_output_dir - ): - last_checkpoint = get_last_checkpoint(training_args.output_dir) - if last_checkpoint is None and len(os.listdir(training_args.output_dir)) > 0: - raise ValueError( - f"Output directory ({training_args.output_dir}) already exists and is not empty. " - "Use --overwrite_output_dir to overcome." - ) - elif ( - last_checkpoint is not None and training_args.resume_from_checkpoint is None - ): - logger.info( - f"Checkpoint detected, resuming training at {last_checkpoint}. To avoid this behavior, change " - "the `--output_dir` or add `--overwrite_output_dir` to train from scratch." - ) - - # Set seed before initializing model. - set_seed(training_args.seed) - - # Get the datasets: you can either provide your own CSV/JSON/TXT training and evaluation files (see below) - # or just provide the name of one of the public datasets available on the hub at https://huggingface.co/datasets/ - # (the dataset will be downloaded automatically from the datasets Hub). - # - # For CSV/JSON files, this script will use the column called 'text' or the first column if no column called - # 'text' is found. You can easily tweak this behavior (see below). - # - # In distributed training, the load_dataset function guarantee that only one local process can concurrently - # download the dataset. - if data_args.dataset_name is not None: - # Downloading and loading a dataset from the hub. - raw_datasets = load_dataset( - data_args.dataset_name, - data_args.dataset_config_name, - cache_dir=model_args.cache_dir, - use_auth_token=True if model_args.use_auth_token else None, - streaming=data_args.streaming, - ) - if "validation" not in raw_datasets.keys(): - raw_datasets["validation"] = load_dataset( - data_args.dataset_name, - data_args.dataset_config_name, - split=f"train[:{data_args.validation_split_percentage}%]", - cache_dir=model_args.cache_dir, - use_auth_token=True if model_args.use_auth_token else None, - streaming=data_args.streaming, - ) - raw_datasets["train"] = load_dataset( - data_args.dataset_name, - data_args.dataset_config_name, - split=f"train[{data_args.validation_split_percentage}%:]", - cache_dir=model_args.cache_dir, - use_auth_token=True if model_args.use_auth_token else None, - streaming=data_args.streaming, - ) - else: - data_files = {} - dataset_args = {} - if data_args.train_file is not None: - data_files["train"] = data_args.train_file - if data_args.validation_file is not None: - data_files["validation"] = data_args.validation_file - extension = ( - data_args.train_file.split(".")[-1] - if data_args.train_file is not None - else data_args.validation_file.split(".")[-1] - ) - if extension == "txt": - extension = "text" - dataset_args["keep_linebreaks"] = data_args.keep_linebreaks - raw_datasets = load_dataset( - extension, - data_files=data_files, - cache_dir=model_args.cache_dir, - use_auth_token=True if model_args.use_auth_token else None, - **dataset_args, - ) - # If no validation data is there, validation_split_percentage will be used to divide the dataset. - if "validation" not in raw_datasets.keys(): - raw_datasets["validation"] = load_dataset( - extension, - data_files=data_files, - split=f"train[:{data_args.validation_split_percentage}%]", - cache_dir=model_args.cache_dir, - use_auth_token=True if model_args.use_auth_token else None, - **dataset_args, - ) - raw_datasets["train"] = load_dataset( - extension, - data_files=data_files, - split=f"train[{data_args.validation_split_percentage}%:]", - cache_dir=model_args.cache_dir, - use_auth_token=True if model_args.use_auth_token else None, - **dataset_args, - ) - - # See more about loading any type of standard or custom dataset (from files, python dict, pandas DataFrame, etc) at - # https://huggingface.co/docs/datasets/loading_datasets.html. - - # Load pretrained model and tokenizer - # - # Distributed training: - # The .from_pretrained methods guarantee that only one local process can concurrently - # download model & vocab. - - config_kwargs = { - "cache_dir": model_args.cache_dir, - "revision": model_args.model_revision, - "use_auth_token": True if model_args.use_auth_token else None, - } - if model_args.config_name: - config = AutoConfig.from_pretrained(model_args.config_name, **config_kwargs) - elif model_args.model_name_or_path: - config = AutoConfig.from_pretrained( - model_args.model_name_or_path, **config_kwargs - ) - else: - config = CONFIG_MAPPING[model_args.model_type]() - logger.warning("You are instantiating a new config instance from scratch.") - if model_args.config_overrides is not None: - logger.info(f"Overriding config: {model_args.config_overrides}") - config.update_from_string(model_args.config_overrides) - logger.info(f"New config: {config}") - - tokenizer_kwargs = { - "cache_dir": model_args.cache_dir, - "use_fast": model_args.use_fast_tokenizer, - "revision": model_args.model_revision, - "use_auth_token": True if model_args.use_auth_token else None, - } - if model_args.tokenizer_name: - tokenizer = AutoTokenizer.from_pretrained( - model_args.tokenizer_name, **tokenizer_kwargs - ) - elif model_args.model_name_or_path: - tokenizer = AutoTokenizer.from_pretrained( - model_args.model_name_or_path, **tokenizer_kwargs - ) - else: - raise ValueError( - "You are instantiating a new tokenizer from scratch. This is not supported by this script." - "You can do it from another script, save it, and load it from here, using --tokenizer_name." - ) - - if model_args.model_name_or_path: - torch_dtype = ( - model_args.torch_dtype - if model_args.torch_dtype in ["auto", None] - else getattr(torch, model_args.torch_dtype) - ) - model = AutoModelForCausalLM.from_pretrained( - model_args.model_name_or_path, - from_tf=bool(".ckpt" in model_args.model_name_or_path), - config=config, - cache_dir=model_args.cache_dir, - revision=model_args.model_revision, - use_auth_token=True if model_args.use_auth_token else None, - torch_dtype=torch_dtype, - low_cpu_mem_usage=model_args.low_cpu_mem_usage, - ) - else: - model = AutoModelForCausalLM.from_config(config) - n_params = sum({p.data_ptr(): p.numel() for p in model.parameters()}.values()) - logger.info( - f"Training new model from scratch - Total size={n_params/2**20:.2f}M params" - ) - - # We resize the embeddings only when necessary to avoid index errors. If you are creating a model from scratch - # on a small vocab and want a smaller embedding size, remove this test. - embedding_size = model.get_input_embeddings().weight.shape[0] - if len(tokenizer) > embedding_size: - model.resize_token_embeddings(len(tokenizer)) - - # Preprocessing the datasets. - # First we tokenize all the texts. - if training_args.do_train: - column_names = list(raw_datasets["train"].features) - else: - column_names = list(raw_datasets["validation"].features) - text_column_name = "text" if "text" in column_names else column_names[0] - - # since this will be pickled to avoid _LazyModule error in Hasher force logger loading before tokenize_function - tok_logger = transformers.utils.logging.get_logger( - "transformers.tokenization_utils_base" - ) - - def tokenize_function(examples): - with CaptureLogger(tok_logger) as cl: - output = tokenizer(examples[text_column_name]) - # clm input could be much much longer than block_size - if "Token indices sequence length is longer than the" in cl.out: - tok_logger.warning( - "^^^^^^^^^^^^^^^^ Please ignore the warning above - this long input will be chunked into smaller bits" - " before being passed to the model." - ) - return output - - with training_args.main_process_first(desc="dataset map tokenization"): - if not data_args.streaming: - tokenized_datasets = raw_datasets.map( - tokenize_function, - batched=True, - num_proc=data_args.preprocessing_num_workers, - remove_columns=column_names, - load_from_cache_file=not data_args.overwrite_cache, - desc="Running tokenizer on dataset", - ) - else: - tokenized_datasets = raw_datasets.map( - tokenize_function, - batched=True, - remove_columns=column_names, - ) - - if data_args.block_size is None: - block_size = tokenizer.model_max_length - if block_size > 1024: - logger.warning( - "The chosen tokenizer supports a `model_max_length` that is longer than the default `block_size` value" - " of 1024. If you would like to use a longer `block_size` up to `tokenizer.model_max_length` you can" - " override this default with `--block_size xxx`." - ) - block_size = 1024 - else: - if data_args.block_size > tokenizer.model_max_length: - logger.warning( - f"The block_size passed ({data_args.block_size}) is larger than the maximum length for the model" - f"({tokenizer.model_max_length}). Using block_size={tokenizer.model_max_length}." - ) - block_size = min(data_args.block_size, tokenizer.model_max_length) - - # Main data processing function that will concatenate all texts from our dataset and generate chunks of block_size. - def group_texts(examples): - # Concatenate all texts. - concatenated_examples = {k: list(chain(*examples[k])) for k in examples.keys()} - total_length = len(concatenated_examples[list(examples.keys())[0]]) - # We drop the small remainder, we could add padding if the model supported it instead of this drop, you can - # customize this part to your needs. - if total_length >= block_size: - total_length = (total_length // block_size) * block_size - # Split by chunks of max_len. - result = { - k: [t[i : i + block_size] for i in range(0, total_length, block_size)] - for k, t in concatenated_examples.items() - } - result["labels"] = result["input_ids"].copy() - return result - - # Note that with `batched=True`, this map processes 1,000 texts together, so group_texts throws away a remainder - # for each of those groups of 1,000 texts. You can adjust that batch_size here but a higher value might be slower - # to preprocess. - # - # To speed up this part, we use multiprocessing. See the documentation of the map method for more information: - # https://huggingface.co/docs/datasets/package_reference/main_classes.html#datasets.Dataset.map - - with training_args.main_process_first(desc="grouping texts together"): - if not data_args.streaming: - lm_datasets = tokenized_datasets.map( - group_texts, - batched=True, - num_proc=data_args.preprocessing_num_workers, - load_from_cache_file=not data_args.overwrite_cache, - desc=f"Grouping texts in chunks of {block_size}", - ) - else: - lm_datasets = tokenized_datasets.map( - group_texts, - batched=True, - ) - - if training_args.do_train: - if "train" not in tokenized_datasets: - raise ValueError("--do_train requires a train dataset") - train_dataset = lm_datasets["train"] - if data_args.max_train_samples is not None: - max_train_samples = min(len(train_dataset), data_args.max_train_samples) - train_dataset = train_dataset.select(range(max_train_samples)) - - if training_args.do_eval: - if "validation" not in tokenized_datasets: - raise ValueError("--do_eval requires a validation dataset") - eval_dataset = lm_datasets["validation"] - if data_args.max_eval_samples is not None: - max_eval_samples = min(len(eval_dataset), data_args.max_eval_samples) - eval_dataset = eval_dataset.select(range(max_eval_samples)) - - def preprocess_logits_for_metrics(logits, labels): - if isinstance(logits, tuple): - # Depending on the model and config, logits may contain extra tensors, - # like past_key_values, but logits always come first - logits = logits[0] - return logits.argmax(dim=-1) - - metric = evaluate.load("accuracy") - - def compute_metrics(eval_preds): - preds, labels = eval_preds - # preds have the same shape as the labels, after the argmax(-1) has been calculated - # by preprocess_logits_for_metrics but we need to shift the labels - labels = labels[:, 1:].reshape(-1) - preds = preds[:, :-1].reshape(-1) - return metric.compute(predictions=preds, references=labels) - - # Initialize our Trainer - trainer = Trainer( - model=model, - args=training_args, - train_dataset=train_dataset if training_args.do_train else None, - eval_dataset=eval_dataset if training_args.do_eval else None, - tokenizer=tokenizer, - # Data collator will default to DataCollatorWithPadding, so we change it. - data_collator=default_data_collator, - compute_metrics=compute_metrics - if training_args.do_eval and not is_torch_tpu_available() - else None, - preprocess_logits_for_metrics=preprocess_logits_for_metrics - if training_args.do_eval and not is_torch_tpu_available() - else None, - ) - - # Training - if training_args.do_train: - checkpoint = None - if training_args.resume_from_checkpoint is not None: - checkpoint = training_args.resume_from_checkpoint - elif last_checkpoint is not None: - checkpoint = last_checkpoint - train_result = trainer.train(resume_from_checkpoint=checkpoint) - trainer.save_model() # Saves the tokenizer too for easy upload - - metrics = train_result.metrics - - max_train_samples = ( - data_args.max_train_samples - if data_args.max_train_samples is not None - else len(train_dataset) - ) - metrics["train_samples"] = min(max_train_samples, len(train_dataset)) - - trainer.log_metrics("train", metrics) - trainer.save_metrics("train", metrics) - trainer.save_state() - - # Evaluation - if training_args.do_eval: - logger.info("*** Evaluate ***") - - metrics = trainer.evaluate() - - max_eval_samples = ( - data_args.max_eval_samples - if data_args.max_eval_samples is not None - else len(eval_dataset) - ) - metrics["eval_samples"] = min(max_eval_samples, len(eval_dataset)) - try: - perplexity = math.exp(metrics["eval_loss"]) - except OverflowError: - perplexity = float("inf") - metrics["perplexity"] = perplexity - - trainer.log_metrics("eval", metrics) - trainer.save_metrics("eval", metrics) - - kwargs = { - "finetuned_from": model_args.model_name_or_path, - "tasks": "text-generation", - } - if data_args.dataset_name is not None: - kwargs["dataset_tags"] = data_args.dataset_name - if data_args.dataset_config_name is not None: - kwargs["dataset_args"] = data_args.dataset_config_name - kwargs[ - "dataset" - ] = f"{data_args.dataset_name} {data_args.dataset_config_name}" - else: - kwargs["dataset"] = data_args.dataset_name - - if training_args.push_to_hub: - trainer.push_to_hub(**kwargs) - else: - trainer.create_model_card(**kwargs) - - -def _mp_fn(index): - # For xla_spawn (TPUs) - main() - - -if __name__ == "__main__": - main() diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml deleted file mode 100644 index 62737232..00000000 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/gptfttest.yaml +++ /dev/null @@ -1,185 +0,0 @@ -apiVersion: workload.codeflare.dev/v1beta1 -kind: AppWrapper -metadata: - labels: - orderedinstance: m5.xlarge_g4dn.xlarge - name: gptfttest - namespace: default -spec: - priority: 9 - resources: - GenericItems: - - custompodresources: - - limits: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - replicas: 1 - requests: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - - limits: - cpu: 2 - memory: 8G - nvidia.com/gpu: 1 - replicas: 2 - requests: - cpu: 2 - memory: 8G - nvidia.com/gpu: 1 - generictemplate: - apiVersion: ray.io/v1 - kind: RayCluster - metadata: - labels: - appwrapper.mcad.ibm.com: gptfttest - controller-tools.k8s.io: '1.0' - name: gptfttest - namespace: default - spec: - autoscalerOptions: - idleTimeoutSeconds: 60 - imagePullPolicy: Always - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 500m - memory: 512Mi - upscalingMode: Default - enableInTreeAutoscaling: false - headGroupSpec: - rayStartParams: - block: 'true' - dashboard-host: 0.0.0.0 - num-gpus: '0' - serviceType: ClusterIP - template: - spec: - affinity: - nodeAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - nodeSelectorTerms: - - matchExpressions: - - key: gptfttest - operator: In - values: - - gptfttest - containers: - - env: - - name: MY_POD_IP - valueFrom: - fieldRef: - fieldPath: status.podIP - - name: RAY_USE_TLS - value: '0' - - name: RAY_TLS_SERVER_CERT - value: /home/ray/workspace/tls/server.crt - - name: RAY_TLS_SERVER_KEY - value: /home/ray/workspace/tls/server.key - - name: RAY_TLS_CA_CERT - value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:latest-py39-cu118 - imagePullPolicy: Always - lifecycle: - preStop: - exec: - command: - - /bin/sh - - -c - - ray stop - name: ray-head - ports: - - containerPort: 6379 - name: gcs - - containerPort: 8265 - name: dashboard - - containerPort: 10001 - name: client - resources: - limits: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - requests: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - imagePullSecrets: [] - rayVersion: 2.1.0 - workerGroupSpecs: - - groupName: small-group-gptfttest - maxReplicas: 2 - minReplicas: 2 - rayStartParams: - block: 'true' - num-gpus: '1' - replicas: 2 - template: - metadata: - annotations: - key: value - labels: - key: value - spec: - affinity: - nodeAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - nodeSelectorTerms: - - matchExpressions: - - key: gptfttest - operator: In - values: - - gptfttest - containers: - - env: - - name: MY_POD_IP - valueFrom: - fieldRef: - fieldPath: status.podIP - - name: RAY_USE_TLS - value: '0' - - name: RAY_TLS_SERVER_CERT - value: /home/ray/workspace/tls/server.crt - - name: RAY_TLS_SERVER_KEY - value: /home/ray/workspace/tls/server.key - - name: RAY_TLS_CA_CERT - value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:latest-py39-cu118 - lifecycle: - preStop: - exec: - command: - - /bin/sh - - -c - - ray stop - name: machine-learning - resources: - limits: - cpu: 2 - memory: 8G - nvidia.com/gpu: 1 - requests: - cpu: 2 - memory: 8G - nvidia.com/gpu: 1 - imagePullSecrets: [] - replicas: 1 - - generictemplate: - apiVersion: route.openshift.io/v1 - kind: Route - metadata: - labels: - odh-ray-cluster-service: gptfttest-head-svc - name: ray-dashboard-gptfttest - namespace: default - spec: - port: - targetPort: dashboard - to: - kind: Service - name: gptfttest-head-svc - replicas: 1 - Items: [] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/requirements_gpt.txt b/demo-notebooks/guided-demos/notebook-ex-outputs/requirements_gpt.txt deleted file mode 100644 index bd6c4f52..00000000 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/requirements_gpt.txt +++ /dev/null @@ -1,8 +0,0 @@ -accelerate >= 0.12.0 -torch >= 1.3 -datasets >= 1.8.0 -sentencepiece != 0.1.92 -evaluate -scikit-learn -transformers==4.28.1 -protobuf<=3.20.1,>=3.8.0 diff --git a/demo-notebooks/guided-demos/preview_nbs/4_gpt.ipynb b/demo-notebooks/guided-demos/preview_nbs/4_gpt.ipynb deleted file mode 100644 index e0407e8c..00000000 --- a/demo-notebooks/guided-demos/preview_nbs/4_gpt.ipynb +++ /dev/null @@ -1,237 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "b6c05b69-4ce8-45ef-82d3-bacb2491bee8", - "metadata": {}, - "outputs": [], - "source": [ - "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "32f99bbd-9903-4d38-a4f2-223dec684ae2", - "metadata": {}, - "outputs": [], - "source": [ - "# Create authentication object for user permissions\n", - "# IF unused, SDK will automatically check for default kubeconfig, then in-cluster config\n", - "# KubeConfigFileAuthentication can also be used to specify kubeconfig path manually\n", - "auth = TokenAuthentication(\n", - " token = \"XXXXX\",\n", - " server = \"XXXXX\",\n", - " skip_tls=False\n", - ")\n", - "auth.login()" - ] - }, - { - "cell_type": "markdown", - "id": "5e4e9ee9", - "metadata": {}, - "source": [ - "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3f32119a-c4ee-4163-b103-d9ca3bddbdb5", - "metadata": {}, - "outputs": [], - "source": [ - "cluster = Cluster(ClusterConfiguration(\n", - " name='gptfttest',\n", - " namespace='default',\n", - " num_workers=2,\n", - " min_cpus=2,\n", - " max_cpus=2,\n", - " min_memory=8,\n", - " max_memory=8,\n", - " num_gpus=1,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", - " instascale=True, #<---instascale enabled\n", - " machine_types=[\"m5.xlarge\", \"g4dn.xlarge\"],\n", - "))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "107c8277-3b3b-4238-a786-a391a662fd7c", - "metadata": {}, - "outputs": [], - "source": [ - "cluster.up()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "730f66ce-adaa-4709-b9cf-22417847e059", - "metadata": {}, - "outputs": [], - "source": [ - "cluster.wait_ready()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "48fac218-2f22-428b-9228-137a4bb0e666", - "metadata": {}, - "outputs": [], - "source": [ - "cluster.details()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9ed5bd75-4230-4c7c-a9e2-0f247890e62a", - "metadata": {}, - "outputs": [], - "source": [ - "from codeflare_sdk import DDPJobDefinition" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "611d203a-35aa-4357-a748-1d01b022fcdb", - "metadata": {}, - "outputs": [], - "source": [ - "arg_list = [\n", - " \"--model_name_or_path\", \"gpt2\",\n", - " \"--dataset_name\", \"wikitext\",\n", - " \"--dataset_config_name\", \"wikitext-2-raw-v1\",\n", - " \"--per_device_train_batch_size\", \"2\",\n", - " \"--per_device_eval_batch_size\", \"2\",\n", - " \"--do_train\",\n", - " \"--do_eval\",\n", - " \"--output_dir\", \"/tmp/test-clm\",\n", - " \"--overwrite_output_dir\"\n", - "]" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8ac7c34f-e227-44c2-a4b1-a57c853ac3a7", - "metadata": {}, - "outputs": [], - "source": [ - "jobdef = DDPJobDefinition(\n", - " name=\"gpttest\",\n", - " script=\"gpt_og.py\",\n", - " script_args=arg_list,\n", - " scheduler_args={\"requirements\": \"requirements_gpt.txt\"}\n", - ")\n", - "job = jobdef.submit(cluster)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1680d287-de46-45f8-b95a-02ba3c83912c", - "metadata": {}, - "outputs": [], - "source": [ - "job.status()" - ] - }, - { - "cell_type": "markdown", - "id": "d310e6a8", - "metadata": {}, - "source": [ - "Retrieve raw log output at anytime with:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "d25d6198-9941-47e8-857f-9811830cc854", - "metadata": {}, - "outputs": [], - "source": [ - "job.logs()" - ] - }, - { - "cell_type": "markdown", - "id": "cb27d5e0", - "metadata": {}, - "source": [ - "View live updates for status, logs, and other information with:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "d2c51953", - "metadata": {}, - "outputs": [], - "source": [ - "cluster.cluster_dashboard_uri()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b14bbde1", - "metadata": {}, - "outputs": [], - "source": [ - "job.status()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "beb1a6b9-d9b3-49b7-b036-09f1d3569b59", - "metadata": {}, - "outputs": [], - "source": [ - "cluster.down()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8398d977-db24-46d0-a7d2-b4e9197808d7", - "metadata": {}, - "outputs": [], - "source": [ - "auth.logout()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/demo-notebooks/guided-demos/preview_nbs/gpt_og.py b/demo-notebooks/guided-demos/preview_nbs/gpt_og.py deleted file mode 100644 index d69e41fc..00000000 --- a/demo-notebooks/guided-demos/preview_nbs/gpt_og.py +++ /dev/null @@ -1,728 +0,0 @@ -#!/usr/bin/env python -# coding=utf-8 -# Copyright 2020 The HuggingFace Inc. team. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -""" -Fine-tuning the library models for causal language modeling (GPT, GPT-2, CTRL, ...) on a text file or a dataset. - -Here is the full list of checkpoints on the hub that can be fine-tuned by this script: -https://huggingface.co/models?filter=text-generation -""" -# You can also adapt this script on your own causal language modeling task. Pointers for this are left as comments. - -import subprocess - -subprocess.run(["pip", "uninstall", "protobuf"]) -subprocess.run( - [ - "pip", - "install", - "--upgrade", - "--target=/home/ray/workspace", - "-r", - "requirements.txt", - ] -) - -import logging -import math -import os -import sys -from dataclasses import dataclass, field -from itertools import chain -from typing import Optional - -import datasets -import evaluate -import torch -from datasets import load_dataset - -import transformers -from transformers import ( - CONFIG_MAPPING, - MODEL_FOR_CAUSAL_LM_MAPPING, - AutoConfig, - AutoModelForCausalLM, - AutoTokenizer, - HfArgumentParser, - Trainer, - TrainingArguments, - default_data_collator, - is_torch_tpu_available, - set_seed, -) -from transformers.testing_utils import CaptureLogger -from transformers.trainer_utils import get_last_checkpoint -from transformers.utils import check_min_version, send_example_telemetry -from transformers.utils.versions import require_version - - -# Will error if the minimal version of Transformers is not installed. Remove at your own risks. -# check_min_version("4.29.0.dev0") - -require_version( - "datasets>=1.8.0", - "To fix: pip install -r examples/pytorch/language-modeling/requirements.txt", -) - -logger = logging.getLogger(__name__) - - -MODEL_CONFIG_CLASSES = list(MODEL_FOR_CAUSAL_LM_MAPPING.keys()) -MODEL_TYPES = tuple(conf.model_type for conf in MODEL_CONFIG_CLASSES) - - -@dataclass -class ModelArguments: - """ - Arguments pertaining to which model/config/tokenizer we are going to fine-tune, or train from scratch. - """ - - model_name_or_path: Optional[str] = field( - default=None, - metadata={ - "help": ( - "The model checkpoint for weights initialization.Don't set if you want to train a model from scratch." - ) - }, - ) - model_type: Optional[str] = field( - default=None, - metadata={ - "help": "If training from scratch, pass a model type from the list: " - + ", ".join(MODEL_TYPES) - }, - ) - config_overrides: Optional[str] = field( - default=None, - metadata={ - "help": ( - "Override some existing default config settings when a model is trained from scratch. Example: " - "n_embd=10,resid_pdrop=0.2,scale_attn_weights=false,summary_type=cls_index" - ) - }, - ) - config_name: Optional[str] = field( - default=None, - metadata={ - "help": "Pretrained config name or path if not the same as model_name" - }, - ) - tokenizer_name: Optional[str] = field( - default=None, - metadata={ - "help": "Pretrained tokenizer name or path if not the same as model_name" - }, - ) - cache_dir: Optional[str] = field( - default=None, - metadata={ - "help": "Where do you want to store the pretrained models downloaded from huggingface.co" - }, - ) - use_fast_tokenizer: bool = field( - default=True, - metadata={ - "help": "Whether to use one of the fast tokenizer (backed by the tokenizers library) or not." - }, - ) - model_revision: str = field( - default="main", - metadata={ - "help": "The specific model version to use (can be a branch name, tag name or commit id)." - }, - ) - use_auth_token: bool = field( - default=False, - metadata={ - "help": ( - "Will use the token generated when running `huggingface-cli login` (necessary to use this script " - "with private models)." - ) - }, - ) - torch_dtype: Optional[str] = field( - default=None, - metadata={ - "help": ( - "Override the default `torch.dtype` and load the model under this dtype. If `auto` is passed, the " - "dtype will be automatically derived from the model's weights." - ), - "choices": ["auto", "bfloat16", "float16", "float32"], - }, - ) - low_cpu_mem_usage: bool = field( - default=False, - metadata={ - "help": ( - "It is an option to create the model as an empty shell, then only materialize its parameters when the pretrained weights are loaded." - "set True will benefit LLM loading time and RAM consumption." - ) - }, - ) - - def __post_init__(self): - if self.config_overrides is not None and ( - self.config_name is not None or self.model_name_or_path is not None - ): - raise ValueError( - "--config_overrides can't be used in combination with --config_name or --model_name_or_path" - ) - - -@dataclass -class DataTrainingArguments: - """ - Arguments pertaining to what data we are going to input our model for training and eval. - """ - - dataset_name: Optional[str] = field( - default=None, - metadata={"help": "The name of the dataset to use (via the datasets library)."}, - ) - dataset_config_name: Optional[str] = field( - default=None, - metadata={ - "help": "The configuration name of the dataset to use (via the datasets library)." - }, - ) - train_file: Optional[str] = field( - default=None, metadata={"help": "The input training data file (a text file)."} - ) - validation_file: Optional[str] = field( - default=None, - metadata={ - "help": "An optional input evaluation data file to evaluate the perplexity on (a text file)." - }, - ) - max_train_samples: Optional[int] = field( - default=None, - metadata={ - "help": ( - "For debugging purposes or quicker training, truncate the number of training examples to this " - "value if set." - ) - }, - ) - max_eval_samples: Optional[int] = field( - default=None, - metadata={ - "help": ( - "For debugging purposes or quicker training, truncate the number of evaluation examples to this " - "value if set." - ) - }, - ) - streaming: bool = field(default=False, metadata={"help": "Enable streaming mode"}) - block_size: Optional[int] = field( - default=None, - metadata={ - "help": ( - "Optional input sequence length after tokenization. " - "The training dataset will be truncated in block of this size for training. " - "Default to the model max input length for single sentence inputs (take into account special tokens)." - ) - }, - ) - overwrite_cache: bool = field( - default=False, - metadata={"help": "Overwrite the cached training and evaluation sets"}, - ) - validation_split_percentage: Optional[int] = field( - default=5, - metadata={ - "help": "The percentage of the train set used as validation set in case there's no validation split" - }, - ) - preprocessing_num_workers: Optional[int] = field( - default=None, - metadata={"help": "The number of processes to use for the preprocessing."}, - ) - keep_linebreaks: bool = field( - default=True, - metadata={"help": "Whether to keep line breaks when using TXT files or not."}, - ) - - def __post_init__(self): - if self.streaming: - require_version( - "datasets>=2.0.0", "The streaming feature requires `datasets>=2.0.0`" - ) - - if ( - self.dataset_name is None - and self.train_file is None - and self.validation_file is None - ): - raise ValueError( - "Need either a dataset name or a training/validation file." - ) - else: - if self.train_file is not None: - extension = self.train_file.split(".")[-1] - assert extension in [ - "csv", - "json", - "txt", - ], "`train_file` should be a csv, a json or a txt file." - if self.validation_file is not None: - extension = self.validation_file.split(".")[-1] - assert extension in [ - "csv", - "json", - "txt", - ], "`validation_file` should be a csv, a json or a txt file." - - -def main(): - # See all possible arguments in src/transformers/training_args.py - # or by passing the --help flag to this script. - # We now keep distinct sets of args, for a cleaner separation of concerns. - - parser = HfArgumentParser( - (ModelArguments, DataTrainingArguments, TrainingArguments) - ) - if len(sys.argv) == 2 and sys.argv[1].endswith(".json"): - # If we pass only one argument to the script and it's the path to a json file, - # let's parse it to get our arguments. - model_args, data_args, training_args = parser.parse_json_file( - json_file=os.path.abspath(sys.argv[1]) - ) - else: - model_args, data_args, training_args = parser.parse_args_into_dataclasses() - - # Sending telemetry. Tracking the example usage helps us better allocate resources to maintain them. The - # information sent is the one passed as arguments along with your Python/PyTorch versions. - send_example_telemetry("run_clm", model_args, data_args) - - # Setup logging - logging.basicConfig( - format="%(asctime)s - %(levelname)s - %(name)s - %(message)s", - datefmt="%m/%d/%Y %H:%M:%S", - handlers=[logging.StreamHandler(sys.stdout)], - ) - - if training_args.should_log: - # The default of training_args.log_level is passive, so we set log level at info here to have that default. - transformers.utils.logging.set_verbosity_info() - - log_level = training_args.get_process_log_level() - logger.setLevel(log_level) - datasets.utils.logging.set_verbosity(log_level) - transformers.utils.logging.set_verbosity(log_level) - transformers.utils.logging.enable_default_handler() - transformers.utils.logging.enable_explicit_format() - - # Log on each process the small summary: - logger.warning( - f"Process rank: {training_args.local_rank}, device: {training_args.device}, n_gpu: {training_args.n_gpu}" - + f"distributed training: {bool(training_args.local_rank != -1)}, 16-bits training: {training_args.fp16}" - ) - logger.info(f"Training/evaluation parameters {training_args}") - - # Detecting last checkpoint. - last_checkpoint = None - if ( - os.path.isdir(training_args.output_dir) - and training_args.do_train - and not training_args.overwrite_output_dir - ): - last_checkpoint = get_last_checkpoint(training_args.output_dir) - if last_checkpoint is None and len(os.listdir(training_args.output_dir)) > 0: - raise ValueError( - f"Output directory ({training_args.output_dir}) already exists and is not empty. " - "Use --overwrite_output_dir to overcome." - ) - elif ( - last_checkpoint is not None and training_args.resume_from_checkpoint is None - ): - logger.info( - f"Checkpoint detected, resuming training at {last_checkpoint}. To avoid this behavior, change " - "the `--output_dir` or add `--overwrite_output_dir` to train from scratch." - ) - - # Set seed before initializing model. - set_seed(training_args.seed) - - # Get the datasets: you can either provide your own CSV/JSON/TXT training and evaluation files (see below) - # or just provide the name of one of the public datasets available on the hub at https://huggingface.co/datasets/ - # (the dataset will be downloaded automatically from the datasets Hub). - # - # For CSV/JSON files, this script will use the column called 'text' or the first column if no column called - # 'text' is found. You can easily tweak this behavior (see below). - # - # In distributed training, the load_dataset function guarantee that only one local process can concurrently - # download the dataset. - if data_args.dataset_name is not None: - # Downloading and loading a dataset from the hub. - raw_datasets = load_dataset( - data_args.dataset_name, - data_args.dataset_config_name, - cache_dir=model_args.cache_dir, - use_auth_token=True if model_args.use_auth_token else None, - streaming=data_args.streaming, - ) - if "validation" not in raw_datasets.keys(): - raw_datasets["validation"] = load_dataset( - data_args.dataset_name, - data_args.dataset_config_name, - split=f"train[:{data_args.validation_split_percentage}%]", - cache_dir=model_args.cache_dir, - use_auth_token=True if model_args.use_auth_token else None, - streaming=data_args.streaming, - ) - raw_datasets["train"] = load_dataset( - data_args.dataset_name, - data_args.dataset_config_name, - split=f"train[{data_args.validation_split_percentage}%:]", - cache_dir=model_args.cache_dir, - use_auth_token=True if model_args.use_auth_token else None, - streaming=data_args.streaming, - ) - else: - data_files = {} - dataset_args = {} - if data_args.train_file is not None: - data_files["train"] = data_args.train_file - if data_args.validation_file is not None: - data_files["validation"] = data_args.validation_file - extension = ( - data_args.train_file.split(".")[-1] - if data_args.train_file is not None - else data_args.validation_file.split(".")[-1] - ) - if extension == "txt": - extension = "text" - dataset_args["keep_linebreaks"] = data_args.keep_linebreaks - raw_datasets = load_dataset( - extension, - data_files=data_files, - cache_dir=model_args.cache_dir, - use_auth_token=True if model_args.use_auth_token else None, - **dataset_args, - ) - # If no validation data is there, validation_split_percentage will be used to divide the dataset. - if "validation" not in raw_datasets.keys(): - raw_datasets["validation"] = load_dataset( - extension, - data_files=data_files, - split=f"train[:{data_args.validation_split_percentage}%]", - cache_dir=model_args.cache_dir, - use_auth_token=True if model_args.use_auth_token else None, - **dataset_args, - ) - raw_datasets["train"] = load_dataset( - extension, - data_files=data_files, - split=f"train[{data_args.validation_split_percentage}%:]", - cache_dir=model_args.cache_dir, - use_auth_token=True if model_args.use_auth_token else None, - **dataset_args, - ) - - # See more about loading any type of standard or custom dataset (from files, python dict, pandas DataFrame, etc) at - # https://huggingface.co/docs/datasets/loading_datasets.html. - - # Load pretrained model and tokenizer - # - # Distributed training: - # The .from_pretrained methods guarantee that only one local process can concurrently - # download model & vocab. - - config_kwargs = { - "cache_dir": model_args.cache_dir, - "revision": model_args.model_revision, - "use_auth_token": True if model_args.use_auth_token else None, - } - if model_args.config_name: - config = AutoConfig.from_pretrained(model_args.config_name, **config_kwargs) - elif model_args.model_name_or_path: - config = AutoConfig.from_pretrained( - model_args.model_name_or_path, **config_kwargs - ) - else: - config = CONFIG_MAPPING[model_args.model_type]() - logger.warning("You are instantiating a new config instance from scratch.") - if model_args.config_overrides is not None: - logger.info(f"Overriding config: {model_args.config_overrides}") - config.update_from_string(model_args.config_overrides) - logger.info(f"New config: {config}") - - tokenizer_kwargs = { - "cache_dir": model_args.cache_dir, - "use_fast": model_args.use_fast_tokenizer, - "revision": model_args.model_revision, - "use_auth_token": True if model_args.use_auth_token else None, - } - if model_args.tokenizer_name: - tokenizer = AutoTokenizer.from_pretrained( - model_args.tokenizer_name, **tokenizer_kwargs - ) - elif model_args.model_name_or_path: - tokenizer = AutoTokenizer.from_pretrained( - model_args.model_name_or_path, **tokenizer_kwargs - ) - else: - raise ValueError( - "You are instantiating a new tokenizer from scratch. This is not supported by this script." - "You can do it from another script, save it, and load it from here, using --tokenizer_name." - ) - - if model_args.model_name_or_path: - torch_dtype = ( - model_args.torch_dtype - if model_args.torch_dtype in ["auto", None] - else getattr(torch, model_args.torch_dtype) - ) - model = AutoModelForCausalLM.from_pretrained( - model_args.model_name_or_path, - from_tf=bool(".ckpt" in model_args.model_name_or_path), - config=config, - cache_dir=model_args.cache_dir, - revision=model_args.model_revision, - use_auth_token=True if model_args.use_auth_token else None, - torch_dtype=torch_dtype, - low_cpu_mem_usage=model_args.low_cpu_mem_usage, - ) - else: - model = AutoModelForCausalLM.from_config(config) - n_params = sum({p.data_ptr(): p.numel() for p in model.parameters()}.values()) - logger.info( - f"Training new model from scratch - Total size={n_params/2**20:.2f}M params" - ) - - # We resize the embeddings only when necessary to avoid index errors. If you are creating a model from scratch - # on a small vocab and want a smaller embedding size, remove this test. - embedding_size = model.get_input_embeddings().weight.shape[0] - if len(tokenizer) > embedding_size: - model.resize_token_embeddings(len(tokenizer)) - - # Preprocessing the datasets. - # First we tokenize all the texts. - if training_args.do_train: - column_names = list(raw_datasets["train"].features) - else: - column_names = list(raw_datasets["validation"].features) - text_column_name = "text" if "text" in column_names else column_names[0] - - # since this will be pickled to avoid _LazyModule error in Hasher force logger loading before tokenize_function - tok_logger = transformers.utils.logging.get_logger( - "transformers.tokenization_utils_base" - ) - - def tokenize_function(examples): - with CaptureLogger(tok_logger) as cl: - output = tokenizer(examples[text_column_name]) - # clm input could be much much longer than block_size - if "Token indices sequence length is longer than the" in cl.out: - tok_logger.warning( - "^^^^^^^^^^^^^^^^ Please ignore the warning above - this long input will be chunked into smaller bits" - " before being passed to the model." - ) - return output - - with training_args.main_process_first(desc="dataset map tokenization"): - if not data_args.streaming: - tokenized_datasets = raw_datasets.map( - tokenize_function, - batched=True, - num_proc=data_args.preprocessing_num_workers, - remove_columns=column_names, - load_from_cache_file=not data_args.overwrite_cache, - desc="Running tokenizer on dataset", - ) - else: - tokenized_datasets = raw_datasets.map( - tokenize_function, - batched=True, - remove_columns=column_names, - ) - - if data_args.block_size is None: - block_size = tokenizer.model_max_length - if block_size > 1024: - logger.warning( - "The chosen tokenizer supports a `model_max_length` that is longer than the default `block_size` value" - " of 1024. If you would like to use a longer `block_size` up to `tokenizer.model_max_length` you can" - " override this default with `--block_size xxx`." - ) - block_size = 1024 - else: - if data_args.block_size > tokenizer.model_max_length: - logger.warning( - f"The block_size passed ({data_args.block_size}) is larger than the maximum length for the model" - f"({tokenizer.model_max_length}). Using block_size={tokenizer.model_max_length}." - ) - block_size = min(data_args.block_size, tokenizer.model_max_length) - - # Main data processing function that will concatenate all texts from our dataset and generate chunks of block_size. - def group_texts(examples): - # Concatenate all texts. - concatenated_examples = {k: list(chain(*examples[k])) for k in examples.keys()} - total_length = len(concatenated_examples[list(examples.keys())[0]]) - # We drop the small remainder, we could add padding if the model supported it instead of this drop, you can - # customize this part to your needs. - if total_length >= block_size: - total_length = (total_length // block_size) * block_size - # Split by chunks of max_len. - result = { - k: [t[i : i + block_size] for i in range(0, total_length, block_size)] - for k, t in concatenated_examples.items() - } - result["labels"] = result["input_ids"].copy() - return result - - # Note that with `batched=True`, this map processes 1,000 texts together, so group_texts throws away a remainder - # for each of those groups of 1,000 texts. You can adjust that batch_size here but a higher value might be slower - # to preprocess. - # - # To speed up this part, we use multiprocessing. See the documentation of the map method for more information: - # https://huggingface.co/docs/datasets/package_reference/main_classes.html#datasets.Dataset.map - - with training_args.main_process_first(desc="grouping texts together"): - if not data_args.streaming: - lm_datasets = tokenized_datasets.map( - group_texts, - batched=True, - num_proc=data_args.preprocessing_num_workers, - load_from_cache_file=not data_args.overwrite_cache, - desc=f"Grouping texts in chunks of {block_size}", - ) - else: - lm_datasets = tokenized_datasets.map( - group_texts, - batched=True, - ) - - if training_args.do_train: - if "train" not in tokenized_datasets: - raise ValueError("--do_train requires a train dataset") - train_dataset = lm_datasets["train"] - if data_args.max_train_samples is not None: - max_train_samples = min(len(train_dataset), data_args.max_train_samples) - train_dataset = train_dataset.select(range(max_train_samples)) - - if training_args.do_eval: - if "validation" not in tokenized_datasets: - raise ValueError("--do_eval requires a validation dataset") - eval_dataset = lm_datasets["validation"] - if data_args.max_eval_samples is not None: - max_eval_samples = min(len(eval_dataset), data_args.max_eval_samples) - eval_dataset = eval_dataset.select(range(max_eval_samples)) - - def preprocess_logits_for_metrics(logits, labels): - if isinstance(logits, tuple): - # Depending on the model and config, logits may contain extra tensors, - # like past_key_values, but logits always come first - logits = logits[0] - return logits.argmax(dim=-1) - - metric = evaluate.load("accuracy") - - def compute_metrics(eval_preds): - preds, labels = eval_preds - # preds have the same shape as the labels, after the argmax(-1) has been calculated - # by preprocess_logits_for_metrics but we need to shift the labels - labels = labels[:, 1:].reshape(-1) - preds = preds[:, :-1].reshape(-1) - return metric.compute(predictions=preds, references=labels) - - # Initialize our Trainer - trainer = Trainer( - model=model, - args=training_args, - train_dataset=train_dataset if training_args.do_train else None, - eval_dataset=eval_dataset if training_args.do_eval else None, - tokenizer=tokenizer, - # Data collator will default to DataCollatorWithPadding, so we change it. - data_collator=default_data_collator, - compute_metrics=compute_metrics - if training_args.do_eval and not is_torch_tpu_available() - else None, - preprocess_logits_for_metrics=preprocess_logits_for_metrics - if training_args.do_eval and not is_torch_tpu_available() - else None, - ) - - # Training - if training_args.do_train: - checkpoint = None - if training_args.resume_from_checkpoint is not None: - checkpoint = training_args.resume_from_checkpoint - elif last_checkpoint is not None: - checkpoint = last_checkpoint - train_result = trainer.train(resume_from_checkpoint=checkpoint) - trainer.save_model() # Saves the tokenizer too for easy upload - - metrics = train_result.metrics - - max_train_samples = ( - data_args.max_train_samples - if data_args.max_train_samples is not None - else len(train_dataset) - ) - metrics["train_samples"] = min(max_train_samples, len(train_dataset)) - - trainer.log_metrics("train", metrics) - trainer.save_metrics("train", metrics) - trainer.save_state() - - # Evaluation - if training_args.do_eval: - logger.info("*** Evaluate ***") - - metrics = trainer.evaluate() - - max_eval_samples = ( - data_args.max_eval_samples - if data_args.max_eval_samples is not None - else len(eval_dataset) - ) - metrics["eval_samples"] = min(max_eval_samples, len(eval_dataset)) - try: - perplexity = math.exp(metrics["eval_loss"]) - except OverflowError: - perplexity = float("inf") - metrics["perplexity"] = perplexity - - trainer.log_metrics("eval", metrics) - trainer.save_metrics("eval", metrics) - - kwargs = { - "finetuned_from": model_args.model_name_or_path, - "tasks": "text-generation", - } - if data_args.dataset_name is not None: - kwargs["dataset_tags"] = data_args.dataset_name - if data_args.dataset_config_name is not None: - kwargs["dataset_args"] = data_args.dataset_config_name - kwargs[ - "dataset" - ] = f"{data_args.dataset_name} {data_args.dataset_config_name}" - else: - kwargs["dataset"] = data_args.dataset_name - - if training_args.push_to_hub: - trainer.push_to_hub(**kwargs) - else: - trainer.create_model_card(**kwargs) - - -def _mp_fn(index): - # For xla_spawn (TPUs) - main() - - -if __name__ == "__main__": - main() diff --git a/demo-notebooks/guided-demos/preview_nbs/requirements_gpt.txt b/demo-notebooks/guided-demos/preview_nbs/requirements_gpt.txt deleted file mode 100644 index bd6c4f52..00000000 --- a/demo-notebooks/guided-demos/preview_nbs/requirements_gpt.txt +++ /dev/null @@ -1,8 +0,0 @@ -accelerate >= 0.12.0 -torch >= 1.3 -datasets >= 1.8.0 -sentencepiece != 0.1.92 -evaluate -scikit-learn -transformers==4.28.1 -protobuf<=3.20.1,>=3.8.0 diff --git a/demo-notebooks/guided-demos/requirements_gpt.txt b/demo-notebooks/guided-demos/requirements_gpt.txt deleted file mode 100644 index bd6c4f52..00000000 --- a/demo-notebooks/guided-demos/requirements_gpt.txt +++ /dev/null @@ -1,8 +0,0 @@ -accelerate >= 0.12.0 -torch >= 1.3 -datasets >= 1.8.0 -sentencepiece != 0.1.92 -evaluate -scikit-learn -transformers==4.28.1 -protobuf<=3.20.1,>=3.8.0 From bd49ef7771b8a837f9d48e0c798e938519daffed Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Wed, 3 Apr 2024 12:20:42 +0100 Subject: [PATCH 171/496] Replaced basic job demo with RayJobClient demos --- .../guided-demos/2_basic_jobs.ipynb | 321 ----------- .../2_basic_jobs.ipynb => 2_job_client.ipynb} | 174 +++--- .../notebook-ex-outputs/2_basic_jobs.ipynb | 522 ------------------ .../notebook-ex-outputs/2_job_client.ipynb | 430 +++++++++++++++ .../preview_nbs/2_job_client.ipynb | 283 ++++++++++ 5 files changed, 786 insertions(+), 944 deletions(-) delete mode 100644 demo-notebooks/guided-demos/2_basic_jobs.ipynb rename demo-notebooks/guided-demos/{preview_nbs/2_basic_jobs.ipynb => 2_job_client.ipynb} (52%) delete mode 100644 demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_jobs.ipynb create mode 100644 demo-notebooks/guided-demos/notebook-ex-outputs/2_job_client.ipynb create mode 100644 demo-notebooks/guided-demos/preview_nbs/2_job_client.ipynb diff --git a/demo-notebooks/guided-demos/2_basic_jobs.ipynb b/demo-notebooks/guided-demos/2_basic_jobs.ipynb deleted file mode 100644 index e57a6ed8..00000000 --- a/demo-notebooks/guided-demos/2_basic_jobs.ipynb +++ /dev/null @@ -1,321 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "464af595", - "metadata": {}, - "source": [ - "In this third notebook, we will go over the basics of submitting jobs via the SDK, either to a Ray cluster or directly to MCAD." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b55bc3ea-4ce3-49bf-bb1f-e209de8ca47a", - "metadata": {}, - "outputs": [], - "source": [ - "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "614daa0c", - "metadata": {}, - "outputs": [], - "source": [ - "# Create authentication object for user permissions\n", - "# IF unused, SDK will automatically check for default kubeconfig, then in-cluster config\n", - "# KubeConfigFileAuthentication can also be used to specify kubeconfig path manually\n", - "auth = TokenAuthentication(\n", - " token = \"XXXXX\",\n", - " server = \"XXXXX\",\n", - " skip_tls=False\n", - ")\n", - "auth.login()" - ] - }, - { - "cell_type": "markdown", - "id": "bc27f84c", - "metadata": {}, - "source": [ - "Let's start by running through the same cluster setup as before:\n", - "\n", - "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0f4bc870-091f-4e11-9642-cba145710159", - "metadata": {}, - "outputs": [], - "source": [ - "# Create and configure our cluster object (and appwrapper)\n", - "cluster = Cluster(ClusterConfiguration(\n", - " name='jobtest',\n", - " namespace='default',\n", - " num_workers=2,\n", - " min_cpus=1,\n", - " max_cpus=1,\n", - " min_memory=4,\n", - " max_memory=4,\n", - " num_gpus=0,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", - " instascale=False\n", - "))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f0884bbc-c224-4ca0-98a0-02dfa09c2200", - "metadata": {}, - "outputs": [], - "source": [ - "# Bring up the cluster\n", - "cluster.up()\n", - "cluster.wait_ready()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "df71c1ed", - "metadata": {}, - "outputs": [], - "source": [ - "cluster.details()" - ] - }, - { - "cell_type": "markdown", - "id": "33663f47", - "metadata": {}, - "source": [ - "This time, however, we are going to use the CodeFlare SDK to submit batch jobs via TorchX, either to the Ray cluster we have just brought up, or directly to MCAD." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "c7b4f232", - "metadata": {}, - "outputs": [], - "source": [ - "from codeflare_sdk import DDPJobDefinition" - ] - }, - { - "cell_type": "markdown", - "id": "83d77b74", - "metadata": {}, - "source": [ - "First, let's begin by submitting to Ray, training a basic NN on the MNIST dataset:\n", - "\n", - "NOTE: To test this demo in an air-gapped/ disconnected environment alter the training script to use a local dataset.\n", - "First we must download the MNIST dataset. We've included a helper script to do this for you. \n", - "\n", - "You can run the python script (`python download_mnist_datasets.py`) directly and then place the dataset in the same directory as this notebook. \n", - "The path to the dataset would be: `..guided-demos/MNIST/raw/` " - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8c2c5138", - "metadata": {}, - "outputs": [], - "source": [ - "jobdef = DDPJobDefinition(\n", - " name=\"mnisttest\",\n", - " script=\"mnist.py\",\n", - " # script=\"mnist_disconnected.py\", # training script for disconnected environment\n", - " scheduler_args={\"requirements\": \"requirements.txt\"}\n", - ")\n", - "job = jobdef.submit(cluster)" - ] - }, - { - "cell_type": "markdown", - "id": "5b9ae53a", - "metadata": {}, - "source": [ - "Now we can take a look at the status of our submitted job, as well as retrieve the full logs:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6e36c3d9", - "metadata": {}, - "outputs": [], - "source": [ - "job.status()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "834cfb5c", - "metadata": {}, - "outputs": [], - "source": [ - "job.logs()" - ] - }, - { - "cell_type": "markdown", - "id": "c8267fb2", - "metadata": {}, - "source": [ - "You can also view organized logs, status, and other information directly through the Ray cluster's dashboard:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3f3861d0", - "metadata": {}, - "outputs": [], - "source": [ - "cluster.cluster_dashboard_uri()" - ] - }, - { - "cell_type": "markdown", - "id": "5af8cd32", - "metadata": {}, - "source": [ - "Once complete, we can bring our Ray cluster down and clean up:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5f36db0f-31f6-4373-9503-dc3c1c4c3f57", - "metadata": {}, - "outputs": [], - "source": [ - "cluster.down()" - ] - }, - { - "cell_type": "markdown", - "id": "31096641", - "metadata": {}, - "source": [ - "Now, an alternative option for job submission is to submit directly to MCAD, which will schedule pods to run the job with requested resources:\n", - "\n", - "NOTE: To test this demo in an air-gapped/ disconnected environment alter the training script to use a local dataset." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "496139cc", - "metadata": {}, - "outputs": [], - "source": [ - "jobdef = DDPJobDefinition(\n", - " name=\"mnistjob\",\n", - " script=\"mnist.py\",\n", - " # script=\"mnist_disconnected.py\", # training script for disconnected environment\n", - " scheduler_args={\"namespace\": \"default\"},\n", - " j=\"1x1\",\n", - " gpu=0,\n", - " cpu=1,\n", - " memMB=8000,\n", - " image=\"quay.io/project-codeflare/mnist-job-test:v0.0.1\"\n", - ")\n", - "job = jobdef.submit()" - ] - }, - { - "cell_type": "markdown", - "id": "0837e43b", - "metadata": {}, - "source": [ - "Once again, we can look at job status and logs:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3d18d42c", - "metadata": {}, - "outputs": [], - "source": [ - "job.status()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "36d7ea97", - "metadata": {}, - "outputs": [], - "source": [ - "job.logs()" - ] - }, - { - "cell_type": "markdown", - "id": "aebf376a", - "metadata": {}, - "source": [ - "This time, once the pods complete, we can clean them up alongside any other associated resources. The following command can also be used to delete jobs early for both Ray and MCAD submission:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ebbb0674", - "metadata": {}, - "outputs": [], - "source": [ - "job.cancel()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0d41b90e", - "metadata": {}, - "outputs": [], - "source": [ - "auth.logout()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.18" - }, - "vscode": { - "interpreter": { - "hash": "f9f85f796d01129d0dd105a088854619f454435301f6ffec2fea96ecbd9be4ac" - } - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/demo-notebooks/guided-demos/preview_nbs/2_basic_jobs.ipynb b/demo-notebooks/guided-demos/2_job_client.ipynb similarity index 52% rename from demo-notebooks/guided-demos/preview_nbs/2_basic_jobs.ipynb rename to demo-notebooks/guided-demos/2_job_client.ipynb index 495a0855..7b3d619b 100644 --- a/demo-notebooks/guided-demos/preview_nbs/2_basic_jobs.ipynb +++ b/demo-notebooks/guided-demos/2_job_client.ipynb @@ -2,60 +2,47 @@ "cells": [ { "cell_type": "markdown", - "id": "464af595", "metadata": {}, "source": [ - "In this third notebook, we will go over the basics of submitting jobs via the SDK, either to a Ray cluster or directly to MCAD." + "In this third demo we will go over the basics of the Ray Job Submission Client in the SDK" ] }, { "cell_type": "code", "execution_count": null, - "id": "b55bc3ea-4ce3-49bf-bb1f-e209de8ca47a", "metadata": {}, "outputs": [], "source": [ "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" + "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication, RayJobClient" ] }, { "cell_type": "code", "execution_count": null, - "id": "614daa0c", "metadata": {}, "outputs": [], "source": [ "# Create authentication object for user permissions\n", "# IF unused, SDK will automatically check for default kubeconfig, then in-cluster config\n", "# KubeConfigFileAuthentication can also be used to specify kubeconfig path manually\n", + "\n", + "auth_token = \"XXXXX\" # The auth_token is used later for the RayJobClient\n", "auth = TokenAuthentication(\n", - " token = \"XXXXX\",\n", + " token = auth_token,\n", " server = \"XXXXX\",\n", " skip_tls=False\n", ")\n", "auth.login()" ] }, - { - "cell_type": "markdown", - "id": "bc27f84c", - "metadata": {}, - "source": [ - "Let's start by running through the same cluster setup as before:\n", - "\n", - "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image." - ] - }, { "cell_type": "code", "execution_count": null, - "id": "0f4bc870-091f-4e11-9642-cba145710159", "metadata": {}, "outputs": [], "source": [ - "# Create and configure our cluster object (and appwrapper)\n", + "# Create and configure our cluster object\n", "cluster = Cluster(ClusterConfiguration(\n", " name='jobtest',\n", " namespace='default',\n", @@ -65,15 +52,13 @@ " min_memory=4,\n", " max_memory=4,\n", " num_gpus=0,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", - " instascale=False\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\"\n", "))" ] }, { "cell_type": "code", "execution_count": null, - "id": "f0884bbc-c224-4ca0-98a0-02dfa09c2200", "metadata": {}, "outputs": [], "source": [ @@ -85,7 +70,6 @@ { "cell_type": "code", "execution_count": null, - "id": "df71c1ed", "metadata": {}, "outputs": [], "source": [ @@ -94,187 +78,180 @@ }, { "cell_type": "markdown", - "id": "33663f47", - "metadata": {}, - "source": [ - "This time, however, we are going to use the CodeFlare SDK to submit batch jobs via TorchX, either to the Ray cluster we have just brought up, or directly to MCAD." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "c7b4f232", "metadata": {}, - "outputs": [], "source": [ - "from codeflare_sdk import DDPJobDefinition" + "### Ray Job Submission - Authorized Ray Cluster" ] }, { "cell_type": "markdown", - "id": "83d77b74", "metadata": {}, "source": [ - "First, let's begin by submitting to Ray, training a basic NN on the MNIST dataset:" + "* Submit a job using an authorized Ray dashboard and the Job Submission Client\n", + "* Provide an entrypoint command directed to your job script\n", + "* Set up your runtime environment" ] }, { "cell_type": "code", "execution_count": null, - "id": "8c2c5138", "metadata": {}, "outputs": [], "source": [ - "jobdef = DDPJobDefinition(\n", - " name=\"mnisttest\",\n", - " script=\"mnist.py\",\n", - " scheduler_args={\"requirements\": \"requirements.txt\"}\n", - ")\n", - "job = jobdef.submit(cluster)" + "# Gather the dashboard URL\n", + "ray_dashboard = cluster.cluster_dashboard_uri()\n", + "\n", + "# Create the header for passing your bearer token\n", + "header = {\n", + " 'Authorization': f'Bearer {auth_token}'\n", + "}\n", + "\n", + "# Initialize the RayJobClient\n", + "client = RayJobClient(address=ray_dashboard, headers=header, verify=True)" ] }, { - "cell_type": "markdown", - "id": "5b9ae53a", + "cell_type": "code", + "execution_count": null, "metadata": {}, + "outputs": [], "source": [ - "Now we can take a look at the status of our submitted job, as well as retrieve the full logs:" + "# Submit an example mnist job using the RayJobClient\n", + "submission_id = client.submit_job(\n", + " entrypoint=\"python mnist.py\",\n", + " runtime_env={\"working_dir\": \"./\",\"pip\": \"requirements.txt\"},\n", + ")\n", + "print(submission_id)" ] }, { "cell_type": "code", "execution_count": null, - "id": "6e36c3d9", "metadata": {}, "outputs": [], "source": [ - "job.status()" + "# Get the job's logs\n", + "client.get_job_logs(submission_id)" ] }, { "cell_type": "code", "execution_count": null, - "id": "834cfb5c", "metadata": {}, "outputs": [], "source": [ - "job.logs()" + "# Get the job's status\n", + "client.get_job_status(submission_id)" ] }, { - "cell_type": "markdown", - "id": "4067ef60", + "cell_type": "code", + "execution_count": null, "metadata": {}, + "outputs": [], "source": [ - "You can also view organized logs, status, and other information directly through the Ray cluster's dashboard:" + "# Get job related info\n", + "client.get_job_info(submission_id)" ] }, { "cell_type": "code", "execution_count": null, - "id": "19055243", "metadata": {}, "outputs": [], "source": [ - "cluster.cluster_dashboard_uri()" + "# List all existing jobs\n", + "client.list_jobs()" ] }, { - "cell_type": "markdown", - "id": "5af8cd32", + "cell_type": "code", + "execution_count": null, "metadata": {}, + "outputs": [], "source": [ - "Once complete, we can bring our Ray cluster down and clean up:" + "# Iterate through the logs of a job \n", + "async for lines in client.tail_job_logs(submission_id):\n", + " print(lines, end=\"\") " ] }, { "cell_type": "code", "execution_count": null, - "id": "5f36db0f-31f6-4373-9503-dc3c1c4c3f57", "metadata": {}, "outputs": [], "source": [ - "cluster.down()" + "# Delete a job\n", + "# Can run client.cancel_job(submission_id) first if job is still running\n", + "client.delete_job(submission_id)" ] }, { "cell_type": "markdown", - "id": "31096641", "metadata": {}, "source": [ - "Now, an alternative option for job submission is to submit directly to MCAD, which will schedule pods to run the job with requested resources:" + "### Unauthorized Ray Cluster with the Ray Job Client" ] }, { "cell_type": "code", "execution_count": null, - "id": "496139cc", "metadata": {}, "outputs": [], "source": [ - "jobdef = DDPJobDefinition(\n", - " name=\"mnistjob\",\n", - " script=\"mnist.py\",\n", - " scheduler_args={\"namespace\": \"default\"},\n", - " j=\"1x1\",\n", - " gpu=0,\n", - " cpu=1,\n", - " memMB=8000,\n", - " image=\"quay.io/project-codeflare/mnist-job-test:v0.0.1\"\n", - ")\n", - "job = jobdef.submit()" - ] - }, - { - "cell_type": "markdown", - "id": "0837e43b", - "metadata": {}, - "source": [ - "Once again, we can look at job status and logs:" + "\"\"\"\n", + "Initialise the RayJobClient with the Ray Dashboard\n", + "\"\"\"\n", + "ray_dashboard = cluster.cluster_dashboard_uri()\n", + "client = RayJobClient(address=ray_dashboard, verify=False)" ] }, { "cell_type": "code", "execution_count": null, - "id": "3d18d42c", "metadata": {}, "outputs": [], "source": [ - "job.status()" + "# Submit an example mnist job using the RayJobClient\n", + "submission_id = client.submit_job(\n", + " entrypoint=\"python mnist.py\",\n", + " runtime_env={\"working_dir\": \"./\",\"pip\": \"requirements.txt\"},\n", + ")\n", + "print(submission_id)" ] }, { "cell_type": "code", "execution_count": null, - "id": "36d7ea97", "metadata": {}, "outputs": [], "source": [ - "job.logs()" + "# Stop the job \n", + "client.stop_job(submission_id)" ] }, { - "cell_type": "markdown", - "id": "aebf376a", + "cell_type": "code", + "execution_count": null, "metadata": {}, + "outputs": [], "source": [ - "This time, once the pods complete, we can clean them up alongside any other associated resources. The following command can also be used to delete jobs early for both Ray and MCAD submission:" + "# Delete the job\n", + "client.delete_job(submission_id)" ] }, { "cell_type": "code", "execution_count": null, - "id": "ebbb0674", "metadata": {}, "outputs": [], "source": [ - "job.cancel()" + "cluster.down()" ] }, { "cell_type": "code", "execution_count": null, - "id": "0d41b90e", "metadata": {}, "outputs": [], "source": [ @@ -284,7 +261,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -298,14 +275,9 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.13" - }, - "vscode": { - "interpreter": { - "hash": "f9f85f796d01129d0dd105a088854619f454435301f6ffec2fea96ecbd9be4ac" - } + "version": "3.9.18" } }, "nbformat": 4, - "nbformat_minor": 5 + "nbformat_minor": 2 } diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_jobs.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_jobs.ipynb deleted file mode 100644 index 16656177..00000000 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_jobs.ipynb +++ /dev/null @@ -1,522 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "464af595", - "metadata": {}, - "source": [ - "In this third notebook, we will go over the basics of submitting jobs via the SDK, either to a Ray cluster or directly to MCAD." - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "b55bc3ea-4ce3-49bf-bb1f-e209de8ca47a", - "metadata": {}, - "outputs": [], - "source": [ - "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "614daa0c", - "metadata": {}, - "outputs": [], - "source": [ - "# Create authentication object for user permissions\n", - "# IF unused, SDK will automatically check for default kubeconfig, then in-cluster config\n", - "# KubeConfigFileAuthentication can also be used to specify kubeconfig path manually\n", - "auth = TokenAuthentication(\n", - " token = \"XXXXX\",\n", - " server = \"XXXXX\",\n", - " skip_tls=False\n", - ")\n", - "auth.login()" - ] - }, - { - "cell_type": "markdown", - "id": "bc27f84c", - "metadata": {}, - "source": [ - "Let's start by running through the same cluster setup as before:\n", - "\n", - "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image." - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "0f4bc870-091f-4e11-9642-cba145710159", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Written to: jobtest.yaml\n" - ] - } - ], - "source": [ - "# Create and configure our cluster object (and appwrapper)\n", - "cluster = Cluster(ClusterConfiguration(\n", - " name='jobtest',\n", - " namespace='default',\n", - " num_workers=2,\n", - " min_cpus=1,\n", - " max_cpus=1,\n", - " min_memory=4,\n", - " max_memory=4,\n", - " num_gpus=0,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", - " instascale=False\n", - "))" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "f0884bbc-c224-4ca0-98a0-02dfa09c2200", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Waiting for requested resources to be set up...\n", - "Requested cluster up and running!\n" - ] - } - ], - "source": [ - "# Bring up the cluster\n", - "cluster.up()\n", - "cluster.wait_ready()" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "df71c1ed", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
                        🚀 CodeFlare Cluster Details 🚀                  \n",
      -       "                                                                   \n",
      -       " ╭───────────────────────────────────────────────────────────────╮ \n",
      -       " │   Name                                                        │ \n",
      -       " │   jobtest                                        Active ✅    │ \n",
      -       " │                                                               │ \n",
      -       " │   URI: ray://jobtest-head-svc.default.svc:10001               │ \n",
      -       " │                                                               │ \n",
      -       " │   Dashboard🔗                                                 │ \n",
      -       " │                                                               │ \n",
      -       " │                       Cluster Resources                       │ \n",
      -       " │   ╭── Workers ──╮  ╭───────── Worker specs(each) ─────────╮   │ \n",
      -       " │   │  # Workers  │  │  Memory      CPU         GPU         │   │ \n",
      -       " │   │             │  │                                      │   │ \n",
      -       " │   │  2          │  │  4~4         1           0           │   │ \n",
      -       " │   │             │  │                                      │   │ \n",
      -       " │   ╰─────────────╯  ╰──────────────────────────────────────╯   │ \n",
      -       " ╰───────────────────────────────────────────────────────────────╯ \n",
      -       "
      \n" - ], - "text/plain": [ - "\u001b[3m \u001b[0m\u001b[1;3m 🚀 CodeFlare Cluster Details 🚀\u001b[0m\u001b[3m \u001b[0m\n", - "\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\n", - " ╭───────────────────────────────────────────────────────────────╮ \n", - " │ \u001b[1;37;42mName\u001b[0m │ \n", - " │ \u001b[1;4mjobtest\u001b[0m Active ✅ │ \n", - " │ │ \n", - " │ \u001b[1mURI:\u001b[0m ray://jobtest-head-svc.default.svc:10001 │ \n", - " │ │ \n", - " │ \u001b]8;id=366603;http://ray-dashboard-jobtest-default.apps.meyceoz-07122023.psap.aws.rhperfscale.org\u001b\\\u001b[4;34mDashboard🔗\u001b[0m\u001b]8;;\u001b\\ │ \n", - " │ │ \n", - " │ \u001b[3m Cluster Resources \u001b[0m │ \n", - " │ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │ \n", - " │ │ \u001b[1m \u001b[0m\u001b[1m# Workers\u001b[0m\u001b[1m \u001b[0m │ │ \u001b[1m \u001b[0m\u001b[1mMemory \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mCPU \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mGPU \u001b[0m\u001b[1m \u001b[0m │ │ \n", - " │ │ \u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ │ \u001b[35m \u001b[0m\u001b[35m2 \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m4~4 \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m1 \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m0 \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ │ \u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ ╰─────────────╯ ╰──────────────────────────────────────╯ │ \n", - " ╰───────────────────────────────────────────────────────────────╯ \n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/plain": [ - "RayCluster(name='jobtest', status=, workers=2, worker_mem_min=4, worker_mem_max=4, worker_cpu=1, worker_gpu=0, namespace='default', dashboard='http://ray-dashboard-jobtest-default.apps.meyceoz-07122023.psap.aws.rhperfscale.org')" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "cluster.details()" - ] - }, - { - "cell_type": "markdown", - "id": "33663f47", - "metadata": {}, - "source": [ - "This time, however, we are going to use the CodeFlare SDK to submit batch jobs via TorchX, either to the Ray cluster we have just brought up, or directly to MCAD." - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "c7b4f232", - "metadata": {}, - "outputs": [], - "source": [ - "from codeflare_sdk import DDPJobDefinition" - ] - }, - { - "cell_type": "markdown", - "id": "83d77b74", - "metadata": {}, - "source": [ - "First, let's begin by submitting to Ray, training a basic NN on the MNIST dataset:" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "8c2c5138", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "The Ray scheduler does not support port mapping.\n" - ] - } - ], - "source": [ - "jobdef = DDPJobDefinition(\n", - " name=\"mnisttest\",\n", - " script=\"mnist.py\",\n", - " scheduler_args={\"requirements\": \"requirements.txt\"}\n", - ")\n", - "job = jobdef.submit(cluster)" - ] - }, - { - "cell_type": "markdown", - "id": "5b9ae53a", - "metadata": {}, - "source": [ - "Now we can take a look at the status of our submitted job, as well as retrieve the full logs:" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "6e36c3d9", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "AppStatus:\n", - " msg: !!python/object/apply:ray.dashboard.modules.job.common.JobStatus\n", - " - RUNNING\n", - " num_restarts: -1\n", - " roles:\n", - " - replicas:\n", - " - hostname: \n", - " id: 0\n", - " role: ray\n", - " state: !!python/object/apply:torchx.specs.api.AppState\n", - " - 3\n", - " structured_error_msg: \n", - " role: ray\n", - " state: RUNNING (3)\n", - " structured_error_msg: \n", - " ui_url: null" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "job.status()" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "834cfb5c", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'[RayActor(name=\\'mnist\\', command=[\\'bash\\', \\'-c\\', \"torchrun --rdzv_backend static --rdzv_endpoint $TORCHX_RANK0_HOST:49782 --rdzv_id \\'mnisttest-bc7wx5t7hd0lcc\\' --nnodes 2 --nproc_per_node 1 --node_rank \\'0\\' --tee 3 --role \\'\\' mnist.py\"], env={\\'TORCHX_TRACKING_EXPERIMENT_NAME\\': \\'default-experiment\\', \\'LOGLEVEL\\': \\'WARNING\\', \\'TORCHX_JOB_ID\\': \\'ray://torchx/mnisttest-bc7wx5t7hd0lcc\\'}, num_cpus=1, num_gpus=0, min_replicas=2), RayActor(name=\\'mnist\\', command=[\\'bash\\', \\'-c\\', \"torchrun --rdzv_backend static --rdzv_endpoint $TORCHX_RANK0_HOST:49782 --rdzv_id \\'mnisttest-bc7wx5t7hd0lcc\\' --nnodes 2 --nproc_per_node 1 --node_rank \\'1\\' --tee 3 --role \\'\\' mnist.py\"], env={\\'TORCHX_TRACKING_EXPERIMENT_NAME\\': \\'default-experiment\\', \\'LOGLEVEL\\': \\'WARNING\\', \\'TORCHX_JOB_ID\\': \\'ray://torchx/mnisttest-bc7wx5t7hd0lcc\\'}, num_cpus=1, num_gpus=0, min_replicas=2)]\\n2023-08-09 08:05:59,434\\tINFO worker.py:1334 -- Using address 10.129.0.64:6379 set in the environment variable RAY_ADDRESS\\n2023-08-09 08:05:59,434\\tINFO worker.py:1452 -- Connecting to existing Ray cluster at address: 10.129.0.64:6379...\\n2023-08-09 08:05:59,477\\tINFO worker.py:1627 -- Connected to Ray cluster. View the dashboard at \\x1b[1m\\x1b[32mhttp://10.129.0.64:8265 \\x1b[39m\\x1b[22m\\nWaiting for minimum placement group to start.\\nSuccessfully created placement groups\\nrdzv_endpoint set to 10.129.0.64 for actor 811b83022b98b0411b06453c02000000\\nrdzv_endpoint set to 10.129.0.64 for actor 0ae7ff0d51deee52fd0b0ba802000000\\nSuccessfully placed command actors\\nEntering main loop, start executing the script on worker nodes\\nrunning ray.wait on [ObjectRef(e082c90ab8422b00811b83022b98b0411b06453c0200000001000000), ObjectRef(ce868e48e2fa9a940ae7ff0d51deee52fd0b0ba80200000001000000)]\\nrunning ray.wait on [ObjectRef(ce868e48e2fa9a940ae7ff0d51deee52fd0b0ba80200000001000000), ObjectRef(f81ec6ff838b16db811b83022b98b0411b06453c0200000001000000)]\\n'" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "job.logs()" - ] - }, - { - "cell_type": "markdown", - "id": "63f46b41", - "metadata": {}, - "source": [ - "You can also view organized logs, status, and other information directly through the Ray cluster's dashboard:" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "78ca8ce0", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'http://ray-dashboard-jobtest-default.apps.meyceoz-07122023.psap.aws.rhperfscale.org'" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "cluster.cluster_dashboard_uri()" - ] - }, - { - "cell_type": "markdown", - "id": "5af8cd32", - "metadata": {}, - "source": [ - "Once complete, we can bring our Ray cluster down and clean up:" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "id": "a7714885", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "AppStatus:\n", - " msg: !!python/object/apply:ray.dashboard.modules.job.common.JobStatus\n", - " - SUCCEEDED\n", - " num_restarts: -1\n", - " roles:\n", - " - replicas:\n", - " - hostname: \n", - " id: 0\n", - " role: ray\n", - " state: !!python/object/apply:torchx.specs.api.AppState\n", - " - 4\n", - " structured_error_msg: \n", - " role: ray\n", - " state: SUCCEEDED (4)\n", - " structured_error_msg: \n", - " ui_url: null" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "job.status()" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "id": "5f36db0f-31f6-4373-9503-dc3c1c4c3f57", - "metadata": {}, - "outputs": [], - "source": [ - "cluster.down()" - ] - }, - { - "cell_type": "markdown", - "id": "31096641", - "metadata": {}, - "source": [ - "Now, an alternative option for job submission is to submit directly to MCAD, which will schedule pods to run the job with requested resources:" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "id": "496139cc", - "metadata": {}, - "outputs": [], - "source": [ - "jobdef = DDPJobDefinition(\n", - " name=\"mnistjob\",\n", - " script=\"mnist.py\",\n", - " scheduler_args={\"namespace\": \"default\"},\n", - " j=\"1x1\",\n", - " gpu=0,\n", - " cpu=1,\n", - " memMB=8000,\n", - " image=\"quay.io/project-codeflare/mnist-job-test:v0.0.1\"\n", - ")\n", - "job = jobdef.submit()" - ] - }, - { - "cell_type": "markdown", - "id": "0837e43b", - "metadata": {}, - "source": [ - "Once again, we can look at job status and logs:" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "id": "3d18d42c", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/opt/homebrew/lib/python3.8/site-packages/torchx/schedulers/kubernetes_mcad_scheduler.py:1105: UserWarning: Warning - MCAD does not report individual replica statuses, but overall task status. Replica id may not match status\n", - " warnings.warn(msg)\n" - ] - }, - { - "data": { - "text/plain": [ - "AppStatus:\n", - " msg: \n", - " num_restarts: -1\n", - " roles:\n", - " - replicas:\n", - " - hostname: ''\n", - " id: 0\n", - " role: mnist\n", - " state: !!python/object/apply:torchx.specs.api.AppState\n", - " - 2\n", - " structured_error_msg: \n", - " role: mnist\n", - " state: RUNNING (3)\n", - " structured_error_msg: \n", - " ui_url: null" - ] - }, - "execution_count": 15, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "job.status()" - ] - }, - { - "cell_type": "code", - "execution_count": 20, - "id": "36d7ea97", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "'2023-08-09T15:10:09.515811959Z [0]:GPU available: False, used: False\\n2023-08-09T15:10:09.515811959Z [0]:TPU available: False, using: 0 TPU cores\\n2023-08-09T15:10:09.515811959Z [0]:IPU available: False, using: 0 IPUs\\n2023-08-09T15:10:09.615986853Z [0]:\\n2023-08-09T15:10:09.615986853Z [0]: 0%| | 0/9912422 [00:00 🚀 CodeFlare Cluster Details 🚀 \n", + " \n", + " ╭───────────────────────────────────────────────────────────────╮ \n", + " │ Name │ \n", + " │ jobtest Active ✅ │ \n", + " │ │ \n", + " │ URI: ray://jobtest-head-svc.default.svc:10001 │ \n", + " │ │ \n", + " │ Dashboard🔗 │ \n", + " │ │ \n", + " │ Cluster Resources │ \n", + " │ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │ \n", + " │ │ # Workers │ │ Memory CPU GPU │ │ \n", + " │ │ │ │ │ │ \n", + " │ │ 2 │ │ 4~4 1 0 │ │ \n", + " │ │ │ │ │ │ \n", + " │ ╰─────────────╯ ╰──────────────────────────────────────╯ │ \n", + " ╰───────────────────────────────────────────────────────────────╯ \n", + "
  • \n" + ], + "text/plain": [ + "\u001b[3m \u001b[0m\u001b[1;3m 🚀 CodeFlare Cluster Details 🚀\u001b[0m\u001b[3m \u001b[0m\n", + "\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\n", + " ╭───────────────────────────────────────────────────────────────╮ \n", + " │ \u001b[1;37;42mName\u001b[0m │ \n", + " │ \u001b[1;4mjobtest\u001b[0m Active ✅ │ \n", + " │ │ \n", + " │ \u001b[1mURI:\u001b[0m ray://jobtest-head-svc.default.svc:10001 │ \n", + " │ │ \n", + " │ \u001b]8;id=561347;https://ray-dashboard-jobtest-default.apps.rosa.mcampbel.af68.p3.openshiftapps.com\u001b\\\u001b[4;34mDashboard🔗\u001b[0m\u001b]8;;\u001b\\ │ \n", + " │ │ \n", + " │ \u001b[3m Cluster Resources \u001b[0m │ \n", + " │ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │ \n", + " │ │ \u001b[1m \u001b[0m\u001b[1m# Workers\u001b[0m\u001b[1m \u001b[0m │ │ \u001b[1m \u001b[0m\u001b[1mMemory \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mCPU \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mGPU \u001b[0m\u001b[1m \u001b[0m │ │ \n", + " │ │ \u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", + " │ │ \u001b[35m \u001b[0m\u001b[35m2 \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m4~4 \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m1 \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m0 \u001b[0m\u001b[35m \u001b[0m │ │ \n", + " │ │ \u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", + " │ ╰─────────────╯ ╰──────────────────────────────────────╯ │ \n", + " ╰───────────────────────────────────────────────────────────────╯ \n" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/plain": [ + "RayCluster(name='jobtest', status=, head_cpus=2, head_mem=8, head_gpu=0, workers=2, worker_mem_min=4, worker_mem_max=4, worker_cpu=1, worker_gpu=0, namespace='default', dashboard='https://ray-dashboard-jobtest-default.apps.rosa.mcampbel.af68.p3.openshiftapps.com')" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "cluster.details()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Ray Job Submission - Authorized Ray Cluster" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "* Submit a job using an authorized Ray dashboard and the Job Submission Client\n", + "* Provide an entrypoint command directed to your job script\n", + "* Set up your runtime environment" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "# Gather the dashboard URL\n", + "ray_dashboard = cluster.cluster_dashboard_uri()\n", + "\n", + "# Create the header for passing your bearer token\n", + "header = {\n", + " 'Authorization': f'Bearer {auth_token}'\n", + "}\n", + "\n", + "# Initialize the RayJobClient\n", + "client = RayJobClient(address=ray_dashboard, headers=header, verify=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2024-04-03 12:16:07,112\tINFO dashboard_sdk.py:338 -- Uploading package gcs://_ray_pkg_431abdedbcc7e123.zip.\n", + "2024-04-03 12:16:07,115\tINFO packaging.py:518 -- Creating a file package for local directory './'.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "raysubmit_NvXkkh1QP1kdq4LG\n" + ] + } + ], + "source": [ + "# Submit an example mnist job using the RayJobClient\n", + "submission_id = client.submit_job(\n", + " entrypoint=\"python mnist.py\",\n", + " runtime_env={\"working_dir\": \"./\",\"pip\": \"requirements.txt\"},\n", + ")\n", + "print(submission_id)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "''" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Get the job's logs\n", + "client.get_job_logs(submission_id)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Get the job's status\n", + "client.get_job_status(submission_id)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "JobDetails(type=, job_id=None, submission_id='raysubmit_NvXkkh1QP1kdq4LG', driver_info=None, status=, entrypoint='python mnist.py', message='Job has not started yet. It may be waiting for the runtime environment to be set up.', error_type=None, start_time=1712142968879, end_time=None, metadata={}, runtime_env={'working_dir': 'gcs://_ray_pkg_431abdedbcc7e123.zip', 'pip': {'packages': ['pytorch_lightning==1.5.10', 'ray_lightning', 'torchmetrics==0.9.1', 'torchvision==0.12.0'], 'pip_check': False}, '_ray_commit': 'b4bba4717f5ba04ee25580fe8f88eed63ef0c5dc'}, driver_agent_http_address=None, driver_node_id=None)" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Get job related info\n", + "client.get_job_info(submission_id)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[JobDetails(type=, job_id=None, submission_id='raysubmit_NvXkkh1QP1kdq4LG', driver_info=None, status=, entrypoint='python mnist.py', message='Job has not started yet. It may be waiting for the runtime environment to be set up.', error_type=None, start_time=1712142968879, end_time=None, metadata={}, runtime_env={'working_dir': 'gcs://_ray_pkg_431abdedbcc7e123.zip', 'pip': {'packages': ['pytorch_lightning==1.5.10', 'ray_lightning', 'torchmetrics==0.9.1', 'torchvision==0.12.0'], 'pip_check': False}, '_ray_commit': 'b4bba4717f5ba04ee25580fe8f88eed63ef0c5dc'}, driver_agent_http_address=None, driver_node_id=None)]" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# List all existing jobs\n", + "client.list_jobs()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Iterate through the logs of a job \n", + "async for lines in client.tail_job_logs(submission_id):\n", + " print(lines, end=\"\") " + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(True, 'Successfully deleted Job raysubmit_NvXkkh1QP1kdq4LG')" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Delete a job\n", + "# Can run client.cancel_job(submission_id) first if job is still running\n", + "client.delete_job(submission_id)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Unauthorized Ray Cluster with the Ray Job Client" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "\"\"\"\n", + "Initialise the RayJobClient with the Ray Dashboard\n", + "\"\"\"\n", + "ray_dashboard = cluster.cluster_dashboard_uri()\n", + "client = RayJobClient(address=ray_dashboard, verify=False)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Submit an example mnist job using the RayJobClient\n", + "submission_id = client.submit_job(\n", + " entrypoint=\"python mnist.py\",\n", + " runtime_env={\"working_dir\": \"./\",\"pip\": \"requirements.txt\"},\n", + ")\n", + "print(submission_id)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Stop the job \n", + "client.stop_job(submission_id)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Delete the job\n", + "client.delete_job(submission_id)" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [], + "source": [ + "cluster.down()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "auth.logout()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.18" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/demo-notebooks/guided-demos/preview_nbs/2_job_client.ipynb b/demo-notebooks/guided-demos/preview_nbs/2_job_client.ipynb new file mode 100644 index 00000000..7b3d619b --- /dev/null +++ b/demo-notebooks/guided-demos/preview_nbs/2_job_client.ipynb @@ -0,0 +1,283 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In this third demo we will go over the basics of the Ray Job Submission Client in the SDK" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Import pieces from codeflare-sdk\n", + "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication, RayJobClient" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Create authentication object for user permissions\n", + "# IF unused, SDK will automatically check for default kubeconfig, then in-cluster config\n", + "# KubeConfigFileAuthentication can also be used to specify kubeconfig path manually\n", + "\n", + "auth_token = \"XXXXX\" # The auth_token is used later for the RayJobClient\n", + "auth = TokenAuthentication(\n", + " token = auth_token,\n", + " server = \"XXXXX\",\n", + " skip_tls=False\n", + ")\n", + "auth.login()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Create and configure our cluster object\n", + "cluster = Cluster(ClusterConfiguration(\n", + " name='jobtest',\n", + " namespace='default',\n", + " num_workers=2,\n", + " min_cpus=1,\n", + " max_cpus=1,\n", + " min_memory=4,\n", + " max_memory=4,\n", + " num_gpus=0,\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\"\n", + "))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Bring up the cluster\n", + "cluster.up()\n", + "cluster.wait_ready()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cluster.details()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Ray Job Submission - Authorized Ray Cluster" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "* Submit a job using an authorized Ray dashboard and the Job Submission Client\n", + "* Provide an entrypoint command directed to your job script\n", + "* Set up your runtime environment" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Gather the dashboard URL\n", + "ray_dashboard = cluster.cluster_dashboard_uri()\n", + "\n", + "# Create the header for passing your bearer token\n", + "header = {\n", + " 'Authorization': f'Bearer {auth_token}'\n", + "}\n", + "\n", + "# Initialize the RayJobClient\n", + "client = RayJobClient(address=ray_dashboard, headers=header, verify=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Submit an example mnist job using the RayJobClient\n", + "submission_id = client.submit_job(\n", + " entrypoint=\"python mnist.py\",\n", + " runtime_env={\"working_dir\": \"./\",\"pip\": \"requirements.txt\"},\n", + ")\n", + "print(submission_id)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Get the job's logs\n", + "client.get_job_logs(submission_id)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Get the job's status\n", + "client.get_job_status(submission_id)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Get job related info\n", + "client.get_job_info(submission_id)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# List all existing jobs\n", + "client.list_jobs()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Iterate through the logs of a job \n", + "async for lines in client.tail_job_logs(submission_id):\n", + " print(lines, end=\"\") " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Delete a job\n", + "# Can run client.cancel_job(submission_id) first if job is still running\n", + "client.delete_job(submission_id)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Unauthorized Ray Cluster with the Ray Job Client" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "\"\"\"\n", + "Initialise the RayJobClient with the Ray Dashboard\n", + "\"\"\"\n", + "ray_dashboard = cluster.cluster_dashboard_uri()\n", + "client = RayJobClient(address=ray_dashboard, verify=False)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Submit an example mnist job using the RayJobClient\n", + "submission_id = client.submit_job(\n", + " entrypoint=\"python mnist.py\",\n", + " runtime_env={\"working_dir\": \"./\",\"pip\": \"requirements.txt\"},\n", + ")\n", + "print(submission_id)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Stop the job \n", + "client.stop_job(submission_id)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Delete the job\n", + "client.delete_job(submission_id)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cluster.down()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "auth.logout()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.18" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} From 017fa124bca65abbd05773b288672de72c4d0e01 Mon Sep 17 00:00:00 2001 From: Mark Campbell Date: Fri, 5 Apr 2024 17:37:02 +0100 Subject: [PATCH 172/496] Use Kueue as default (#470) * Made Kueue the default queueing strategy Updated oauth test to have mcad=True Changed .codeflare/appwrappers to .codeflare/resources Addressed comments & added SUSPENDED status Review changes & list_cluster functions Updated tests and load_components Update tests, Rebase * Update src/codeflare_sdk/cluster/cluster.py Co-authored-by: Antonin Stefanutti --------- Co-authored-by: Antonin Stefanutti --- src/codeflare_sdk/__init__.py | 2 + src/codeflare_sdk/cluster/__init__.py | 8 +- src/codeflare_sdk/cluster/cluster.py | 43 ++- src/codeflare_sdk/cluster/config.py | 4 +- src/codeflare_sdk/cluster/model.py | 2 + src/codeflare_sdk/utils/generate_yaml.py | 62 ++- src/codeflare_sdk/utils/pretty_print.py | 24 ++ tests/e2e/mnist_raycluster_sdk_oauth_test.py | 1 + tests/e2e/mnist_raycluster_sdk_test.py | 1 + tests/e2e/start_ray_cluster.py | 1 + tests/test-case-no-mcad.yamls | 2 +- tests/unit_test.py | 387 ++++++++++++++++++- tests/unit_test_support.py | 1 + 13 files changed, 515 insertions(+), 23 deletions(-) diff --git a/src/codeflare_sdk/__init__.py b/src/codeflare_sdk/__init__.py index 0ed41d15..86b6da88 100644 --- a/src/codeflare_sdk/__init__.py +++ b/src/codeflare_sdk/__init__.py @@ -12,6 +12,8 @@ RayCluster, AppWrapper, get_cluster, + list_all_queued, + list_all_clusters, ) from .job import JobDefinition, Job, DDPJobDefinition, DDPJob, RayJobClient diff --git a/src/codeflare_sdk/cluster/__init__.py b/src/codeflare_sdk/cluster/__init__.py index 419561d7..0b1849e5 100644 --- a/src/codeflare_sdk/cluster/__init__.py +++ b/src/codeflare_sdk/cluster/__init__.py @@ -13,6 +13,12 @@ AppWrapper, ) -from .cluster import Cluster, ClusterConfiguration, get_cluster +from .cluster import ( + Cluster, + ClusterConfiguration, + get_cluster, + list_all_queued, + list_all_clusters, +) from .awload import AWManager diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 707ea61d..be59c5c6 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -189,6 +189,7 @@ def create_app_wrapper(self): dispatch_priority = self.config.dispatch_priority write_to_file = self.config.write_to_file verify_tls = self.config.verify_tls + local_queue = self.config.local_queue return generate_appwrapper( name=name, namespace=namespace, @@ -213,6 +214,7 @@ def create_app_wrapper(self): priority_val=priority_val, write_to_file=write_to_file, verify_tls=verify_tls, + local_queue=local_queue, ) # creates a new cluster with the provided or default spec @@ -319,6 +321,9 @@ def status( # check the ray cluster status cluster = _ray_cluster_status(self.config.name, self.config.namespace) if cluster: + if cluster.status == RayClusterStatus.SUSPENDED: + ready = False + status = CodeFlareClusterStatus.SUSPENDED if cluster.status == RayClusterStatus.UNKNOWN: ready = False status = CodeFlareClusterStatus.STARTING @@ -588,17 +593,24 @@ def list_all_clusters(namespace: str, print_to_console: bool = True): return clusters -def list_all_queued(namespace: str, print_to_console: bool = True): +def list_all_queued(namespace: str, print_to_console: bool = True, mcad: bool = False): """ - Returns (and prints by default) a list of all currently queued-up AppWrappers + Returns (and prints by default) a list of all currently queued-up Ray Clusters in a given namespace. """ - app_wrappers = _get_app_wrappers( - namespace, filter=[AppWrapperStatus.RUNNING, AppWrapperStatus.PENDING] - ) - if print_to_console: - pretty_print.print_app_wrappers_status(app_wrappers) - return app_wrappers + if mcad: + resources = _get_app_wrappers( + namespace, filter=[AppWrapperStatus.RUNNING, AppWrapperStatus.PENDING] + ) + if print_to_console: + pretty_print.print_app_wrappers_status(resources) + else: + resources = _get_ray_clusters( + namespace, filter=[RayClusterStatus.READY, RayClusterStatus.SUSPENDED] + ) + if print_to_console: + pretty_print.print_ray_clusters_status(resources) + return resources def get_current_namespace(): # pragma: no cover @@ -798,7 +810,9 @@ def _ray_cluster_status(name, namespace="default") -> Optional[RayCluster]: return None -def _get_ray_clusters(namespace="default") -> List[RayCluster]: +def _get_ray_clusters( + namespace="default", filter: Optional[List[RayClusterStatus]] = None +) -> List[RayCluster]: list_of_clusters = [] try: config_check() @@ -812,8 +826,15 @@ def _get_ray_clusters(namespace="default") -> List[RayCluster]: except Exception as e: # pragma: no cover return _kube_api_error_handling(e) - for rc in rcs["items"]: - list_of_clusters.append(_map_to_ray_cluster(rc)) + # Get a list of RCs with the filter if it is passed to the function + if filter is not None: + for rc in rcs["items"]: + ray_cluster = _map_to_ray_cluster(rc) + if filter and ray_cluster.status in filter: + list_of_clusters.append(ray_cluster) + else: + for rc in rcs["items"]: + list_of_clusters.append(_map_to_ray_cluster(rc)) return list_of_clusters diff --git a/src/codeflare_sdk/cluster/config.py b/src/codeflare_sdk/cluster/config.py index 7156495f..f6bcac89 100644 --- a/src/codeflare_sdk/cluster/config.py +++ b/src/codeflare_sdk/cluster/config.py @@ -46,7 +46,7 @@ class ClusterConfiguration: num_gpus: int = 0 template: str = f"{dir}/templates/base-template.yaml" instascale: bool = False - mcad: bool = True + mcad: bool = False envs: dict = field(default_factory=dict) image: str = "" local_interactive: bool = False @@ -60,3 +60,5 @@ def __post_init__(self): print( "Warning: TLS verification has been disabled - Endpoint checks will be bypassed" ) + + local_queue: str = None diff --git a/src/codeflare_sdk/cluster/model.py b/src/codeflare_sdk/cluster/model.py index 2e1abaf7..e2dcb652 100644 --- a/src/codeflare_sdk/cluster/model.py +++ b/src/codeflare_sdk/cluster/model.py @@ -32,6 +32,7 @@ class RayClusterStatus(Enum): UNHEALTHY = "unhealthy" FAILED = "failed" UNKNOWN = "unknown" + SUSPENDED = "suspended" class AppWrapperStatus(Enum): @@ -59,6 +60,7 @@ class CodeFlareClusterStatus(Enum): QUEUEING = 4 FAILED = 5 UNKNOWN = 6 + SUSPENDED = 7 @dataclass diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 7f14b5ba..2088b910 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -17,6 +17,7 @@ (in the cluster sub-module) for AppWrapper generation. """ +from typing import Optional import typing import yaml import sys @@ -460,7 +461,35 @@ def _create_oauth_sidecar_object( ) -def write_components(user_yaml: dict, output_file_name: str): +def get_default_kueue_name(namespace: str): + # If the local queue is set, use it. Otherwise, try to use the default queue. + try: + config_check() + api_instance = client.CustomObjectsApi(api_config_handler()) + local_queues = api_instance.list_namespaced_custom_object( + group="kueue.x-k8s.io", + version="v1beta1", + namespace=namespace, + plural="localqueues", + ) + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + for lq in local_queues["items"]: + if ( + "annotations" in lq["metadata"] + and "kueue.x-k8s.io/default-queue" in lq["metadata"]["annotations"] + and lq["metadata"]["annotations"]["kueue.x-k8s.io/default-queue"].lower() + == "true" + ): + return lq["metadata"]["name"] + raise ValueError( + "Default Local Queue with kueue.x-k8s.io/default-queue: true annotation not found please create a default Local Queue or provide the local_queue name in Cluster Configuration" + ) + + +def write_components( + user_yaml: dict, output_file_name: str, namespace: str, local_queue: Optional[str] +): # Create the directory if it doesn't exist directory_path = os.path.dirname(output_file_name) if not os.path.exists(directory_path): @@ -468,9 +497,19 @@ def write_components(user_yaml: dict, output_file_name: str): components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") open(output_file_name, "w").close() + lq_name = local_queue or get_default_kueue_name(namespace) with open(output_file_name, "a") as outfile: for component in components: if "generictemplate" in component: + if ( + "workload.codeflare.dev/appwrapper" + in component["generictemplate"]["metadata"]["labels"] + ): + del component["generictemplate"]["metadata"]["labels"][ + "workload.codeflare.dev/appwrapper" + ] + labels = component["generictemplate"]["metadata"]["labels"] + labels.update({"kueue.x-k8s.io/queue-name": lq_name}) outfile.write("---\n") yaml.dump( component["generictemplate"], outfile, default_flow_style=False @@ -478,11 +517,23 @@ def write_components(user_yaml: dict, output_file_name: str): print(f"Written to: {output_file_name}") -def load_components(user_yaml: dict, name: str): +def load_components( + user_yaml: dict, name: str, namespace: str, local_queue: Optional[str] +): component_list = [] components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") + lq_name = local_queue or get_default_kueue_name(namespace) for component in components: if "generictemplate" in component: + if ( + "workload.codeflare.dev/appwrapper" + in component["generictemplate"]["metadata"]["labels"] + ): + del component["generictemplate"]["metadata"]["labels"][ + "workload.codeflare.dev/appwrapper" + ] + labels = component["generictemplate"]["metadata"]["labels"] + labels.update({"kueue.x-k8s.io/queue-name": lq_name}) component_list.append(component["generictemplate"]) resources = "---\n" + "---\n".join( @@ -523,6 +574,7 @@ def generate_appwrapper( priority_val: int, write_to_file: bool, verify_tls: bool, + local_queue: Optional[str], ): user_yaml = read_template(template) appwrapper_name, cluster_name = gen_names(name) @@ -575,18 +627,18 @@ def generate_appwrapper( if is_openshift_cluster(): enable_openshift_oauth(user_yaml, cluster_name, namespace) - directory_path = os.path.expanduser("~/.codeflare/appwrapper/") + directory_path = os.path.expanduser("~/.codeflare/resources/") outfile = os.path.join(directory_path, appwrapper_name + ".yaml") if write_to_file: if mcad: write_user_appwrapper(user_yaml, outfile) else: - write_components(user_yaml, outfile) + write_components(user_yaml, outfile, namespace, local_queue) return outfile else: if mcad: user_yaml = load_appwrapper(user_yaml, name) else: - user_yaml = load_components(user_yaml, name) + user_yaml = load_components(user_yaml, name, namespace, local_queue) return user_yaml diff --git a/src/codeflare_sdk/utils/pretty_print.py b/src/codeflare_sdk/utils/pretty_print.py index 0bdc185d..74678ecc 100644 --- a/src/codeflare_sdk/utils/pretty_print.py +++ b/src/codeflare_sdk/utils/pretty_print.py @@ -56,6 +56,30 @@ def print_app_wrappers_status(app_wrappers: List[AppWrapper], starting: bool = F console.print(Panel.fit(table)) +def print_ray_clusters_status(app_wrappers: List[AppWrapper], starting: bool = False): + if not app_wrappers: + print_no_resources_found() + return # shortcircuit + + console = Console() + table = Table( + box=box.ASCII_DOUBLE_HEAD, + title="[bold] :rocket: Cluster Queue Status :rocket:", + ) + table.add_column("Name", style="cyan", no_wrap=True) + table.add_column("Status", style="magenta") + + for app_wrapper in app_wrappers: + name = app_wrapper.name + status = app_wrapper.status.value + if starting: + status += " (starting)" + table.add_row(name, status) + table.add_row("") # empty row for spacing + + console.print(Panel.fit(table)) + + def print_cluster_status(cluster: RayCluster): "Pretty prints the status of a passed-in cluster" if not cluster: diff --git a/tests/e2e/mnist_raycluster_sdk_oauth_test.py b/tests/e2e/mnist_raycluster_sdk_oauth_test.py index 708a389d..90bec08d 100644 --- a/tests/e2e/mnist_raycluster_sdk_oauth_test.py +++ b/tests/e2e/mnist_raycluster_sdk_oauth_test.py @@ -52,6 +52,7 @@ def run_mnist_raycluster_sdk_oauth(self): instascale=False, image=ray_image, write_to_file=True, + mcad=True, ) ) diff --git a/tests/e2e/mnist_raycluster_sdk_test.py b/tests/e2e/mnist_raycluster_sdk_test.py index b98b860b..a38cb48d 100644 --- a/tests/e2e/mnist_raycluster_sdk_test.py +++ b/tests/e2e/mnist_raycluster_sdk_test.py @@ -52,6 +52,7 @@ def run_mnist_raycluster_sdk(self): instascale=False, image=ray_image, write_to_file=True, + mcad=True, ) ) diff --git a/tests/e2e/start_ray_cluster.py b/tests/e2e/start_ray_cluster.py index f4cf7e73..8bb18580 100644 --- a/tests/e2e/start_ray_cluster.py +++ b/tests/e2e/start_ray_cluster.py @@ -22,6 +22,7 @@ num_gpus=0, instascale=False, image=ray_image, + mcad=True, ) ) diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index 99745760..e6bbcdd2 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -6,7 +6,7 @@ metadata: sdk.codeflare.dev/local_interactive: 'False' labels: controller-tools.k8s.io: '1.0' - workload.codeflare.dev/appwrapper: unit-test-cluster-ray + kueue.x-k8s.io/queue-name: local-queue-default name: unit-test-cluster-ray namespace: ns spec: diff --git a/tests/unit_test.py b/tests/unit_test.py index 6831ea65..b25d3dd0 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -24,7 +24,7 @@ from codeflare_sdk.cluster import cluster parent = Path(__file__).resolve().parents[1] -aw_dir = os.path.expanduser("~/.codeflare/appwrapper/") +aw_dir = os.path.expanduser("~/.codeflare/resources/") sys.path.append(str(parent) + "/src") from kubernetes import client, config, dynamic @@ -299,8 +299,59 @@ def test_create_app_wrapper_raises_error_with_no_image(): ), "Error message did not match expected output." +def get_local_queue(group, version, namespace, plural): + assert group == "kueue.x-k8s.io" + assert version == "v1beta1" + assert namespace == "ns" + assert plural == "localqueues" + local_queues = { + "apiVersion": "kueue.x-k8s.io/v1beta1", + "items": [ + { + "apiVersion": "kueue.x-k8s.io/v1beta1", + "kind": "LocalQueue", + "metadata": { + "annotations": {"kueue.x-k8s.io/default-queue": "true"}, + "name": "local-queue-default", + "namespace": "ns", + }, + "spec": {"clusterQueue": "cluster-queue"}, + } + ], + "kind": "LocalQueueList", + "metadata": {"continue": "", "resourceVersion": "2266811"}, + } + return local_queues + + def test_cluster_creation_no_mcad(mocker): + # Create Ray Cluster with no local queue specified + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), + ) + config = createClusterConfig() + config.name = "unit-test-cluster-ray" + config.write_to_file = True + config.mcad = False + cluster = Cluster(config) + assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-ray.yaml" + assert cluster.app_wrapper_name == "unit-test-cluster-ray" + assert filecmp.cmp( + f"{aw_dir}unit-test-cluster-ray.yaml", + f"{parent}/tests/test-case-no-mcad.yamls", + shallow=True, + ) + + +def test_cluster_creation_no_mcad_local_queue(mocker): # With written resources + # Create Ray Cluster with local queue specified mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch( "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", @@ -310,6 +361,7 @@ def test_cluster_creation_no_mcad(mocker): config.name = "unit-test-cluster-ray" config.mcad = False config.write_to_file = True + config.local_queue = "local-queue-default" cluster = Cluster(config) assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-ray.yaml" assert cluster.app_wrapper_name == "unit-test-cluster-ray" @@ -334,6 +386,7 @@ def test_cluster_creation_no_mcad(mocker): image="quay.io/project-codeflare/ray:latest-py39-cu118", write_to_file=False, mcad=False, + local_queue="local-queue-default", ) cluster = Cluster(config) test_resources = [] @@ -383,6 +436,7 @@ def test_default_cluster_creation(mocker): default_config = ClusterConfiguration( name="unit-test-default-cluster", image="quay.io/project-codeflare/ray:latest-py39-cu118", + mcad=True, ) cluster = Cluster(default_config) test_aw = yaml.safe_load(cluster.app_wrapper_yaml) @@ -494,6 +548,10 @@ def test_cluster_up_down(mocker): def test_cluster_up_down_no_mcad(mocker): + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), + ) mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") mocker.patch( @@ -832,6 +890,7 @@ def test_ray_details(mocker, capsys): namespace="ns", image="quay.io/project-codeflare/ray:latest-py39-cu118", write_to_file=True, + mcad=True, ) ) captured = capsys.readouterr() @@ -1389,7 +1448,263 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "observedGeneration": 1, "state": "ready", }, - } + }, + { + "apiVersion": "ray.io/v1", + "kind": "RayCluster", + "metadata": { + "creationTimestamp": "2023-02-22T16:26:07Z", + "generation": 1, + "labels": { + "workload.codeflare.dev/appwrapper": "quicktest2", + "controller-tools.k8s.io": "1.0", + "resourceName": "quicktest2", + "orderedinstance": "m4.xlarge_g4dn.xlarge", + }, + "managedFields": [ + { + "apiVersion": "ray.io/v1", + "fieldsType": "FieldsV1", + "fieldsV1": { + "f:metadata": { + "f:labels": { + ".": {}, + "f:workload.codeflare.dev/appwrapper": {}, + "f:controller-tools.k8s.io": {}, + "f:resourceName": {}, + }, + "f:ownerReferences": { + ".": {}, + 'k:{"uid":"6334fc1b-471e-4876-8e7b-0b2277679235"}': {}, + }, + }, + "f:spec": { + ".": {}, + "f:autoscalerOptions": { + ".": {}, + "f:idleTimeoutSeconds": {}, + "f:imagePullPolicy": {}, + "f:resources": { + ".": {}, + "f:limits": { + ".": {}, + "f:cpu": {}, + "f:memory": {}, + }, + "f:requests": { + ".": {}, + "f:cpu": {}, + "f:memory": {}, + }, + }, + "f:upscalingMode": {}, + }, + "f:enableInTreeAutoscaling": {}, + "f:headGroupSpec": { + ".": {}, + "f:rayStartParams": { + ".": {}, + "f:block": {}, + "f:dashboard-host": {}, + "f:num-gpus": {}, + }, + "f:serviceType": {}, + "f:template": { + ".": {}, + "f:spec": {".": {}, "f:containers": {}}, + }, + }, + "f:rayVersion": {}, + "f:workerGroupSpecs": {}, + }, + }, + "manager": "mcad-controller", + "operation": "Update", + "time": "2023-02-22T16:26:07Z", + }, + { + "apiVersion": "ray.io/v1", + "fieldsType": "FieldsV1", + "fieldsV1": { + "f:status": { + ".": {}, + "f:availableWorkerReplicas": {}, + "f:desiredWorkerReplicas": {}, + "f:endpoints": { + ".": {}, + "f:client": {}, + "f:dashboard": {}, + "f:gcs": {}, + }, + "f:lastUpdateTime": {}, + "f:maxWorkerReplicas": {}, + "f:minWorkerReplicas": {}, + "f:state": {}, + } + }, + "manager": "manager", + "operation": "Update", + "subresource": "status", + "time": "2023-02-22T16:26:16Z", + }, + ], + "name": "quicktest2", + "namespace": "ns", + "ownerReferences": [ + { + "apiVersion": "workload.codeflare.dev/v1beta1", + "blockOwnerDeletion": True, + "controller": True, + "kind": "AppWrapper", + "name": "quicktest2", + "uid": "6334fc1b-471e-4876-8e7b-0b2277679235", + } + ], + "resourceVersion": "9482407", + "uid": "44d45d1f-26c8-43e7-841f-831dbd8c1285", + }, + "spec": { + "autoscalerOptions": { + "idleTimeoutSeconds": 60, + "imagePullPolicy": "Always", + "resources": { + "limits": {"cpu": "500m", "memory": "512Mi"}, + "requests": {"cpu": "500m", "memory": "512Mi"}, + }, + "upscalingMode": "Default", + }, + "enableInTreeAutoscaling": False, + "headGroupSpec": { + "rayStartParams": { + "block": "true", + "dashboard-host": "0.0.0.0", + "num-gpus": "0", + }, + "serviceType": "ClusterIP", + "template": { + "spec": { + "containers": [ + { + "image": "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103", + "imagePullPolicy": "Always", + "lifecycle": { + "preStop": { + "exec": { + "command": [ + "/bin/sh", + "-c", + "ray stop", + ] + } + } + }, + "name": "ray-head", + "ports": [ + { + "containerPort": 6379, + "name": "gcs", + "protocol": "TCP", + }, + { + "containerPort": 8265, + "name": "dashboard", + "protocol": "TCP", + }, + { + "containerPort": 10001, + "name": "client", + "protocol": "TCP", + }, + ], + "resources": { + "limits": { + "cpu": 2, + "memory": "8G", + "nvidia.com/gpu": 0, + }, + "requests": { + "cpu": 2, + "memory": "8G", + "nvidia.com/gpu": 0, + }, + }, + } + ] + } + }, + }, + "rayVersion": "1.12.0", + "workerGroupSpecs": [ + { + "groupName": "small-group-quicktest2", + "maxReplicas": 1, + "minReplicas": 1, + "rayStartParams": {"block": "true", "num-gpus": "0"}, + "replicas": 1, + "template": { + "metadata": { + "annotations": {"key": "value"}, + "labels": {"key": "value"}, + }, + "spec": { + "containers": [ + { + "env": [ + { + "name": "MY_POD_IP", + "valueFrom": { + "fieldRef": { + "fieldPath": "status.podIP" + } + }, + } + ], + "image": "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103", + "lifecycle": { + "preStop": { + "exec": { + "command": [ + "/bin/sh", + "-c", + "ray stop", + ] + } + } + }, + "name": "machine-learning", + "resources": { + "limits": { + "cpu": 1, + "memory": "2G", + "nvidia.com/gpu": 0, + }, + "requests": { + "cpu": 1, + "memory": "2G", + "nvidia.com/gpu": 0, + }, + }, + } + ], + }, + }, + } + ], + }, + "status": { + "availableWorkerReplicas": 2, + "desiredWorkerReplicas": 1, + "endpoints": { + "client": "10001", + "dashboard": "8265", + "gcs": "6379", + }, + "lastUpdateTime": "2023-02-22T16:26:16Z", + "maxWorkerReplicas": 1, + "minWorkerReplicas": 1, + "state": "suspended", + }, + }, ] } return api_obj @@ -2117,6 +2432,8 @@ def custom_side_effect(group, version, namespace, plural, **kwargs): return get_ray_obj("ray.io", "v1", "ns", "rayclusters") elif plural == "appwrappers": return get_aw_obj("workload.codeflare.dev", "v1beta1", "ns", "appwrappers") + elif plural == "localqueues": + return get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues") mocker.patch( "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", get_aw_obj @@ -2296,6 +2613,22 @@ def test_list_clusters(mocker, capsys): " │ │ │ │ │ │ \n" " │ ╰─────────────╯ ╰──────────────────────────────────────╯ │ \n" " ╰───────────────────────────────────────────────────────────────╯ \n" + "╭───────────────────────────────────────────────────────────────╮\n" + "│ Name │\n" + "│ quicktest2 Inactive ❌ │\n" + "│ │\n" + "│ URI: ray://quicktest2-head-svc.ns.svc:10001 │\n" + "│ │\n" + "│ Dashboard🔗 │\n" + "│ │\n" + "│ Cluster Resources │\n" + "│ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │\n" + "│ │ # Workers │ │ Memory CPU GPU │ │\n" + "│ │ │ │ │ │\n" + "│ │ 1 │ │ 2G~2G 1 0 │ │\n" + "│ │ │ │ │ │\n" + "│ ╰─────────────╯ ╰──────────────────────────────────────╯ │\n" + "╰───────────────────────────────────────────────────────────────╯\n" ) @@ -2305,7 +2638,7 @@ def test_list_queue(mocker, capsys): "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", side_effect=get_obj_none, ) - list_all_queued("ns") + list_all_queued("ns", mcad=True) captured = capsys.readouterr() assert captured.out == ( "╭──────────────────────────────────────────────────────────────────────────────╮\n" @@ -2316,7 +2649,7 @@ def test_list_queue(mocker, capsys): "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", side_effect=get_aw_obj, ) - list_all_queued("ns") + list_all_queued("ns", mcad=True) captured = capsys.readouterr() assert captured.out == ( "╭──────────────────────────╮\n" @@ -2334,6 +2667,49 @@ def test_list_queue(mocker, capsys): ) +def test_list_queue_rayclusters(mocker, capsys): + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mock_api = MagicMock() + mock_api.get_api_versions.return_value.groups = [ + MagicMock(versions=[MagicMock(group_version="route.openshift.io/v1")]) + ] + mocker.patch("kubernetes.client.ApisApi", return_value=mock_api) + + assert is_openshift_cluster() == True + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + side_effect=get_obj_none, + ) + list_all_queued("ns") + captured = capsys.readouterr() + assert captured.out == ( + "╭──────────────────────────────────────────────────────────────────────────────╮\n" + "│ No resources found, have you run cluster.up() yet? │\n" + "╰──────────────────────────────────────────────────────────────────────────────╯\n" + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + side_effect=get_ray_obj, + ) + list_all_queued("ns") + captured = capsys.readouterr() + print(captured.out) + assert captured.out == ( + "╭────────────────────────────╮\n" + "│ 🚀 Cluster Queue Status │\n" + "│ 🚀 │\n" + "│ +------------+-----------+ │\n" + "│ | Name | Status | │\n" + "│ +============+===========+ │\n" + "│ | quicktest | ready | │\n" + "│ | | | │\n" + "│ | quicktest2 | suspended | │\n" + "│ | | | │\n" + "│ +------------+-----------+ │\n" + "╰────────────────────────────╯\n" + ) + + def test_cluster_status(mocker): mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") @@ -2360,6 +2736,7 @@ def test_cluster_status(mocker): namespace="ns", image="quay.io/project-codeflare/ray:latest-py39-cu118", write_to_file=True, + mcad=True, ) ) mocker.patch("codeflare_sdk.cluster.cluster._app_wrapper_status", return_value=None) @@ -2454,6 +2831,7 @@ def test_wait_ready(mocker, capsys): namespace="ns", image="quay.io/project-codeflare/ray:latest-py39-cu118", write_to_file=True, + mcad=True, ) ) try: @@ -3125,6 +3503,7 @@ def test_gen_app_wrapper_with_oauth(mocker: MockerFixture): "test_cluster", image="quay.io/project-codeflare/ray:latest-py39-cu118", write_to_file=True, + mcad=True, ) ) user_yaml = write_user_appwrapper.call_args.args[0] diff --git a/tests/unit_test_support.py b/tests/unit_test_support.py index 31328338..190c4f1a 100644 --- a/tests/unit_test_support.py +++ b/tests/unit_test_support.py @@ -43,6 +43,7 @@ def createClusterConfig(): min_memory=5, max_memory=6, num_gpus=7, + mcad=True, instascale=True, machine_types=["cpu.small", "gpu.large"], image_pull_secrets=["unit-test-pull-secret"], From a06cf4f417b58a633927521aec8914d63f654632 Mon Sep 17 00:00:00 2001 From: Christian Zaccaria <73656840+ChristianZaccaria@users.noreply.github.com> Date: Fri, 5 Apr 2024 17:43:07 +0100 Subject: [PATCH 173/496] Fix output logs by switching to kind-cluster context (#501) --- .github/workflows/e2e_tests.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index 4698af25..0fee64b3 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -123,6 +123,7 @@ jobs: poetry run pytest -v -s ./tests/e2e -m kind > ${CODEFLARE_TEST_OUTPUT_DIR}/pytest_output.log 2>&1 - name: Switch to kind-cluster context to print logs + if: always() && steps.deploy.outcome == 'success' run: kubectl config use-context kind-cluster - name: Print CodeFlare operator logs From 88b2518db3657411635e647301e59dfdd65d75a7 Mon Sep 17 00:00:00 2001 From: Eoin Gallinagh <34043122+VanillaSpoon@users.noreply.github.com> Date: Fri, 5 Apr 2024 17:52:47 +0100 Subject: [PATCH 174/496] remove: DDPJobDefinition from SDK (#498) * remove: DDPJobDefinition and update tests * add: address comments --- src/codeflare_sdk/__init__.py | 2 +- src/codeflare_sdk/cluster/cluster.py | 18 - src/codeflare_sdk/job/__init__.py | 2 - src/codeflare_sdk/job/jobs.py | 207 ----------- tests/e2e/mnist_raycluster_sdk_oauth_test.py | 42 ++- tests/e2e/mnist_raycluster_sdk_test.py | 40 ++- tests/e2e/mnist_rayjob.py | 33 +- tests/unit_test.py | 356 ------------------- tests/unit_test_support.py | 34 -- 9 files changed, 67 insertions(+), 667 deletions(-) delete mode 100644 src/codeflare_sdk/job/jobs.py diff --git a/src/codeflare_sdk/__init__.py b/src/codeflare_sdk/__init__.py index 86b6da88..28a9c4db 100644 --- a/src/codeflare_sdk/__init__.py +++ b/src/codeflare_sdk/__init__.py @@ -16,6 +16,6 @@ list_all_clusters, ) -from .job import JobDefinition, Job, DDPJobDefinition, DDPJob, RayJobClient +from .job import RayJobClient from .utils import generate_cert diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index be59c5c6..81f7a711 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -21,10 +21,8 @@ from time import sleep from typing import List, Optional, Tuple, Dict -import openshift as oc from kubernetes import config from ray.job_submission import JobSubmissionClient -import urllib3 from .auth import config_check, api_config_handler from ..utils import pretty_print @@ -58,8 +56,6 @@ class Cluster: Note that currently, the underlying implementation is a Ray cluster. """ - torchx_scheduler = "ray" - def __init__(self, config: ClusterConfiguration): """ Create the resource cluster object by passing in a ClusterConfiguration @@ -477,20 +473,6 @@ def job_logs(self, job_id: str) -> str: """ return self.job_client.get_job_logs(job_id) - def torchx_config( - self, working_dir: str = None, requirements: str = None - ) -> Dict[str, str]: - dashboard_address = urllib3.util.parse_url(self.cluster_dashboard_uri()).host - to_return = { - "cluster_name": self.config.name, - "dashboard_address": dashboard_address, - } - if working_dir: - to_return["working_dir"] = working_dir - if requirements: - to_return["requirements"] = requirements - return to_return - def from_k8_cluster_object( rc, mcad=True, diff --git a/src/codeflare_sdk/job/__init__.py b/src/codeflare_sdk/job/__init__.py index c5b5819a..f230eb77 100644 --- a/src/codeflare_sdk/job/__init__.py +++ b/src/codeflare_sdk/job/__init__.py @@ -1,3 +1 @@ -from .jobs import JobDefinition, Job, DDPJobDefinition, DDPJob - from .ray_jobs import RayJobClient diff --git a/src/codeflare_sdk/job/jobs.py b/src/codeflare_sdk/job/jobs.py deleted file mode 100644 index 655107df..00000000 --- a/src/codeflare_sdk/job/jobs.py +++ /dev/null @@ -1,207 +0,0 @@ -# Copyright 2023 IBM, Red Hat -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -The jobs sub-module contains methods needed to submit Distributed Data Parallel(DDP) jobs to Ray Clusters created by the CodeFlare SDK. -""" - -import abc -from typing import TYPE_CHECKING, Optional, Dict, List -from pathlib import Path - -from torchx.components.dist import ddp -from torchx.runner import get_runner, Runner -from torchx.schedulers.ray_scheduler import RayScheduler -from torchx.specs import AppHandle, parse_app_handle, AppDryRunInfo - - -if TYPE_CHECKING: - from ..cluster.cluster import Cluster -from ..cluster.cluster import get_current_namespace - -all_jobs: List["Job"] = [] - - -class JobDefinition(metaclass=abc.ABCMeta): - def _dry_run(self, cluster: "Cluster"): - pass - - def submit(self, cluster: "Cluster"): - pass - - -class Job(metaclass=abc.ABCMeta): - def status(self): - pass - - def logs(self): - pass - - -class DDPJobDefinition(JobDefinition): - def __init__( - self, - script: Optional[str] = None, - m: Optional[str] = None, - script_args: Optional[List[str]] = None, - name: Optional[str] = None, - cpu: Optional[int] = None, - gpu: Optional[int] = None, - memMB: Optional[int] = None, - h: Optional[str] = None, - j: Optional[str] = None, - env: Optional[Dict[str, str]] = None, - max_retries: int = 0, - mounts: Optional[List[str]] = None, - rdzv_port: int = 29500, - rdzv_backend: str = None, - scheduler_args: Optional[Dict[str, str]] = None, - image: Optional[str] = None, - workspace: Optional[str] = f"file://{Path.cwd()}", - ): - if bool(script) == bool(m): # logical XOR - raise ValueError( - "Exactly one of the following arguments must be defined: [script, m]." - ) - self.script = script - self.m = m - self.script_args: List[str] = script_args if script_args is not None else [] - self.name = name - self.cpu = cpu - self.gpu = gpu - self.memMB = memMB - self.h = h - self.j = j - self.env: Dict[str, str] = env if env is not None else dict() - self.max_retries = max_retries - self.mounts: List[str] = mounts if mounts is not None else [] - self.rdzv_port = rdzv_port - self.rdzv_backend = rdzv_backend - self.scheduler_args: Dict[str, str] = ( - scheduler_args if scheduler_args is not None else dict() - ) - self.image = image - self.workspace = workspace - - def _dry_run(self, cluster: "Cluster"): - j = f"{cluster.config.num_workers}x{max(cluster.config.num_gpus, 1)}" # # of proc. = # of gpus - runner = get_runner(ray_client=cluster.job_client) - runner._scheduler_instances["ray"] = RayScheduler( - session_name=runner._name, ray_client=cluster.job_client - ) - return ( - runner.dryrun( - app=ddp( - *self.script_args, - script=self.script, - m=self.m, - name=self.name, - h=self.h, - cpu=self.cpu if self.cpu is not None else cluster.config.max_cpus, - gpu=self.gpu if self.gpu is not None else cluster.config.num_gpus, - memMB=self.memMB - if self.memMB is not None - else cluster.config.max_memory * 1024, - j=self.j if self.j is not None else j, - env=self.env, - max_retries=self.max_retries, - rdzv_port=self.rdzv_port, - rdzv_backend=self.rdzv_backend - if self.rdzv_backend is not None - else "static", - mounts=self.mounts, - ), - scheduler=cluster.torchx_scheduler, - cfg=cluster.torchx_config(**self.scheduler_args), - workspace=self.workspace, - ), - runner, - ) - - def _missing_spec(self, spec: str): - raise ValueError(f"Job definition missing arg: {spec}") - - def _dry_run_no_cluster(self): - if self.scheduler_args is not None: - if self.scheduler_args.get("namespace") is None: - self.scheduler_args["namespace"] = get_current_namespace() - runner = get_runner() - return ( - runner.dryrun( - app=ddp( - *self.script_args, - script=self.script, - m=self.m, - name=self.name - if self.name is not None - else self._missing_spec("name"), - h=self.h, - cpu=self.cpu - if self.cpu is not None - else self._missing_spec("cpu (# cpus per worker)"), - gpu=self.gpu - if self.gpu is not None - else self._missing_spec("gpu (# gpus per worker)"), - memMB=self.memMB - if self.memMB is not None - else self._missing_spec("memMB (memory in MB)"), - j=self.j - if self.j is not None - else self._missing_spec( - "j (`workers`x`procs`)" - ), # # of proc. = # of gpus, - env=self.env, # should this still exist? - max_retries=self.max_retries, - rdzv_port=self.rdzv_port, # should this still exist? - rdzv_backend=self.rdzv_backend - if self.rdzv_backend is not None - else "c10d", - mounts=self.mounts, - image=self.image - if self.image is not None - else self._missing_spec("image"), - ), - scheduler="kubernetes_mcad", - cfg=self.scheduler_args, - workspace="", - ), - runner, - ) - - def submit(self, cluster: "Cluster" = None) -> "Job": - return DDPJob(self, cluster) - - -class DDPJob(Job): - def __init__(self, job_definition: "DDPJobDefinition", cluster: "Cluster" = None): - self.job_definition = job_definition - self.cluster = cluster - if self.cluster: - definition, runner = job_definition._dry_run(cluster) - self._app_handle = runner.schedule(definition) - self._runner = runner - else: - definition, runner = job_definition._dry_run_no_cluster() - self._app_handle = runner.schedule(definition) - self._runner = runner - all_jobs.append(self) - - def status(self) -> str: - return self._runner.status(self._app_handle) - - def logs(self) -> str: - return "".join(self._runner.log_lines(self._app_handle, None)) - - def cancel(self): - self._runner.cancel(self._app_handle) diff --git a/tests/e2e/mnist_raycluster_sdk_oauth_test.py b/tests/e2e/mnist_raycluster_sdk_oauth_test.py index 90bec08d..1ffff8ec 100644 --- a/tests/e2e/mnist_raycluster_sdk_oauth_test.py +++ b/tests/e2e/mnist_raycluster_sdk_oauth_test.py @@ -2,10 +2,8 @@ from time import sleep -from torchx.specs.api import AppState, is_terminal - from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication -from codeflare_sdk.job.jobs import DDPJobDefinition +from codeflare_sdk.job import RayJobClient import pytest @@ -79,7 +77,7 @@ def assert_jobsubmit_withoutLogin(self, cluster): "entrypoint": "python mnist.py", "runtime_env": { "working_dir": "./tests/e2e/", - "pip": "mnist_pip_requirements.txt", + "pip": "./tests/e2e/mnist_pip_requirements.txt", }, } try: @@ -98,19 +96,26 @@ def assert_jobsubmit_withoutLogin(self, cluster): def assert_jobsubmit_withlogin(self, cluster): self.assert_appwrapper_exists() - jobdef = DDPJobDefinition( - name="mnist", - script="./tests/e2e/mnist.py", - scheduler_args={"requirements": "./tests/e2e/mnist_pip_requirements.txt"}, + auth_token = run_oc_command(["whoami", "--show-token=true"]) + ray_dashboard = cluster.cluster_dashboard_uri() + header = {"Authorization": f"Bearer {auth_token}"} + client = RayJobClient(address=ray_dashboard, headers=header, verify=True) + + # Submit the job + submission_id = client.submit_job( + entrypoint="python mnist.py", + runtime_env={ + "working_dir": "./tests/e2e/", + "pip": "mnist_pip_requirements.txt", + }, ) - job = jobdef.submit(cluster) - + print(f"Submitted job with ID: {submission_id}") done = False time = 0 timeout = 900 while not done: - status = job.status() - if is_terminal(status.state): + status = client.get_job_status(submission_id) + if status.is_terminal(): break if not done: print(status) @@ -119,11 +124,12 @@ def assert_jobsubmit_withlogin(self, cluster): sleep(5) time += 5 - print(job.status()) - self.assert_job_completion(status) + logs = client.get_job_logs(submission_id) + print(logs) - print(job.logs()) + self.assert_job_completion(status) + client.delete_job(submission_id) cluster.down() def assert_appwrapper_exists(self): @@ -144,9 +150,9 @@ def assert_appwrapper_exists(self): assert False def assert_job_completion(self, status): - if status.state == AppState.SUCCEEDED: - print(f"Job has completed: '{status.state}'") + if status == "SUCCEEDED": + print(f"Job has completed: '{status}'") assert True else: - print(f"Job has completed: '{status.state}'") + print(f"Job has completed: '{status}'") assert False diff --git a/tests/e2e/mnist_raycluster_sdk_test.py b/tests/e2e/mnist_raycluster_sdk_test.py index a38cb48d..348a02cc 100644 --- a/tests/e2e/mnist_raycluster_sdk_test.py +++ b/tests/e2e/mnist_raycluster_sdk_test.py @@ -7,10 +7,8 @@ import ray -from torchx.specs.api import AppState, is_terminal - from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration -from codeflare_sdk.job.jobs import DDPJobDefinition +from codeflare_sdk.job import RayJobClient import pytest @@ -68,19 +66,26 @@ def run_mnist_raycluster_sdk(self): cluster.details() - jobdef = DDPJobDefinition( - name="mnist", - script="./tests/e2e/mnist.py", - scheduler_args={"requirements": "./tests/e2e/mnist_pip_requirements.txt"}, + auth_token = run_oc_command(["whoami", "--show-token=true"]) + ray_dashboard = cluster.cluster_dashboard_uri() + header = {"Authorization": f"Bearer {auth_token}"} + client = RayJobClient(address=ray_dashboard, headers=header, verify=True) + + # Submit the job + submission_id = client.submit_job( + entrypoint="python mnist.py", + runtime_env={ + "working_dir": "./tests/e2e/", + "pip": "./tests/e2e/mnist_pip_requirements.txt", + }, ) - job = jobdef.submit(cluster) - + print(f"Submitted job with ID: {submission_id}") done = False time = 0 timeout = 900 while not done: - status = job.status() - if is_terminal(status.state): + status = client.get_job_status(submission_id) + if status.is_terminal(): break if not done: print(status) @@ -89,11 +94,12 @@ def run_mnist_raycluster_sdk(self): sleep(5) time += 5 - print(job.status()) - self.assert_job_completion(status) + logs = client.get_job_logs(submission_id) + print(logs) - print(job.logs()) + self.assert_job_completion(status) + client.delete_job(submission_id) cluster.down() # Assertions @@ -128,9 +134,9 @@ def assert_raycluster_exists(self): assert False def assert_job_completion(self, status): - if status.state == AppState.SUCCEEDED: - print(f"Job has completed: '{status.state}'") + if status == "SUCCEEDED": + print(f"Job has completed: '{status}'") assert True else: - print(f"Job has completed: '{status.state}'") + print(f"Job has completed: '{status}'") assert False diff --git a/tests/e2e/mnist_rayjob.py b/tests/e2e/mnist_rayjob.py index 8557a55c..c9306da6 100644 --- a/tests/e2e/mnist_rayjob.py +++ b/tests/e2e/mnist_rayjob.py @@ -2,10 +2,10 @@ from time import sleep -from torchx.specs.api import AppState, is_terminal +from support import * from codeflare_sdk.cluster.cluster import get_cluster -from codeflare_sdk.job.jobs import DDPJobDefinition +from codeflare_sdk.job import RayJobClient namespace = sys.argv[1] @@ -13,19 +13,23 @@ cluster.details() -jobdef = DDPJobDefinition( - name="mnist", - script="mnist.py", - scheduler_args={"requirements": "requirements.txt"}, -) -job = jobdef.submit(cluster) +auth_token = run_oc_command(["whoami", "--show-token=true"]) +ray_dashboard = cluster.cluster_dashboard_uri() +header = {"Authorization": f"Bearer {auth_token}"} +client = RayJobClient(address=ray_dashboard, headers=header, verify=True) +# Submit the job +submission_id = client.submit_job( + entrypoint="python mnist.py", + runtime_env={"working_dir": "/", "pip": "requirements.txt"}, +) +print(f"Submitted job with ID: {submission_id}") done = False time = 0 timeout = 900 while not done: - status = job.status() - if is_terminal(status.state): + status = client.get_job_status(submission_id) + if status.is_terminal(): break if not done: print(status) @@ -34,13 +38,14 @@ sleep(5) time += 5 -print(f"Job has completed: {status.state}") - -print(job.logs()) +logs = client.get_job_logs(submission_id) +print(logs) +client.delete_job(submission_id) cluster.down() -if not status.state == AppState.SUCCEEDED: + +if not status == "SUCCEEDED": exit(1) else: exit(0) diff --git a/tests/unit_test.py b/tests/unit_test.py index b25d3dd0..935cdd10 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# TODO: replace all instances of torchx_runner from pathlib import Path import sys @@ -59,12 +58,6 @@ RayClusterStatus, CodeFlareClusterStatus, ) -from codeflare_sdk.job.jobs import ( - JobDefinition, - Job, - DDPJobDefinition, - DDPJob, -) from codeflare_sdk.utils.generate_cert import ( generate_ca_cert, generate_tls_cert, @@ -73,10 +66,7 @@ from unit_test_support import ( createClusterWithConfig, - createTestDDP, - createDDPJob_no_cluster, createClusterConfig, - createDDPJob_with_cluster, ) import codeflare_sdk.utils.kube_api_helpers @@ -90,10 +80,6 @@ import openshift from openshift.selector import Selector import ray -from torchx.specs import AppDryRunInfo, AppDef -from torchx.runner import get_runner, Runner -from torchx.schedulers.ray_scheduler import RayJob -from torchx.schedulers.kubernetes_mcad_scheduler import KubernetesMCADJob import pytest import yaml from unittest.mock import MagicMock @@ -2863,352 +2849,10 @@ def test_wait_ready(mocker, capsys): ) -def test_jobdefinition_coverage(mocker): - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", - return_value={"spec": {"domain": ""}}, - ) - abstract = JobDefinition() - cluster = createClusterWithConfig(mocker) - abstract._dry_run(cluster) - abstract.submit(cluster) - - -def test_job_coverage(): - abstract = Job() - abstract.status() - abstract.logs() - - -def test_DDPJobDefinition_creation(mocker): - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - ddp = createTestDDP() - assert ddp.script == "test.py" - assert ddp.m == None - assert ddp.script_args == ["test"] - assert ddp.name == "test" - assert ddp.cpu == 1 - assert ddp.gpu == 0 - assert ddp.memMB == 1024 - assert ddp.h == None - assert ddp.j == "2x1" - assert ddp.env == {"test": "test"} - assert ddp.max_retries == 0 - assert ddp.mounts == [] - assert ddp.rdzv_port == 29500 - assert ddp.scheduler_args == {"requirements": "test"} - - -def test_DDPJobDefinition_dry_run(mocker: MockerFixture): - """ - Test that the dry run method returns the correct type: AppDryRunInfo, - that the attributes of the returned object are of the correct type, - and that the values from cluster and job definition are correctly passed. - """ - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch( - "codeflare_sdk.cluster.cluster.Cluster.cluster_dashboard_uri", - return_value="", - ) - mocker.patch.object(Cluster, "job_client") - ddp = createTestDDP() - cluster = createClusterWithConfig(mocker) - ddp_job, _ = ddp._dry_run(cluster) - assert type(ddp_job) == AppDryRunInfo - assert ddp_job._fmt is not None - assert type(ddp_job.request) == RayJob - assert type(ddp_job._app) == AppDef - assert type(ddp_job._cfg) == type(dict()) - assert type(ddp_job._scheduler) == type(str()) - - assert ddp_job.request.app_id.startswith("test") - assert ddp_job.request.cluster_name == "unit-test-cluster" - assert ddp_job.request.requirements == "test" - - assert ddp_job._app.roles[0].resource.cpu == 1 - assert ddp_job._app.roles[0].resource.gpu == 0 - assert ddp_job._app.roles[0].resource.memMB == 1024 - - assert ddp_job._cfg["cluster_name"] == "unit-test-cluster" - assert ddp_job._cfg["requirements"] == "test" - - assert ddp_job._scheduler == "ray" - - -def test_DDPJobDefinition_dry_run_no_cluster(mocker): - """ - Test that the dry run method returns the correct type: AppDryRunInfo, - that the attributes of the returned object are of the correct type, - and that the values from cluster and job definition are correctly passed. - """ - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch( - "codeflare_sdk.job.jobs.get_current_namespace", - return_value="opendatahub", - ) - - ddp = createTestDDP() - ddp.image = "fake-image" - ddp_job, _ = ddp._dry_run_no_cluster() - assert type(ddp_job) == AppDryRunInfo - assert ddp_job._fmt is not None - assert type(ddp_job.request) == KubernetesMCADJob - assert type(ddp_job._app) == AppDef - assert type(ddp_job._cfg) == type(dict()) - assert type(ddp_job._scheduler) == type(str()) - - assert ( - ddp_job.request.resource["spec"]["resources"]["GenericItems"][0][ - "generictemplate" - ] - .spec.containers[0] - .image - == "fake-image" - ) - - assert ddp_job._app.roles[0].resource.cpu == 1 - assert ddp_job._app.roles[0].resource.gpu == 0 - assert ddp_job._app.roles[0].resource.memMB == 1024 - - assert ddp_job._cfg["requirements"] == "test" - - assert ddp_job._scheduler == "kubernetes_mcad" - - -def test_DDPJobDefinition_dry_run_no_resource_args(mocker): - """ - Test that the dry run correctly gets resources from the cluster object - when the job definition does not specify resources. - """ - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch.object(Cluster, "job_client") - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", - return_value={"spec": {"domain": ""}}, - ) - mocker.patch( - "codeflare_sdk.cluster.cluster.Cluster.cluster_dashboard_uri", - return_value="", - ) - cluster = createClusterWithConfig(mocker) - ddp = DDPJobDefinition( - script="test.py", - m=None, - script_args=["test"], - name="test", - h=None, - env={"test": "test"}, - max_retries=0, - mounts=[], - rdzv_port=29500, - scheduler_args={"requirements": "test"}, - ) - ddp_job, _ = ddp._dry_run(cluster) - - assert ddp_job._app.roles[0].resource.cpu == cluster.config.max_cpus - assert ddp_job._app.roles[0].resource.gpu == cluster.config.num_gpus - assert ddp_job._app.roles[0].resource.memMB == cluster.config.max_memory * 1024 - assert ( - parse_j(ddp_job._app.roles[0].args[1]) - == f"{cluster.config.num_workers}x{cluster.config.num_gpus}" - ) - - -def test_DDPJobDefinition_dry_run_no_cluster_no_resource_args(mocker): - """ - Test that the dry run method returns the correct type: AppDryRunInfo, - that the attributes of the returned object are of the correct type, - and that the values from cluster and job definition are correctly passed. - """ - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - - mocker.patch( - "codeflare_sdk.job.jobs.get_current_namespace", - return_value="opendatahub", - ) - - ddp = createTestDDP() - try: - ddp._dry_run_no_cluster() - assert 0 == 1 - except ValueError as e: - assert str(e) == "Job definition missing arg: image" - ddp.image = "fake-image" - ddp.name = None - try: - ddp._dry_run_no_cluster() - assert 0 == 1 - except ValueError as e: - assert str(e) == "Job definition missing arg: name" - ddp.name = "fake" - ddp.cpu = None - try: - ddp._dry_run_no_cluster() - assert 0 == 1 - except ValueError as e: - assert str(e) == "Job definition missing arg: cpu (# cpus per worker)" - ddp.cpu = 1 - ddp.gpu = None - try: - ddp._dry_run_no_cluster() - assert 0 == 1 - except ValueError as e: - assert str(e) == "Job definition missing arg: gpu (# gpus per worker)" - ddp.gpu = 1 - ddp.memMB = None - try: - ddp._dry_run_no_cluster() - assert 0 == 1 - except ValueError as e: - assert str(e) == "Job definition missing arg: memMB (memory in MB)" - ddp.memMB = 1 - ddp.j = None - try: - ddp._dry_run_no_cluster() - assert 0 == 1 - except ValueError as e: - assert str(e) == "Job definition missing arg: j (`workers`x`procs`)" - - -def test_DDPJobDefinition_submit(mocker: MockerFixture): - """ - Tests that the submit method returns the correct type: DDPJob - And that the attributes of the returned object are of the correct type - """ - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mock_schedule = MagicMock() - mocker.patch.object(Runner, "schedule", mock_schedule) - mock_schedule.return_value = "fake-dashboard-url" - mocker.patch.object(Cluster, "job_client") - ddp_def = createTestDDP() - cluster = createClusterWithConfig(mocker) - mocker.patch( - "codeflare_sdk.job.jobs.get_current_namespace", - side_effect="opendatahub", - ) - mocker.patch.object( - Cluster, "cluster_dashboard_uri", return_value="fake-dashboard-url" - ) - ddp_job = ddp_def.submit(cluster) - assert type(ddp_job) == DDPJob - assert type(ddp_job.job_definition) == DDPJobDefinition - assert type(ddp_job.cluster) == Cluster - assert type(ddp_job._app_handle) == str - assert ddp_job._app_handle == "fake-dashboard-url" - - ddp_def.image = "fake-image" - ddp_job = ddp_def.submit() - assert type(ddp_job) == DDPJob - assert type(ddp_job.job_definition) == DDPJobDefinition - assert ddp_job.cluster == None - assert type(ddp_job._app_handle) == str - assert ddp_job._app_handle == "fake-dashboard-url" - - -def test_DDPJob_creation(mocker: MockerFixture): - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch.object(Cluster, "job_client") - mock_schedule = MagicMock() - mocker.patch.object(Runner, "schedule", mock_schedule) - mocker.patch.object( - Cluster, "cluster_dashboard_uri", return_value="fake-dashboard-url" - ) - ddp_def = createTestDDP() - cluster = createClusterWithConfig(mocker) - mock_schedule.return_value = "fake-dashboard-url" - ddp_job = createDDPJob_with_cluster(mocker, ddp_def, cluster) - assert type(ddp_job) == DDPJob - assert type(ddp_job.job_definition) == DDPJobDefinition - assert type(ddp_job.cluster) == Cluster - assert type(ddp_job._app_handle) == str - assert ddp_job._app_handle == "fake-dashboard-url" - _, args, kwargs = mock_schedule.mock_calls[0] - assert type(args[0]) == AppDryRunInfo - job_info = args[0] - assert type(job_info.request) == RayJob - assert type(job_info._app) == AppDef - assert type(job_info._cfg) == type(dict()) - assert type(job_info._scheduler) == type(str()) - - -def test_DDPJob_creation_no_cluster(mocker: MockerFixture): - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - ddp_def = createTestDDP() - ddp_def.image = "fake-image" - mocker.patch( - "codeflare_sdk.job.jobs.get_current_namespace", - side_effect="opendatahub", - ) - mock_schedule = MagicMock() - mocker.patch.object(Runner, "schedule", mock_schedule) - mock_schedule.return_value = "fake-app-handle" - ddp_job = createDDPJob_no_cluster(ddp_def, None) - assert type(ddp_job) == DDPJob - assert type(ddp_job.job_definition) == DDPJobDefinition - assert ddp_job.cluster == None - assert type(ddp_job._app_handle) == str - assert ddp_job._app_handle == "fake-app-handle" - _, args, kwargs = mock_schedule.mock_calls[0] - assert type(args[0]) == AppDryRunInfo - job_info = args[0] - assert type(job_info.request) == KubernetesMCADJob - assert type(job_info._app) == AppDef - assert type(job_info._cfg) == type(dict()) - assert type(job_info._scheduler) == type(str()) - - -def test_DDPJob_status(mocker: MockerFixture): - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - # Setup the neccesary mock patches - mock_status = MagicMock() - mocker.patch.object(Runner, "status", mock_status) - test_DDPJob_creation(mocker) - ddp_def = createTestDDP() - cluster = createClusterWithConfig(mocker) - ddp_job = createDDPJob_with_cluster(mocker, ddp_def, cluster) - mock_status.return_value = "fake-status" - assert ddp_job.status() == "fake-status" - _, args, kwargs = mock_status.mock_calls[0] - assert args[0] == "fake-dashboard-url" - - -def test_DDPJob_logs(mocker: MockerFixture): - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mock_log = MagicMock() - mocker.patch.object(Runner, "log_lines", mock_log) - # Setup the neccesary mock patches - test_DDPJob_creation(mocker) - ddp_def = createTestDDP() - cluster = createClusterWithConfig(mocker) - ddp_job = createDDPJob_with_cluster(mocker, ddp_def, cluster) - mock_log.return_value = "fake-logs" - assert ddp_job.logs() == "fake-logs" - _, args, kwargs = mock_log.mock_calls[0] - assert args[0] == "fake-dashboard-url" - - def arg_check_side_effect(*args): assert args[0] == "fake-app-handle" -def test_DDPJob_cancel(mocker: MockerFixture): - mock_cancel = MagicMock() - mocker.patch.object(Runner, "cancel", mock_cancel) - # Setup the neccesary mock patches - test_DDPJob_creation_no_cluster(mocker) - ddp_def = createTestDDP() - ddp_def.image = "fake-image" - ddp_job = createDDPJob_no_cluster(ddp_def, None) - mocker.patch( - "openshift.get_project_name", - return_value="opendatahub", - ) - mock_cancel.side_effect = arg_check_side_effect - ddp_job.cancel() - - def parse_j(cmd): pattern = r"--nnodes\s+\d+\s+--nproc_per_node\s+\d+" match = re.search(pattern, cmd) diff --git a/tests/unit_test_support.py b/tests/unit_test_support.py index 190c4f1a..329df45e 100644 --- a/tests/unit_test_support.py +++ b/tests/unit_test_support.py @@ -1,38 +1,9 @@ -from codeflare_sdk.job.jobs import ( - DDPJobDefinition, - DDPJob, -) - from codeflare_sdk.cluster.cluster import ( Cluster, ClusterConfiguration, ) -def createTestDDP(): - ddp = DDPJobDefinition( - script="test.py", - m=None, - script_args=["test"], - name="test", - cpu=1, - gpu=0, - memMB=1024, - h=None, - j="2x1", - env={"test": "test"}, - max_retries=0, - mounts=[], - rdzv_port=29500, - scheduler_args={"requirements": "test"}, - ) - return ddp - - -def createDDPJob_no_cluster(ddp_def, cluster): - return DDPJob(ddp_def, cluster) - - def createClusterConfig(): config = ClusterConfiguration( name="unit-test-cluster", @@ -61,8 +32,3 @@ def createClusterWithConfig(mocker): ) cluster = Cluster(createClusterConfig()) return cluster - - -def createDDPJob_with_cluster(mocker, ddp_def, cluster=None): - cluster = createClusterWithConfig(mocker) - return DDPJob(ddp_def, cluster) From 0dda865d82a881ea099af96e99c704125247f71f Mon Sep 17 00:00:00 2001 From: Dimitri Saridakis Date: Fri, 5 Apr 2024 18:08:56 +0100 Subject: [PATCH 175/496] fix: use quay for images (#502) --- src/codeflare_sdk/templates/base-template.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index 0f0f8b32..8b14e0cd 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -130,7 +130,7 @@ spec: - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt name: ray-head - image: rayproject/ray:latest + image: quay.io/project-codeflare/ray:latest-py39-cu118 imagePullPolicy: Always ports: - containerPort: 6379 @@ -176,7 +176,7 @@ spec: - sh - -c - cd /home/ray/workspace/tls && openssl req -nodes -newkey rsa:2048 -keyout server.key -out server.csr -subj '/CN=ray-head' && printf "authorityKeyIdentifier=keyid,issuer\nbasicConstraints=CA:FALSE\nsubjectAltName = @alt_names\n[alt_names]\nDNS.1 = 127.0.0.1\nDNS.2 = localhost\nDNS.3 = ${FQ_RAY_IP}\nDNS.4 = $(awk 'END{print $1}' /etc/hosts)\nDNS.5 = rayclient-deployment-name-$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).server-name">./domain.ext && cp /home/ray/workspace/ca/* . && openssl x509 -req -CA ca.crt -CAkey ca.key -in server.csr -out server.crt -days 365 -CAcreateserial -extfile domain.ext - image: rayproject/ray:2.7.0 + image: quay.io/project-codeflare/ray:latest-py39-cu118 name: create-cert # securityContext: # runAsUser: 1000 @@ -252,7 +252,7 @@ spec: initContainers: # the env var $RAY_IP is set by the operator if missing, with the value of the head service name - name: create-cert - image: rayproject/ray:2.7.0 + image: quay.io/project-codeflare/ray:latest-py39-cu118 command: - sh - -c @@ -269,7 +269,7 @@ spec: readOnly: false containers: - name: machine-learning # must consist of lower case alphanumeric characters or '-', and must start and end with an alphanumeric character (e.g. 'my-name', or '123-abc' - image: rayproject/ray:latest + image: quay.io/project-codeflare/ray:latest-py39-cu118 env: - name: MY_POD_IP valueFrom: From 421a3db83c3e34f72966a522e2d52da5fa8ee79a Mon Sep 17 00:00:00 2001 From: codeflare-machine-account Date: Fri, 5 Apr 2024 17:27:00 +0000 Subject: [PATCH 176/496] Changes in docs for release: v0.15.0 --- .../cluster/cluster.html | 999 +++++++----------- .../cluster/config.html | 56 +- .../detailed-documentation/cluster/index.html | 8 +- .../detailed-documentation/cluster/model.html | 18 +- docs/detailed-documentation/index.html | 5 +- docs/detailed-documentation/job/index.html | 9 +- docs/detailed-documentation/job/jobs.html | 631 ----------- .../utils/generate_yaml.html | 834 ++++++--------- docs/detailed-documentation/utils/index.html | 5 - .../utils/openshift_oauth.html | 323 ------ .../utils/pretty_print.html | 90 +- 11 files changed, 825 insertions(+), 2153 deletions(-) delete mode 100644 docs/detailed-documentation/job/jobs.html delete mode 100644 docs/detailed-documentation/utils/openshift_oauth.html diff --git a/docs/detailed-documentation/cluster/cluster.html b/docs/detailed-documentation/cluster/cluster.html index 07e44e4b..6e220e18 100644 --- a/docs/detailed-documentation/cluster/cluster.html +++ b/docs/detailed-documentation/cluster/cluster.html @@ -53,10 +53,8 @@

    Module codeflare_sdk.cluster.cluster

    from time import sleep from typing import List, Optional, Tuple, Dict -import openshift as oc from kubernetes import config from ray.job_submission import JobSubmissionClient -import urllib3 from .auth import config_check, api_config_handler from ..utils import pretty_print @@ -65,10 +63,7 @@

    Module codeflare_sdk.cluster.cluster

    ) from ..utils.kube_api_helpers import _kube_api_error_handling from ..utils.generate_yaml import is_openshift_cluster -from ..utils.openshift_oauth import ( - create_openshift_oauth_objects, - delete_openshift_oauth_objects, -) + from .config import ClusterConfiguration from .model import ( AppWrapper, @@ -93,8 +88,6 @@

    Module codeflare_sdk.cluster.cluster

    Note that currently, the underlying implementation is a Ray cluster. """ - torchx_scheduler = "ray" - def __init__(self, config: ClusterConfiguration): """ Create the resource cluster object by passing in a ClusterConfiguration @@ -105,9 +98,7 @@

    Module codeflare_sdk.cluster.cluster

    self.config = config self.app_wrapper_yaml = self.create_app_wrapper() self._job_submission_client = None - self.app_wrapper_name = self.app_wrapper_yaml.replace(".yaml", "").split("/")[ - -1 - ] + self.app_wrapper_name = self.config.name @property def _client_headers(self): @@ -120,14 +111,16 @@

    Module codeflare_sdk.cluster.cluster

    @property def _client_verify_tls(self): - return not self.config.openshift_oauth + if not is_openshift_cluster or not self.config.verify_tls: + return False + return True @property def job_client(self): k8client = api_config_handler() or client.ApiClient() if self._job_submission_client: return self._job_submission_client - if self.config.openshift_oauth: + if is_openshift_cluster(): print(k8client.configuration.get_api_key_with_prefix("authorization")) self._job_submission_client = JobSubmissionClient( self.cluster_dashboard_uri(), @@ -222,8 +215,9 @@

    Module codeflare_sdk.cluster.cluster

    local_interactive = self.config.local_interactive image_pull_secrets = self.config.image_pull_secrets dispatch_priority = self.config.dispatch_priority - ingress_domain = self.config.ingress_domain - ingress_options = self.config.ingress_options + write_to_file = self.config.write_to_file + verify_tls = self.config.verify_tls + local_queue = self.config.local_queue return generate_appwrapper( name=name, namespace=namespace, @@ -246,9 +240,9 @@

    Module codeflare_sdk.cluster.cluster

    image_pull_secrets=image_pull_secrets, dispatch_priority=dispatch_priority, priority_val=priority_val, - openshift_oauth=self.config.openshift_oauth, - ingress_domain=ingress_domain, - ingress_options=ingress_options, + write_to_file=write_to_file, + verify_tls=verify_tls, + local_queue=local_queue, ) # creates a new cluster with the provided or default spec @@ -258,24 +252,30 @@

    Module codeflare_sdk.cluster.cluster

    the MCAD queue. """ namespace = self.config.namespace - if self.config.openshift_oauth: - create_openshift_oauth_objects( - cluster_name=self.config.name, namespace=namespace - ) try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) if self.config.mcad: - with open(self.app_wrapper_yaml) as f: - aw = yaml.load(f, Loader=yaml.FullLoader) - api_instance.create_namespaced_custom_object( - group="workload.codeflare.dev", - version="v1beta1", - namespace=namespace, - plural="appwrappers", - body=aw, - ) + if self.config.write_to_file: + with open(self.app_wrapper_yaml) as f: + aw = yaml.load(f, Loader=yaml.FullLoader) + api_instance.create_namespaced_custom_object( + group="workload.codeflare.dev", + version="v1beta1", + namespace=namespace, + plural="appwrappers", + body=aw, + ) + else: + aw = yaml.safe_load(self.app_wrapper_yaml) + api_instance.create_namespaced_custom_object( + group="workload.codeflare.dev", + version="v1beta1", + namespace=namespace, + plural="appwrappers", + body=aw, + ) else: self._component_resources_up(namespace, api_instance) except Exception as e: # pragma: no cover @@ -303,11 +303,6 @@

    Module codeflare_sdk.cluster.cluster

    except Exception as e: # pragma: no cover return _kube_api_error_handling(e) - if self.config.openshift_oauth: - delete_openshift_oauth_objects( - cluster_name=self.config.name, namespace=namespace - ) - def status( self, print_to_console: bool = True ) -> Tuple[CodeFlareClusterStatus, bool]: @@ -354,6 +349,9 @@

    Module codeflare_sdk.cluster.cluster

    # check the ray cluster status cluster = _ray_cluster_status(self.config.name, self.config.namespace) if cluster: + if cluster.status == RayClusterStatus.SUSPENDED: + ready = False + status = CodeFlareClusterStatus.SUSPENDED if cluster.status == RayClusterStatus.UNKNOWN: ready = False status = CodeFlareClusterStatus.STARTING @@ -401,36 +399,33 @@

    Module codeflare_sdk.cluster.cluster

    Checks every five seconds. """ print("Waiting for requested resources to be set up...") - ready = False - dashboard_ready = False - status = None time = 0 - while not ready: + while True: + if timeout and time >= timeout: + raise TimeoutError( + f"wait() timed out after waiting {timeout}s for cluster to be ready" + ) status, ready = self.status(print_to_console=False) if status == CodeFlareClusterStatus.UNKNOWN: print( "WARNING: Current cluster status is unknown, have you run cluster.up yet?" ) - if not ready: - if timeout and time >= timeout: - raise TimeoutError( - f"wait() timed out after waiting {timeout}s for cluster to be ready" - ) - sleep(5) - time += 5 + if ready: + break + sleep(5) + time += 5 print("Requested cluster is up and running!") - while dashboard_check and not dashboard_ready: - dashboard_ready = self.is_dashboard_ready() - if not dashboard_ready: - if timeout and time >= timeout: - raise TimeoutError( - f"wait() timed out after waiting {timeout}s for dashboard to be ready" - ) - sleep(5) - time += 5 - if dashboard_ready: - print("Dashboard is ready!") + while dashboard_check: + if timeout and time >= timeout: + raise TimeoutError( + f"wait() timed out after waiting {timeout}s for dashboard to be ready" + ) + if self.is_dashboard_ready(): + print("Dashboard is ready!") + break + sleep(5) + time += 5 def details(self, print_to_console: bool = True) -> RayCluster: cluster = _copy_to_ray(self) @@ -490,7 +485,7 @@

    Module codeflare_sdk.cluster.cluster

    elif "route.openshift.io/termination" in annotations: protocol = "https" return f"{protocol}://{ingress.spec.rules[0].host}" - return "Dashboard ingress not available yet, have you run cluster.up()?" + return "Dashboard not available yet, have you run cluster.up()?" def list_jobs(self) -> List: """ @@ -510,45 +505,41 @@

    Module codeflare_sdk.cluster.cluster

    """ return self.job_client.get_job_logs(job_id) - def torchx_config( - self, working_dir: str = None, requirements: str = None - ) -> Dict[str, str]: - dashboard_address = urllib3.util.parse_url(self.cluster_dashboard_uri()).host - to_return = { - "cluster_name": self.config.name, - "dashboard_address": dashboard_address, - } - if working_dir: - to_return["working_dir"] = working_dir - if requirements: - to_return["requirements"] = requirements - return to_return - - def from_k8_cluster_object(rc, mcad=True, ingress_domain=None, ingress_options={}): + def from_k8_cluster_object( + rc, + mcad=True, + write_to_file=False, + verify_tls=True, + ): + config_check() + if ( + rc["metadata"]["annotations"]["sdk.codeflare.dev/local_interactive"] + == "True" + ): + local_interactive = True + else: + local_interactive = False machine_types = ( rc["metadata"]["labels"]["orderedinstance"].split("_") if "orderedinstance" in rc["metadata"]["labels"] else [] ) - local_interactive = ( - "volumeMounts" - in rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0] - ) - if local_interactive: - ingress_domain = get_ingress_domain_from_client( - rc["metadata"]["name"], rc["metadata"]["namespace"] - ) + cluster_config = ClusterConfiguration( name=rc["metadata"]["name"], namespace=rc["metadata"]["namespace"], machine_types=machine_types, num_workers=rc["spec"]["workerGroupSpecs"][0]["minReplicas"], - min_cpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ - "containers" - ][0]["resources"]["requests"]["cpu"], - max_cpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ - "containers" - ][0]["resources"]["limits"]["cpu"], + min_cpus=int( + rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ + "resources" + ]["requests"]["cpu"] + ), + max_cpus=int( + rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ + "resources" + ]["limits"]["cpu"] + ), min_memory=int( rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ "resources" @@ -559,17 +550,19 @@

    Module codeflare_sdk.cluster.cluster

    "resources" ]["limits"]["memory"][:-1] ), - num_gpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ - "containers" - ][0]["resources"]["limits"]["nvidia.com/gpu"], + num_gpus=int( + rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ + "resources" + ]["limits"]["nvidia.com/gpu"] + ), instascale=True if machine_types else False, image=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][ 0 ]["image"], local_interactive=local_interactive, mcad=mcad, - ingress_domain=ingress_domain, - ingress_options=ingress_options, + write_to_file=write_to_file, + verify_tls=verify_tls, ) return Cluster(cluster_config) @@ -583,79 +576,25 @@

    Module codeflare_sdk.cluster.cluster

    def _component_resources_up( self, namespace: str, api_instance: client.CustomObjectsApi ): - with open(self.app_wrapper_yaml) as f: - yamls = yaml.load_all(f, Loader=yaml.FullLoader) - for resource in yamls: - if resource["kind"] == "RayCluster": - api_instance.create_namespaced_custom_object( - group="ray.io", - version="v1", - namespace=namespace, - plural="rayclusters", - body=resource, - ) - elif resource["kind"] == "Ingress": - api_instance.create_namespaced_custom_object( - group="networking.k8s.io", - version="v1", - namespace=namespace, - plural="ingresses", - body=resource, - ) - elif resource["kind"] == "Route": - api_instance.create_namespaced_custom_object( - group="route.openshift.io", - version="v1", - namespace=namespace, - plural="routes", - body=resource, - ) - elif resource["kind"] == "Secret": - secret_instance = client.CoreV1Api(api_config_handler()) - secret_instance.create_namespaced_secret( - namespace=namespace, - body=resource, - ) + if self.config.write_to_file: + with open(self.app_wrapper_yaml) as f: + yamls = yaml.load_all(f, Loader=yaml.FullLoader) + _create_resources(yamls, namespace, api_instance) + else: + yamls = yaml.load_all(self.app_wrapper_yaml, Loader=yaml.FullLoader) + _create_resources(yamls, namespace, api_instance) def _component_resources_down( self, namespace: str, api_instance: client.CustomObjectsApi ): - with open(self.app_wrapper_yaml) as f: - yamls = yaml.load_all(f, Loader=yaml.FullLoader) - for resource in yamls: - if resource["kind"] == "RayCluster": - api_instance.delete_namespaced_custom_object( - group="ray.io", - version="v1", - namespace=namespace, - plural="rayclusters", - name=self.app_wrapper_name, - ) - elif resource["kind"] == "Ingress": - name = resource["metadata"]["name"] - api_instance.delete_namespaced_custom_object( - group="networking.k8s.io", - version="v1", - namespace=namespace, - plural="ingresses", - name=name, - ) - elif resource["kind"] == "Route": - name = resource["metadata"]["name"] - api_instance.delete_namespaced_custom_object( - group="route.openshift.io", - version="v1", - namespace=namespace, - plural="routes", - name=name, - ) - elif resource["kind"] == "Secret": - name = resource["metadata"]["name"] - secret_instance = client.CoreV1Api(api_config_handler()) - secret_instance.delete_namespaced_secret( - namespace=namespace, - name=name, - ) + cluster_name = self.config.name + if self.config.write_to_file: + with open(self.app_wrapper_yaml) as f: + yamls = yaml.load_all(f, Loader=yaml.FullLoader) + _delete_resources(yamls, namespace, api_instance, cluster_name) + else: + yamls = yaml.safe_load_all(self.app_wrapper_yaml) + _delete_resources(yamls, namespace, api_instance, cluster_name) def list_all_clusters(namespace: str, print_to_console: bool = True): @@ -668,17 +607,24 @@

    Module codeflare_sdk.cluster.cluster

    return clusters -def list_all_queued(namespace: str, print_to_console: bool = True): +def list_all_queued(namespace: str, print_to_console: bool = True, mcad: bool = False): """ - Returns (and prints by default) a list of all currently queued-up AppWrappers + Returns (and prints by default) a list of all currently queued-up Ray Clusters in a given namespace. """ - app_wrappers = _get_app_wrappers( - namespace, filter=[AppWrapperStatus.RUNNING, AppWrapperStatus.PENDING] - ) - if print_to_console: - pretty_print.print_app_wrappers_status(app_wrappers) - return app_wrappers + if mcad: + resources = _get_app_wrappers( + namespace, filter=[AppWrapperStatus.RUNNING, AppWrapperStatus.PENDING] + ) + if print_to_console: + pretty_print.print_app_wrappers_status(resources) + else: + resources = _get_ray_clusters( + namespace, filter=[RayClusterStatus.READY, RayClusterStatus.SUSPENDED] + ) + if print_to_console: + pretty_print.print_ray_clusters_status(resources) + return resources def get_current_namespace(): # pragma: no cover @@ -707,7 +653,12 @@

    Module codeflare_sdk.cluster.cluster

    return None -def get_cluster(cluster_name: str, namespace: str = "default"): +def get_cluster( + cluster_name: str, + namespace: str = "default", + write_to_file: bool = False, + verify_tls: bool = True, +): try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) @@ -723,61 +674,11 @@

    Module codeflare_sdk.cluster.cluster

    for rc in rcs["items"]: if rc["metadata"]["name"] == cluster_name: mcad = _check_aw_exists(cluster_name, namespace) - - try: - config_check() - api_instance = client.NetworkingV1Api(api_config_handler()) - ingresses = api_instance.list_namespaced_ingress(namespace) - ingress_host = None - ingress_options = {} - for ingress in ingresses.items: - # Search for ingress with AppWrapper name as the owner - if ( - "ingress-owner" in ingress.metadata.labels - and ingress.metadata.labels["ingress-owner"] == cluster_name - ): - ingress_host = ingress.spec.rules[0].host - if ( - "ingress-options" in ingress.metadata.labels - and ingress.metadata.labels["ingress-options"] == "true" - ): - ingress_name = ingress.metadata.name - port = ( - ingress.spec.rules[0] - .http.paths[0] - .backend.service.port.number - ) - annotations = ingress.metadata.annotations - path = ingress.spec.rules[0].http.paths[0].path - ingress_class_name = ingress.spec.ingress_class_name - path_type = ingress.spec.rules[0].http.paths[0].path_type - - ingress_options = { - "ingresses": [ - { - "ingressName": ingress_name, - "port": port, - "annotations": annotations, - "ingressClassName": ingress_class_name, - "pathType": path_type, - "path": path, - "host": ingress_host, - } - ] - } - except Exception as e: - return _kube_api_error_handling(e) - # We gather the ingress domain from the host - if ingress_host is not None and ingress_options == {}: - ingress_domain = ingress_host.split(".", 1)[1] - else: - ingress_domain = None - return Cluster.from_k8_cluster_object( rc, mcad=mcad, - ingress_domain=ingress_domain, - ingress_options=ingress_options, + write_to_file=write_to_file, + verify_tls=verify_tls, ) raise FileNotFoundError( f"Cluster {cluster_name} is not found in {namespace} namespace" @@ -785,6 +686,46 @@

    Module codeflare_sdk.cluster.cluster

    # private methods +def _delete_resources( + yamls, namespace: str, api_instance: client.CustomObjectsApi, cluster_name: str +): + for resource in yamls: + if resource["kind"] == "RayCluster": + name = resource["metadata"]["name"] + api_instance.delete_namespaced_custom_object( + group="ray.io", + version="v1", + namespace=namespace, + plural="rayclusters", + name=name, + ) + elif resource["kind"] == "Secret": + name = resource["metadata"]["name"] + secret_instance = client.CoreV1Api(api_config_handler()) + secret_instance.delete_namespaced_secret( + namespace=namespace, + name=name, + ) + + +def _create_resources(yamls, namespace: str, api_instance: client.CustomObjectsApi): + for resource in yamls: + if resource["kind"] == "RayCluster": + api_instance.create_namespaced_custom_object( + group="ray.io", + version="v1", + namespace=namespace, + plural="rayclusters", + body=resource, + ) + elif resource["kind"] == "Secret": + secret_instance = client.CoreV1Api(api_config_handler()) + secret_instance.create_namespaced_secret( + namespace=namespace, + body=resource, + ) + + def _check_aw_exists(name: str, namespace: str) -> bool: try: config_check() @@ -883,7 +824,9 @@

    Module codeflare_sdk.cluster.cluster

    return None -def _get_ray_clusters(namespace="default") -> List[RayCluster]: +def _get_ray_clusters( + namespace="default", filter: Optional[List[RayClusterStatus]] = None +) -> List[RayCluster]: list_of_clusters = [] try: config_check() @@ -897,8 +840,15 @@

    Module codeflare_sdk.cluster.cluster

    except Exception as e: # pragma: no cover return _kube_api_error_handling(e) - for rc in rcs["items"]: - list_of_clusters.append(_map_to_ray_cluster(rc)) + # Get a list of RCs with the filter if it is passed to the function + if filter is not None: + for rc in rcs["items"]: + ray_cluster = _map_to_ray_cluster(rc) + if filter and ray_cluster.status in filter: + list_of_clusters.append(ray_cluster) + else: + for rc in rcs["items"]: + list_of_clusters.append(_map_to_ray_cluster(rc)) return list_of_clusters @@ -1038,34 +988,7 @@

    Module codeflare_sdk.cluster.cluster

    ) if ray.status == CodeFlareClusterStatus.READY: ray.status = RayClusterStatus.READY - return ray - - -def get_ingress_domain_from_client(cluster_name: str, namespace: str = "default"): - if is_openshift_cluster(): - try: - config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) - route = api_instance.get_namespaced_custom_object( - group="route.openshift.io", - version="v1", - namespace=namespace, - plural="routes", - name=f"rayclient-{cluster_name}", - ) - return route["spec"]["host"].split(".", 1)[1] - except Exception as e: # pragma no cover - return _kube_api_error_handling(e) - else: - try: - config_check() - api_instance = client.NetworkingV1Api(api_config_handler()) - ingress = api_instance.read_namespaced_ingress( - f"rayclient-{cluster_name}", namespace - ) - return ingress.spec.rules[0].host.split(".", 1)[1] - except Exception as e: # pragma no cover - return _kube_api_error_handling(e)
    + return ray
    @@ -1076,7 +999,7 @@

    Module codeflare_sdk.cluster.cluster

    Functions

    -def get_cluster(cluster_name: str, namespace: str = 'default') +def get_cluster(cluster_name: str, namespace: str = 'default', write_to_file: bool = False, verify_tls: bool = True)
    @@ -1084,7 +1007,12 @@

    Functions

    Expand source code -
    def get_cluster(cluster_name: str, namespace: str = "default"):
    +
    def get_cluster(
    +    cluster_name: str,
    +    namespace: str = "default",
    +    write_to_file: bool = False,
    +    verify_tls: bool = True,
    +):
         try:
             config_check()
             api_instance = client.CustomObjectsApi(api_config_handler())
    @@ -1100,61 +1028,11 @@ 

    Functions

    for rc in rcs["items"]: if rc["metadata"]["name"] == cluster_name: mcad = _check_aw_exists(cluster_name, namespace) - - try: - config_check() - api_instance = client.NetworkingV1Api(api_config_handler()) - ingresses = api_instance.list_namespaced_ingress(namespace) - ingress_host = None - ingress_options = {} - for ingress in ingresses.items: - # Search for ingress with AppWrapper name as the owner - if ( - "ingress-owner" in ingress.metadata.labels - and ingress.metadata.labels["ingress-owner"] == cluster_name - ): - ingress_host = ingress.spec.rules[0].host - if ( - "ingress-options" in ingress.metadata.labels - and ingress.metadata.labels["ingress-options"] == "true" - ): - ingress_name = ingress.metadata.name - port = ( - ingress.spec.rules[0] - .http.paths[0] - .backend.service.port.number - ) - annotations = ingress.metadata.annotations - path = ingress.spec.rules[0].http.paths[0].path - ingress_class_name = ingress.spec.ingress_class_name - path_type = ingress.spec.rules[0].http.paths[0].path_type - - ingress_options = { - "ingresses": [ - { - "ingressName": ingress_name, - "port": port, - "annotations": annotations, - "ingressClassName": ingress_class_name, - "pathType": path_type, - "path": path, - "host": ingress_host, - } - ] - } - except Exception as e: - return _kube_api_error_handling(e) - # We gather the ingress domain from the host - if ingress_host is not None and ingress_options == {}: - ingress_domain = ingress_host.split(".", 1)[1] - else: - ingress_domain = None - return Cluster.from_k8_cluster_object( rc, mcad=mcad, - ingress_domain=ingress_domain, - ingress_options=ingress_options, + write_to_file=write_to_file, + verify_tls=verify_tls, ) raise FileNotFoundError( f"Cluster {cluster_name} is not found in {namespace} namespace" @@ -1196,42 +1074,6 @@

    Functions

    return None
    -
    -def get_ingress_domain_from_client(cluster_name: str, namespace: str = 'default') -
    -
    -
    -
    - -Expand source code - -
    def get_ingress_domain_from_client(cluster_name: str, namespace: str = "default"):
    -    if is_openshift_cluster():
    -        try:
    -            config_check()
    -            api_instance = client.CustomObjectsApi(api_config_handler())
    -            route = api_instance.get_namespaced_custom_object(
    -                group="route.openshift.io",
    -                version="v1",
    -                namespace=namespace,
    -                plural="routes",
    -                name=f"rayclient-{cluster_name}",
    -            )
    -            return route["spec"]["host"].split(".", 1)[1]
    -        except Exception as e:  # pragma no cover
    -            return _kube_api_error_handling(e)
    -    else:
    -        try:
    -            config_check()
    -            api_instance = client.NetworkingV1Api(api_config_handler())
    -            ingress = api_instance.read_namespaced_ingress(
    -                f"rayclient-{cluster_name}", namespace
    -            )
    -            return ingress.spec.rules[0].host.split(".", 1)[1]
    -        except Exception as e:  # pragma no cover
    -            return _kube_api_error_handling(e)
    -
    -
    def list_all_clusters(namespace: str, print_to_console: bool = True)
    @@ -1252,26 +1094,33 @@

    Functions

    -def list_all_queued(namespace: str, print_to_console: bool = True) +def list_all_queued(namespace: str, print_to_console: bool = True, mcad: bool = False)
    -

    Returns (and prints by default) a list of all currently queued-up AppWrappers +

    Returns (and prints by default) a list of all currently queued-up Ray Clusters in a given namespace.

    Expand source code -
    def list_all_queued(namespace: str, print_to_console: bool = True):
    +
    def list_all_queued(namespace: str, print_to_console: bool = True, mcad: bool = False):
         """
    -    Returns (and prints by default) a list of all currently queued-up AppWrappers
    +    Returns (and prints by default) a list of all currently queued-up Ray Clusters
         in a given namespace.
         """
    -    app_wrappers = _get_app_wrappers(
    -        namespace, filter=[AppWrapperStatus.RUNNING, AppWrapperStatus.PENDING]
    -    )
    -    if print_to_console:
    -        pretty_print.print_app_wrappers_status(app_wrappers)
    -    return app_wrappers
    + if mcad: + resources = _get_app_wrappers( + namespace, filter=[AppWrapperStatus.RUNNING, AppWrapperStatus.PENDING] + ) + if print_to_console: + pretty_print.print_app_wrappers_status(resources) + else: + resources = _get_ray_clusters( + namespace, filter=[RayClusterStatus.READY, RayClusterStatus.SUSPENDED] + ) + if print_to_console: + pretty_print.print_ray_clusters_status(resources) + return resources
    @@ -1303,8 +1152,6 @@

    Classes

    Note that currently, the underlying implementation is a Ray cluster. """ - torchx_scheduler = "ray" - def __init__(self, config: ClusterConfiguration): """ Create the resource cluster object by passing in a ClusterConfiguration @@ -1315,9 +1162,7 @@

    Classes

    self.config = config self.app_wrapper_yaml = self.create_app_wrapper() self._job_submission_client = None - self.app_wrapper_name = self.app_wrapper_yaml.replace(".yaml", "").split("/")[ - -1 - ] + self.app_wrapper_name = self.config.name @property def _client_headers(self): @@ -1330,14 +1175,16 @@

    Classes

    @property def _client_verify_tls(self): - return not self.config.openshift_oauth + if not is_openshift_cluster or not self.config.verify_tls: + return False + return True @property def job_client(self): k8client = api_config_handler() or client.ApiClient() if self._job_submission_client: return self._job_submission_client - if self.config.openshift_oauth: + if is_openshift_cluster(): print(k8client.configuration.get_api_key_with_prefix("authorization")) self._job_submission_client = JobSubmissionClient( self.cluster_dashboard_uri(), @@ -1432,8 +1279,9 @@

    Classes

    local_interactive = self.config.local_interactive image_pull_secrets = self.config.image_pull_secrets dispatch_priority = self.config.dispatch_priority - ingress_domain = self.config.ingress_domain - ingress_options = self.config.ingress_options + write_to_file = self.config.write_to_file + verify_tls = self.config.verify_tls + local_queue = self.config.local_queue return generate_appwrapper( name=name, namespace=namespace, @@ -1456,9 +1304,9 @@

    Classes

    image_pull_secrets=image_pull_secrets, dispatch_priority=dispatch_priority, priority_val=priority_val, - openshift_oauth=self.config.openshift_oauth, - ingress_domain=ingress_domain, - ingress_options=ingress_options, + write_to_file=write_to_file, + verify_tls=verify_tls, + local_queue=local_queue, ) # creates a new cluster with the provided or default spec @@ -1468,24 +1316,30 @@

    Classes

    the MCAD queue. """ namespace = self.config.namespace - if self.config.openshift_oauth: - create_openshift_oauth_objects( - cluster_name=self.config.name, namespace=namespace - ) try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) if self.config.mcad: - with open(self.app_wrapper_yaml) as f: - aw = yaml.load(f, Loader=yaml.FullLoader) - api_instance.create_namespaced_custom_object( - group="workload.codeflare.dev", - version="v1beta1", - namespace=namespace, - plural="appwrappers", - body=aw, - ) + if self.config.write_to_file: + with open(self.app_wrapper_yaml) as f: + aw = yaml.load(f, Loader=yaml.FullLoader) + api_instance.create_namespaced_custom_object( + group="workload.codeflare.dev", + version="v1beta1", + namespace=namespace, + plural="appwrappers", + body=aw, + ) + else: + aw = yaml.safe_load(self.app_wrapper_yaml) + api_instance.create_namespaced_custom_object( + group="workload.codeflare.dev", + version="v1beta1", + namespace=namespace, + plural="appwrappers", + body=aw, + ) else: self._component_resources_up(namespace, api_instance) except Exception as e: # pragma: no cover @@ -1513,11 +1367,6 @@

    Classes

    except Exception as e: # pragma: no cover return _kube_api_error_handling(e) - if self.config.openshift_oauth: - delete_openshift_oauth_objects( - cluster_name=self.config.name, namespace=namespace - ) - def status( self, print_to_console: bool = True ) -> Tuple[CodeFlareClusterStatus, bool]: @@ -1564,6 +1413,9 @@

    Classes

    # check the ray cluster status cluster = _ray_cluster_status(self.config.name, self.config.namespace) if cluster: + if cluster.status == RayClusterStatus.SUSPENDED: + ready = False + status = CodeFlareClusterStatus.SUSPENDED if cluster.status == RayClusterStatus.UNKNOWN: ready = False status = CodeFlareClusterStatus.STARTING @@ -1611,36 +1463,33 @@

    Classes

    Checks every five seconds. """ print("Waiting for requested resources to be set up...") - ready = False - dashboard_ready = False - status = None time = 0 - while not ready: + while True: + if timeout and time >= timeout: + raise TimeoutError( + f"wait() timed out after waiting {timeout}s for cluster to be ready" + ) status, ready = self.status(print_to_console=False) if status == CodeFlareClusterStatus.UNKNOWN: print( "WARNING: Current cluster status is unknown, have you run cluster.up yet?" ) - if not ready: - if timeout and time >= timeout: - raise TimeoutError( - f"wait() timed out after waiting {timeout}s for cluster to be ready" - ) - sleep(5) - time += 5 + if ready: + break + sleep(5) + time += 5 print("Requested cluster is up and running!") - while dashboard_check and not dashboard_ready: - dashboard_ready = self.is_dashboard_ready() - if not dashboard_ready: - if timeout and time >= timeout: - raise TimeoutError( - f"wait() timed out after waiting {timeout}s for dashboard to be ready" - ) - sleep(5) - time += 5 - if dashboard_ready: - print("Dashboard is ready!") + while dashboard_check: + if timeout and time >= timeout: + raise TimeoutError( + f"wait() timed out after waiting {timeout}s for dashboard to be ready" + ) + if self.is_dashboard_ready(): + print("Dashboard is ready!") + break + sleep(5) + time += 5 def details(self, print_to_console: bool = True) -> RayCluster: cluster = _copy_to_ray(self) @@ -1700,7 +1549,7 @@

    Classes

    elif "route.openshift.io/termination" in annotations: protocol = "https" return f"{protocol}://{ingress.spec.rules[0].host}" - return "Dashboard ingress not available yet, have you run cluster.up()?" + return "Dashboard not available yet, have you run cluster.up()?" def list_jobs(self) -> List: """ @@ -1720,45 +1569,41 @@

    Classes

    """ return self.job_client.get_job_logs(job_id) - def torchx_config( - self, working_dir: str = None, requirements: str = None - ) -> Dict[str, str]: - dashboard_address = urllib3.util.parse_url(self.cluster_dashboard_uri()).host - to_return = { - "cluster_name": self.config.name, - "dashboard_address": dashboard_address, - } - if working_dir: - to_return["working_dir"] = working_dir - if requirements: - to_return["requirements"] = requirements - return to_return - - def from_k8_cluster_object(rc, mcad=True, ingress_domain=None, ingress_options={}): + def from_k8_cluster_object( + rc, + mcad=True, + write_to_file=False, + verify_tls=True, + ): + config_check() + if ( + rc["metadata"]["annotations"]["sdk.codeflare.dev/local_interactive"] + == "True" + ): + local_interactive = True + else: + local_interactive = False machine_types = ( rc["metadata"]["labels"]["orderedinstance"].split("_") if "orderedinstance" in rc["metadata"]["labels"] else [] ) - local_interactive = ( - "volumeMounts" - in rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0] - ) - if local_interactive: - ingress_domain = get_ingress_domain_from_client( - rc["metadata"]["name"], rc["metadata"]["namespace"] - ) + cluster_config = ClusterConfiguration( name=rc["metadata"]["name"], namespace=rc["metadata"]["namespace"], machine_types=machine_types, num_workers=rc["spec"]["workerGroupSpecs"][0]["minReplicas"], - min_cpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ - "containers" - ][0]["resources"]["requests"]["cpu"], - max_cpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ - "containers" - ][0]["resources"]["limits"]["cpu"], + min_cpus=int( + rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ + "resources" + ]["requests"]["cpu"] + ), + max_cpus=int( + rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ + "resources" + ]["limits"]["cpu"] + ), min_memory=int( rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ "resources" @@ -1769,17 +1614,19 @@

    Classes

    "resources" ]["limits"]["memory"][:-1] ), - num_gpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ - "containers" - ][0]["resources"]["limits"]["nvidia.com/gpu"], + num_gpus=int( + rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ + "resources" + ]["limits"]["nvidia.com/gpu"] + ), instascale=True if machine_types else False, image=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][ 0 ]["image"], local_interactive=local_interactive, mcad=mcad, - ingress_domain=ingress_domain, - ingress_options=ingress_options, + write_to_file=write_to_file, + verify_tls=verify_tls, ) return Cluster(cluster_config) @@ -1793,87 +1640,26 @@

    Classes

    def _component_resources_up( self, namespace: str, api_instance: client.CustomObjectsApi ): - with open(self.app_wrapper_yaml) as f: - yamls = yaml.load_all(f, Loader=yaml.FullLoader) - for resource in yamls: - if resource["kind"] == "RayCluster": - api_instance.create_namespaced_custom_object( - group="ray.io", - version="v1", - namespace=namespace, - plural="rayclusters", - body=resource, - ) - elif resource["kind"] == "Ingress": - api_instance.create_namespaced_custom_object( - group="networking.k8s.io", - version="v1", - namespace=namespace, - plural="ingresses", - body=resource, - ) - elif resource["kind"] == "Route": - api_instance.create_namespaced_custom_object( - group="route.openshift.io", - version="v1", - namespace=namespace, - plural="routes", - body=resource, - ) - elif resource["kind"] == "Secret": - secret_instance = client.CoreV1Api(api_config_handler()) - secret_instance.create_namespaced_secret( - namespace=namespace, - body=resource, - ) + if self.config.write_to_file: + with open(self.app_wrapper_yaml) as f: + yamls = yaml.load_all(f, Loader=yaml.FullLoader) + _create_resources(yamls, namespace, api_instance) + else: + yamls = yaml.load_all(self.app_wrapper_yaml, Loader=yaml.FullLoader) + _create_resources(yamls, namespace, api_instance) def _component_resources_down( self, namespace: str, api_instance: client.CustomObjectsApi ): - with open(self.app_wrapper_yaml) as f: - yamls = yaml.load_all(f, Loader=yaml.FullLoader) - for resource in yamls: - if resource["kind"] == "RayCluster": - api_instance.delete_namespaced_custom_object( - group="ray.io", - version="v1", - namespace=namespace, - plural="rayclusters", - name=self.app_wrapper_name, - ) - elif resource["kind"] == "Ingress": - name = resource["metadata"]["name"] - api_instance.delete_namespaced_custom_object( - group="networking.k8s.io", - version="v1", - namespace=namespace, - plural="ingresses", - name=name, - ) - elif resource["kind"] == "Route": - name = resource["metadata"]["name"] - api_instance.delete_namespaced_custom_object( - group="route.openshift.io", - version="v1", - namespace=namespace, - plural="routes", - name=name, - ) - elif resource["kind"] == "Secret": - name = resource["metadata"]["name"] - secret_instance = client.CoreV1Api(api_config_handler()) - secret_instance.delete_namespaced_secret( - namespace=namespace, - name=name, - )
    + cluster_name = self.config.name + if self.config.write_to_file: + with open(self.app_wrapper_yaml) as f: + yamls = yaml.load_all(f, Loader=yaml.FullLoader) + _delete_resources(yamls, namespace, api_instance, cluster_name) + else: + yamls = yaml.safe_load_all(self.app_wrapper_yaml) + _delete_resources(yamls, namespace, api_instance, cluster_name) -

    Class variables

    -
    -
    var torchx_scheduler
    -
    -
    -
    -

    Instance variables

    var job_client
    @@ -1888,7 +1674,7 @@

    Instance variables

    k8client = api_config_handler() or client.ApiClient() if self._job_submission_client: return self._job_submission_client - if self.config.openshift_oauth: + if is_openshift_cluster(): print(k8client.configuration.get_api_key_with_prefix("authorization")) self._job_submission_client = JobSubmissionClient( self.cluster_dashboard_uri(), @@ -1960,7 +1746,7 @@

    Methods

    elif "route.openshift.io/termination" in annotations: protocol = "https" return f"{protocol}://{ingress.spec.rules[0].host}" - return "Dashboard ingress not available yet, have you run cluster.up()?" + return "Dashboard not available yet, have you run cluster.up()?"
    @@ -2041,8 +1827,9 @@

    Methods

    local_interactive = self.config.local_interactive image_pull_secrets = self.config.image_pull_secrets dispatch_priority = self.config.dispatch_priority - ingress_domain = self.config.ingress_domain - ingress_options = self.config.ingress_options + write_to_file = self.config.write_to_file + verify_tls = self.config.verify_tls + local_queue = self.config.local_queue return generate_appwrapper( name=name, namespace=namespace, @@ -2065,9 +1852,9 @@

    Methods

    image_pull_secrets=image_pull_secrets, dispatch_priority=dispatch_priority, priority_val=priority_val, - openshift_oauth=self.config.openshift_oauth, - ingress_domain=ingress_domain, - ingress_options=ingress_options, + write_to_file=write_to_file, + verify_tls=verify_tls, + local_queue=local_queue, )
    @@ -2117,12 +1904,7 @@

    Methods

    else: self._component_resources_down(namespace, api_instance) except Exception as e: # pragma: no cover - return _kube_api_error_handling(e) - - if self.config.openshift_oauth: - delete_openshift_oauth_objects( - cluster_name=self.config.name, namespace=namespace - ) + return _kube_api_error_handling(e)
    @@ -2156,7 +1938,7 @@

    Methods

    -def from_k8_cluster_object(rc, mcad=True, ingress_domain=None, ingress_options={}) +def from_k8_cluster_object(rc, mcad=True, write_to_file=False, verify_tls=True)
    @@ -2164,31 +1946,41 @@

    Methods

    Expand source code -
    def from_k8_cluster_object(rc, mcad=True, ingress_domain=None, ingress_options={}):
    +
    def from_k8_cluster_object(
    +    rc,
    +    mcad=True,
    +    write_to_file=False,
    +    verify_tls=True,
    +):
    +    config_check()
    +    if (
    +        rc["metadata"]["annotations"]["sdk.codeflare.dev/local_interactive"]
    +        == "True"
    +    ):
    +        local_interactive = True
    +    else:
    +        local_interactive = False
         machine_types = (
             rc["metadata"]["labels"]["orderedinstance"].split("_")
             if "orderedinstance" in rc["metadata"]["labels"]
             else []
         )
    -    local_interactive = (
    -        "volumeMounts"
    -        in rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0]
    -    )
    -    if local_interactive:
    -        ingress_domain = get_ingress_domain_from_client(
    -            rc["metadata"]["name"], rc["metadata"]["namespace"]
    -        )
    +
         cluster_config = ClusterConfiguration(
             name=rc["metadata"]["name"],
             namespace=rc["metadata"]["namespace"],
             machine_types=machine_types,
             num_workers=rc["spec"]["workerGroupSpecs"][0]["minReplicas"],
    -        min_cpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][
    -            "containers"
    -        ][0]["resources"]["requests"]["cpu"],
    -        max_cpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][
    -            "containers"
    -        ][0]["resources"]["limits"]["cpu"],
    +        min_cpus=int(
    +            rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][
    +                "resources"
    +            ]["requests"]["cpu"]
    +        ),
    +        max_cpus=int(
    +            rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][
    +                "resources"
    +            ]["limits"]["cpu"]
    +        ),
             min_memory=int(
                 rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][
                     "resources"
    @@ -2199,17 +1991,19 @@ 

    Methods

    "resources" ]["limits"]["memory"][:-1] ), - num_gpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ - "containers" - ][0]["resources"]["limits"]["nvidia.com/gpu"], + num_gpus=int( + rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ + "resources" + ]["limits"]["nvidia.com/gpu"] + ), instascale=True if machine_types else False, image=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][ 0 ]["image"], local_interactive=local_interactive, mcad=mcad, - ingress_domain=ingress_domain, - ingress_options=ingress_options, + write_to_file=write_to_file, + verify_tls=verify_tls, ) return Cluster(cluster_config)
    @@ -2361,6 +2155,9 @@

    Methods

    # check the ray cluster status cluster = _ray_cluster_status(self.config.name, self.config.namespace) if cluster: + if cluster.status == RayClusterStatus.SUSPENDED: + ready = False + status = CodeFlareClusterStatus.SUSPENDED if cluster.status == RayClusterStatus.UNKNOWN: ready = False status = CodeFlareClusterStatus.STARTING @@ -2387,30 +2184,6 @@

    Methods

    return status, ready
    -
    -def torchx_config(self, working_dir: str = None, requirements: str = None) ‑> Dict[str, str] -
    -
    -
    -
    - -Expand source code - -
    def torchx_config(
    -    self, working_dir: str = None, requirements: str = None
    -) -> Dict[str, str]:
    -    dashboard_address = urllib3.util.parse_url(self.cluster_dashboard_uri()).host
    -    to_return = {
    -        "cluster_name": self.config.name,
    -        "dashboard_address": dashboard_address,
    -    }
    -    if working_dir:
    -        to_return["working_dir"] = working_dir
    -    if requirements:
    -        to_return["requirements"] = requirements
    -    return to_return
    -
    -
    def up(self)
    @@ -2427,24 +2200,30 @@

    Methods

    the MCAD queue. """ namespace = self.config.namespace - if self.config.openshift_oauth: - create_openshift_oauth_objects( - cluster_name=self.config.name, namespace=namespace - ) try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) if self.config.mcad: - with open(self.app_wrapper_yaml) as f: - aw = yaml.load(f, Loader=yaml.FullLoader) - api_instance.create_namespaced_custom_object( - group="workload.codeflare.dev", - version="v1beta1", - namespace=namespace, - plural="appwrappers", - body=aw, - ) + if self.config.write_to_file: + with open(self.app_wrapper_yaml) as f: + aw = yaml.load(f, Loader=yaml.FullLoader) + api_instance.create_namespaced_custom_object( + group="workload.codeflare.dev", + version="v1beta1", + namespace=namespace, + plural="appwrappers", + body=aw, + ) + else: + aw = yaml.safe_load(self.app_wrapper_yaml) + api_instance.create_namespaced_custom_object( + group="workload.codeflare.dev", + version="v1beta1", + namespace=namespace, + plural="appwrappers", + body=aw, + ) else: self._component_resources_up(namespace, api_instance) except Exception as e: # pragma: no cover @@ -2489,36 +2268,33 @@

    Methods

    Checks every five seconds. """ print("Waiting for requested resources to be set up...") - ready = False - dashboard_ready = False - status = None time = 0 - while not ready: + while True: + if timeout and time >= timeout: + raise TimeoutError( + f"wait() timed out after waiting {timeout}s for cluster to be ready" + ) status, ready = self.status(print_to_console=False) if status == CodeFlareClusterStatus.UNKNOWN: print( "WARNING: Current cluster status is unknown, have you run cluster.up yet?" ) - if not ready: - if timeout and time >= timeout: - raise TimeoutError( - f"wait() timed out after waiting {timeout}s for cluster to be ready" - ) - sleep(5) - time += 5 + if ready: + break + sleep(5) + time += 5 print("Requested cluster is up and running!") - while dashboard_check and not dashboard_ready: - dashboard_ready = self.is_dashboard_ready() - if not dashboard_ready: - if timeout and time >= timeout: - raise TimeoutError( - f"wait() timed out after waiting {timeout}s for dashboard to be ready" - ) - sleep(5) - time += 5 - if dashboard_ready: - print("Dashboard is ready!")
    + while dashboard_check: + if timeout and time >= timeout: + raise TimeoutError( + f"wait() timed out after waiting {timeout}s for dashboard to be ready" + ) + if self.is_dashboard_ready(): + print("Dashboard is ready!") + break + sleep(5) + time += 5
    @@ -2541,7 +2317,6 @@

    Index

    @@ -2565,8 +2340,6 @@

    list_jobs
  • local_client_url
  • status
  • -
  • torchx_config
  • -
  • torchx_scheduler
  • up
  • validate_image_config
  • wait_ready
  • diff --git a/docs/detailed-documentation/cluster/config.html b/docs/detailed-documentation/cluster/config.html index a4f2d662..72f0e3da 100644 --- a/docs/detailed-documentation/cluster/config.html +++ b/docs/detailed-documentation/cluster/config.html @@ -78,15 +78,22 @@

    Module codeflare_sdk.cluster.config

    num_gpus: int = 0 template: str = f"{dir}/templates/base-template.yaml" instascale: bool = False - mcad: bool = True + mcad: bool = False envs: dict = field(default_factory=dict) image: str = "" local_interactive: bool = False image_pull_secrets: list = field(default_factory=list) dispatch_priority: str = None - openshift_oauth: bool = False # NOTE: to use the user must have permission to create a RoleBinding for system:auth-delegator - ingress_options: dict = field(default_factory=dict) - ingress_domain: str = None + write_to_file: bool = False + verify_tls: bool = True + + def __post_init__(self): + if not self.verify_tls: + print( + "Warning: TLS verification has been disabled - Endpoint checks will be bypassed" + ) + + local_queue: str = None
    @@ -100,7 +107,7 @@

    Classes

    class ClusterConfiguration -(name: str, namespace: str = None, head_info: list = <factory>, head_cpus: int = 2, head_memory: int = 8, head_gpus: int = 0, machine_types: list = <factory>, min_cpus: int = 1, max_cpus: int = 1, num_workers: int = 1, min_memory: int = 2, max_memory: int = 2, num_gpus: int = 0, template: str = '/home/runner/work/codeflare-sdk/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', instascale: bool = False, mcad: bool = True, envs: dict = <factory>, image: str = '', local_interactive: bool = False, image_pull_secrets: list = <factory>, dispatch_priority: str = None, openshift_oauth: bool = False, ingress_options: dict = <factory>, ingress_domain: str = None) +(name: str, namespace: str = None, head_info: list = <factory>, head_cpus: int = 2, head_memory: int = 8, head_gpus: int = 0, machine_types: list = <factory>, min_cpus: int = 1, max_cpus: int = 1, num_workers: int = 1, min_memory: int = 2, max_memory: int = 2, num_gpus: int = 0, template: str = '/home/runner/work/codeflare-sdk/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', instascale: bool = False, mcad: bool = False, envs: dict = <factory>, image: str = '', local_interactive: bool = False, image_pull_secrets: list = <factory>, dispatch_priority: str = None, write_to_file: bool = False, verify_tls: bool = True, local_queue: str = None)

    This dataclass is used to specify resource requirements and other details, and @@ -130,15 +137,22 @@

    Classes

    num_gpus: int = 0 template: str = f"{dir}/templates/base-template.yaml" instascale: bool = False - mcad: bool = True + mcad: bool = False envs: dict = field(default_factory=dict) image: str = "" local_interactive: bool = False image_pull_secrets: list = field(default_factory=list) dispatch_priority: str = None - openshift_oauth: bool = False # NOTE: to use the user must have permission to create a RoleBinding for system:auth-delegator - ingress_options: dict = field(default_factory=dict) - ingress_domain: str = None + write_to_file: bool = False + verify_tls: bool = True + + def __post_init__(self): + if not self.verify_tls: + print( + "Warning: TLS verification has been disabled - Endpoint checks will be bypassed" + ) + + local_queue: str = None

    Class variables

    @@ -174,19 +188,15 @@

    Class variables

    -
    var ingress_domain : str
    -
    -
    -
    -
    var ingress_options : dict
    +
    var instascale : bool
    -
    var instascale : bool
    +
    var local_interactive : bool
    -
    var local_interactive : bool
    +
    var local_queue : str
    @@ -230,11 +240,15 @@

    Class variables

    -
    var openshift_oauth : bool
    +
    var template : str
    +
    +
    +
    +
    var verify_tls : bool
    -
    var template : str
    +
    var write_to_file : bool
    @@ -267,10 +281,9 @@

    head_memory
  • image
  • image_pull_secrets
  • -
  • ingress_domain
  • -
  • ingress_options
  • instascale
  • local_interactive
  • +
  • local_queue
  • machine_types
  • max_cpus
  • max_memory
  • @@ -281,8 +294,9 @@

    namespace
  • num_gpus
  • num_workers
  • -
  • openshift_oauth
  • template
  • +
  • verify_tls
  • +
  • write_to_file
  • diff --git a/docs/detailed-documentation/cluster/index.html b/docs/detailed-documentation/cluster/index.html index 8454c875..a7967885 100644 --- a/docs/detailed-documentation/cluster/index.html +++ b/docs/detailed-documentation/cluster/index.html @@ -41,7 +41,13 @@

    Module codeflare_sdk.cluster

    AppWrapper, ) -from .cluster import Cluster, ClusterConfiguration +from .cluster import ( + Cluster, + ClusterConfiguration, + get_cluster, + list_all_queued, + list_all_clusters, +) from .awload import AWManager diff --git a/docs/detailed-documentation/cluster/model.html b/docs/detailed-documentation/cluster/model.html index ff8a0071..3843f545 100644 --- a/docs/detailed-documentation/cluster/model.html +++ b/docs/detailed-documentation/cluster/model.html @@ -65,6 +65,7 @@

    Module codeflare_sdk.cluster.model

    UNHEALTHY = "unhealthy" FAILED = "failed" UNKNOWN = "unknown" + SUSPENDED = "suspended" class AppWrapperStatus(Enum): @@ -92,6 +93,7 @@

    Module codeflare_sdk.cluster.model

    QUEUEING = 4 FAILED = 5 UNKNOWN = 6 + SUSPENDED = 7 @dataclass @@ -254,7 +256,8 @@

    Class variables

    QUEUED = 3 QUEUEING = 4 FAILED = 5 - UNKNOWN = 6 + UNKNOWN = 6 + SUSPENDED = 7

    Ancestors

      @@ -282,6 +285,10 @@

      Class variables

      +
      var SUSPENDED
      +
      +
      +
      var UNKNOWN
      @@ -387,7 +394,8 @@

      Class variables

      READY = "ready" UNHEALTHY = "unhealthy" FAILED = "failed" - UNKNOWN = "unknown" + UNKNOWN = "unknown" + SUSPENDED = "suspended"

      Ancestors

      @@ -483,6 +496,7 @@

    • FAILED
    • READY
    • +
    • SUSPENDED
    • UNHEALTHY
    • UNKNOWN
    diff --git a/docs/detailed-documentation/index.html b/docs/detailed-documentation/index.html index 4895f0a9..21bab3d1 100644 --- a/docs/detailed-documentation/index.html +++ b/docs/detailed-documentation/index.html @@ -39,9 +39,12 @@

    Package codeflare_sdk

    CodeFlareClusterStatus, RayCluster, AppWrapper, + get_cluster, + list_all_queued, + list_all_clusters, ) -from .job import JobDefinition, Job, DDPJobDefinition, DDPJob, RayJobClient +from .job import RayJobClient from .utils import generate_cert
    diff --git a/docs/detailed-documentation/job/index.html b/docs/detailed-documentation/job/index.html index 514cb895..ccfc679d 100644 --- a/docs/detailed-documentation/job/index.html +++ b/docs/detailed-documentation/job/index.html @@ -26,18 +26,12 @@

    Module codeflare_sdk.job

    Expand source code -
    from .jobs import JobDefinition, Job, DDPJobDefinition, DDPJob
    -
    -from .ray_jobs import RayJobClient
    +
    from .ray_jobs import RayJobClient

    Sub-modules

    -
    codeflare_sdk.job.jobs
    -
    -

    The jobs sub-module contains methods needed to submit Distributed Data Parallel(DDP) jobs to Ray Clusters created by the CodeFlare SDK.

    -
    codeflare_sdk.job.ray_jobs

    The ray_jobs sub-module contains methods needed to submit jobs and connect to Ray Clusters that were not created by CodeFlare. @@ -65,7 +59,6 @@

    Index

  • Sub-modules

  • diff --git a/docs/detailed-documentation/job/jobs.html b/docs/detailed-documentation/job/jobs.html deleted file mode 100644 index 98e59e66..00000000 --- a/docs/detailed-documentation/job/jobs.html +++ /dev/null @@ -1,631 +0,0 @@ - - - - - - -codeflare_sdk.job.jobs API documentation - - - - - - - - - - - -
    -
    -
    -

    Module codeflare_sdk.job.jobs

    -
    -
    -

    The jobs sub-module contains methods needed to submit Distributed Data Parallel(DDP) jobs to Ray Clusters created by the CodeFlare SDK.

    -
    - -Expand source code - -
    # Copyright 2023 IBM, Red Hat
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#      http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -
    -"""
    -The jobs sub-module contains methods needed to submit Distributed Data Parallel(DDP) jobs to Ray Clusters created by the CodeFlare SDK.
    -"""
    -
    -import abc
    -from typing import TYPE_CHECKING, Optional, Dict, List
    -from pathlib import Path
    -
    -from torchx.components.dist import ddp
    -from torchx.runner import get_runner, Runner
    -from torchx.schedulers.ray_scheduler import RayScheduler
    -from torchx.specs import AppHandle, parse_app_handle, AppDryRunInfo
    -
    -
    -if TYPE_CHECKING:
    -    from ..cluster.cluster import Cluster
    -from ..cluster.cluster import get_current_namespace
    -
    -all_jobs: List["Job"] = []
    -
    -
    -class JobDefinition(metaclass=abc.ABCMeta):
    -    def _dry_run(self, cluster: "Cluster"):
    -        pass
    -
    -    def submit(self, cluster: "Cluster"):
    -        pass
    -
    -
    -class Job(metaclass=abc.ABCMeta):
    -    def status(self):
    -        pass
    -
    -    def logs(self):
    -        pass
    -
    -
    -class DDPJobDefinition(JobDefinition):
    -    def __init__(
    -        self,
    -        script: Optional[str] = None,
    -        m: Optional[str] = None,
    -        script_args: Optional[List[str]] = None,
    -        name: Optional[str] = None,
    -        cpu: Optional[int] = None,
    -        gpu: Optional[int] = None,
    -        memMB: Optional[int] = None,
    -        h: Optional[str] = None,
    -        j: Optional[str] = None,
    -        env: Optional[Dict[str, str]] = None,
    -        max_retries: int = 0,
    -        mounts: Optional[List[str]] = None,
    -        rdzv_port: int = 29500,
    -        rdzv_backend: str = None,
    -        scheduler_args: Optional[Dict[str, str]] = None,
    -        image: Optional[str] = None,
    -        workspace: Optional[str] = f"file://{Path.cwd()}",
    -    ):
    -        if bool(script) == bool(m):  # logical XOR
    -            raise ValueError(
    -                "Exactly one of the following arguments must be defined: [script, m]."
    -            )
    -        self.script = script
    -        self.m = m
    -        self.script_args: List[str] = script_args if script_args is not None else []
    -        self.name = name
    -        self.cpu = cpu
    -        self.gpu = gpu
    -        self.memMB = memMB
    -        self.h = h
    -        self.j = j
    -        self.env: Dict[str, str] = env if env is not None else dict()
    -        self.max_retries = max_retries
    -        self.mounts: List[str] = mounts if mounts is not None else []
    -        self.rdzv_port = rdzv_port
    -        self.rdzv_backend = rdzv_backend
    -        self.scheduler_args: Dict[str, str] = (
    -            scheduler_args if scheduler_args is not None else dict()
    -        )
    -        self.image = image
    -        self.workspace = workspace
    -
    -    def _dry_run(self, cluster: "Cluster"):
    -        j = f"{cluster.config.num_workers}x{max(cluster.config.num_gpus, 1)}"  # # of proc. = # of gpus
    -        runner = get_runner(ray_client=cluster.job_client)
    -        runner._scheduler_instances["ray"] = RayScheduler(
    -            session_name=runner._name, ray_client=cluster.job_client
    -        )
    -        return (
    -            runner.dryrun(
    -                app=ddp(
    -                    *self.script_args,
    -                    script=self.script,
    -                    m=self.m,
    -                    name=self.name,
    -                    h=self.h,
    -                    cpu=self.cpu if self.cpu is not None else cluster.config.max_cpus,
    -                    gpu=self.gpu if self.gpu is not None else cluster.config.num_gpus,
    -                    memMB=self.memMB
    -                    if self.memMB is not None
    -                    else cluster.config.max_memory * 1024,
    -                    j=self.j if self.j is not None else j,
    -                    env=self.env,
    -                    max_retries=self.max_retries,
    -                    rdzv_port=self.rdzv_port,
    -                    rdzv_backend=self.rdzv_backend
    -                    if self.rdzv_backend is not None
    -                    else "static",
    -                    mounts=self.mounts,
    -                ),
    -                scheduler=cluster.torchx_scheduler,
    -                cfg=cluster.torchx_config(**self.scheduler_args),
    -                workspace=self.workspace,
    -            ),
    -            runner,
    -        )
    -
    -    def _missing_spec(self, spec: str):
    -        raise ValueError(f"Job definition missing arg: {spec}")
    -
    -    def _dry_run_no_cluster(self):
    -        if self.scheduler_args is not None:
    -            if self.scheduler_args.get("namespace") is None:
    -                self.scheduler_args["namespace"] = get_current_namespace()
    -        runner = get_runner()
    -        return (
    -            runner.dryrun(
    -                app=ddp(
    -                    *self.script_args,
    -                    script=self.script,
    -                    m=self.m,
    -                    name=self.name
    -                    if self.name is not None
    -                    else self._missing_spec("name"),
    -                    h=self.h,
    -                    cpu=self.cpu
    -                    if self.cpu is not None
    -                    else self._missing_spec("cpu (# cpus per worker)"),
    -                    gpu=self.gpu
    -                    if self.gpu is not None
    -                    else self._missing_spec("gpu (# gpus per worker)"),
    -                    memMB=self.memMB
    -                    if self.memMB is not None
    -                    else self._missing_spec("memMB (memory in MB)"),
    -                    j=self.j
    -                    if self.j is not None
    -                    else self._missing_spec(
    -                        "j (`workers`x`procs`)"
    -                    ),  # # of proc. = # of gpus,
    -                    env=self.env,  # should this still exist?
    -                    max_retries=self.max_retries,
    -                    rdzv_port=self.rdzv_port,  # should this still exist?
    -                    rdzv_backend=self.rdzv_backend
    -                    if self.rdzv_backend is not None
    -                    else "c10d",
    -                    mounts=self.mounts,
    -                    image=self.image
    -                    if self.image is not None
    -                    else self._missing_spec("image"),
    -                ),
    -                scheduler="kubernetes_mcad",
    -                cfg=self.scheduler_args,
    -                workspace="",
    -            ),
    -            runner,
    -        )
    -
    -    def submit(self, cluster: "Cluster" = None) -> "Job":
    -        return DDPJob(self, cluster)
    -
    -
    -class DDPJob(Job):
    -    def __init__(self, job_definition: "DDPJobDefinition", cluster: "Cluster" = None):
    -        self.job_definition = job_definition
    -        self.cluster = cluster
    -        if self.cluster:
    -            definition, runner = job_definition._dry_run(cluster)
    -            self._app_handle = runner.schedule(definition)
    -            self._runner = runner
    -        else:
    -            definition, runner = job_definition._dry_run_no_cluster()
    -            self._app_handle = runner.schedule(definition)
    -            self._runner = runner
    -        all_jobs.append(self)
    -
    -    def status(self) -> str:
    -        return self._runner.status(self._app_handle)
    -
    -    def logs(self) -> str:
    -        return "".join(self._runner.log_lines(self._app_handle, None))
    -
    -    def cancel(self):
    -        self._runner.cancel(self._app_handle)
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -

    Classes

    -
    -
    -class DDPJob -(job_definition: DDPJobDefinition, cluster: Cluster = None) -
    -
    -
    -
    - -Expand source code - -
    class DDPJob(Job):
    -    def __init__(self, job_definition: "DDPJobDefinition", cluster: "Cluster" = None):
    -        self.job_definition = job_definition
    -        self.cluster = cluster
    -        if self.cluster:
    -            definition, runner = job_definition._dry_run(cluster)
    -            self._app_handle = runner.schedule(definition)
    -            self._runner = runner
    -        else:
    -            definition, runner = job_definition._dry_run_no_cluster()
    -            self._app_handle = runner.schedule(definition)
    -            self._runner = runner
    -        all_jobs.append(self)
    -
    -    def status(self) -> str:
    -        return self._runner.status(self._app_handle)
    -
    -    def logs(self) -> str:
    -        return "".join(self._runner.log_lines(self._app_handle, None))
    -
    -    def cancel(self):
    -        self._runner.cancel(self._app_handle)
    -
    -

    Ancestors

    - -

    Methods

    -
    -
    -def cancel(self) -
    -
    -
    -
    - -Expand source code - -
    def cancel(self):
    -    self._runner.cancel(self._app_handle)
    -
    -
    -
    -def logs(self) ‑> str -
    -
    -
    -
    - -Expand source code - -
    def logs(self) -> str:
    -    return "".join(self._runner.log_lines(self._app_handle, None))
    -
    -
    -
    -def status(self) ‑> str -
    -
    -
    -
    - -Expand source code - -
    def status(self) -> str:
    -    return self._runner.status(self._app_handle)
    -
    -
    -
    -
    -
    -class DDPJobDefinition -(script: Optional[str] = None, m: Optional[str] = None, script_args: Optional[List[str]] = None, name: Optional[str] = None, cpu: Optional[int] = None, gpu: Optional[int] = None, memMB: Optional[int] = None, h: Optional[str] = None, j: Optional[str] = None, env: Optional[Dict[str, str]] = None, max_retries: int = 0, mounts: Optional[List[str]] = None, rdzv_port: int = 29500, rdzv_backend: str = None, scheduler_args: Optional[Dict[str, str]] = None, image: Optional[str] = None, workspace: Optional[str] = 'file:///home/runner/work/codeflare-sdk/codeflare-sdk') -
    -
    -
    -
    - -Expand source code - -
    class DDPJobDefinition(JobDefinition):
    -    def __init__(
    -        self,
    -        script: Optional[str] = None,
    -        m: Optional[str] = None,
    -        script_args: Optional[List[str]] = None,
    -        name: Optional[str] = None,
    -        cpu: Optional[int] = None,
    -        gpu: Optional[int] = None,
    -        memMB: Optional[int] = None,
    -        h: Optional[str] = None,
    -        j: Optional[str] = None,
    -        env: Optional[Dict[str, str]] = None,
    -        max_retries: int = 0,
    -        mounts: Optional[List[str]] = None,
    -        rdzv_port: int = 29500,
    -        rdzv_backend: str = None,
    -        scheduler_args: Optional[Dict[str, str]] = None,
    -        image: Optional[str] = None,
    -        workspace: Optional[str] = f"file://{Path.cwd()}",
    -    ):
    -        if bool(script) == bool(m):  # logical XOR
    -            raise ValueError(
    -                "Exactly one of the following arguments must be defined: [script, m]."
    -            )
    -        self.script = script
    -        self.m = m
    -        self.script_args: List[str] = script_args if script_args is not None else []
    -        self.name = name
    -        self.cpu = cpu
    -        self.gpu = gpu
    -        self.memMB = memMB
    -        self.h = h
    -        self.j = j
    -        self.env: Dict[str, str] = env if env is not None else dict()
    -        self.max_retries = max_retries
    -        self.mounts: List[str] = mounts if mounts is not None else []
    -        self.rdzv_port = rdzv_port
    -        self.rdzv_backend = rdzv_backend
    -        self.scheduler_args: Dict[str, str] = (
    -            scheduler_args if scheduler_args is not None else dict()
    -        )
    -        self.image = image
    -        self.workspace = workspace
    -
    -    def _dry_run(self, cluster: "Cluster"):
    -        j = f"{cluster.config.num_workers}x{max(cluster.config.num_gpus, 1)}"  # # of proc. = # of gpus
    -        runner = get_runner(ray_client=cluster.job_client)
    -        runner._scheduler_instances["ray"] = RayScheduler(
    -            session_name=runner._name, ray_client=cluster.job_client
    -        )
    -        return (
    -            runner.dryrun(
    -                app=ddp(
    -                    *self.script_args,
    -                    script=self.script,
    -                    m=self.m,
    -                    name=self.name,
    -                    h=self.h,
    -                    cpu=self.cpu if self.cpu is not None else cluster.config.max_cpus,
    -                    gpu=self.gpu if self.gpu is not None else cluster.config.num_gpus,
    -                    memMB=self.memMB
    -                    if self.memMB is not None
    -                    else cluster.config.max_memory * 1024,
    -                    j=self.j if self.j is not None else j,
    -                    env=self.env,
    -                    max_retries=self.max_retries,
    -                    rdzv_port=self.rdzv_port,
    -                    rdzv_backend=self.rdzv_backend
    -                    if self.rdzv_backend is not None
    -                    else "static",
    -                    mounts=self.mounts,
    -                ),
    -                scheduler=cluster.torchx_scheduler,
    -                cfg=cluster.torchx_config(**self.scheduler_args),
    -                workspace=self.workspace,
    -            ),
    -            runner,
    -        )
    -
    -    def _missing_spec(self, spec: str):
    -        raise ValueError(f"Job definition missing arg: {spec}")
    -
    -    def _dry_run_no_cluster(self):
    -        if self.scheduler_args is not None:
    -            if self.scheduler_args.get("namespace") is None:
    -                self.scheduler_args["namespace"] = get_current_namespace()
    -        runner = get_runner()
    -        return (
    -            runner.dryrun(
    -                app=ddp(
    -                    *self.script_args,
    -                    script=self.script,
    -                    m=self.m,
    -                    name=self.name
    -                    if self.name is not None
    -                    else self._missing_spec("name"),
    -                    h=self.h,
    -                    cpu=self.cpu
    -                    if self.cpu is not None
    -                    else self._missing_spec("cpu (# cpus per worker)"),
    -                    gpu=self.gpu
    -                    if self.gpu is not None
    -                    else self._missing_spec("gpu (# gpus per worker)"),
    -                    memMB=self.memMB
    -                    if self.memMB is not None
    -                    else self._missing_spec("memMB (memory in MB)"),
    -                    j=self.j
    -                    if self.j is not None
    -                    else self._missing_spec(
    -                        "j (`workers`x`procs`)"
    -                    ),  # # of proc. = # of gpus,
    -                    env=self.env,  # should this still exist?
    -                    max_retries=self.max_retries,
    -                    rdzv_port=self.rdzv_port,  # should this still exist?
    -                    rdzv_backend=self.rdzv_backend
    -                    if self.rdzv_backend is not None
    -                    else "c10d",
    -                    mounts=self.mounts,
    -                    image=self.image
    -                    if self.image is not None
    -                    else self._missing_spec("image"),
    -                ),
    -                scheduler="kubernetes_mcad",
    -                cfg=self.scheduler_args,
    -                workspace="",
    -            ),
    -            runner,
    -        )
    -
    -    def submit(self, cluster: "Cluster" = None) -> "Job":
    -        return DDPJob(self, cluster)
    -
    -

    Ancestors

    - -

    Methods

    -
    -
    -def submit(self, cluster: Cluster = None) ‑> Job -
    -
    -
    -
    - -Expand source code - -
    def submit(self, cluster: "Cluster" = None) -> "Job":
    -    return DDPJob(self, cluster)
    -
    -
    -
    -
    -
    -class Job -
    -
    -
    -
    - -Expand source code - -
    class Job(metaclass=abc.ABCMeta):
    -    def status(self):
    -        pass
    -
    -    def logs(self):
    -        pass
    -
    -

    Subclasses

    - -

    Methods

    -
    -
    -def logs(self) -
    -
    -
    -
    - -Expand source code - -
    def logs(self):
    -    pass
    -
    -
    -
    -def status(self) -
    -
    -
    -
    - -Expand source code - -
    def status(self):
    -    pass
    -
    -
    -
    -
    -
    -class JobDefinition -
    -
    -
    -
    - -Expand source code - -
    class JobDefinition(metaclass=abc.ABCMeta):
    -    def _dry_run(self, cluster: "Cluster"):
    -        pass
    -
    -    def submit(self, cluster: "Cluster"):
    -        pass
    -
    -

    Subclasses

    - -

    Methods

    -
    -
    -def submit(self, cluster: Cluster) -
    -
    -
    -
    - -Expand source code - -
    def submit(self, cluster: "Cluster"):
    -    pass
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - diff --git a/docs/detailed-documentation/utils/generate_yaml.html b/docs/detailed-documentation/utils/generate_yaml.html index d533c8d1..452bea97 100644 --- a/docs/detailed-documentation/utils/generate_yaml.html +++ b/docs/detailed-documentation/utils/generate_yaml.html @@ -48,6 +48,7 @@

    Module codeflare_sdk.utils.generate_yaml

    (in the cluster sub-module) for AppWrapper generation. """ +from typing import Optional import typing import yaml import sys @@ -80,10 +81,6 @@

    Module codeflare_sdk.utils.generate_yaml

    return name, name -def gen_dashboard_ingress_name(cluster_name): - return f"ray-dashboard-{cluster_name}" - - # Check if the routes api exists def is_openshift_cluster(): try: @@ -98,156 +95,17 @@

    Module codeflare_sdk.utils.generate_yaml

    return _kube_api_error_handling(e) -def update_dashboard_route(route_item, cluster_name, namespace): - metadata = route_item.get("generictemplate", {}).get("metadata") - metadata["name"] = gen_dashboard_ingress_name(cluster_name) - metadata["namespace"] = namespace - metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc" - spec = route_item.get("generictemplate", {}).get("spec") - spec["to"]["name"] = f"{cluster_name}-head-svc" - - -# ToDo: refactor the update_x_route() functions -def update_rayclient_route(route_item, cluster_name, namespace): - metadata = route_item.get("generictemplate", {}).get("metadata") - metadata["name"] = f"rayclient-{cluster_name}" - metadata["namespace"] = namespace - metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc" - spec = route_item.get("generictemplate", {}).get("spec") - spec["to"]["name"] = f"{cluster_name}-head-svc" - - -def update_dashboard_exposure( - ingress_item, route_item, cluster_name, namespace, ingress_options, ingress_domain -): - if is_openshift_cluster(): - update_dashboard_route(route_item, cluster_name, namespace) - else: - update_dashboard_ingress( - ingress_item, cluster_name, namespace, ingress_options, ingress_domain - ) - - -def update_rayclient_exposure( - client_route_item, client_ingress_item, cluster_name, namespace, ingress_domain -): - if is_openshift_cluster(): - update_rayclient_route(client_route_item, cluster_name, namespace) - else: - update_rayclient_ingress( - client_ingress_item, cluster_name, namespace, ingress_domain - ) - - -def update_dashboard_ingress( - ingress_item, cluster_name, namespace, ingress_options, ingress_domain -): # pragma: no cover - metadata = ingress_item.get("generictemplate", {}).get("metadata") - spec = ingress_item.get("generictemplate", {}).get("spec") - if ingress_options != {}: - for index, ingress_option in enumerate(ingress_options["ingresses"]): - if "ingressName" not in ingress_option.keys(): - raise ValueError( - f"Error: 'ingressName' is missing or empty for ingress item at index {index}" - ) - if "port" not in ingress_option.keys(): - raise ValueError( - f"Error: 'port' is missing or empty for ingress item at index {index}" - ) - elif not isinstance(ingress_option["port"], int): - raise ValueError( - f"Error: 'port' is not of type int for ingress item at index {index}" - ) - if ingress_option is not None: - metadata["name"] = ingress_option["ingressName"] - metadata["namespace"] = namespace - metadata["labels"]["ingress-owner"] = cluster_name - metadata["labels"]["ingress-options"] = "true" - if ( - "annotations" not in ingress_option.keys() - or ingress_option["annotations"] is None - ): - del metadata["annotations"] - else: - metadata["annotations"] = ingress_option["annotations"] - if ( - "path" not in ingress_option.keys() - or ingress_option["path"] is None - ): - del spec["rules"][0]["http"]["paths"][0]["path"] - else: - spec["rules"][0]["http"]["paths"][0]["path"] = ingress_option[ - "path" - ] - if ( - "pathType" not in ingress_option.keys() - or ingress_option["pathType"] is None - ): - spec["rules"][0]["http"]["paths"][0][ - "pathType" - ] = "ImplementationSpecific" - if ( - "host" not in ingress_option.keys() - or ingress_option["host"] is None - ): - del spec["rules"][0]["host"] - else: - spec["rules"][0]["host"] = ingress_option["host"] - if ( - "ingressClassName" not in ingress_option.keys() - or ingress_option["ingressClassName"] is None - ): - del spec["ingressClassName"] - else: - spec["ingressClassName"] = ingress_option["ingressClassName"] - - spec["rules"][0]["http"]["paths"][0]["backend"]["service"][ - "name" - ] = f"{cluster_name}-head-svc" - else: - spec["ingressClassName"] = "nginx" - metadata["name"] = gen_dashboard_ingress_name(cluster_name) - metadata["labels"]["ingress-owner"] = cluster_name - metadata["namespace"] = namespace - spec["rules"][0]["http"]["paths"][0]["backend"]["service"][ - "name" - ] = f"{cluster_name}-head-svc" - if ingress_domain is None: - raise ValueError( - "ingress_domain is invalid. Please specify an ingress domain" - ) - else: - domain = ingress_domain - del metadata["annotations"] - spec["rules"][0]["host"] = f"ray-dashboard-{cluster_name}-{namespace}.{domain}" - - -def update_rayclient_ingress( - ingress_item, cluster_name, namespace, ingress_domain -): # pragma: no cover - metadata = ingress_item.get("generictemplate", {}).get("metadata") - spec = ingress_item.get("generictemplate", {}).get("spec") - metadata["name"] = f"rayclient-{cluster_name}" - metadata["namespace"] = namespace - metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc" - - spec["rules"][0]["http"]["paths"][0]["backend"]["service"][ - "name" - ] = f"{cluster_name}-head-svc" - - if ingress_domain is not None: - ingressClassName = "nginx" - annotations = { - "nginx.ingress.kubernetes.io/rewrite-target": "/", - "nginx.ingress.kubernetes.io/ssl-redirect": "true", - "nginx.ingress.kubernetes.io/ssl-passthrough": "true", - } - else: - raise ValueError("ingress_domain is invalid. Please specify a domain") - - metadata["annotations"] = annotations - spec["ingressClassName"] = ingressClassName - spec["rules"][0]["host"] = f"rayclient-{cluster_name}-{namespace}.{ingress_domain}" +def is_kind_cluster(): + try: + config_check() + v1 = client.CoreV1Api() + label_selector = "kubernetes.io/hostname=kind-control-plane" + nodes = v1.list_node(label_selector=label_selector) + # If we find one or more nodes with the label, assume it's a KinD cluster + return len(nodes.items) > 0 + except Exception as e: + print(f"Error checking if cluster is KinD: {e}") + return False def update_names(yaml, item, appwrapper_name, cluster_name, namespace): @@ -258,6 +116,20 @@

    Module codeflare_sdk.utils.generate_yaml

    lower_meta["labels"]["workload.codeflare.dev/appwrapper"] = appwrapper_name lower_meta["name"] = cluster_name lower_meta["namespace"] = namespace + lower_spec = item.get("generictemplate", {}).get("spec") + if is_openshift_cluster(): + cookie_secret_env_var = { + "name": "COOKIE_SECRET", + "valueFrom": { + "secretKeyRef": { + "key": "cookie_secret", + "name": f"{cluster_name}-oauth-config", + } + }, + } + lower_spec["headGroupSpec"]["template"]["spec"]["containers"][0]["env"].append( + cookie_secret_env_var + ) def update_labels(yaml, instascale, instance_types): @@ -450,10 +322,10 @@

    Module codeflare_sdk.utils.generate_yaml

    data["ca.key"], data["ca.crt"] = generate_cert.generate_ca_cert(365) -def enable_local_interactive(resources, cluster_name, namespace, ingress_domain): - rayclient_ingress_item = resources["resources"].get("GenericItems")[3] - rayclient_route_item = resources["resources"].get("GenericItems")[4] - ca_secret_item = resources["resources"].get("GenericItems")[5] +def enable_local_interactive(resources, cluster_name, namespace): # pragma: no cover + from ..cluster.cluster import _get_ingress_domain + + ca_secret_item = resources["resources"].get("GenericItems")[1] item = resources["resources"].get("GenericItems")[0] update_ca_secret(ca_secret_item, cluster_name, namespace) # update_ca_secret_volumes @@ -477,21 +349,12 @@

    Module codeflare_sdk.utils.generate_yaml

    command = command.replace("deployment-name", cluster_name) - if ingress_domain is None: - raise ValueError( - "ingress_domain is invalid. For creating the client route/ingress please specify an ingress domain" - ) - else: - domain = ingress_domain + domain = "" ## FIX - We can't retrieve ingress domain - move init container to CFO command = command.replace("server-name", domain) - update_rayclient_exposure( - rayclient_route_item, - rayclient_ingress_item, - cluster_name, - namespace, - ingress_domain, - ) + item["generictemplate"]["metadata"]["annotations"][ + "sdk.codeflare.dev/local_interactive" + ] = "True" item["generictemplate"]["spec"]["headGroupSpec"]["template"]["spec"][ "initContainers" @@ -548,26 +411,6 @@

    Module codeflare_sdk.utils.generate_yaml

    resources["GenericItems"] = updated_items -def delete_route_or_ingress(resources): - if is_openshift_cluster(): - client_to_remove_name = "rayclient-deployment-ingress" - dashboard_to_remove_name = "ray-dashboard-deployment-ingress" - else: - client_to_remove_name = "rayclient-deployment-route" - dashboard_to_remove_name = "ray-dashboard-deployment-route" - - updated_items = [] - for i in resources["GenericItems"][:]: - if dashboard_to_remove_name in i["generictemplate"]["metadata"]["name"]: - continue - elif client_to_remove_name in i["generictemplate"]["metadata"]["name"]: - continue - - updated_items.append(i) - - resources["GenericItems"] = updated_items - - def write_user_appwrapper(user_yaml, output_file_name): # Create the directory if it doesn't exist directory_path = os.path.dirname(output_file_name) @@ -603,13 +446,9 @@

    Module codeflare_sdk.utils.generate_yaml

    ) # allows for setting value of Cluster object when initializing object from an existing AppWrapper on cluster user_yaml["metadata"]["annotations"] = user_yaml["metadata"].get("annotations", {}) - user_yaml["metadata"]["annotations"][ - "codeflare-sdk-use-oauth" - ] = "true" # if the user gets an ray_headgroup_pod = user_yaml["spec"]["resources"]["GenericItems"][0][ "generictemplate" ]["spec"]["headGroupSpec"]["template"]["spec"] - user_yaml["spec"]["resources"]["GenericItems"].pop(1) ray_headgroup_pod["serviceAccount"] = oauth_sa_name ray_headgroup_pod["volumes"] = ray_headgroup_pod.get("volumes", []) @@ -638,7 +477,7 @@

    Module codeflare_sdk.utils.generate_yaml

    "--upstream=http://localhost:8265", f"--tls-cert={tls_mount_location}/tls.crt", f"--tls-key={tls_mount_location}/tls.key", - f"--cookie-secret={b64encode(urandom(64)).decode('utf-8')}", # create random string for encrypting cookie + "--cookie-secret=$(COOKIE_SECRET)", f'--openshift-delegate-urls={{"/":{{"resource":"pods","namespace":"{namespace}","verb":"get"}}}}', ], image="registry.redhat.io/openshift4/ose-oauth-proxy@sha256:1ea6a01bf3e63cdcf125c6064cbd4a4a270deaf0f157b3eabb78f60556840366", @@ -653,7 +492,35 @@

    Module codeflare_sdk.utils.generate_yaml

    ) -def write_components(user_yaml: dict, output_file_name: str): +def get_default_kueue_name(namespace: str): + # If the local queue is set, use it. Otherwise, try to use the default queue. + try: + config_check() + api_instance = client.CustomObjectsApi(api_config_handler()) + local_queues = api_instance.list_namespaced_custom_object( + group="kueue.x-k8s.io", + version="v1beta1", + namespace=namespace, + plural="localqueues", + ) + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + for lq in local_queues["items"]: + if ( + "annotations" in lq["metadata"] + and "kueue.x-k8s.io/default-queue" in lq["metadata"]["annotations"] + and lq["metadata"]["annotations"]["kueue.x-k8s.io/default-queue"].lower() + == "true" + ): + return lq["metadata"]["name"] + raise ValueError( + "Default Local Queue with kueue.x-k8s.io/default-queue: true annotation not found please create a default Local Queue or provide the local_queue name in Cluster Configuration" + ) + + +def write_components( + user_yaml: dict, output_file_name: str, namespace: str, local_queue: Optional[str] +): # Create the directory if it doesn't exist directory_path = os.path.dirname(output_file_name) if not os.path.exists(directory_path): @@ -661,9 +528,19 @@

    Module codeflare_sdk.utils.generate_yaml

    components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") open(output_file_name, "w").close() + lq_name = local_queue or get_default_kueue_name(namespace) with open(output_file_name, "a") as outfile: for component in components: if "generictemplate" in component: + if ( + "workload.codeflare.dev/appwrapper" + in component["generictemplate"]["metadata"]["labels"] + ): + del component["generictemplate"]["metadata"]["labels"][ + "workload.codeflare.dev/appwrapper" + ] + labels = component["generictemplate"]["metadata"]["labels"] + labels.update({"kueue.x-k8s.io/queue-name": lq_name}) outfile.write("---\n") yaml.dump( component["generictemplate"], outfile, default_flow_style=False @@ -671,6 +548,39 @@

    Module codeflare_sdk.utils.generate_yaml

    print(f"Written to: {output_file_name}") +def load_components( + user_yaml: dict, name: str, namespace: str, local_queue: Optional[str] +): + component_list = [] + components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") + lq_name = local_queue or get_default_kueue_name(namespace) + for component in components: + if "generictemplate" in component: + if ( + "workload.codeflare.dev/appwrapper" + in component["generictemplate"]["metadata"]["labels"] + ): + del component["generictemplate"]["metadata"]["labels"][ + "workload.codeflare.dev/appwrapper" + ] + labels = component["generictemplate"]["metadata"]["labels"] + labels.update({"kueue.x-k8s.io/queue-name": lq_name}) + component_list.append(component["generictemplate"]) + + resources = "---\n" + "---\n".join( + [yaml.dump(component) for component in component_list] + ) + user_yaml = resources + print(f"Yaml resources loaded for {name}") + return user_yaml + + +def load_appwrapper(user_yaml: dict, name: str): + user_yaml = yaml.dump(user_yaml) + print(f"Yaml resources loaded for {name}") + return user_yaml + + def generate_appwrapper( name: str, namespace: str, @@ -693,17 +603,21 @@

    Module codeflare_sdk.utils.generate_yaml

    image_pull_secrets: list, dispatch_priority: str, priority_val: int, - openshift_oauth: bool, - ingress_domain: str, - ingress_options: dict, + write_to_file: bool, + verify_tls: bool, + local_queue: Optional[str], ): user_yaml = read_template(template) appwrapper_name, cluster_name = gen_names(name) resources = user_yaml.get("spec", "resources") item = resources["resources"].get("GenericItems")[0] - ingress_item = resources["resources"].get("GenericItems")[1] - route_item = resources["resources"].get("GenericItems")[2] - update_names(user_yaml, item, appwrapper_name, cluster_name, namespace) + update_names( + user_yaml, + item, + appwrapper_name, + cluster_name, + namespace, + ) update_labels(user_yaml, instascale, instance_types) update_priority(user_yaml, item, dispatch_priority, priority_val) update_custompodresources( @@ -735,31 +649,30 @@

    Module codeflare_sdk.utils.generate_yaml

    head_memory, head_gpus, ) - update_dashboard_exposure( - ingress_item, - route_item, - cluster_name, - namespace, - ingress_options, - ingress_domain, - ) + if local_interactive: - enable_local_interactive(resources, cluster_name, namespace, ingress_domain) + enable_local_interactive(resources, cluster_name, namespace) else: disable_raycluster_tls(resources["resources"]) - delete_route_or_ingress(resources["resources"]) - - if openshift_oauth: + if is_openshift_cluster(): enable_openshift_oauth(user_yaml, cluster_name, namespace) - directory_path = os.path.expanduser("~/.codeflare/appwrapper/") + directory_path = os.path.expanduser("~/.codeflare/resources/") outfile = os.path.join(directory_path, appwrapper_name + ".yaml") - if not mcad: - write_components(user_yaml, outfile) + + if write_to_file: + if mcad: + write_user_appwrapper(user_yaml, outfile) + else: + write_components(user_yaml, outfile, namespace, local_queue) + return outfile else: - write_user_appwrapper(user_yaml, outfile) - return outfile + if mcad: + user_yaml = load_appwrapper(user_yaml, name) + else: + user_yaml = load_components(user_yaml, name, namespace, local_queue) + return user_yaml
    @@ -782,35 +695,6 @@

    Functions

    return [x for x in l if x["name"] not in target] -
    -def delete_route_or_ingress(resources) -
    -
    -
    -
    - -Expand source code - -
    def delete_route_or_ingress(resources):
    -    if is_openshift_cluster():
    -        client_to_remove_name = "rayclient-deployment-ingress"
    -        dashboard_to_remove_name = "ray-dashboard-deployment-ingress"
    -    else:
    -        client_to_remove_name = "rayclient-deployment-route"
    -        dashboard_to_remove_name = "ray-dashboard-deployment-route"
    -
    -    updated_items = []
    -    for i in resources["GenericItems"][:]:
    -        if dashboard_to_remove_name in i["generictemplate"]["metadata"]["name"]:
    -            continue
    -        elif client_to_remove_name in i["generictemplate"]["metadata"]["name"]:
    -            continue
    -
    -        updated_items.append(i)
    -
    -    resources["GenericItems"] = updated_items
    -
    -
    def disable_raycluster_tls(resources)
    @@ -867,7 +751,7 @@

    Functions

    -def enable_local_interactive(resources, cluster_name, namespace, ingress_domain) +def enable_local_interactive(resources, cluster_name, namespace)
    @@ -875,10 +759,10 @@

    Functions

    Expand source code -
    def enable_local_interactive(resources, cluster_name, namespace, ingress_domain):
    -    rayclient_ingress_item = resources["resources"].get("GenericItems")[3]
    -    rayclient_route_item = resources["resources"].get("GenericItems")[4]
    -    ca_secret_item = resources["resources"].get("GenericItems")[5]
    +
    def enable_local_interactive(resources, cluster_name, namespace):  # pragma: no cover
    +    from ..cluster.cluster import _get_ingress_domain
    +
    +    ca_secret_item = resources["resources"].get("GenericItems")[1]
         item = resources["resources"].get("GenericItems")[0]
         update_ca_secret(ca_secret_item, cluster_name, namespace)
         # update_ca_secret_volumes
    @@ -902,21 +786,12 @@ 

    Functions

    command = command.replace("deployment-name", cluster_name) - if ingress_domain is None: - raise ValueError( - "ingress_domain is invalid. For creating the client route/ingress please specify an ingress domain" - ) - else: - domain = ingress_domain + domain = "" ## FIX - We can't retrieve ingress domain - move init container to CFO command = command.replace("server-name", domain) - update_rayclient_exposure( - rayclient_route_item, - rayclient_ingress_item, - cluster_name, - namespace, - ingress_domain, - ) + item["generictemplate"]["metadata"]["annotations"][ + "sdk.codeflare.dev/local_interactive" + ] = "True" item["generictemplate"]["spec"]["headGroupSpec"]["template"]["spec"][ "initContainers" @@ -955,13 +830,9 @@

    Functions

    ) # allows for setting value of Cluster object when initializing object from an existing AppWrapper on cluster user_yaml["metadata"]["annotations"] = user_yaml["metadata"].get("annotations", {}) - user_yaml["metadata"]["annotations"][ - "codeflare-sdk-use-oauth" - ] = "true" # if the user gets an ray_headgroup_pod = user_yaml["spec"]["resources"]["GenericItems"][0][ "generictemplate" ]["spec"]["headGroupSpec"]["template"]["spec"] - user_yaml["spec"]["resources"]["GenericItems"].pop(1) ray_headgroup_pod["serviceAccount"] = oauth_sa_name ray_headgroup_pod["volumes"] = ray_headgroup_pod.get("volumes", []) @@ -974,19 +845,6 @@

    Functions

    )
    -
    -def gen_dashboard_ingress_name(cluster_name) -
    -
    -
    -
    - -Expand source code - -
    def gen_dashboard_ingress_name(cluster_name):
    -    return f"ray-dashboard-{cluster_name}"
    -
    -
    def gen_names(name)
    @@ -1007,7 +865,7 @@

    Functions

    -def generate_appwrapper(name: str, namespace: str, head_cpus: int, head_memory: int, head_gpus: int, min_cpu: int, max_cpu: int, min_memory: int, max_memory: int, gpu: int, workers: int, template: str, image: str, instascale: bool, mcad: bool, instance_types: list, env, local_interactive: bool, image_pull_secrets: list, dispatch_priority: str, priority_val: int, openshift_oauth: bool, ingress_domain: str, ingress_options: dict) +def generate_appwrapper(name: str, namespace: str, head_cpus: int, head_memory: int, head_gpus: int, min_cpu: int, max_cpu: int, min_memory: int, max_memory: int, gpu: int, workers: int, template: str, image: str, instascale: bool, mcad: bool, instance_types: list, env, local_interactive: bool, image_pull_secrets: list, dispatch_priority: str, priority_val: int, write_to_file: bool, verify_tls: bool, local_queue: Optional[str])
    @@ -1037,17 +895,21 @@

    Functions

    image_pull_secrets: list, dispatch_priority: str, priority_val: int, - openshift_oauth: bool, - ingress_domain: str, - ingress_options: dict, + write_to_file: bool, + verify_tls: bool, + local_queue: Optional[str], ): user_yaml = read_template(template) appwrapper_name, cluster_name = gen_names(name) resources = user_yaml.get("spec", "resources") item = resources["resources"].get("GenericItems")[0] - ingress_item = resources["resources"].get("GenericItems")[1] - route_item = resources["resources"].get("GenericItems")[2] - update_names(user_yaml, item, appwrapper_name, cluster_name, namespace) + update_names( + user_yaml, + item, + appwrapper_name, + cluster_name, + namespace, + ) update_labels(user_yaml, instascale, instance_types) update_priority(user_yaml, item, dispatch_priority, priority_val) update_custompodresources( @@ -1079,31 +941,87 @@

    Functions

    head_memory, head_gpus, ) - update_dashboard_exposure( - ingress_item, - route_item, - cluster_name, - namespace, - ingress_options, - ingress_domain, - ) + if local_interactive: - enable_local_interactive(resources, cluster_name, namespace, ingress_domain) + enable_local_interactive(resources, cluster_name, namespace) else: disable_raycluster_tls(resources["resources"]) - delete_route_or_ingress(resources["resources"]) - - if openshift_oauth: + if is_openshift_cluster(): enable_openshift_oauth(user_yaml, cluster_name, namespace) - directory_path = os.path.expanduser("~/.codeflare/appwrapper/") + directory_path = os.path.expanduser("~/.codeflare/resources/") outfile = os.path.join(directory_path, appwrapper_name + ".yaml") - if not mcad: - write_components(user_yaml, outfile) + + if write_to_file: + if mcad: + write_user_appwrapper(user_yaml, outfile) + else: + write_components(user_yaml, outfile, namespace, local_queue) + return outfile else: - write_user_appwrapper(user_yaml, outfile) - return outfile
    + if mcad: + user_yaml = load_appwrapper(user_yaml, name) + else: + user_yaml = load_components(user_yaml, name, namespace, local_queue) + return user_yaml + +
    +
    +def get_default_kueue_name(namespace: str) +
    +
    +
    +
    + +Expand source code + +
    def get_default_kueue_name(namespace: str):
    +    # If the local queue is set, use it. Otherwise, try to use the default queue.
    +    try:
    +        config_check()
    +        api_instance = client.CustomObjectsApi(api_config_handler())
    +        local_queues = api_instance.list_namespaced_custom_object(
    +            group="kueue.x-k8s.io",
    +            version="v1beta1",
    +            namespace=namespace,
    +            plural="localqueues",
    +        )
    +    except Exception as e:  # pragma: no cover
    +        return _kube_api_error_handling(e)
    +    for lq in local_queues["items"]:
    +        if (
    +            "annotations" in lq["metadata"]
    +            and "kueue.x-k8s.io/default-queue" in lq["metadata"]["annotations"]
    +            and lq["metadata"]["annotations"]["kueue.x-k8s.io/default-queue"].lower()
    +            == "true"
    +        ):
    +            return lq["metadata"]["name"]
    +    raise ValueError(
    +        "Default Local Queue with kueue.x-k8s.io/default-queue: true annotation not found please create a default Local Queue or provide the local_queue name in Cluster Configuration"
    +    )
    +
    +
    +
    +def is_kind_cluster() +
    +
    +
    +
    + +Expand source code + +
    def is_kind_cluster():
    +    try:
    +        config_check()
    +        v1 = client.CoreV1Api()
    +        label_selector = "kubernetes.io/hostname=kind-control-plane"
    +        nodes = v1.list_node(label_selector=label_selector)
    +        # If we find one or more nodes with the label, assume it's a KinD cluster
    +        return len(nodes.items) > 0
    +    except Exception as e:
    +        print(f"Error checking if cluster is KinD: {e}")
    +        return False
    @@ -1128,6 +1046,57 @@

    Functions

    return _kube_api_error_handling(e)
    +
    +def load_appwrapper(user_yaml: dict, name: str) +
    +
    +
    +
    + +Expand source code + +
    def load_appwrapper(user_yaml: dict, name: str):
    +    user_yaml = yaml.dump(user_yaml)
    +    print(f"Yaml resources loaded for {name}")
    +    return user_yaml
    +
    +
    +
    +def load_components(user_yaml: dict, name: str, namespace: str, local_queue: Optional[str]) +
    +
    +
    +
    + +Expand source code + +
    def load_components(
    +    user_yaml: dict, name: str, namespace: str, local_queue: Optional[str]
    +):
    +    component_list = []
    +    components = user_yaml.get("spec", "resources")["resources"].get("GenericItems")
    +    lq_name = local_queue or get_default_kueue_name(namespace)
    +    for component in components:
    +        if "generictemplate" in component:
    +            if (
    +                "workload.codeflare.dev/appwrapper"
    +                in component["generictemplate"]["metadata"]["labels"]
    +            ):
    +                del component["generictemplate"]["metadata"]["labels"][
    +                    "workload.codeflare.dev/appwrapper"
    +                ]
    +                labels = component["generictemplate"]["metadata"]["labels"]
    +                labels.update({"kueue.x-k8s.io/queue-name": lq_name})
    +            component_list.append(component["generictemplate"])
    +
    +    resources = "---\n" + "---\n".join(
    +        [yaml.dump(component) for component in component_list]
    +    )
    +    user_yaml = resources
    +    print(f"Yaml resources loaded for {name}")
    +    return user_yaml
    +
    +
    def read_template(template)
    @@ -1247,136 +1216,6 @@

    Functions

    sys.exit("Error: malformed template") -
    -def update_dashboard_exposure(ingress_item, route_item, cluster_name, namespace, ingress_options, ingress_domain) -
    -
    -
    -
    - -Expand source code - -
    def update_dashboard_exposure(
    -    ingress_item, route_item, cluster_name, namespace, ingress_options, ingress_domain
    -):
    -    if is_openshift_cluster():
    -        update_dashboard_route(route_item, cluster_name, namespace)
    -    else:
    -        update_dashboard_ingress(
    -            ingress_item, cluster_name, namespace, ingress_options, ingress_domain
    -        )
    -
    -
    -
    -def update_dashboard_ingress(ingress_item, cluster_name, namespace, ingress_options, ingress_domain) -
    -
    -
    -
    - -Expand source code - -
    def update_dashboard_ingress(
    -    ingress_item, cluster_name, namespace, ingress_options, ingress_domain
    -):  # pragma: no cover
    -    metadata = ingress_item.get("generictemplate", {}).get("metadata")
    -    spec = ingress_item.get("generictemplate", {}).get("spec")
    -    if ingress_options != {}:
    -        for index, ingress_option in enumerate(ingress_options["ingresses"]):
    -            if "ingressName" not in ingress_option.keys():
    -                raise ValueError(
    -                    f"Error: 'ingressName' is missing or empty for ingress item at index {index}"
    -                )
    -            if "port" not in ingress_option.keys():
    -                raise ValueError(
    -                    f"Error: 'port' is missing or empty for ingress item at index {index}"
    -                )
    -            elif not isinstance(ingress_option["port"], int):
    -                raise ValueError(
    -                    f"Error: 'port' is not of type int for ingress item at index {index}"
    -                )
    -            if ingress_option is not None:
    -                metadata["name"] = ingress_option["ingressName"]
    -                metadata["namespace"] = namespace
    -                metadata["labels"]["ingress-owner"] = cluster_name
    -                metadata["labels"]["ingress-options"] = "true"
    -                if (
    -                    "annotations" not in ingress_option.keys()
    -                    or ingress_option["annotations"] is None
    -                ):
    -                    del metadata["annotations"]
    -                else:
    -                    metadata["annotations"] = ingress_option["annotations"]
    -                if (
    -                    "path" not in ingress_option.keys()
    -                    or ingress_option["path"] is None
    -                ):
    -                    del spec["rules"][0]["http"]["paths"][0]["path"]
    -                else:
    -                    spec["rules"][0]["http"]["paths"][0]["path"] = ingress_option[
    -                        "path"
    -                    ]
    -                if (
    -                    "pathType" not in ingress_option.keys()
    -                    or ingress_option["pathType"] is None
    -                ):
    -                    spec["rules"][0]["http"]["paths"][0][
    -                        "pathType"
    -                    ] = "ImplementationSpecific"
    -                if (
    -                    "host" not in ingress_option.keys()
    -                    or ingress_option["host"] is None
    -                ):
    -                    del spec["rules"][0]["host"]
    -                else:
    -                    spec["rules"][0]["host"] = ingress_option["host"]
    -                if (
    -                    "ingressClassName" not in ingress_option.keys()
    -                    or ingress_option["ingressClassName"] is None
    -                ):
    -                    del spec["ingressClassName"]
    -                else:
    -                    spec["ingressClassName"] = ingress_option["ingressClassName"]
    -
    -                spec["rules"][0]["http"]["paths"][0]["backend"]["service"][
    -                    "name"
    -                ] = f"{cluster_name}-head-svc"
    -    else:
    -        spec["ingressClassName"] = "nginx"
    -        metadata["name"] = gen_dashboard_ingress_name(cluster_name)
    -        metadata["labels"]["ingress-owner"] = cluster_name
    -        metadata["namespace"] = namespace
    -        spec["rules"][0]["http"]["paths"][0]["backend"]["service"][
    -            "name"
    -        ] = f"{cluster_name}-head-svc"
    -        if ingress_domain is None:
    -            raise ValueError(
    -                "ingress_domain is invalid. Please specify an ingress domain"
    -            )
    -        else:
    -            domain = ingress_domain
    -        del metadata["annotations"]
    -        spec["rules"][0]["host"] = f"ray-dashboard-{cluster_name}-{namespace}.{domain}"
    -
    -
    -
    -def update_dashboard_route(route_item, cluster_name, namespace) -
    -
    -
    -
    - -Expand source code - -
    def update_dashboard_route(route_item, cluster_name, namespace):
    -    metadata = route_item.get("generictemplate", {}).get("metadata")
    -    metadata["name"] = gen_dashboard_ingress_name(cluster_name)
    -    metadata["namespace"] = namespace
    -    metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc"
    -    spec = route_item.get("generictemplate", {}).get("spec")
    -    spec["to"]["name"] = f"{cluster_name}-head-svc"
    -
    -
    def update_env(spec, env)
    @@ -1468,7 +1307,21 @@

    Functions

    lower_meta = item.get("generictemplate", {}).get("metadata") lower_meta["labels"]["workload.codeflare.dev/appwrapper"] = appwrapper_name lower_meta["name"] = cluster_name - lower_meta["namespace"] = namespace + lower_meta["namespace"] = namespace + lower_spec = item.get("generictemplate", {}).get("spec") + if is_openshift_cluster(): + cookie_secret_env_var = { + "name": "COOKIE_SECRET", + "valueFrom": { + "secretKeyRef": { + "key": "cookie_secret", + "name": f"{cluster_name}-oauth-config", + } + }, + } + lower_spec["headGroupSpec"]["template"]["spec"]["containers"][0]["env"].append( + cookie_secret_env_var + )
    @@ -1550,81 +1403,6 @@

    Functions

    spec.pop("priority")
    -
    -def update_rayclient_exposure(client_route_item, client_ingress_item, cluster_name, namespace, ingress_domain) -
    -
    -
    -
    - -Expand source code - -
    def update_rayclient_exposure(
    -    client_route_item, client_ingress_item, cluster_name, namespace, ingress_domain
    -):
    -    if is_openshift_cluster():
    -        update_rayclient_route(client_route_item, cluster_name, namespace)
    -    else:
    -        update_rayclient_ingress(
    -            client_ingress_item, cluster_name, namespace, ingress_domain
    -        )
    -
    -
    -
    -def update_rayclient_ingress(ingress_item, cluster_name, namespace, ingress_domain) -
    -
    -
    -
    - -Expand source code - -
    def update_rayclient_ingress(
    -    ingress_item, cluster_name, namespace, ingress_domain
    -):  # pragma: no cover
    -    metadata = ingress_item.get("generictemplate", {}).get("metadata")
    -    spec = ingress_item.get("generictemplate", {}).get("spec")
    -    metadata["name"] = f"rayclient-{cluster_name}"
    -    metadata["namespace"] = namespace
    -    metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc"
    -
    -    spec["rules"][0]["http"]["paths"][0]["backend"]["service"][
    -        "name"
    -    ] = f"{cluster_name}-head-svc"
    -
    -    if ingress_domain is not None:
    -        ingressClassName = "nginx"
    -        annotations = {
    -            "nginx.ingress.kubernetes.io/rewrite-target": "/",
    -            "nginx.ingress.kubernetes.io/ssl-redirect": "true",
    -            "nginx.ingress.kubernetes.io/ssl-passthrough": "true",
    -        }
    -    else:
    -        raise ValueError("ingress_domain is invalid. Please specify a domain")
    -
    -    metadata["annotations"] = annotations
    -    spec["ingressClassName"] = ingressClassName
    -    spec["rules"][0]["host"] = f"rayclient-{cluster_name}-{namespace}.{ingress_domain}"
    -
    -
    -
    -def update_rayclient_route(route_item, cluster_name, namespace) -
    -
    -
    -
    - -Expand source code - -
    def update_rayclient_route(route_item, cluster_name, namespace):
    -    metadata = route_item.get("generictemplate", {}).get("metadata")
    -    metadata["name"] = f"rayclient-{cluster_name}"
    -    metadata["namespace"] = namespace
    -    metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc"
    -    spec = route_item.get("generictemplate", {}).get("spec")
    -    spec["to"]["name"] = f"{cluster_name}-head-svc"
    -
    -
    def update_resources(spec, min_cpu, max_cpu, min_memory, max_memory, gpu)
    @@ -1650,7 +1428,7 @@

    Functions

    -def write_components(user_yaml: dict, output_file_name: str) +def write_components(user_yaml: dict, output_file_name: str, namespace: str, local_queue: Optional[str])
    @@ -1658,7 +1436,9 @@

    Functions

    Expand source code -
    def write_components(user_yaml: dict, output_file_name: str):
    +
    def write_components(
    +    user_yaml: dict, output_file_name: str, namespace: str, local_queue: Optional[str]
    +):
         # Create the directory if it doesn't exist
         directory_path = os.path.dirname(output_file_name)
         if not os.path.exists(directory_path):
    @@ -1666,9 +1446,19 @@ 

    Functions

    components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") open(output_file_name, "w").close() + lq_name = local_queue or get_default_kueue_name(namespace) with open(output_file_name, "a") as outfile: for component in components: if "generictemplate" in component: + if ( + "workload.codeflare.dev/appwrapper" + in component["generictemplate"]["metadata"]["labels"] + ): + del component["generictemplate"]["metadata"]["labels"][ + "workload.codeflare.dev/appwrapper" + ] + labels = component["generictemplate"]["metadata"]["labels"] + labels.update({"kueue.x-k8s.io/queue-name": lq_name}) outfile.write("---\n") yaml.dump( component["generictemplate"], outfile, default_flow_style=False @@ -1716,21 +1506,20 @@

    Index

  • Functions

  • -
    codeflare_sdk.utils.openshift_oauth
    -
    -
    -
    codeflare_sdk.utils.pretty_print

    This sub-module exists primarily to be used internally by the Cluster object @@ -74,7 +70,6 @@

    Index

  • codeflare_sdk.utils.generate_cert
  • codeflare_sdk.utils.generate_yaml
  • codeflare_sdk.utils.kube_api_helpers
  • -
  • codeflare_sdk.utils.openshift_oauth
  • codeflare_sdk.utils.pretty_print
  • diff --git a/docs/detailed-documentation/utils/openshift_oauth.html b/docs/detailed-documentation/utils/openshift_oauth.html deleted file mode 100644 index de848cf5..00000000 --- a/docs/detailed-documentation/utils/openshift_oauth.html +++ /dev/null @@ -1,323 +0,0 @@ - - - - - - -codeflare_sdk.utils.openshift_oauth API documentation - - - - - - - - - - - -
    -
    -
    -

    Module codeflare_sdk.utils.openshift_oauth

    -
    -
    -
    - -Expand source code - -
    from urllib3.util import parse_url
    -import yaml
    -
    -from ..cluster.auth import config_check, api_config_handler
    -
    -from kubernetes import client
    -from kubernetes import dynamic
    -
    -
    -def _route_api_getter():
    -    return dynamic.DynamicClient(
    -        api_config_handler() or client.ApiClient()
    -    ).resources.get(api_version="route.openshift.io/v1", kind="Route")
    -
    -
    -def create_openshift_oauth_objects(cluster_name, namespace):
    -    config_check()
    -    oauth_port = 8443
    -    oauth_sa_name = f"{cluster_name}-oauth-proxy"
    -    tls_secret_name = _gen_tls_secret_name(cluster_name)
    -    service_name = f"{cluster_name}-oauth"
    -    port_name = "oauth-proxy"
    -
    -    _create_or_replace_oauth_sa(namespace, oauth_sa_name, cluster_name)
    -    _create_or_replace_oauth_service_obj(
    -        cluster_name, namespace, oauth_port, tls_secret_name, service_name, port_name
    -    )
    -    _create_or_replace_oauth_route_object(
    -        cluster_name,
    -        namespace,
    -        service_name,
    -        port_name,
    -    )
    -    _create_or_replace_oauth_rb(cluster_name, namespace, oauth_sa_name)
    -
    -
    -def _create_or_replace_oauth_sa(namespace, oauth_sa_name, cluster_name):
    -    oauth_sa = client.V1ServiceAccount(
    -        api_version="v1",
    -        kind="ServiceAccount",
    -        metadata=client.V1ObjectMeta(
    -            name=oauth_sa_name,
    -            namespace=namespace,
    -            annotations={
    -                "serviceaccounts.openshift.io/oauth-redirectreference.first": '{"kind":"OAuthRedirectReference","apiVersion":"v1","reference":{"kind":"Route","name":"'
    -                + "ray-dashboard-"
    -                + cluster_name
    -                + '"}}'
    -            },
    -        ),
    -    )
    -    try:
    -        client.CoreV1Api(api_config_handler()).create_namespaced_service_account(
    -            namespace=namespace, body=oauth_sa
    -        )
    -    except client.ApiException as e:
    -        if e.reason == "Conflict":
    -            client.CoreV1Api(api_config_handler()).replace_namespaced_service_account(
    -                namespace=namespace,
    -                body=oauth_sa,
    -                name=oauth_sa_name,
    -            )
    -        else:
    -            raise e
    -
    -
    -def _create_or_replace_oauth_rb(cluster_name, namespace, oauth_sa_name):
    -    oauth_crb = client.V1ClusterRoleBinding(
    -        api_version="rbac.authorization.k8s.io/v1",
    -        kind="ClusterRoleBinding",
    -        metadata=client.V1ObjectMeta(name=f"{cluster_name}-rb"),
    -        role_ref=client.V1RoleRef(
    -            api_group="rbac.authorization.k8s.io",
    -            kind="ClusterRole",
    -            name="system:auth-delegator",
    -        ),
    -        subjects=[
    -            client.V1Subject(
    -                kind="ServiceAccount", name=oauth_sa_name, namespace=namespace
    -            )
    -        ],
    -    )
    -    try:
    -        client.RbacAuthorizationV1Api(api_config_handler()).create_cluster_role_binding(
    -            body=oauth_crb
    -        )
    -    except client.ApiException as e:
    -        if e.reason == "Conflict":
    -            client.RbacAuthorizationV1Api(
    -                api_config_handler()
    -            ).replace_cluster_role_binding(body=oauth_crb, name=f"{cluster_name}-rb")
    -        else:
    -            raise e
    -
    -
    -def _gen_tls_secret_name(cluster_name):
    -    return f"{cluster_name}-proxy-tls-secret"
    -
    -
    -def delete_openshift_oauth_objects(cluster_name, namespace):
    -    # NOTE: it might be worth adding error handling here, but shouldn't be necessary because cluster.down(...) checks
    -    # for an existing cluster before calling this => the objects should never be deleted twice
    -    oauth_sa_name = f"{cluster_name}-oauth-proxy"
    -    service_name = f"{cluster_name}-oauth"
    -    v1_routes = _route_api_getter()
    -    client.CoreV1Api(api_config_handler()).delete_namespaced_service_account(
    -        name=oauth_sa_name, namespace=namespace
    -    )
    -    client.CoreV1Api(api_config_handler()).delete_namespaced_service(
    -        name=service_name, namespace=namespace
    -    )
    -    v1_routes.delete(name=f"ray-dashboard-{cluster_name}", namespace=namespace)
    -    client.RbacAuthorizationV1Api(api_config_handler()).delete_cluster_role_binding(
    -        name=f"{cluster_name}-rb"
    -    )
    -
    -
    -def _create_or_replace_oauth_service_obj(
    -    cluster_name: str,
    -    namespace: str,
    -    oauth_port: int,
    -    tls_secret_name: str,
    -    service_name: str,
    -    port_name: str,
    -) -> client.V1Service:
    -    oauth_service = client.V1Service(
    -        api_version="v1",
    -        kind="Service",
    -        metadata=client.V1ObjectMeta(
    -            annotations={
    -                "service.beta.openshift.io/serving-cert-secret-name": tls_secret_name
    -            },
    -            name=service_name,
    -            namespace=namespace,
    -        ),
    -        spec=client.V1ServiceSpec(
    -            ports=[
    -                client.V1ServicePort(
    -                    name=port_name,
    -                    protocol="TCP",
    -                    port=443,
    -                    target_port=oauth_port,
    -                )
    -            ],
    -            selector={
    -                "app.kubernetes.io/created-by": "kuberay-operator",
    -                "app.kubernetes.io/name": "kuberay",
    -                "ray.io/cluster": cluster_name,
    -                "ray.io/identifier": f"{cluster_name}-head",
    -                "ray.io/node-type": "head",
    -            },
    -        ),
    -    )
    -    try:
    -        client.CoreV1Api(api_config_handler()).create_namespaced_service(
    -            namespace=namespace, body=oauth_service
    -        )
    -    except client.ApiException as e:
    -        if e.reason == "Conflict":
    -            client.CoreV1Api(api_config_handler()).replace_namespaced_service(
    -                namespace=namespace, body=oauth_service, name=service_name
    -            )
    -        else:
    -            raise e
    -
    -
    -def _create_or_replace_oauth_route_object(
    -    cluster_name: str,
    -    namespace: str,
    -    service_name: str,
    -    port_name: str,
    -):
    -    route = f"""
    -        apiVersion: route.openshift.io/v1
    -        kind: Route
    -        metadata:
    -            name: ray-dashboard-{cluster_name}
    -            namespace: {namespace}
    -        spec:
    -            port:
    -                targetPort: {port_name}
    -            tls:
    -                termination: reencrypt
    -            to:
    -                kind: Service
    -                name: {service_name}
    -    """
    -    route_data = yaml.safe_load(route)
    -    v1_routes = _route_api_getter()
    -    try:
    -        existing_route = v1_routes.get(
    -            name=f"ray-dashboard-{cluster_name}", namespace=namespace
    -        )
    -        route_data["metadata"]["resourceVersion"] = existing_route["metadata"][
    -            "resourceVersion"
    -        ]
    -        v1_routes.replace(body=route_data)
    -    except dynamic.client.ApiException:
    -        v1_routes.create(body=route_data)
    -
    -
    -
    -
    -
    -
    -
    -

    Functions

    -
    -
    -def create_openshift_oauth_objects(cluster_name, namespace) -
    -
    -
    -
    - -Expand source code - -
    def create_openshift_oauth_objects(cluster_name, namespace):
    -    config_check()
    -    oauth_port = 8443
    -    oauth_sa_name = f"{cluster_name}-oauth-proxy"
    -    tls_secret_name = _gen_tls_secret_name(cluster_name)
    -    service_name = f"{cluster_name}-oauth"
    -    port_name = "oauth-proxy"
    -
    -    _create_or_replace_oauth_sa(namespace, oauth_sa_name, cluster_name)
    -    _create_or_replace_oauth_service_obj(
    -        cluster_name, namespace, oauth_port, tls_secret_name, service_name, port_name
    -    )
    -    _create_or_replace_oauth_route_object(
    -        cluster_name,
    -        namespace,
    -        service_name,
    -        port_name,
    -    )
    -    _create_or_replace_oauth_rb(cluster_name, namespace, oauth_sa_name)
    -
    -
    -
    -def delete_openshift_oauth_objects(cluster_name, namespace) -
    -
    -
    -
    - -Expand source code - -
    def delete_openshift_oauth_objects(cluster_name, namespace):
    -    # NOTE: it might be worth adding error handling here, but shouldn't be necessary because cluster.down(...) checks
    -    # for an existing cluster before calling this => the objects should never be deleted twice
    -    oauth_sa_name = f"{cluster_name}-oauth-proxy"
    -    service_name = f"{cluster_name}-oauth"
    -    v1_routes = _route_api_getter()
    -    client.CoreV1Api(api_config_handler()).delete_namespaced_service_account(
    -        name=oauth_sa_name, namespace=namespace
    -    )
    -    client.CoreV1Api(api_config_handler()).delete_namespaced_service(
    -        name=service_name, namespace=namespace
    -    )
    -    v1_routes.delete(name=f"ray-dashboard-{cluster_name}", namespace=namespace)
    -    client.RbacAuthorizationV1Api(api_config_handler()).delete_cluster_role_binding(
    -        name=f"{cluster_name}-rb"
    -    )
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - diff --git a/docs/detailed-documentation/utils/pretty_print.html b/docs/detailed-documentation/utils/pretty_print.html index 5ff38db1..e7c10825 100644 --- a/docs/detailed-documentation/utils/pretty_print.html +++ b/docs/detailed-documentation/utils/pretty_print.html @@ -87,6 +87,30 @@

    Module codeflare_sdk.utils.pretty_print

    console.print(Panel.fit(table)) +def print_ray_clusters_status(app_wrappers: List[AppWrapper], starting: bool = False): + if not app_wrappers: + print_no_resources_found() + return # shortcircuit + + console = Console() + table = Table( + box=box.ASCII_DOUBLE_HEAD, + title="[bold] :rocket: Cluster Queue Status :rocket:", + ) + table.add_column("Name", style="cyan", no_wrap=True) + table.add_column("Status", style="magenta") + + for app_wrapper in app_wrappers: + name = app_wrapper.name + status = app_wrapper.status.value + if starting: + status += " (starting)" + table.add_row(name, status) + table.add_row("") # empty row for spacing + + console.print(Panel.fit(table)) + + def print_cluster_status(cluster: RayCluster): "Pretty prints the status of a passed-in cluster" if not cluster: @@ -101,15 +125,11 @@

    Module codeflare_sdk.utils.pretty_print

    ) name = cluster.name dashboard = cluster.dashboard - # owned = bool(cluster["userOwned"]) - owned = True #'table0' to display the cluster name, status, url, and dashboard link table0 = Table(box=None, show_header=False) - if owned: - table0.add_row("[white on green][bold]Name") - else: - table0.add_row("") + + table0.add_row("[white on green][bold]Name") table0.add_row("[bold underline]" + name, status) table0.add_row() # fixme harcded to default for now @@ -150,15 +170,11 @@

    Module codeflare_sdk.utils.pretty_print

    memory = str(cluster.worker_mem_min) + "~" + str(cluster.worker_mem_max) cpu = str(cluster.worker_cpu) gpu = str(cluster.worker_gpu) - # owned = bool(cluster["userOwned"]) - owned = True #'table0' to display the cluster name, status, url, and dashboard link table0 = Table(box=None, show_header=False) - if owned: - table0.add_row("[white on green][bold]Name") - else: - table0.add_row("") + + table0.add_row("[white on green][bold]Name") table0.add_row("[bold underline]" + name, status) table0.add_row() # fixme harcded to default for now @@ -278,15 +294,11 @@

    Functions

    ) name = cluster.name dashboard = cluster.dashboard - # owned = bool(cluster["userOwned"]) - owned = True #'table0' to display the cluster name, status, url, and dashboard link table0 = Table(box=None, show_header=False) - if owned: - table0.add_row("[white on green][bold]Name") - else: - table0.add_row("") + + table0.add_row("[white on green][bold]Name") table0.add_row("[bold underline]" + name, status) table0.add_row() # fixme harcded to default for now @@ -336,15 +348,11 @@

    Functions

    memory = str(cluster.worker_mem_min) + "~" + str(cluster.worker_mem_max) cpu = str(cluster.worker_cpu) gpu = str(cluster.worker_gpu) - # owned = bool(cluster["userOwned"]) - owned = True #'table0' to display the cluster name, status, url, and dashboard link table0 = Table(box=None, show_header=False) - if owned: - table0.add_row("[white on green][bold]Name") - else: - table0.add_row("") + + table0.add_row("[white on green][bold]Name") table0.add_row("[bold underline]" + name, status) table0.add_row() # fixme harcded to default for now @@ -415,6 +423,39 @@

    Functions

    console.print(Panel("[red]No resources found, have you run cluster.up() yet?"))
    +
    +def print_ray_clusters_status(app_wrappers: List[AppWrapper], starting: bool = False) +
    +
    +
    +
    + +Expand source code + +
    def print_ray_clusters_status(app_wrappers: List[AppWrapper], starting: bool = False):
    +    if not app_wrappers:
    +        print_no_resources_found()
    +        return  # shortcircuit
    +
    +    console = Console()
    +    table = Table(
    +        box=box.ASCII_DOUBLE_HEAD,
    +        title="[bold] :rocket: Cluster Queue Status :rocket:",
    +    )
    +    table.add_column("Name", style="cyan", no_wrap=True)
    +    table.add_column("Status", style="magenta")
    +
    +    for app_wrapper in app_wrappers:
    +        name = app_wrapper.name
    +        status = app_wrapper.status.value
    +        if starting:
    +            status += " (starting)"
    +        table.add_row(name, status)
    +        table.add_row("")  # empty row for spacing
    +
    +    console.print(Panel.fit(table))
    +
    +
    @@ -437,6 +478,7 @@

    Index

  • print_cluster_status
  • print_clusters
  • print_no_resources_found
  • +
  • print_ray_clusters_status
  • From 85da8d2ec1a528924d25e32d942606c94d1ab611 Mon Sep 17 00:00:00 2001 From: Christian Zaccaria <73656840+ChristianZaccaria@users.noreply.github.com> Date: Thu, 11 Apr 2024 14:35:09 +0100 Subject: [PATCH 177/496] Bug-fix wait_ready functionality (#507) --- src/codeflare_sdk/cluster/cluster.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 81f7a711..60f968d2 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -848,7 +848,7 @@ def _get_app_wrappers( def _map_to_ray_cluster(rc) -> Optional[RayCluster]: - if "state" in rc["status"]: + if "status" in rc and "state" in rc["status"]: status = RayClusterStatus(rc["status"]["state"].lower()) else: status = RayClusterStatus.UNKNOWN From 771d4df339d82465776b37123fe1aa5424035e6e Mon Sep 17 00:00:00 2001 From: Karel Suta Date: Thu, 11 Apr 2024 16:38:40 +0200 Subject: [PATCH 178/496] Fix RBAC for e2e tests (#508) --- .github/workflows/e2e_tests.yaml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index 0fee64b3..054fee76 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -103,12 +103,10 @@ jobs: run: | kubectl create clusterrole list-ingresses --verb=get,list --resource=ingresses kubectl create clusterrolebinding sdk-user-list-ingresses --clusterrole=list-ingresses --user=sdk-user - kubectl create clusterrole appwrapper-creator --verb=get,list,create,delete,patch --resource=appwrappers - kubectl create clusterrolebinding sdk-user-appwrapper-creator --clusterrole=appwrapper-creator --user=sdk-user kubectl create clusterrole namespace-creator --verb=get,list,create,delete,patch --resource=namespaces kubectl create clusterrolebinding sdk-user-namespace-creator --clusterrole=namespace-creator --user=sdk-user - kubectl create clusterrole list-rayclusters --verb=get,list --resource=rayclusters - kubectl create clusterrolebinding sdk-user-list-rayclusters --clusterrole=list-rayclusters --user=sdk-user + kubectl create clusterrole raycluster-creator --verb=get,list,create,delete,patch --resource=rayclusters + kubectl create clusterrolebinding sdk-user-raycluster-creator --clusterrole=raycluster-creator --user=sdk-user kubectl config use-context sdk-user - name: Run e2e tests From 7ded58df0e81e4597d0aea025306472ad8b9ad14 Mon Sep 17 00:00:00 2001 From: codeflare-machine-account Date: Fri, 12 Apr 2024 17:12:48 +0000 Subject: [PATCH 179/496] Changes in docs for release: v0.15.1 --- docs/detailed-documentation/cluster/cluster.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/detailed-documentation/cluster/cluster.html b/docs/detailed-documentation/cluster/cluster.html index 6e220e18..c5b65132 100644 --- a/docs/detailed-documentation/cluster/cluster.html +++ b/docs/detailed-documentation/cluster/cluster.html @@ -880,7 +880,7 @@

    Module codeflare_sdk.cluster.cluster

    def _map_to_ray_cluster(rc) -> Optional[RayCluster]: - if "state" in rc["status"]: + if "status" in rc and "state" in rc["status"]: status = RayClusterStatus(rc["status"]["state"].lower()) else: status = RayClusterStatus.UNKNOWN From b553e92b2ebf3ad80cbdc683f658e8484494d56a Mon Sep 17 00:00:00 2001 From: Shilpa Chugh Date: Mon, 15 Apr 2024 15:46:19 +0530 Subject: [PATCH 180/496] update e2e.test workflow` (#510) Co-authored-by: Shilpa Chugh --- .github/workflows/e2e_tests.yaml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index 054fee76..686047c6 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -82,16 +82,14 @@ jobs: id: deploy run: | cd codeflare-operator + echo Setting up CodeFlare stack + make setup-e2e echo Deploying CodeFlare operator IMG="${REGISTRY_ADDRESS}"/codeflare-operator sed -i 's/RayDashboardOAuthEnabled: pointer.Bool(true)/RayDashboardOAuthEnabled: pointer.Bool(false)/' main.go make image-push -e IMG="${IMG}" make deploy -e IMG="${IMG}" -e ENV="e2e" kubectl wait --timeout=120s --for=condition=Available=true deployment -n openshift-operators codeflare-operator-manager - - echo Setting up CodeFlare stack - make setup-e2e - cd .. - name: Add user to KinD From 979ff43377475663836b5daa4c2b02ed9bd25020 Mon Sep 17 00:00:00 2001 From: Shilpa Chugh Date: Mon, 15 Apr 2024 20:39:55 +0530 Subject: [PATCH 181/496] Refactor e2e tests to use kueue (#505) Co-authored-by: Shilpa Chugh --- docs/e2e.md | 21 ++++-- tests/e2e/kueue_resources_setup.sh | 53 ++++++++++++++ ...t.py => mnist_raycluster_sdk_kind_test.py} | 71 +++++-------------- tests/e2e/mnist_raycluster_sdk_oauth_test.py | 32 ++------- tests/e2e/support.py | 7 ++ 5 files changed, 100 insertions(+), 84 deletions(-) create mode 100755 tests/e2e/kueue_resources_setup.sh rename tests/e2e/{mnist_raycluster_sdk_test.py => mnist_raycluster_sdk_kind_test.py} (52%) diff --git a/docs/e2e.md b/docs/e2e.md index 469647ca..27324f70 100644 --- a/docs/e2e.md +++ b/docs/e2e.md @@ -10,8 +10,10 @@ Pre-requisite for KinD clusters: please add in your local `/etc/hosts` file `127 ``` make kind-e2e export CLUSTER_HOSTNAME=kind - make deploy -e IMG=quay.io/project-codeflare/codeflare-operator:v1.1.0 make setup-e2e + make deploy -e IMG=quay.io/project-codeflare/codeflare-operator:v1.3.0 + + For running tests locally on Kind cluster, we need to disable `rayDashboardOAuthEnabled` in `codeflare-operator-config` ConfigMap and then restart CodeFlare Operator ``` - **(Optional)** - Create and add `sdk-user` with limited permissions to the cluster to run through the e2e tests: @@ -53,9 +55,13 @@ Pre-requisite for KinD clusters: please add in your local `/etc/hosts` file `127 ``` + - Install the latest development version of kueue + ``` + kubectl apply --server-side -k "github.com/opendatahub-io/kueue/config/rhoai?ref=dev" + ``` - Test Phase: - - Once we have the codeflare-operator and kuberay-operator running and ready, we can run the e2e test on the codeflare-sdk repository: + - Once we have the codeflare-operator, kuberay-operator and kueue running and ready, we can run the e2e test on the codeflare-sdk repository: ``` poetry install --with test,docs poetry run pytest -v -s ./tests/e2e/mnist_raycluster_sdk_test.py @@ -67,11 +73,18 @@ Pre-requisite for KinD clusters: please add in your local `/etc/hosts` file `127 - Setup Phase: - Pull the [codeflare-operator repo](https://github.com/project-codeflare/codeflare-operator) and run the following make targets: ``` - make deploy -e IMG=quay.io/project-codeflare/codeflare-operator:v1.1.0 + make setup-e2e + make deploy -e IMG=quay.io/project-codeflare/codeflare-operator:v1.3.0 ``` + + - Install the latest development version of kueue + ``` + kubectl apply --server-side -k "github.com/opendatahub-io/kueue/config/rhoai?ref=dev" + ``` + - Test Phase: - - Once we have the codeflare-operator and kuberay-operator running and ready, we can run the e2e test on the codeflare-sdk repository: + - Once we have the codeflare-operator, kuberay-operator and kueue running and ready, we can run the e2e test on the codeflare-sdk repository: ``` poetry install --with test,docs poetry run pytest -v -s ./tests/e2e/mnist_raycluster_sdk_test.py diff --git a/tests/e2e/kueue_resources_setup.sh b/tests/e2e/kueue_resources_setup.sh new file mode 100755 index 00000000..23df1abf --- /dev/null +++ b/tests/e2e/kueue_resources_setup.sh @@ -0,0 +1,53 @@ +#!/bin/bash + +name=${name:-cluster-queue-mnist} +flavor=${flavor:-default-flavor-mnist} +local_queue_name=${local_queue_name:-local-queue-mnist} +namespace=$1 + +echo "Applying Cluster Queue" + +cat < Date: Wed, 10 Apr 2024 16:12:31 +0100 Subject: [PATCH 182/496] Add enableIngress and set to false by default --- src/codeflare_sdk/cluster/cluster.py | 14 ++++++++++++++ src/codeflare_sdk/templates/base-template.yaml | 1 + tests/test-case-no-mcad.yamls | 1 + tests/test-case-prio.yaml | 1 + tests/test-case.yaml | 1 + tests/test-default-appwrapper.yaml | 1 + 6 files changed, 19 insertions(+) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 60f968d2..4e1968e9 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -547,6 +547,20 @@ def _component_resources_up( if self.config.write_to_file: with open(self.app_wrapper_yaml) as f: yamls = yaml.load_all(f, Loader=yaml.FullLoader) + for resource in yamls: + enable_ingress = ( + resource.get("spec", {}) + .get("headGroupSpec", {}) + .get("enableIngress") + ) + if resource["kind"] == "RayCluster" and enable_ingress is not False: + name = resource["metadata"]["name"] + print( + f"Forbidden: RayCluster '{name}' has 'enableIngress' set to 'True' or is unset." + ) + return + f.seek(0) # Reset file pointer to the beginning + yamls = yaml.load_all(f, Loader=yaml.FullLoader) # Reload the YAMLs _create_resources(yamls, namespace, api_instance) else: yamls = yaml.load_all(self.app_wrapper_yaml, Loader=yaml.FullLoader) diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index 8b14e0cd..5f6036ac 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -87,6 +87,7 @@ spec: headGroupSpec: # Kubernetes Service Type, valid values are 'ClusterIP', 'NodePort' and 'LoadBalancer' serviceType: ClusterIP + enableIngress: false # logical group name, for this called head-group, also can be functional # pod type head or worker # rayNodeType: head # Not needed since it is under the headgroup diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index e6bbcdd2..e13752a4 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -23,6 +23,7 @@ spec: upscalingMode: Default enableInTreeAutoscaling: false headGroupSpec: + enableIngress: false rayStartParams: block: 'true' dashboard-host: 0.0.0.0 diff --git a/tests/test-case-prio.yaml b/tests/test-case-prio.yaml index fd83fc3a..10e161de 100644 --- a/tests/test-case-prio.yaml +++ b/tests/test-case-prio.yaml @@ -53,6 +53,7 @@ spec: upscalingMode: Default enableInTreeAutoscaling: false headGroupSpec: + enableIngress: false rayStartParams: block: 'true' dashboard-host: 0.0.0.0 diff --git a/tests/test-case.yaml b/tests/test-case.yaml index 14d8e1a4..78d2e4a5 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -52,6 +52,7 @@ spec: upscalingMode: Default enableInTreeAutoscaling: false headGroupSpec: + enableIngress: false rayStartParams: block: 'true' dashboard-host: 0.0.0.0 diff --git a/tests/test-default-appwrapper.yaml b/tests/test-default-appwrapper.yaml index 321a5d51..ecab5eac 100644 --- a/tests/test-default-appwrapper.yaml +++ b/tests/test-default-appwrapper.yaml @@ -50,6 +50,7 @@ spec: upscalingMode: Default enableInTreeAutoscaling: false headGroupSpec: + enableIngress: false rayStartParams: block: 'true' dashboard-host: 0.0.0.0 From a53fc1e617130c3017de50b0ed4681777993eace Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Mon, 15 Apr 2024 11:12:44 +0100 Subject: [PATCH 183/496] Convert generator to a list for YAML processing --- src/codeflare_sdk/cluster/cluster.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 4e1968e9..d6cf12f5 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -546,7 +546,7 @@ def _component_resources_up( ): if self.config.write_to_file: with open(self.app_wrapper_yaml) as f: - yamls = yaml.load_all(f, Loader=yaml.FullLoader) + yamls = list(yaml.load_all(f, Loader=yaml.FullLoader)) for resource in yamls: enable_ingress = ( resource.get("spec", {}) @@ -559,8 +559,6 @@ def _component_resources_up( f"Forbidden: RayCluster '{name}' has 'enableIngress' set to 'True' or is unset." ) return - f.seek(0) # Reset file pointer to the beginning - yamls = yaml.load_all(f, Loader=yaml.FullLoader) # Reload the YAMLs _create_resources(yamls, namespace, api_instance) else: yamls = yaml.load_all(self.app_wrapper_yaml, Loader=yaml.FullLoader) From 74cc866940bd8ab10ba0187979bec3f602d9becf Mon Sep 17 00:00:00 2001 From: Kevin Date: Wed, 17 Apr 2024 10:55:13 -0400 Subject: [PATCH 184/496] remove print of auth header Signed-off-by: Kevin --- src/codeflare_sdk/cluster/cluster.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index d6cf12f5..24cbf9a7 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -89,7 +89,6 @@ def job_client(self): if self._job_submission_client: return self._job_submission_client if is_openshift_cluster(): - print(k8client.configuration.get_api_key_with_prefix("authorization")) self._job_submission_client = JobSubmissionClient( self.cluster_dashboard_uri(), headers=self._client_headers, From e4f4a44d2cd0ddf4eda066c2a19c71b3efe44ac8 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Fri, 19 Apr 2024 09:36:48 +0100 Subject: [PATCH 185/496] Remove patch in e2e workflow --- .github/workflows/e2e_tests.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index 686047c6..f3ecc477 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -86,7 +86,6 @@ jobs: make setup-e2e echo Deploying CodeFlare operator IMG="${REGISTRY_ADDRESS}"/codeflare-operator - sed -i 's/RayDashboardOAuthEnabled: pointer.Bool(true)/RayDashboardOAuthEnabled: pointer.Bool(false)/' main.go make image-push -e IMG="${IMG}" make deploy -e IMG="${IMG}" -e ENV="e2e" kubectl wait --timeout=120s --for=condition=Available=true deployment -n openshift-operators codeflare-operator-manager From d9ef34dae2f8eed4052f150868d51a9ce1eab8f6 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Fri, 12 Apr 2024 11:21:36 +0100 Subject: [PATCH 186/496] Removed create-cert init containers & side car container --- pyproject.toml | 1 + src/codeflare_sdk.egg-info/PKG-INFO | 2 +- src/codeflare_sdk.egg-info/SOURCES.txt | 2 - .../templates/base-template.yaml | 75 +---------- src/codeflare_sdk/utils/generate_yaml.py | 127 +----------------- tests/test-case-no-mcad.yamls | 53 ++++---- tests/test-case-prio.yaml | 54 ++++---- tests/test-case.yaml | 54 ++++---- tests/test-default-appwrapper.yaml | 54 ++++---- tests/unit_test.py | 115 ++++++++++------ 10 files changed, 189 insertions(+), 348 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index c47e466a..ee50a48c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -48,6 +48,7 @@ pytest-timeout = "2.2.0" [tool.pytest.ini_options] filterwarnings = [ "ignore::DeprecationWarning:pkg_resources", + "ignore::DeprecationWarning", "ignore:pkg_resources is deprecated as an API:DeprecationWarning", ] markers = [ diff --git a/src/codeflare_sdk.egg-info/PKG-INFO b/src/codeflare_sdk.egg-info/PKG-INFO index c4061c62..27ec5cbf 100644 --- a/src/codeflare_sdk.egg-info/PKG-INFO +++ b/src/codeflare_sdk.egg-info/PKG-INFO @@ -1,4 +1,4 @@ Metadata-Version: 2.1 -Name: codeflare-sdk +Name: codeflare_sdk Version: 0.0.0 License-File: LICENSE diff --git a/src/codeflare_sdk.egg-info/SOURCES.txt b/src/codeflare_sdk.egg-info/SOURCES.txt index d922d0db..42541f1d 100644 --- a/src/codeflare_sdk.egg-info/SOURCES.txt +++ b/src/codeflare_sdk.egg-info/SOURCES.txt @@ -13,11 +13,9 @@ src/codeflare_sdk/cluster/cluster.py src/codeflare_sdk/cluster/config.py src/codeflare_sdk/cluster/model.py src/codeflare_sdk/job/__init__.py -src/codeflare_sdk/job/jobs.py src/codeflare_sdk/job/ray_jobs.py src/codeflare_sdk/utils/__init__.py src/codeflare_sdk/utils/generate_cert.py src/codeflare_sdk/utils/generate_yaml.py src/codeflare_sdk/utils/kube_api_helpers.py -src/codeflare_sdk/utils/openshift_oauth.py src/codeflare_sdk/utils/pretty_print.py diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index 5f6036ac..a93ec3a7 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -117,20 +117,7 @@ spec: - "aw-kuberay" containers: # The Ray head pod - - env: - - name: MY_POD_IP - valueFrom: - fieldRef: - fieldPath: status.podIP - - name: RAY_USE_TLS - value: "0" - - name: RAY_TLS_SERVER_CERT - value: /home/ray/workspace/tls/server.crt - - name: RAY_TLS_SERVER_KEY - value: /home/ray/workspace/tls/server.key - - name: RAY_TLS_CA_CERT - value: /home/ray/workspace/tls/ca.crt - name: ray-head + - name: ray-head image: quay.io/project-codeflare/ray:latest-py39-cu118 imagePullPolicy: Always ports: @@ -172,30 +159,7 @@ spec: - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt - initContainers: - - command: - - sh - - -c - - cd /home/ray/workspace/tls && openssl req -nodes -newkey rsa:2048 -keyout server.key -out server.csr -subj '/CN=ray-head' && printf "authorityKeyIdentifier=keyid,issuer\nbasicConstraints=CA:FALSE\nsubjectAltName = @alt_names\n[alt_names]\nDNS.1 = 127.0.0.1\nDNS.2 = localhost\nDNS.3 = ${FQ_RAY_IP}\nDNS.4 = $(awk 'END{print $1}' /etc/hosts)\nDNS.5 = rayclient-deployment-name-$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).server-name">./domain.ext && cp /home/ray/workspace/ca/* . && openssl x509 -req -CA ca.crt -CAkey ca.key -in server.csr -out server.crt -days 365 -CAcreateserial -extfile domain.ext - image: quay.io/project-codeflare/ray:latest-py39-cu118 - name: create-cert - # securityContext: - # runAsUser: 1000 - # runAsGroup: 1000 - volumeMounts: - - name: ca-vol - mountPath: "/home/ray/workspace/ca" - readOnly: true - - name: server-cert - mountPath: "/home/ray/workspace/tls" - readOnly: false volumes: - - name: ca-vol - secret: - secretName: ca-secret-deployment-name - optional: false - - name: server-cert - emptyDir: {} - name: odh-trusted-ca-cert configMap: name: odh-trusted-ca-bundle @@ -250,40 +214,9 @@ spec: operator: In values: - "aw-kuberay" - initContainers: - # the env var $RAY_IP is set by the operator if missing, with the value of the head service name - - name: create-cert - image: quay.io/project-codeflare/ray:latest-py39-cu118 - command: - - sh - - -c - - cd /home/ray/workspace/tls && openssl req -nodes -newkey rsa:2048 -keyout server.key -out server.csr -subj '/CN=ray-head' && printf "authorityKeyIdentifier=keyid,issuer\nbasicConstraints=CA:FALSE\nsubjectAltName = @alt_names\n[alt_names]\nDNS.1 = 127.0.0.1\nDNS.2 = localhost\nDNS.3 = ${FQ_RAY_IP}\nDNS.4 = $(awk 'END{print $1}' /etc/hosts)">./domain.ext && cp /home/ray/workspace/ca/* . && openssl x509 -req -CA ca.crt -CAkey ca.key -in server.csr -out server.crt -days 365 -CAcreateserial -extfile domain.ext - # securityContext: - # runAsUser: 1000 - # runAsGroup: 1000 - volumeMounts: - - name: ca-vol - mountPath: "/home/ray/workspace/ca" - readOnly: true - - name: server-cert - mountPath: "/home/ray/workspace/tls" - readOnly: false containers: - name: machine-learning # must consist of lower case alphanumeric characters or '-', and must start and end with an alphanumeric character (e.g. 'my-name', or '123-abc' image: quay.io/project-codeflare/ray:latest-py39-cu118 - env: - - name: MY_POD_IP - valueFrom: - fieldRef: - fieldPath: status.podIP - - name: RAY_USE_TLS - value: "0" - - name: RAY_TLS_SERVER_CERT - value: /home/ray/workspace/tls/server.crt - - name: RAY_TLS_SERVER_KEY - value: /home/ray/workspace/tls/server.key - - name: RAY_TLS_CA_CERT - value: /home/ray/workspace/tls/ca.crt # environment variables to set in the container.Optional. # Refer to https://kubernetes.io/docs/tasks/inject-data-application/define-environment-variable-container/ lifecycle: @@ -319,12 +252,6 @@ spec: name: odh-ca-cert subPath: odh-ca-bundle.crt volumes: - - name: ca-vol - secret: - secretName: ca-secret-deployment-name - optional: false - - name: server-cert - emptyDir: {} - name: odh-trusted-ca-cert configMap: name: odh-trusted-ca-bundle diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 2088b910..91fd6811 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -85,20 +85,6 @@ def update_names(yaml, item, appwrapper_name, cluster_name, namespace): lower_meta["labels"]["workload.codeflare.dev/appwrapper"] = appwrapper_name lower_meta["name"] = cluster_name lower_meta["namespace"] = namespace - lower_spec = item.get("generictemplate", {}).get("spec") - if is_openshift_cluster(): - cookie_secret_env_var = { - "name": "COOKIE_SECRET", - "valueFrom": { - "secretKeyRef": { - "key": "cookie_secret", - "name": f"{cluster_name}-oauth-config", - } - }, - } - lower_spec["headGroupSpec"]["template"]["spec"]["containers"][0]["env"].append( - cookie_secret_env_var - ) def update_labels(yaml, instascale, instance_types): @@ -291,44 +277,13 @@ def update_ca_secret(ca_secret_item, cluster_name, namespace): data["ca.key"], data["ca.crt"] = generate_cert.generate_ca_cert(365) -def enable_local_interactive(resources, cluster_name, namespace): # pragma: no cover - from ..cluster.cluster import _get_ingress_domain - - ca_secret_item = resources["resources"].get("GenericItems")[1] +def enable_local_interactive(resources): # pragma: no cover item = resources["resources"].get("GenericItems")[0] - update_ca_secret(ca_secret_item, cluster_name, namespace) - # update_ca_secret_volumes - item["generictemplate"]["spec"]["headGroupSpec"]["template"]["spec"]["volumes"][0][ - "secret" - ]["secretName"] = f"ca-secret-{cluster_name}" - item["generictemplate"]["spec"]["workerGroupSpecs"][0]["template"]["spec"][ - "volumes" - ][0]["secret"]["secretName"] = f"ca-secret-{cluster_name}" - # update_tls_env - item["generictemplate"]["spec"]["headGroupSpec"]["template"]["spec"]["containers"][ - 0 - ]["env"][1]["value"] = "1" - item["generictemplate"]["spec"]["workerGroupSpecs"][0]["template"]["spec"][ - "containers" - ][0]["env"][1]["value"] = "1" - # update_init_container - command = item["generictemplate"]["spec"]["headGroupSpec"]["template"]["spec"][ - "initContainers" - ][0].get("command")[2] - - command = command.replace("deployment-name", cluster_name) - - domain = "" ## FIX - We can't retrieve ingress domain - move init container to CFO - command = command.replace("server-name", domain) item["generictemplate"]["metadata"]["annotations"][ "sdk.codeflare.dev/local_interactive" ] = "True" - item["generictemplate"]["spec"]["headGroupSpec"]["template"]["spec"][ - "initContainers" - ][0].get("command")[2] = command - def del_from_list_by_name(l: list, target: typing.List[str]) -> list: return [x for x in l if x["name"] not in target] @@ -392,75 +347,6 @@ def write_user_appwrapper(user_yaml, output_file_name): print(f"Written to: {output_file_name}") -def enable_openshift_oauth(user_yaml, cluster_name, namespace): - config_check() - k8_client = api_config_handler() or client.ApiClient() - tls_mount_location = "/etc/tls/private" - oauth_port = 8443 - oauth_sa_name = f"{cluster_name}-oauth-proxy" - tls_secret_name = f"{cluster_name}-proxy-tls-secret" - tls_volume_name = "proxy-tls-secret" - port_name = "oauth-proxy" - oauth_sidecar = _create_oauth_sidecar_object( - namespace, - tls_mount_location, - oauth_port, - oauth_sa_name, - tls_volume_name, - port_name, - ) - tls_secret_volume = client.V1Volume( - name=tls_volume_name, - secret=client.V1SecretVolumeSource(secret_name=tls_secret_name), - ) - # allows for setting value of Cluster object when initializing object from an existing AppWrapper on cluster - user_yaml["metadata"]["annotations"] = user_yaml["metadata"].get("annotations", {}) - ray_headgroup_pod = user_yaml["spec"]["resources"]["GenericItems"][0][ - "generictemplate" - ]["spec"]["headGroupSpec"]["template"]["spec"] - ray_headgroup_pod["serviceAccount"] = oauth_sa_name - ray_headgroup_pod["volumes"] = ray_headgroup_pod.get("volumes", []) - - # we use a generic api client here so that the serialization function doesn't need to be mocked for unit tests - ray_headgroup_pod["volumes"].append( - client.ApiClient().sanitize_for_serialization(tls_secret_volume) - ) - ray_headgroup_pod["containers"].append( - client.ApiClient().sanitize_for_serialization(oauth_sidecar) - ) - - -def _create_oauth_sidecar_object( - namespace: str, - tls_mount_location: str, - oauth_port: int, - oauth_sa_name: str, - tls_volume_name: str, - port_name: str, -) -> client.V1Container: - return client.V1Container( - args=[ - f"--https-address=:{oauth_port}", - "--provider=openshift", - f"--openshift-service-account={oauth_sa_name}", - "--upstream=http://localhost:8265", - f"--tls-cert={tls_mount_location}/tls.crt", - f"--tls-key={tls_mount_location}/tls.key", - "--cookie-secret=$(COOKIE_SECRET)", - f'--openshift-delegate-urls={{"/":{{"resource":"pods","namespace":"{namespace}","verb":"get"}}}}', - ], - image="registry.redhat.io/openshift4/ose-oauth-proxy@sha256:1ea6a01bf3e63cdcf125c6064cbd4a4a270deaf0f157b3eabb78f60556840366", - name="oauth-proxy", - ports=[client.V1ContainerPort(container_port=oauth_port, name=port_name)], - resources=client.V1ResourceRequirements(limits=None, requests=None), - volume_mounts=[ - client.V1VolumeMount( - mount_path=tls_mount_location, name=tls_volume_name, read_only=True - ) - ], - ) - - def get_default_kueue_name(namespace: str): # If the local queue is set, use it. Otherwise, try to use the default queue. try: @@ -620,12 +506,13 @@ def generate_appwrapper( ) if local_interactive: - enable_local_interactive(resources, cluster_name, namespace) - else: - disable_raycluster_tls(resources["resources"]) + enable_local_interactive(resources) - if is_openshift_cluster(): - enable_openshift_oauth(user_yaml, cluster_name, namespace) + # else: + # disable_raycluster_tls(resources["resources"]) + + ca_secret_item = resources["resources"].get("GenericItems")[1] + update_ca_secret(ca_secret_item, cluster_name, namespace) directory_path = os.path.expanduser("~/.codeflare/resources/") outfile = os.path.join(directory_path, appwrapper_name + ".yaml") diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index e13752a4..bde0af91 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -41,20 +41,7 @@ spec: values: - unit-test-cluster-ray containers: - - env: - - name: MY_POD_IP - valueFrom: - fieldRef: - fieldPath: status.podIP - - name: RAY_USE_TLS - value: '0' - - name: RAY_TLS_SERVER_CERT - value: /home/ray/workspace/tls/server.crt - - name: RAY_TLS_SERVER_KEY - value: /home/ray/workspace/tls/server.key - - name: RAY_TLS_CA_CERT - value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:latest-py39-cu118 + - image: quay.io/project-codeflare/ray:latest-py39-cu118 imagePullPolicy: Always lifecycle: preStop: @@ -81,6 +68,12 @@ spec: memory: 8G nvidia.com/gpu: 0 volumeMounts: + - mountPath: /home/ray/workspace/ca + name: ca-vol + readOnly: true + - mountPath: /home/ray/workspace/tls + name: server-cert + readOnly: true - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt @@ -136,20 +129,7 @@ spec: values: - unit-test-cluster-ray containers: - - env: - - name: MY_POD_IP - valueFrom: - fieldRef: - fieldPath: status.podIP - - name: RAY_USE_TLS - value: '0' - - name: RAY_TLS_SERVER_CERT - value: /home/ray/workspace/tls/server.crt - - name: RAY_TLS_SERVER_KEY - value: /home/ray/workspace/tls/server.key - - name: RAY_TLS_CA_CERT - value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:latest-py39-cu118 + - image: quay.io/project-codeflare/ray:latest-py39-cu118 lifecycle: preStop: exec: @@ -168,6 +148,12 @@ spec: memory: 5G nvidia.com/gpu: 7 volumeMounts: + - mountPath: /home/ray/workspace/ca + name: ca-vol + readOnly: true + - mountPath: /home/ray/workspace/tls + name: server-cert + readOnly: true - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt @@ -197,3 +183,14 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert +--- +apiVersion: v1 +data: + ca.crt: ca-field + ca.key: ca-field +kind: Secret +metadata: + labels: + odh-ray-cluster-service: unit-test-cluster-ray-head-svc + name: ca-secret-unit-test-cluster-ray + namespace: ns diff --git a/tests/test-case-prio.yaml b/tests/test-case-prio.yaml index 10e161de..b3df1270 100644 --- a/tests/test-case-prio.yaml +++ b/tests/test-case-prio.yaml @@ -71,20 +71,7 @@ spec: values: - prio-test-cluster containers: - - env: - - name: MY_POD_IP - valueFrom: - fieldRef: - fieldPath: status.podIP - - name: RAY_USE_TLS - value: '0' - - name: RAY_TLS_SERVER_CERT - value: /home/ray/workspace/tls/server.crt - - name: RAY_TLS_SERVER_KEY - value: /home/ray/workspace/tls/server.key - - name: RAY_TLS_CA_CERT - value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:latest-py39-cu118 + - image: quay.io/project-codeflare/ray:latest-py39-cu118 imagePullPolicy: Always lifecycle: preStop: @@ -111,6 +98,12 @@ spec: memory: 8G nvidia.com/gpu: 0 volumeMounts: + - mountPath: /home/ray/workspace/ca + name: ca-vol + readOnly: true + - mountPath: /home/ray/workspace/tls + name: server-cert + readOnly: true - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt @@ -167,20 +160,7 @@ spec: values: - prio-test-cluster containers: - - env: - - name: MY_POD_IP - valueFrom: - fieldRef: - fieldPath: status.podIP - - name: RAY_USE_TLS - value: '0' - - name: RAY_TLS_SERVER_CERT - value: /home/ray/workspace/tls/server.crt - - name: RAY_TLS_SERVER_KEY - value: /home/ray/workspace/tls/server.key - - name: RAY_TLS_CA_CERT - value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:latest-py39-cu118 + - image: quay.io/project-codeflare/ray:latest-py39-cu118 lifecycle: preStop: exec: @@ -199,6 +179,12 @@ spec: memory: 5G nvidia.com/gpu: 7 volumeMounts: + - mountPath: /home/ray/workspace/ca + name: ca-vol + readOnly: true + - mountPath: /home/ray/workspace/tls + name: server-cert + readOnly: true - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt @@ -230,4 +216,16 @@ spec: optional: true name: odh-ca-cert replicas: 1 + - generictemplate: + apiVersion: v1 + data: + ca.crt: ca-field + ca.key: ca-field + kind: Secret + metadata: + labels: + odh-ray-cluster-service: prio-test-cluster-head-svc + name: ca-secret-prio-test-cluster + namespace: ns + replicas: 1 Items: [] diff --git a/tests/test-case.yaml b/tests/test-case.yaml index 78d2e4a5..5ff66644 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -70,20 +70,7 @@ spec: values: - unit-test-cluster containers: - - env: - - name: MY_POD_IP - valueFrom: - fieldRef: - fieldPath: status.podIP - - name: RAY_USE_TLS - value: '0' - - name: RAY_TLS_SERVER_CERT - value: /home/ray/workspace/tls/server.crt - - name: RAY_TLS_SERVER_KEY - value: /home/ray/workspace/tls/server.key - - name: RAY_TLS_CA_CERT - value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:latest-py39-cu118 + - image: quay.io/project-codeflare/ray:latest-py39-cu118 imagePullPolicy: Always lifecycle: preStop: @@ -110,6 +97,12 @@ spec: memory: 8G nvidia.com/gpu: 0 volumeMounts: + - mountPath: /home/ray/workspace/ca + name: ca-vol + readOnly: true + - mountPath: /home/ray/workspace/tls + name: server-cert + readOnly: true - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt @@ -165,20 +158,7 @@ spec: values: - unit-test-cluster containers: - - env: - - name: MY_POD_IP - valueFrom: - fieldRef: - fieldPath: status.podIP - - name: RAY_USE_TLS - value: '0' - - name: RAY_TLS_SERVER_CERT - value: /home/ray/workspace/tls/server.crt - - name: RAY_TLS_SERVER_KEY - value: /home/ray/workspace/tls/server.key - - name: RAY_TLS_CA_CERT - value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:latest-py39-cu118 + - image: quay.io/project-codeflare/ray:latest-py39-cu118 lifecycle: preStop: exec: @@ -197,6 +177,12 @@ spec: memory: 5G nvidia.com/gpu: 7 volumeMounts: + - mountPath: /home/ray/workspace/ca + name: ca-vol + readOnly: true + - mountPath: /home/ray/workspace/tls + name: server-cert + readOnly: true - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt @@ -227,4 +213,16 @@ spec: optional: true name: odh-ca-cert replicas: 1 + - generictemplate: + apiVersion: v1 + data: + ca.crt: ca-field + ca.key: ca-field + kind: Secret + metadata: + labels: + odh-ray-cluster-service: unit-test-cluster-head-svc + name: ca-secret-unit-test-cluster + namespace: ns + replicas: 1 Items: [] diff --git a/tests/test-default-appwrapper.yaml b/tests/test-default-appwrapper.yaml index ecab5eac..a4d5648c 100644 --- a/tests/test-default-appwrapper.yaml +++ b/tests/test-default-appwrapper.yaml @@ -59,20 +59,7 @@ spec: template: spec: containers: - - env: - - name: MY_POD_IP - valueFrom: - fieldRef: - fieldPath: status.podIP - - name: RAY_USE_TLS - value: '0' - - name: RAY_TLS_SERVER_CERT - value: /home/ray/workspace/tls/server.crt - - name: RAY_TLS_SERVER_KEY - value: /home/ray/workspace/tls/server.key - - name: RAY_TLS_CA_CERT - value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:latest-py39-cu118 + - image: quay.io/project-codeflare/ray:latest-py39-cu118 imagePullPolicy: Always lifecycle: preStop: @@ -99,6 +86,12 @@ spec: memory: 8G nvidia.com/gpu: 0 volumeMounts: + - mountPath: /home/ray/workspace/ca + name: ca-vol + readOnly: true + - mountPath: /home/ray/workspace/tls + name: server-cert + readOnly: true - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt @@ -144,20 +137,7 @@ spec: key: value spec: containers: - - env: - - name: MY_POD_IP - valueFrom: - fieldRef: - fieldPath: status.podIP - - name: RAY_USE_TLS - value: '0' - - name: RAY_TLS_SERVER_CERT - value: /home/ray/workspace/tls/server.crt - - name: RAY_TLS_SERVER_KEY - value: /home/ray/workspace/tls/server.key - - name: RAY_TLS_CA_CERT - value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:latest-py39-cu118 + - image: quay.io/project-codeflare/ray:latest-py39-cu118 lifecycle: preStop: exec: @@ -176,6 +156,12 @@ spec: memory: 2G nvidia.com/gpu: 0 volumeMounts: + - mountPath: /home/ray/workspace/ca + name: ca-vol + readOnly: true + - mountPath: /home/ray/workspace/tls + name: server-cert + readOnly: true - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt @@ -205,4 +191,16 @@ spec: optional: true name: odh-ca-cert replicas: 1 + - generictemplate: + apiVersion: v1 + data: + ca.crt: ca-field + ca.key: ca-field + kind: Secret + metadata: + labels: + odh-ray-cluster-service: unit-test-default-cluster-head-svc + name: ca-secret-unit-test-default-cluster + namespace: opendatahub + replicas: 1 Items: [] diff --git a/tests/unit_test.py b/tests/unit_test.py index 935cdd10..19a92081 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -75,6 +75,7 @@ is_openshift_cluster, read_template, enable_local_interactive, + write_components, ) import openshift @@ -260,9 +261,65 @@ def test_config_creation(): assert config.local_interactive == False +def ca_secret_support(path, mcad: bool): + # Given that the secret is always random we need to set it to a static value for the tests to pass + if mcad: + with open(path, "r") as file: + try: + yaml_file = yaml.safe_load(file) + except yaml.YAMLError as exc: + print(exc) + resources = yaml_file.get("spec", "resources") + ca_secret_item = resources["resources"].get("GenericItems")[1] + data = ca_secret_item.get("generictemplate", {}).get("data") + data["ca.key"] = "ca-field" + data["ca.crt"] = "ca-field" + with open(path, "w") as outfile: + yaml.dump(yaml_file, outfile, default_flow_style=False) + else: + # Load the YAML file + with open(path, "r") as f: + data = list(yaml.safe_load_all(f)) + + # Find the Secret entry and update the fields + for item in data: + if item.get("kind") == "Secret": + item["data"]["ca.crt"] = "ca-field" + item["data"]["ca.key"] = "ca-field" + break + with open(path, "w") as f: + for item in data: + f.write("---\n") + yaml.dump(item, f, default_flow_style=False) + + +def ca_secret_support_no_write(yaml_file, mcad: bool): + if mcad: + file = yaml.safe_load(yaml_file) + resources = file.get("spec", "resources") + + ca_secret_item = resources["resources"].get("GenericItems")[1] + data = ca_secret_item.get("generictemplate", {}).get("data") + data["ca.key"] = "ca-field" + data["ca.crt"] = "ca-field" + return file + + else: + data = list(yaml.safe_load_all(yaml_file)) + for item in data: + if item.get("kind") == "Secret": + item["data"]["ca.crt"] = "ca-field" + item["data"]["ca.key"] = "ca-field" + break + + resources = "---\n" + "---\n".join([yaml.dump(item) for item in data]) + return resources + + def test_cluster_creation(mocker): mocker.patch("kubernetes.client.ApisApi.get_api_versions") cluster = createClusterWithConfig(mocker) + ca_secret_support(f"{aw_dir}unit-test-cluster.yaml", True) assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster.yaml" assert cluster.app_wrapper_name == "unit-test-cluster" assert filecmp.cmp( @@ -326,8 +383,10 @@ def test_cluster_creation_no_mcad(mocker): config.write_to_file = True config.mcad = False cluster = Cluster(config) + assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-ray.yaml" assert cluster.app_wrapper_name == "unit-test-cluster-ray" + ca_secret_support(cluster.app_wrapper_yaml, False) assert filecmp.cmp( f"{aw_dir}unit-test-cluster-ray.yaml", f"{parent}/tests/test-case-no-mcad.yamls", @@ -349,6 +408,7 @@ def test_cluster_creation_no_mcad_local_queue(mocker): config.write_to_file = True config.local_queue = "local-queue-default" cluster = Cluster(config) + ca_secret_support(cluster.app_wrapper_yaml, False) assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-ray.yaml" assert cluster.app_wrapper_name == "unit-test-cluster-ray" assert filecmp.cmp( @@ -377,7 +437,11 @@ def test_cluster_creation_no_mcad_local_queue(mocker): cluster = Cluster(config) test_resources = [] expected_resources = [] - test_aw = yaml.load_all(cluster.app_wrapper_yaml, Loader=yaml.FullLoader) + + test_aw = yaml.load_all( + ca_secret_support_no_write(cluster.app_wrapper_yaml, False), + Loader=yaml.FullLoader, + ) for resource in test_aw: test_resources.append(resource) with open( @@ -404,6 +468,7 @@ def test_cluster_creation_priority(mocker): return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, ) cluster = Cluster(config) + ca_secret_support(cluster.app_wrapper_yaml, True) assert cluster.app_wrapper_yaml == f"{aw_dir}prio-test-cluster.yaml" assert cluster.app_wrapper_name == "prio-test-cluster" assert filecmp.cmp( @@ -425,7 +490,8 @@ def test_default_cluster_creation(mocker): mcad=True, ) cluster = Cluster(default_config) - test_aw = yaml.safe_load(cluster.app_wrapper_yaml) + test_aw = ca_secret_support_no_write(cluster.app_wrapper_yaml, True) + with open( f"{parent}/tests/test-default-appwrapper.yaml", ) as f: @@ -534,16 +600,12 @@ def test_cluster_up_down(mocker): def test_cluster_up_down_no_mcad(mocker): + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch( "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), ) - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", - return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, - ) mocker.patch( "kubernetes.client.CustomObjectsApi.create_namespaced_custom_object", side_effect=arg_check_apply_effect, @@ -552,6 +614,12 @@ def test_cluster_up_down_no_mcad(mocker): "kubernetes.client.CustomObjectsApi.delete_namespaced_custom_object", side_effect=arg_check_del_effect, ) + mocker.patch( + "kubernetes.client.CoreV1Api.create_namespaced_secret", + ) + mocker.patch( + "kubernetes.client.CoreV1Api.delete_namespaced_secret", + ) mocker.patch( "kubernetes.client.CustomObjectsApi.list_cluster_custom_object", return_value={"items": []}, @@ -3128,37 +3196,6 @@ def test_export_env(): # assert ca_secret["metadata"]["name"] == f"ca-secret-{cluster_name}" # assert ca_secret["metadata"]["namespace"] == namespace - -def test_gen_app_wrapper_with_oauth(mocker: MockerFixture): - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch( - "codeflare_sdk.cluster.cluster.get_current_namespace", - return_value="opendatahub", - ) - mocker.patch( - "codeflare_sdk.utils.generate_yaml.is_openshift_cluster", return_value=True - ) - write_user_appwrapper = MagicMock() - mocker.patch( - "codeflare_sdk.utils.generate_yaml.write_user_appwrapper", write_user_appwrapper - ) - Cluster( - ClusterConfiguration( - "test_cluster", - image="quay.io/project-codeflare/ray:latest-py39-cu118", - write_to_file=True, - mcad=True, - ) - ) - user_yaml = write_user_appwrapper.call_args.args[0] - assert any( - container["name"] == "oauth-proxy" - for container in user_yaml["spec"]["resources"]["GenericItems"][0][ - "generictemplate" - ]["spec"]["headGroupSpec"]["template"]["spec"]["containers"] - ) - - """ Ray Jobs tests """ From 65e19053369f1f23010ad66f22ccc85dccc27781 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 18 Apr 2024 17:12:03 +0100 Subject: [PATCH 187/496] Review changes --- pyproject.toml | 1 - src/codeflare_sdk.egg-info/PKG-INFO | 2 +- src/codeflare_sdk/utils/generate_yaml.py | 3 --- 3 files changed, 1 insertion(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index ee50a48c..c47e466a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -48,7 +48,6 @@ pytest-timeout = "2.2.0" [tool.pytest.ini_options] filterwarnings = [ "ignore::DeprecationWarning:pkg_resources", - "ignore::DeprecationWarning", "ignore:pkg_resources is deprecated as an API:DeprecationWarning", ] markers = [ diff --git a/src/codeflare_sdk.egg-info/PKG-INFO b/src/codeflare_sdk.egg-info/PKG-INFO index 27ec5cbf..c4061c62 100644 --- a/src/codeflare_sdk.egg-info/PKG-INFO +++ b/src/codeflare_sdk.egg-info/PKG-INFO @@ -1,4 +1,4 @@ Metadata-Version: 2.1 -Name: codeflare_sdk +Name: codeflare-sdk Version: 0.0.0 License-File: LICENSE diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 91fd6811..7c00defc 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -508,9 +508,6 @@ def generate_appwrapper( if local_interactive: enable_local_interactive(resources) - # else: - # disable_raycluster_tls(resources["resources"]) - ca_secret_item = resources["resources"].get("GenericItems")[1] update_ca_secret(ca_secret_item, cluster_name, namespace) From d994331dfe8de2b2c283e09f00f879fda4ba9197 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 18 Apr 2024 18:46:51 +0100 Subject: [PATCH 188/496] Remove local_interactive --- docs/cluster-configuration.md | 28 ---- src/codeflare_sdk/cluster/cluster.py | 17 +-- src/codeflare_sdk/cluster/config.py | 1 - .../templates/base-template.yaml | 2 - src/codeflare_sdk/utils/generate_yaml.py | 12 -- tests/test-case-bad.yaml | 2 - tests/test-case-no-mcad.yamls | 2 - tests/test-case-prio.yaml | 2 - tests/test-case.yaml | 2 - tests/test-default-appwrapper.yaml | 2 - tests/unit_test.py | 128 ------------------ 11 files changed, 2 insertions(+), 196 deletions(-) diff --git a/docs/cluster-configuration.md b/docs/cluster-configuration.md index bb058fa4..7684db2c 100644 --- a/docs/cluster-configuration.md +++ b/docs/cluster-configuration.md @@ -22,39 +22,11 @@ cluster = Cluster(ClusterConfiguration( image="quay.io/project-codeflare/ray:latest-py39-cu118", # Mandatory Field instascale=False, # Default False machine_types=["m5.xlarge", "g4dn.xlarge"], - ingress_domain="example.com" # Default None, Mandatory for Vanilla Kubernetes Clusters - ingress_domain is ignored on OpenShift Clusters as a route is created. - local_interactive=False, # Default False )) ``` -Note: On OpenShift, the `ingress_domain` is only required when `local_interactive` is enabled. - This may change soon. Upon creating a cluster configuration with `mcad=True` an appwrapper will be created featuring the Ray Cluster and any Routes, Ingresses or Secrets that are needed to be created along side it.
    From there a user can call `cluster.up()` and `cluster.down()` to create and remove the appwrapper thus creating and removing the Ray Cluster. In cases where `mcad=False` a yaml file will be created with the individual Ray Cluster, Route/Ingress and Secret included.
    The Ray Cluster and service will be created by KubeRay directly and the other components will be individually created. - -## Ray Cluster Configuration in a Vanilla Kubernetes environment (Non-OpenShift) -To create a Ray Cluster using the CodeFlare SDK in a Vanilla Kubernetes environment an `ingress_domain` must be passed in the Cluster Configuration. -This is used for the creation of the Ray Dashboard and Client ingresses. - -`ingress_options` can be passed to create a custom Ray Dashboard ingress, `ingress_domain` is still a required variable for the Client route/ingress. -An example of `ingress_options` would look like this. - -``` -ingress_options = { - "ingresses": [ - { - "ingressName": "", - "port": , - "pathType": "", - "path": "", - "host":"", - "annotations": { - "foo": "bar", - "foo": "bar", - } - } - ] -} -``` diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 24cbf9a7..68474eea 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -179,7 +179,6 @@ def create_app_wrapper(self): mcad = self.config.mcad instance_types = self.config.machine_types env = self.config.envs - local_interactive = self.config.local_interactive image_pull_secrets = self.config.image_pull_secrets dispatch_priority = self.config.dispatch_priority write_to_file = self.config.write_to_file @@ -203,7 +202,6 @@ def create_app_wrapper(self): mcad=mcad, instance_types=instance_types, env=env, - local_interactive=local_interactive, image_pull_secrets=image_pull_secrets, dispatch_priority=dispatch_priority, priority_val=priority_val, @@ -479,13 +477,6 @@ def from_k8_cluster_object( verify_tls=True, ): config_check() - if ( - rc["metadata"]["annotations"]["sdk.codeflare.dev/local_interactive"] - == "True" - ): - local_interactive = True - else: - local_interactive = False machine_types = ( rc["metadata"]["labels"]["orderedinstance"].split("_") if "orderedinstance" in rc["metadata"]["labels"] @@ -526,7 +517,6 @@ def from_k8_cluster_object( image=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][ 0 ]["image"], - local_interactive=local_interactive, mcad=mcad, write_to_file=write_to_file, verify_tls=verify_tls, @@ -534,11 +524,8 @@ def from_k8_cluster_object( return Cluster(cluster_config) def local_client_url(self): - if self.config.local_interactive == True: - ingress_domain = _get_ingress_domain(self) - return f"ray://{ingress_domain}" - else: - return "None" + ingress_domain = _get_ingress_domain(self) + return f"ray://{ingress_domain}" def _component_resources_up( self, namespace: str, api_instance: client.CustomObjectsApi diff --git a/src/codeflare_sdk/cluster/config.py b/src/codeflare_sdk/cluster/config.py index f6bcac89..e4d046f9 100644 --- a/src/codeflare_sdk/cluster/config.py +++ b/src/codeflare_sdk/cluster/config.py @@ -49,7 +49,6 @@ class ClusterConfiguration: mcad: bool = False envs: dict = field(default_factory=dict) image: str = "" - local_interactive: bool = False image_pull_secrets: list = field(default_factory=list) dispatch_priority: str = None write_to_file: bool = False diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index a93ec3a7..d034ab3b 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -40,8 +40,6 @@ spec: apiVersion: ray.io/v1 kind: RayCluster metadata: - annotations: - sdk.codeflare.dev/local_interactive: "False" labels: workload.codeflare.dev/appwrapper: "aw-kuberay" controller-tools.k8s.io: "1.0" diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 7c00defc..6d59196b 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -277,14 +277,6 @@ def update_ca_secret(ca_secret_item, cluster_name, namespace): data["ca.key"], data["ca.crt"] = generate_cert.generate_ca_cert(365) -def enable_local_interactive(resources): # pragma: no cover - item = resources["resources"].get("GenericItems")[0] - - item["generictemplate"]["metadata"]["annotations"][ - "sdk.codeflare.dev/local_interactive" - ] = "True" - - def del_from_list_by_name(l: list, target: typing.List[str]) -> list: return [x for x in l if x["name"] not in target] @@ -454,7 +446,6 @@ def generate_appwrapper( mcad: bool, instance_types: list, env, - local_interactive: bool, image_pull_secrets: list, dispatch_priority: str, priority_val: int, @@ -505,9 +496,6 @@ def generate_appwrapper( head_gpus, ) - if local_interactive: - enable_local_interactive(resources) - ca_secret_item = resources["resources"].get("GenericItems")[1] update_ca_secret(ca_secret_item, cluster_name, namespace) diff --git a/tests/test-case-bad.yaml b/tests/test-case-bad.yaml index aeccf519..6e969e01 100644 --- a/tests/test-case-bad.yaml +++ b/tests/test-case-bad.yaml @@ -32,8 +32,6 @@ spec: apiVersion: ray.io/v1 kind: RayCluster metadata: - annotations: - sdk.codeflare.dev/local_interactive: 'False' labels: workload.codeflare.dev/appwrapper: unit-test-cluster controller-tools.k8s.io: '1.0' diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index bde0af91..5ce787b2 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -2,8 +2,6 @@ apiVersion: ray.io/v1 kind: RayCluster metadata: - annotations: - sdk.codeflare.dev/local_interactive: 'False' labels: controller-tools.k8s.io: '1.0' kueue.x-k8s.io/queue-name: local-queue-default diff --git a/tests/test-case-prio.yaml b/tests/test-case-prio.yaml index b3df1270..4c715fe0 100644 --- a/tests/test-case-prio.yaml +++ b/tests/test-case-prio.yaml @@ -32,8 +32,6 @@ spec: apiVersion: ray.io/v1 kind: RayCluster metadata: - annotations: - sdk.codeflare.dev/local_interactive: 'False' labels: controller-tools.k8s.io: '1.0' workload.codeflare.dev/appwrapper: prio-test-cluster diff --git a/tests/test-case.yaml b/tests/test-case.yaml index 5ff66644..120f1907 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -31,8 +31,6 @@ spec: apiVersion: ray.io/v1 kind: RayCluster metadata: - annotations: - sdk.codeflare.dev/local_interactive: 'False' labels: controller-tools.k8s.io: '1.0' workload.codeflare.dev/appwrapper: unit-test-cluster diff --git a/tests/test-default-appwrapper.yaml b/tests/test-default-appwrapper.yaml index a4d5648c..9a7c581c 100644 --- a/tests/test-default-appwrapper.yaml +++ b/tests/test-default-appwrapper.yaml @@ -29,8 +29,6 @@ spec: apiVersion: ray.io/v1 kind: RayCluster metadata: - annotations: - sdk.codeflare.dev/local_interactive: 'False' labels: controller-tools.k8s.io: '1.0' workload.codeflare.dev/appwrapper: unit-test-default-cluster diff --git a/tests/unit_test.py b/tests/unit_test.py index 19a92081..74133cc3 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -74,7 +74,6 @@ gen_names, is_openshift_cluster, read_template, - enable_local_interactive, write_components, ) @@ -258,7 +257,6 @@ def test_config_creation(): assert config.image_pull_secrets == ["unit-test-pull-secret"] assert config.dispatch_priority == None assert config.mcad == True - assert config.local_interactive == False def ca_secret_support(path, mcad: bool): @@ -746,7 +744,6 @@ def test_local_client_url(mocker): cluster_config = ClusterConfiguration( name="unit-test-cluster-localinter", namespace="ns", - local_interactive=True, write_to_file=True, ) cluster = Cluster(cluster_config) @@ -1062,9 +1059,6 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "metadata": { "creationTimestamp": "2024-03-05T09:55:37Z", "generation": 1, - "annotations": { - "sdk.codeflare.dev/local_interactive": "True", - }, "labels": { "appwrapper.mcad.ibm.com": "quicktest", "controller-tools.k8s.io": "1.0", @@ -1874,9 +1868,6 @@ def get_aw_obj(group, version, namespace, plural): "apiVersion": "ray.io/v1", "kind": "RayCluster", "metadata": { - "annotations": { - "sdk.codeflare.dev/local_interactive": "False" - }, "labels": { "workload.codeflare.dev/appwrapper": "quicktest1", "controller-tools.k8s.io": "1.0", @@ -2204,9 +2195,6 @@ def get_aw_obj(group, version, namespace, plural): "apiVersion": "ray.io/v1", "kind": "RayCluster", "metadata": { - "annotations": { - "sdk.codeflare.dev/local_interactive": "False" - }, "labels": { "workload.codeflare.dev/appwrapper": "quicktest2", "controller-tools.k8s.io": "1.0", @@ -2518,7 +2506,6 @@ def custom_side_effect(group, version, namespace, plural, **kwargs): assert cluster_config.min_cpus == 1 and cluster_config.max_cpus == 1 assert cluster_config.min_memory == 2 and cluster_config.max_memory == 2 assert cluster_config.num_gpus == 0 - assert cluster_config.local_interactive == True assert ( cluster_config.image == "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103" @@ -2552,7 +2539,6 @@ def test_get_cluster(mocker): assert cluster_config.min_memory == 2 and cluster_config.max_memory == 2 assert cluster_config.num_gpus == 0 assert cluster_config.instascale - assert cluster_config.local_interactive assert ( cluster_config.image == "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103" @@ -3082,120 +3068,6 @@ def test_export_env(): ) -# def test_enable_local_interactive(mocker): -# template = f"{parent}/src/codeflare_sdk/templates/base-template.yaml" -# user_yaml = read_template(template) -# aw_spec = user_yaml.get("spec", None) -# cluster_name = "test-enable-local" -# namespace = "default" -# ingress_domain = "mytest.domain" -# mocker.patch("kubernetes.client.ApisApi.get_api_versions") -# mocker.patch( -# "codeflare_sdk.utils.generate_yaml.is_openshift_cluster", return_value=False -# ) -# volume_mounts = [ -# {"name": "ca-vol", "mountPath": "/home/ray/workspace/ca", "readOnly": True}, -# { -# "name": "server-cert", -# "mountPath": "/home/ray/workspace/tls", -# "readOnly": False, -# }, -# ] -# volumes = [ -# { -# "name": "ca-vol", -# "secret": {"secretName": "ca-secret-test-enable-local"}, -# "optional": False, -# }, -# {"name": "server-cert", "emptyDir": {}}, -# { -# "name": "odh-trusted-ca-cert", -# "configMap": { -# "name": "odh-trusted-ca-bundle", -# "items": [ -# {"key": "ca-bundle.crt", "path": "odh-trusted-ca-bundle.crt"} -# ], -# "optional": True, -# }, -# }, -# { -# "name": "odh-ca-cert", -# "configMap": { -# "name": "odh-trusted-ca-bundle", -# "items": [{"key": "odh-ca-bundle.crt", "path": "odh-ca-bundle.crt"}], -# "optional": True, -# }, -# }, -# ] -# tls_env = [ -# {"name": "RAY_USE_TLS", "value": "1"}, -# {"name": "RAY_TLS_SERVER_CERT", "value": "/home/ray/workspace/tls/server.crt"}, -# {"name": "RAY_TLS_SERVER_KEY", "value": "/home/ray/workspace/tls/server.key"}, -# {"name": "RAY_TLS_CA_CERT", "value": "/home/ray/workspace/tls/ca.crt"}, -# ] -# assert aw_spec != None -# enable_local_interactive(aw_spec, cluster_name, namespace, ingress_domain) -# head_group_spec = aw_spec["resources"]["GenericItems"][0]["generictemplate"][ -# "spec" -# ]["headGroupSpec"] -# worker_group_spec = aw_spec["resources"]["GenericItems"][0]["generictemplate"][ -# "spec" -# ]["workerGroupSpecs"] -# ca_secret = aw_spec["resources"]["GenericItems"][1]["generictemplate"] -# # At a minimal, make sure the following items are presented in the appwrapper spec.resources. -# # 1. headgroup has the initContainers command to generated TLS cert from the mounted CA cert. -# # Note: In this particular command, the DNS.5 in [alt_name] must match the exposed local_client_url: rayclient-{cluster_name}.{namespace}.{ingress_domain} -# assert ( -# head_group_spec["template"]["spec"]["initContainers"][0]["command"][2] -# == f"cd /home/ray/workspace/tls && openssl req -nodes -newkey rsa:2048 -keyout server.key -out server.csr -subj '/CN=ray-head' && printf \"authorityKeyIdentifier=keyid,issuer\\nbasicConstraints=CA:FALSE\\nsubjectAltName = @alt_names\\n[alt_names]\\nDNS.1 = 127.0.0.1\\nDNS.2 = localhost\\nDNS.3 = ${{FQ_RAY_IP}}\\nDNS.4 = $(awk 'END{{print $1}}' /etc/hosts)\\nDNS.5 = rayclient-{cluster_name}-$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace).{ingress_domain}\">./domain.ext && cp /home/ray/workspace/ca/* . && openssl x509 -req -CA ca.crt -CAkey ca.key -in server.csr -out server.crt -days 365 -CAcreateserial -extfile domain.ext" -# ) -# assert ( -# head_group_spec["template"]["spec"]["initContainers"][0]["volumeMounts"] -# == volume_mounts -# ) -# assert head_group_spec["template"]["spec"]["volumes"] == volumes - -# # 2. workerGroupSpec has the initContainers command to generated TLS cert from the mounted CA cert. -# assert ( -# worker_group_spec[0]["template"]["spec"]["initContainers"][0]["command"][2] -# == "cd /home/ray/workspace/tls && openssl req -nodes -newkey rsa:2048 -keyout server.key -out server.csr -subj '/CN=ray-head' && printf \"authorityKeyIdentifier=keyid,issuer\\nbasicConstraints=CA:FALSE\\nsubjectAltName = @alt_names\\n[alt_names]\\nDNS.1 = 127.0.0.1\\nDNS.2 = localhost\\nDNS.3 = ${FQ_RAY_IP}\\nDNS.4 = $(awk 'END{print $1}' /etc/hosts)\">./domain.ext && cp /home/ray/workspace/ca/* . && openssl x509 -req -CA ca.crt -CAkey ca.key -in server.csr -out server.crt -days 365 -CAcreateserial -extfile domain.ext" -# ) -# assert ( -# worker_group_spec[0]["template"]["spec"]["initContainers"][0]["volumeMounts"] -# == volume_mounts -# ) -# assert worker_group_spec[0]["template"]["spec"]["volumes"] == volumes - -# # 3. Required Envs to enable TLS encryption between head and workers -# for i in range(len(tls_env)): -# assert ( -# head_group_spec["template"]["spec"]["containers"][0]["env"][i + 1]["name"] -# == tls_env[i]["name"] -# ) -# assert ( -# head_group_spec["template"]["spec"]["containers"][0]["env"][i + 1]["value"] -# == tls_env[i]["value"] -# ) -# assert ( -# worker_group_spec[0]["template"]["spec"]["containers"][0]["env"][i + 1][ -# "name" -# ] -# == tls_env[i]["name"] -# ) -# assert ( -# worker_group_spec[0]["template"]["spec"]["containers"][0]["env"][i + 1][ -# "value" -# ] -# == tls_env[i]["value"] -# ) - -# # 4. Secret with ca.crt and ca.key -# assert ca_secret["kind"] == "Secret" -# assert ca_secret["data"]["ca.crt"] != None -# assert ca_secret["data"]["ca.key"] != None -# assert ca_secret["metadata"]["name"] == f"ca-secret-{cluster_name}" -# assert ca_secret["metadata"]["namespace"] == namespace - """ Ray Jobs tests """ From d2693d1f5ac2653dbc1d9738a5b9246ad2c6084c Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Fri, 19 Apr 2024 13:52:54 +0100 Subject: [PATCH 189/496] Removed ca generation from SDK --- src/codeflare_sdk/cluster/cluster.py | 13 --- .../templates/base-template.yaml | 12 --- src/codeflare_sdk/utils/generate_yaml.py | 60 ------------- tests/test-case-no-mcad.yamls | 11 --- tests/test-case-prio.yaml | 12 --- tests/test-case.yaml | 12 --- tests/test-default-appwrapper.yaml | 12 --- tests/unit_test.py | 84 ++----------------- 8 files changed, 8 insertions(+), 208 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 68474eea..295332ae 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -665,13 +665,6 @@ def _delete_resources( plural="rayclusters", name=name, ) - elif resource["kind"] == "Secret": - name = resource["metadata"]["name"] - secret_instance = client.CoreV1Api(api_config_handler()) - secret_instance.delete_namespaced_secret( - namespace=namespace, - name=name, - ) def _create_resources(yamls, namespace: str, api_instance: client.CustomObjectsApi): @@ -684,12 +677,6 @@ def _create_resources(yamls, namespace: str, api_instance: client.CustomObjectsA plural="rayclusters", body=resource, ) - elif resource["kind"] == "Secret": - secret_instance = client.CoreV1Api(api_config_handler()) - secret_instance.create_namespaced_secret( - namespace=namespace, - body=resource, - ) def _check_aw_exists(name: str, namespace: str) -> bool: diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index d034ab3b..2b83a8a5 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -264,15 +264,3 @@ spec: - key: odh-ca-bundle.crt path: odh-ca-bundle.crt optional: true - - replicas: 1 - generictemplate: - apiVersion: v1 - data: - ca.crt: generated_crt - ca.key: generated_key - kind: Secret - metadata: - name: ca-secret-deployment-name - labels: - # allows me to return name of service that Ray operator creates - odh-ray-cluster-service: deployment-name-head-svc diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 6d59196b..95c3d04f 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -266,67 +266,10 @@ def update_nodes( update_resources(spec, min_cpu, max_cpu, min_memory, max_memory, gpu) -def update_ca_secret(ca_secret_item, cluster_name, namespace): - from . import generate_cert - - metadata = ca_secret_item.get("generictemplate", {}).get("metadata") - metadata["name"] = f"ca-secret-{cluster_name}" - metadata["namespace"] = namespace - metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc" - data = ca_secret_item.get("generictemplate", {}).get("data") - data["ca.key"], data["ca.crt"] = generate_cert.generate_ca_cert(365) - - def del_from_list_by_name(l: list, target: typing.List[str]) -> list: return [x for x in l if x["name"] not in target] -def disable_raycluster_tls(resources): - generic_template_spec = resources["GenericItems"][0]["generictemplate"]["spec"] - - headGroupTemplateSpec = generic_template_spec["headGroupSpec"]["template"]["spec"] - headGroupTemplateSpec["volumes"] = del_from_list_by_name( - headGroupTemplateSpec.get("volumes", []), - ["ca-vol", "server-cert"], - ) - - c: dict - for c in generic_template_spec["headGroupSpec"]["template"]["spec"]["containers"]: - c["volumeMounts"] = del_from_list_by_name( - c.get("volumeMounts", []), ["ca-vol", "server-cert"] - ) - - if "initContainers" in generic_template_spec["headGroupSpec"]["template"]["spec"]: - del generic_template_spec["headGroupSpec"]["template"]["spec"]["initContainers"] - - for workerGroup in generic_template_spec.get("workerGroupSpecs", []): - workerGroupSpec = workerGroup["template"]["spec"] - workerGroupSpec["volumes"] = del_from_list_by_name( - workerGroupSpec.get("volumes", []), - ["ca-vol", "server-cert"], - ) - for c in workerGroup["template"]["spec"].get("containers", []): - c["volumeMounts"] = del_from_list_by_name( - c.get("volumeMounts", []), ["ca-vol", "server-cert"] - ) - - del generic_template_spec["workerGroupSpecs"][0]["template"]["spec"][ - "initContainers" - ] - - updated_items = [] - for i in resources["GenericItems"][:]: - if "rayclient-deployment-ingress" in i["generictemplate"]["metadata"]["name"]: - continue - if "rayclient-deployment-route" in i["generictemplate"]["metadata"]["name"]: - continue - if "ca-secret-deployment-name" in i["generictemplate"]["metadata"]["name"]: - continue - updated_items.append(i) - - resources["GenericItems"] = updated_items - - def write_user_appwrapper(user_yaml, output_file_name): # Create the directory if it doesn't exist directory_path = os.path.dirname(output_file_name) @@ -496,9 +439,6 @@ def generate_appwrapper( head_gpus, ) - ca_secret_item = resources["resources"].get("GenericItems")[1] - update_ca_secret(ca_secret_item, cluster_name, namespace) - directory_path = os.path.expanduser("~/.codeflare/resources/") outfile = os.path.join(directory_path, appwrapper_name + ".yaml") diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index 5ce787b2..1883e9c5 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -181,14 +181,3 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert ---- -apiVersion: v1 -data: - ca.crt: ca-field - ca.key: ca-field -kind: Secret -metadata: - labels: - odh-ray-cluster-service: unit-test-cluster-ray-head-svc - name: ca-secret-unit-test-cluster-ray - namespace: ns diff --git a/tests/test-case-prio.yaml b/tests/test-case-prio.yaml index 4c715fe0..13485ed0 100644 --- a/tests/test-case-prio.yaml +++ b/tests/test-case-prio.yaml @@ -214,16 +214,4 @@ spec: optional: true name: odh-ca-cert replicas: 1 - - generictemplate: - apiVersion: v1 - data: - ca.crt: ca-field - ca.key: ca-field - kind: Secret - metadata: - labels: - odh-ray-cluster-service: prio-test-cluster-head-svc - name: ca-secret-prio-test-cluster - namespace: ns - replicas: 1 Items: [] diff --git a/tests/test-case.yaml b/tests/test-case.yaml index 120f1907..060c354c 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -211,16 +211,4 @@ spec: optional: true name: odh-ca-cert replicas: 1 - - generictemplate: - apiVersion: v1 - data: - ca.crt: ca-field - ca.key: ca-field - kind: Secret - metadata: - labels: - odh-ray-cluster-service: unit-test-cluster-head-svc - name: ca-secret-unit-test-cluster - namespace: ns - replicas: 1 Items: [] diff --git a/tests/test-default-appwrapper.yaml b/tests/test-default-appwrapper.yaml index 9a7c581c..14261d0c 100644 --- a/tests/test-default-appwrapper.yaml +++ b/tests/test-default-appwrapper.yaml @@ -189,16 +189,4 @@ spec: optional: true name: odh-ca-cert replicas: 1 - - generictemplate: - apiVersion: v1 - data: - ca.crt: ca-field - ca.key: ca-field - kind: Secret - metadata: - labels: - odh-ray-cluster-service: unit-test-default-cluster-head-svc - name: ca-secret-unit-test-default-cluster - namespace: opendatahub - replicas: 1 Items: [] diff --git a/tests/unit_test.py b/tests/unit_test.py index 74133cc3..3892f8e5 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -259,65 +259,9 @@ def test_config_creation(): assert config.mcad == True -def ca_secret_support(path, mcad: bool): - # Given that the secret is always random we need to set it to a static value for the tests to pass - if mcad: - with open(path, "r") as file: - try: - yaml_file = yaml.safe_load(file) - except yaml.YAMLError as exc: - print(exc) - resources = yaml_file.get("spec", "resources") - ca_secret_item = resources["resources"].get("GenericItems")[1] - data = ca_secret_item.get("generictemplate", {}).get("data") - data["ca.key"] = "ca-field" - data["ca.crt"] = "ca-field" - with open(path, "w") as outfile: - yaml.dump(yaml_file, outfile, default_flow_style=False) - else: - # Load the YAML file - with open(path, "r") as f: - data = list(yaml.safe_load_all(f)) - - # Find the Secret entry and update the fields - for item in data: - if item.get("kind") == "Secret": - item["data"]["ca.crt"] = "ca-field" - item["data"]["ca.key"] = "ca-field" - break - with open(path, "w") as f: - for item in data: - f.write("---\n") - yaml.dump(item, f, default_flow_style=False) - - -def ca_secret_support_no_write(yaml_file, mcad: bool): - if mcad: - file = yaml.safe_load(yaml_file) - resources = file.get("spec", "resources") - - ca_secret_item = resources["resources"].get("GenericItems")[1] - data = ca_secret_item.get("generictemplate", {}).get("data") - data["ca.key"] = "ca-field" - data["ca.crt"] = "ca-field" - return file - - else: - data = list(yaml.safe_load_all(yaml_file)) - for item in data: - if item.get("kind") == "Secret": - item["data"]["ca.crt"] = "ca-field" - item["data"]["ca.key"] = "ca-field" - break - - resources = "---\n" + "---\n".join([yaml.dump(item) for item in data]) - return resources - - def test_cluster_creation(mocker): mocker.patch("kubernetes.client.ApisApi.get_api_versions") cluster = createClusterWithConfig(mocker) - ca_secret_support(f"{aw_dir}unit-test-cluster.yaml", True) assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster.yaml" assert cluster.app_wrapper_name == "unit-test-cluster" assert filecmp.cmp( @@ -384,7 +328,6 @@ def test_cluster_creation_no_mcad(mocker): assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-ray.yaml" assert cluster.app_wrapper_name == "unit-test-cluster-ray" - ca_secret_support(cluster.app_wrapper_yaml, False) assert filecmp.cmp( f"{aw_dir}unit-test-cluster-ray.yaml", f"{parent}/tests/test-case-no-mcad.yamls", @@ -406,7 +349,6 @@ def test_cluster_creation_no_mcad_local_queue(mocker): config.write_to_file = True config.local_queue = "local-queue-default" cluster = Cluster(config) - ca_secret_support(cluster.app_wrapper_yaml, False) assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-ray.yaml" assert cluster.app_wrapper_name == "unit-test-cluster-ray" assert filecmp.cmp( @@ -428,27 +370,18 @@ def test_cluster_creation_no_mcad_local_queue(mocker): machine_types=["cpu.small", "gpu.large"], image_pull_secrets=["unit-test-pull-secret"], image="quay.io/project-codeflare/ray:latest-py39-cu118", - write_to_file=False, + write_to_file=True, mcad=False, local_queue="local-queue-default", ) cluster = Cluster(config) - test_resources = [] - expected_resources = [] - - test_aw = yaml.load_all( - ca_secret_support_no_write(cluster.app_wrapper_yaml, False), - Loader=yaml.FullLoader, - ) - for resource in test_aw: - test_resources.append(resource) - with open( + assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-ray.yaml" + assert cluster.app_wrapper_name == "unit-test-cluster-ray" + assert filecmp.cmp( + f"{aw_dir}unit-test-cluster-ray.yaml", f"{parent}/tests/test-case-no-mcad.yamls", - ) as f: - default_aw = yaml.load_all(f, Loader=yaml.FullLoader) - for resource in default_aw: - expected_resources.append(resource) - assert test_resources == expected_resources + shallow=True, + ) def test_cluster_creation_priority(mocker): @@ -466,7 +399,6 @@ def test_cluster_creation_priority(mocker): return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, ) cluster = Cluster(config) - ca_secret_support(cluster.app_wrapper_yaml, True) assert cluster.app_wrapper_yaml == f"{aw_dir}prio-test-cluster.yaml" assert cluster.app_wrapper_name == "prio-test-cluster" assert filecmp.cmp( @@ -488,7 +420,7 @@ def test_default_cluster_creation(mocker): mcad=True, ) cluster = Cluster(default_config) - test_aw = ca_secret_support_no_write(cluster.app_wrapper_yaml, True) + test_aw = yaml.load(cluster.app_wrapper_yaml, Loader=yaml.FullLoader) with open( f"{parent}/tests/test-default-appwrapper.yaml", From 24a0b6e2e73e2e9e4a51e7524709f80ae51ec022 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Fri, 19 Apr 2024 15:23:31 +0100 Subject: [PATCH 190/496] Removed ca volume mounts --- src/codeflare_sdk/templates/base-template.yaml | 12 ------------ tests/test-case-no-mcad.yamls | 12 ------------ tests/test-case-prio.yaml | 12 ------------ tests/test-case.yaml | 12 ------------ tests/test-default-appwrapper.yaml | 12 ------------ 5 files changed, 60 deletions(-) diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index 2b83a8a5..356e3494 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -139,12 +139,6 @@ spec: memory: "8G" nvidia.com/gpu: 0 volumeMounts: - - name: ca-vol - mountPath: "/home/ray/workspace/ca" - readOnly: true - - name: server-cert - mountPath: "/home/ray/workspace/tls" - readOnly: true - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt @@ -231,12 +225,6 @@ spec: memory: "12G" nvidia.com/gpu: "1" volumeMounts: - - name: ca-vol - mountPath: "/home/ray/workspace/ca" - readOnly: true - - name: server-cert - mountPath: "/home/ray/workspace/tls" - readOnly: true - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index 1883e9c5..aaf9324e 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -66,12 +66,6 @@ spec: memory: 8G nvidia.com/gpu: 0 volumeMounts: - - mountPath: /home/ray/workspace/ca - name: ca-vol - readOnly: true - - mountPath: /home/ray/workspace/tls - name: server-cert - readOnly: true - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt @@ -146,12 +140,6 @@ spec: memory: 5G nvidia.com/gpu: 7 volumeMounts: - - mountPath: /home/ray/workspace/ca - name: ca-vol - readOnly: true - - mountPath: /home/ray/workspace/tls - name: server-cert - readOnly: true - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt diff --git a/tests/test-case-prio.yaml b/tests/test-case-prio.yaml index 13485ed0..a4d6e68f 100644 --- a/tests/test-case-prio.yaml +++ b/tests/test-case-prio.yaml @@ -96,12 +96,6 @@ spec: memory: 8G nvidia.com/gpu: 0 volumeMounts: - - mountPath: /home/ray/workspace/ca - name: ca-vol - readOnly: true - - mountPath: /home/ray/workspace/tls - name: server-cert - readOnly: true - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt @@ -177,12 +171,6 @@ spec: memory: 5G nvidia.com/gpu: 7 volumeMounts: - - mountPath: /home/ray/workspace/ca - name: ca-vol - readOnly: true - - mountPath: /home/ray/workspace/tls - name: server-cert - readOnly: true - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt diff --git a/tests/test-case.yaml b/tests/test-case.yaml index 060c354c..b97d12a4 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -95,12 +95,6 @@ spec: memory: 8G nvidia.com/gpu: 0 volumeMounts: - - mountPath: /home/ray/workspace/ca - name: ca-vol - readOnly: true - - mountPath: /home/ray/workspace/tls - name: server-cert - readOnly: true - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt @@ -175,12 +169,6 @@ spec: memory: 5G nvidia.com/gpu: 7 volumeMounts: - - mountPath: /home/ray/workspace/ca - name: ca-vol - readOnly: true - - mountPath: /home/ray/workspace/tls - name: server-cert - readOnly: true - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt diff --git a/tests/test-default-appwrapper.yaml b/tests/test-default-appwrapper.yaml index 14261d0c..c390f619 100644 --- a/tests/test-default-appwrapper.yaml +++ b/tests/test-default-appwrapper.yaml @@ -84,12 +84,6 @@ spec: memory: 8G nvidia.com/gpu: 0 volumeMounts: - - mountPath: /home/ray/workspace/ca - name: ca-vol - readOnly: true - - mountPath: /home/ray/workspace/tls - name: server-cert - readOnly: true - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt @@ -154,12 +148,6 @@ spec: memory: 2G nvidia.com/gpu: 0 volumeMounts: - - mountPath: /home/ray/workspace/ca - name: ca-vol - readOnly: true - - mountPath: /home/ray/workspace/tls - name: server-cert - readOnly: true - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt From 3e7b39e2f03f900c75b13a8a36767e6c2c392414 Mon Sep 17 00:00:00 2001 From: Karel Suta Date: Fri, 19 Apr 2024 16:27:51 +0200 Subject: [PATCH 191/496] Fix Notebook sync release workflow trigger --- .github/workflows/release.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index b3f374d0..d5ad965c 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -106,7 +106,7 @@ jobs: gh workflow run odh-notebooks-sync.yml \ --repo ${{ github.event.inputs.codeflare-repository-organization }}/codeflare-sdk \ --ref ${{ github.ref }} \ - --field upstream-repository-organization=opendatahub-io + --field upstream-repository-organization=opendatahub-io \ --field codeflare-repository-organization=${{ github.event.inputs.codeflare-repository-organization }} \ --field codeflare_sdk_release_version=${{ github.event.inputs.release-version }} env: From c59884cd8a62f4e8d5d8ab18be30b3b87459a94a Mon Sep 17 00:00:00 2001 From: codeflare-machine-account Date: Fri, 19 Apr 2024 15:58:13 +0000 Subject: [PATCH 192/496] Changes in docs for release: v0.16.0 --- .../cluster/cluster.html | 95 ++--- .../cluster/config.html | 9 +- .../utils/generate_yaml.html | 389 +----------------- 3 files changed, 35 insertions(+), 458 deletions(-) diff --git a/docs/detailed-documentation/cluster/cluster.html b/docs/detailed-documentation/cluster/cluster.html index c5b65132..eed1e154 100644 --- a/docs/detailed-documentation/cluster/cluster.html +++ b/docs/detailed-documentation/cluster/cluster.html @@ -121,7 +121,6 @@

    Module codeflare_sdk.cluster.cluster

    if self._job_submission_client: return self._job_submission_client if is_openshift_cluster(): - print(k8client.configuration.get_api_key_with_prefix("authorization")) self._job_submission_client = JobSubmissionClient( self.cluster_dashboard_uri(), headers=self._client_headers, @@ -212,7 +211,6 @@

    Module codeflare_sdk.cluster.cluster

    mcad = self.config.mcad instance_types = self.config.machine_types env = self.config.envs - local_interactive = self.config.local_interactive image_pull_secrets = self.config.image_pull_secrets dispatch_priority = self.config.dispatch_priority write_to_file = self.config.write_to_file @@ -236,7 +234,6 @@

    Module codeflare_sdk.cluster.cluster

    mcad=mcad, instance_types=instance_types, env=env, - local_interactive=local_interactive, image_pull_secrets=image_pull_secrets, dispatch_priority=dispatch_priority, priority_val=priority_val, @@ -512,13 +509,6 @@

    Module codeflare_sdk.cluster.cluster

    verify_tls=True, ): config_check() - if ( - rc["metadata"]["annotations"]["sdk.codeflare.dev/local_interactive"] - == "True" - ): - local_interactive = True - else: - local_interactive = False machine_types = ( rc["metadata"]["labels"]["orderedinstance"].split("_") if "orderedinstance" in rc["metadata"]["labels"] @@ -559,7 +549,6 @@

    Module codeflare_sdk.cluster.cluster

    image=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][ 0 ]["image"], - local_interactive=local_interactive, mcad=mcad, write_to_file=write_to_file, verify_tls=verify_tls, @@ -567,18 +556,27 @@

    Module codeflare_sdk.cluster.cluster

    return Cluster(cluster_config) def local_client_url(self): - if self.config.local_interactive == True: - ingress_domain = _get_ingress_domain(self) - return f"ray://{ingress_domain}" - else: - return "None" + ingress_domain = _get_ingress_domain(self) + return f"ray://{ingress_domain}" def _component_resources_up( self, namespace: str, api_instance: client.CustomObjectsApi ): if self.config.write_to_file: with open(self.app_wrapper_yaml) as f: - yamls = yaml.load_all(f, Loader=yaml.FullLoader) + yamls = list(yaml.load_all(f, Loader=yaml.FullLoader)) + for resource in yamls: + enable_ingress = ( + resource.get("spec", {}) + .get("headGroupSpec", {}) + .get("enableIngress") + ) + if resource["kind"] == "RayCluster" and enable_ingress is not False: + name = resource["metadata"]["name"] + print( + f"Forbidden: RayCluster '{name}' has 'enableIngress' set to 'True' or is unset." + ) + return _create_resources(yamls, namespace, api_instance) else: yamls = yaml.load_all(self.app_wrapper_yaml, Loader=yaml.FullLoader) @@ -699,13 +697,6 @@

    Module codeflare_sdk.cluster.cluster

    plural="rayclusters", name=name, ) - elif resource["kind"] == "Secret": - name = resource["metadata"]["name"] - secret_instance = client.CoreV1Api(api_config_handler()) - secret_instance.delete_namespaced_secret( - namespace=namespace, - name=name, - ) def _create_resources(yamls, namespace: str, api_instance: client.CustomObjectsApi): @@ -718,12 +709,6 @@

    Module codeflare_sdk.cluster.cluster

    plural="rayclusters", body=resource, ) - elif resource["kind"] == "Secret": - secret_instance = client.CoreV1Api(api_config_handler()) - secret_instance.create_namespaced_secret( - namespace=namespace, - body=resource, - ) def _check_aw_exists(name: str, namespace: str) -> bool: @@ -1185,7 +1170,6 @@

    Classes

    if self._job_submission_client: return self._job_submission_client if is_openshift_cluster(): - print(k8client.configuration.get_api_key_with_prefix("authorization")) self._job_submission_client = JobSubmissionClient( self.cluster_dashboard_uri(), headers=self._client_headers, @@ -1276,7 +1260,6 @@

    Classes

    mcad = self.config.mcad instance_types = self.config.machine_types env = self.config.envs - local_interactive = self.config.local_interactive image_pull_secrets = self.config.image_pull_secrets dispatch_priority = self.config.dispatch_priority write_to_file = self.config.write_to_file @@ -1300,7 +1283,6 @@

    Classes

    mcad=mcad, instance_types=instance_types, env=env, - local_interactive=local_interactive, image_pull_secrets=image_pull_secrets, dispatch_priority=dispatch_priority, priority_val=priority_val, @@ -1576,13 +1558,6 @@

    Classes

    verify_tls=True, ): config_check() - if ( - rc["metadata"]["annotations"]["sdk.codeflare.dev/local_interactive"] - == "True" - ): - local_interactive = True - else: - local_interactive = False machine_types = ( rc["metadata"]["labels"]["orderedinstance"].split("_") if "orderedinstance" in rc["metadata"]["labels"] @@ -1623,7 +1598,6 @@

    Classes

    image=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][ 0 ]["image"], - local_interactive=local_interactive, mcad=mcad, write_to_file=write_to_file, verify_tls=verify_tls, @@ -1631,18 +1605,27 @@

    Classes

    return Cluster(cluster_config) def local_client_url(self): - if self.config.local_interactive == True: - ingress_domain = _get_ingress_domain(self) - return f"ray://{ingress_domain}" - else: - return "None" + ingress_domain = _get_ingress_domain(self) + return f"ray://{ingress_domain}" def _component_resources_up( self, namespace: str, api_instance: client.CustomObjectsApi ): if self.config.write_to_file: with open(self.app_wrapper_yaml) as f: - yamls = yaml.load_all(f, Loader=yaml.FullLoader) + yamls = list(yaml.load_all(f, Loader=yaml.FullLoader)) + for resource in yamls: + enable_ingress = ( + resource.get("spec", {}) + .get("headGroupSpec", {}) + .get("enableIngress") + ) + if resource["kind"] == "RayCluster" and enable_ingress is not False: + name = resource["metadata"]["name"] + print( + f"Forbidden: RayCluster '{name}' has 'enableIngress' set to 'True' or is unset." + ) + return _create_resources(yamls, namespace, api_instance) else: yamls = yaml.load_all(self.app_wrapper_yaml, Loader=yaml.FullLoader) @@ -1675,7 +1658,6 @@

    Instance variables

    if self._job_submission_client: return self._job_submission_client if is_openshift_cluster(): - print(k8client.configuration.get_api_key_with_prefix("authorization")) self._job_submission_client = JobSubmissionClient( self.cluster_dashboard_uri(), headers=self._client_headers, @@ -1824,7 +1806,6 @@

    Methods

    mcad = self.config.mcad instance_types = self.config.machine_types env = self.config.envs - local_interactive = self.config.local_interactive image_pull_secrets = self.config.image_pull_secrets dispatch_priority = self.config.dispatch_priority write_to_file = self.config.write_to_file @@ -1848,7 +1829,6 @@

    Methods

    mcad=mcad, instance_types=instance_types, env=env, - local_interactive=local_interactive, image_pull_secrets=image_pull_secrets, dispatch_priority=dispatch_priority, priority_val=priority_val, @@ -1953,13 +1933,6 @@

    Methods

    verify_tls=True, ): config_check() - if ( - rc["metadata"]["annotations"]["sdk.codeflare.dev/local_interactive"] - == "True" - ): - local_interactive = True - else: - local_interactive = False machine_types = ( rc["metadata"]["labels"]["orderedinstance"].split("_") if "orderedinstance" in rc["metadata"]["labels"] @@ -2000,7 +1973,6 @@

    Methods

    image=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][ 0 ]["image"], - local_interactive=local_interactive, mcad=mcad, write_to_file=write_to_file, verify_tls=verify_tls, @@ -2092,11 +2064,8 @@

    Methods

    Expand source code
    def local_client_url(self):
    -    if self.config.local_interactive == True:
    -        ingress_domain = _get_ingress_domain(self)
    -        return f"ray://{ingress_domain}"
    -    else:
    -        return "None"
    + ingress_domain = _get_ingress_domain(self) + return f"ray://{ingress_domain}"
    diff --git a/docs/detailed-documentation/cluster/config.html b/docs/detailed-documentation/cluster/config.html index 72f0e3da..c7b9ccfc 100644 --- a/docs/detailed-documentation/cluster/config.html +++ b/docs/detailed-documentation/cluster/config.html @@ -81,7 +81,6 @@

    Module codeflare_sdk.cluster.config

    mcad: bool = False envs: dict = field(default_factory=dict) image: str = "" - local_interactive: bool = False image_pull_secrets: list = field(default_factory=list) dispatch_priority: str = None write_to_file: bool = False @@ -107,7 +106,7 @@

    Classes

    class ClusterConfiguration -(name: str, namespace: str = None, head_info: list = <factory>, head_cpus: int = 2, head_memory: int = 8, head_gpus: int = 0, machine_types: list = <factory>, min_cpus: int = 1, max_cpus: int = 1, num_workers: int = 1, min_memory: int = 2, max_memory: int = 2, num_gpus: int = 0, template: str = '/home/runner/work/codeflare-sdk/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', instascale: bool = False, mcad: bool = False, envs: dict = <factory>, image: str = '', local_interactive: bool = False, image_pull_secrets: list = <factory>, dispatch_priority: str = None, write_to_file: bool = False, verify_tls: bool = True, local_queue: str = None) +(name: str, namespace: str = None, head_info: list = <factory>, head_cpus: int = 2, head_memory: int = 8, head_gpus: int = 0, machine_types: list = <factory>, min_cpus: int = 1, max_cpus: int = 1, num_workers: int = 1, min_memory: int = 2, max_memory: int = 2, num_gpus: int = 0, template: str = '/home/runner/work/codeflare-sdk/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', instascale: bool = False, mcad: bool = False, envs: dict = <factory>, image: str = '', image_pull_secrets: list = <factory>, dispatch_priority: str = None, write_to_file: bool = False, verify_tls: bool = True, local_queue: str = None)

    This dataclass is used to specify resource requirements and other details, and @@ -140,7 +139,6 @@

    Classes

    mcad: bool = False envs: dict = field(default_factory=dict) image: str = "" - local_interactive: bool = False image_pull_secrets: list = field(default_factory=list) dispatch_priority: str = None write_to_file: bool = False @@ -192,10 +190,6 @@

    Class variables

    -
    var local_interactive : bool
    -
    -
    -
    var local_queue : str
    @@ -282,7 +276,6 @@

    image
  • image_pull_secrets
  • instascale
  • -
  • local_interactive
  • local_queue
  • machine_types
  • max_cpus
  • diff --git a/docs/detailed-documentation/utils/generate_yaml.html b/docs/detailed-documentation/utils/generate_yaml.html index 452bea97..fc714780 100644 --- a/docs/detailed-documentation/utils/generate_yaml.html +++ b/docs/detailed-documentation/utils/generate_yaml.html @@ -116,20 +116,6 @@

    Module codeflare_sdk.utils.generate_yaml

    lower_meta["labels"]["workload.codeflare.dev/appwrapper"] = appwrapper_name lower_meta["name"] = cluster_name lower_meta["namespace"] = namespace - lower_spec = item.get("generictemplate", {}).get("spec") - if is_openshift_cluster(): - cookie_secret_env_var = { - "name": "COOKIE_SECRET", - "valueFrom": { - "secretKeyRef": { - "key": "cookie_secret", - "name": f"{cluster_name}-oauth-config", - } - }, - } - lower_spec["headGroupSpec"]["template"]["spec"]["containers"][0]["env"].append( - cookie_secret_env_var - ) def update_labels(yaml, instascale, instance_types): @@ -311,106 +297,10 @@

    Module codeflare_sdk.utils.generate_yaml

    update_resources(spec, min_cpu, max_cpu, min_memory, max_memory, gpu) -def update_ca_secret(ca_secret_item, cluster_name, namespace): - from . import generate_cert - - metadata = ca_secret_item.get("generictemplate", {}).get("metadata") - metadata["name"] = f"ca-secret-{cluster_name}" - metadata["namespace"] = namespace - metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc" - data = ca_secret_item.get("generictemplate", {}).get("data") - data["ca.key"], data["ca.crt"] = generate_cert.generate_ca_cert(365) - - -def enable_local_interactive(resources, cluster_name, namespace): # pragma: no cover - from ..cluster.cluster import _get_ingress_domain - - ca_secret_item = resources["resources"].get("GenericItems")[1] - item = resources["resources"].get("GenericItems")[0] - update_ca_secret(ca_secret_item, cluster_name, namespace) - # update_ca_secret_volumes - item["generictemplate"]["spec"]["headGroupSpec"]["template"]["spec"]["volumes"][0][ - "secret" - ]["secretName"] = f"ca-secret-{cluster_name}" - item["generictemplate"]["spec"]["workerGroupSpecs"][0]["template"]["spec"][ - "volumes" - ][0]["secret"]["secretName"] = f"ca-secret-{cluster_name}" - # update_tls_env - item["generictemplate"]["spec"]["headGroupSpec"]["template"]["spec"]["containers"][ - 0 - ]["env"][1]["value"] = "1" - item["generictemplate"]["spec"]["workerGroupSpecs"][0]["template"]["spec"][ - "containers" - ][0]["env"][1]["value"] = "1" - # update_init_container - command = item["generictemplate"]["spec"]["headGroupSpec"]["template"]["spec"][ - "initContainers" - ][0].get("command")[2] - - command = command.replace("deployment-name", cluster_name) - - domain = "" ## FIX - We can't retrieve ingress domain - move init container to CFO - - command = command.replace("server-name", domain) - item["generictemplate"]["metadata"]["annotations"][ - "sdk.codeflare.dev/local_interactive" - ] = "True" - - item["generictemplate"]["spec"]["headGroupSpec"]["template"]["spec"][ - "initContainers" - ][0].get("command")[2] = command - - def del_from_list_by_name(l: list, target: typing.List[str]) -> list: return [x for x in l if x["name"] not in target] -def disable_raycluster_tls(resources): - generic_template_spec = resources["GenericItems"][0]["generictemplate"]["spec"] - - headGroupTemplateSpec = generic_template_spec["headGroupSpec"]["template"]["spec"] - headGroupTemplateSpec["volumes"] = del_from_list_by_name( - headGroupTemplateSpec.get("volumes", []), - ["ca-vol", "server-cert"], - ) - - c: dict - for c in generic_template_spec["headGroupSpec"]["template"]["spec"]["containers"]: - c["volumeMounts"] = del_from_list_by_name( - c.get("volumeMounts", []), ["ca-vol", "server-cert"] - ) - - if "initContainers" in generic_template_spec["headGroupSpec"]["template"]["spec"]: - del generic_template_spec["headGroupSpec"]["template"]["spec"]["initContainers"] - - for workerGroup in generic_template_spec.get("workerGroupSpecs", []): - workerGroupSpec = workerGroup["template"]["spec"] - workerGroupSpec["volumes"] = del_from_list_by_name( - workerGroupSpec.get("volumes", []), - ["ca-vol", "server-cert"], - ) - for c in workerGroup["template"]["spec"].get("containers", []): - c["volumeMounts"] = del_from_list_by_name( - c.get("volumeMounts", []), ["ca-vol", "server-cert"] - ) - - del generic_template_spec["workerGroupSpecs"][0]["template"]["spec"][ - "initContainers" - ] - - updated_items = [] - for i in resources["GenericItems"][:]: - if "rayclient-deployment-ingress" in i["generictemplate"]["metadata"]["name"]: - continue - if "rayclient-deployment-route" in i["generictemplate"]["metadata"]["name"]: - continue - if "ca-secret-deployment-name" in i["generictemplate"]["metadata"]["name"]: - continue - updated_items.append(i) - - resources["GenericItems"] = updated_items - - def write_user_appwrapper(user_yaml, output_file_name): # Create the directory if it doesn't exist directory_path = os.path.dirname(output_file_name) @@ -423,75 +313,6 @@

    Module codeflare_sdk.utils.generate_yaml

    print(f"Written to: {output_file_name}") -def enable_openshift_oauth(user_yaml, cluster_name, namespace): - config_check() - k8_client = api_config_handler() or client.ApiClient() - tls_mount_location = "/etc/tls/private" - oauth_port = 8443 - oauth_sa_name = f"{cluster_name}-oauth-proxy" - tls_secret_name = f"{cluster_name}-proxy-tls-secret" - tls_volume_name = "proxy-tls-secret" - port_name = "oauth-proxy" - oauth_sidecar = _create_oauth_sidecar_object( - namespace, - tls_mount_location, - oauth_port, - oauth_sa_name, - tls_volume_name, - port_name, - ) - tls_secret_volume = client.V1Volume( - name=tls_volume_name, - secret=client.V1SecretVolumeSource(secret_name=tls_secret_name), - ) - # allows for setting value of Cluster object when initializing object from an existing AppWrapper on cluster - user_yaml["metadata"]["annotations"] = user_yaml["metadata"].get("annotations", {}) - ray_headgroup_pod = user_yaml["spec"]["resources"]["GenericItems"][0][ - "generictemplate" - ]["spec"]["headGroupSpec"]["template"]["spec"] - ray_headgroup_pod["serviceAccount"] = oauth_sa_name - ray_headgroup_pod["volumes"] = ray_headgroup_pod.get("volumes", []) - - # we use a generic api client here so that the serialization function doesn't need to be mocked for unit tests - ray_headgroup_pod["volumes"].append( - client.ApiClient().sanitize_for_serialization(tls_secret_volume) - ) - ray_headgroup_pod["containers"].append( - client.ApiClient().sanitize_for_serialization(oauth_sidecar) - ) - - -def _create_oauth_sidecar_object( - namespace: str, - tls_mount_location: str, - oauth_port: int, - oauth_sa_name: str, - tls_volume_name: str, - port_name: str, -) -> client.V1Container: - return client.V1Container( - args=[ - f"--https-address=:{oauth_port}", - "--provider=openshift", - f"--openshift-service-account={oauth_sa_name}", - "--upstream=http://localhost:8265", - f"--tls-cert={tls_mount_location}/tls.crt", - f"--tls-key={tls_mount_location}/tls.key", - "--cookie-secret=$(COOKIE_SECRET)", - f'--openshift-delegate-urls={{"/":{{"resource":"pods","namespace":"{namespace}","verb":"get"}}}}', - ], - image="registry.redhat.io/openshift4/ose-oauth-proxy@sha256:1ea6a01bf3e63cdcf125c6064cbd4a4a270deaf0f157b3eabb78f60556840366", - name="oauth-proxy", - ports=[client.V1ContainerPort(container_port=oauth_port, name=port_name)], - resources=client.V1ResourceRequirements(limits=None, requests=None), - volume_mounts=[ - client.V1VolumeMount( - mount_path=tls_mount_location, name=tls_volume_name, read_only=True - ) - ], - ) - - def get_default_kueue_name(namespace: str): # If the local queue is set, use it. Otherwise, try to use the default queue. try: @@ -599,7 +420,6 @@

    Module codeflare_sdk.utils.generate_yaml

    mcad: bool, instance_types: list, env, - local_interactive: bool, image_pull_secrets: list, dispatch_priority: str, priority_val: int, @@ -650,14 +470,6 @@

    Module codeflare_sdk.utils.generate_yaml

    head_gpus, ) - if local_interactive: - enable_local_interactive(resources, cluster_name, namespace) - else: - disable_raycluster_tls(resources["resources"]) - - if is_openshift_cluster(): - enable_openshift_oauth(user_yaml, cluster_name, namespace) - directory_path = os.path.expanduser("~/.codeflare/resources/") outfile = os.path.join(directory_path, appwrapper_name + ".yaml") @@ -695,156 +507,6 @@

    Functions

    return [x for x in l if x["name"] not in target]
    -
    -def disable_raycluster_tls(resources) -
    -
    -
    -
    - -Expand source code - -
    def disable_raycluster_tls(resources):
    -    generic_template_spec = resources["GenericItems"][0]["generictemplate"]["spec"]
    -
    -    headGroupTemplateSpec = generic_template_spec["headGroupSpec"]["template"]["spec"]
    -    headGroupTemplateSpec["volumes"] = del_from_list_by_name(
    -        headGroupTemplateSpec.get("volumes", []),
    -        ["ca-vol", "server-cert"],
    -    )
    -
    -    c: dict
    -    for c in generic_template_spec["headGroupSpec"]["template"]["spec"]["containers"]:
    -        c["volumeMounts"] = del_from_list_by_name(
    -            c.get("volumeMounts", []), ["ca-vol", "server-cert"]
    -        )
    -
    -    if "initContainers" in generic_template_spec["headGroupSpec"]["template"]["spec"]:
    -        del generic_template_spec["headGroupSpec"]["template"]["spec"]["initContainers"]
    -
    -    for workerGroup in generic_template_spec.get("workerGroupSpecs", []):
    -        workerGroupSpec = workerGroup["template"]["spec"]
    -        workerGroupSpec["volumes"] = del_from_list_by_name(
    -            workerGroupSpec.get("volumes", []),
    -            ["ca-vol", "server-cert"],
    -        )
    -        for c in workerGroup["template"]["spec"].get("containers", []):
    -            c["volumeMounts"] = del_from_list_by_name(
    -                c.get("volumeMounts", []), ["ca-vol", "server-cert"]
    -            )
    -
    -    del generic_template_spec["workerGroupSpecs"][0]["template"]["spec"][
    -        "initContainers"
    -    ]
    -
    -    updated_items = []
    -    for i in resources["GenericItems"][:]:
    -        if "rayclient-deployment-ingress" in i["generictemplate"]["metadata"]["name"]:
    -            continue
    -        if "rayclient-deployment-route" in i["generictemplate"]["metadata"]["name"]:
    -            continue
    -        if "ca-secret-deployment-name" in i["generictemplate"]["metadata"]["name"]:
    -            continue
    -        updated_items.append(i)
    -
    -    resources["GenericItems"] = updated_items
    -
    -
    -
    -def enable_local_interactive(resources, cluster_name, namespace) -
    -
    -
    -
    - -Expand source code - -
    def enable_local_interactive(resources, cluster_name, namespace):  # pragma: no cover
    -    from ..cluster.cluster import _get_ingress_domain
    -
    -    ca_secret_item = resources["resources"].get("GenericItems")[1]
    -    item = resources["resources"].get("GenericItems")[0]
    -    update_ca_secret(ca_secret_item, cluster_name, namespace)
    -    # update_ca_secret_volumes
    -    item["generictemplate"]["spec"]["headGroupSpec"]["template"]["spec"]["volumes"][0][
    -        "secret"
    -    ]["secretName"] = f"ca-secret-{cluster_name}"
    -    item["generictemplate"]["spec"]["workerGroupSpecs"][0]["template"]["spec"][
    -        "volumes"
    -    ][0]["secret"]["secretName"] = f"ca-secret-{cluster_name}"
    -    # update_tls_env
    -    item["generictemplate"]["spec"]["headGroupSpec"]["template"]["spec"]["containers"][
    -        0
    -    ]["env"][1]["value"] = "1"
    -    item["generictemplate"]["spec"]["workerGroupSpecs"][0]["template"]["spec"][
    -        "containers"
    -    ][0]["env"][1]["value"] = "1"
    -    # update_init_container
    -    command = item["generictemplate"]["spec"]["headGroupSpec"]["template"]["spec"][
    -        "initContainers"
    -    ][0].get("command")[2]
    -
    -    command = command.replace("deployment-name", cluster_name)
    -
    -    domain = ""  ## FIX - We can't retrieve ingress domain - move init container to CFO
    -
    -    command = command.replace("server-name", domain)
    -    item["generictemplate"]["metadata"]["annotations"][
    -        "sdk.codeflare.dev/local_interactive"
    -    ] = "True"
    -
    -    item["generictemplate"]["spec"]["headGroupSpec"]["template"]["spec"][
    -        "initContainers"
    -    ][0].get("command")[2] = command
    -
    -
    -
    -def enable_openshift_oauth(user_yaml, cluster_name, namespace) -
    -
    -
    -
    - -Expand source code - -
    def enable_openshift_oauth(user_yaml, cluster_name, namespace):
    -    config_check()
    -    k8_client = api_config_handler() or client.ApiClient()
    -    tls_mount_location = "/etc/tls/private"
    -    oauth_port = 8443
    -    oauth_sa_name = f"{cluster_name}-oauth-proxy"
    -    tls_secret_name = f"{cluster_name}-proxy-tls-secret"
    -    tls_volume_name = "proxy-tls-secret"
    -    port_name = "oauth-proxy"
    -    oauth_sidecar = _create_oauth_sidecar_object(
    -        namespace,
    -        tls_mount_location,
    -        oauth_port,
    -        oauth_sa_name,
    -        tls_volume_name,
    -        port_name,
    -    )
    -    tls_secret_volume = client.V1Volume(
    -        name=tls_volume_name,
    -        secret=client.V1SecretVolumeSource(secret_name=tls_secret_name),
    -    )
    -    # allows for setting value of Cluster object when initializing object from an existing AppWrapper on cluster
    -    user_yaml["metadata"]["annotations"] = user_yaml["metadata"].get("annotations", {})
    -    ray_headgroup_pod = user_yaml["spec"]["resources"]["GenericItems"][0][
    -        "generictemplate"
    -    ]["spec"]["headGroupSpec"]["template"]["spec"]
    -    ray_headgroup_pod["serviceAccount"] = oauth_sa_name
    -    ray_headgroup_pod["volumes"] = ray_headgroup_pod.get("volumes", [])
    -
    -    # we use a generic api client here so that the serialization function doesn't need to be mocked for unit tests
    -    ray_headgroup_pod["volumes"].append(
    -        client.ApiClient().sanitize_for_serialization(tls_secret_volume)
    -    )
    -    ray_headgroup_pod["containers"].append(
    -        client.ApiClient().sanitize_for_serialization(oauth_sidecar)
    -    )
    -
    -
    def gen_names(name)
    @@ -865,7 +527,7 @@

    Functions

    -def generate_appwrapper(name: str, namespace: str, head_cpus: int, head_memory: int, head_gpus: int, min_cpu: int, max_cpu: int, min_memory: int, max_memory: int, gpu: int, workers: int, template: str, image: str, instascale: bool, mcad: bool, instance_types: list, env, local_interactive: bool, image_pull_secrets: list, dispatch_priority: str, priority_val: int, write_to_file: bool, verify_tls: bool, local_queue: Optional[str]) +def generate_appwrapper(name: str, namespace: str, head_cpus: int, head_memory: int, head_gpus: int, min_cpu: int, max_cpu: int, min_memory: int, max_memory: int, gpu: int, workers: int, template: str, image: str, instascale: bool, mcad: bool, instance_types: list, env, image_pull_secrets: list, dispatch_priority: str, priority_val: int, write_to_file: bool, verify_tls: bool, local_queue: Optional[str])
    @@ -891,7 +553,6 @@

    Functions

    mcad: bool, instance_types: list, env, - local_interactive: bool, image_pull_secrets: list, dispatch_priority: str, priority_val: int, @@ -942,14 +603,6 @@

    Functions

    head_gpus, ) - if local_interactive: - enable_local_interactive(resources, cluster_name, namespace) - else: - disable_raycluster_tls(resources["resources"]) - - if is_openshift_cluster(): - enable_openshift_oauth(user_yaml, cluster_name, namespace) - directory_path = os.path.expanduser("~/.codeflare/resources/") outfile = os.path.join(directory_path, appwrapper_name + ".yaml") @@ -1137,26 +790,6 @@

    Functions

    spec.pop("affinity")
    -
    -def update_ca_secret(ca_secret_item, cluster_name, namespace) -
    -
    -
    -
    - -Expand source code - -
    def update_ca_secret(ca_secret_item, cluster_name, namespace):
    -    from . import generate_cert
    -
    -    metadata = ca_secret_item.get("generictemplate", {}).get("metadata")
    -    metadata["name"] = f"ca-secret-{cluster_name}"
    -    metadata["namespace"] = namespace
    -    metadata["labels"]["odh-ray-cluster-service"] = f"{cluster_name}-head-svc"
    -    data = ca_secret_item.get("generictemplate", {}).get("data")
    -    data["ca.key"], data["ca.crt"] = generate_cert.generate_ca_cert(365)
    -
    -
    def update_custompodresources(item, min_cpu, max_cpu, min_memory, max_memory, gpu, workers, head_cpus, head_memory, head_gpus)
    @@ -1307,21 +940,7 @@

    Functions

    lower_meta = item.get("generictemplate", {}).get("metadata") lower_meta["labels"]["workload.codeflare.dev/appwrapper"] = appwrapper_name lower_meta["name"] = cluster_name - lower_meta["namespace"] = namespace - lower_spec = item.get("generictemplate", {}).get("spec") - if is_openshift_cluster(): - cookie_secret_env_var = { - "name": "COOKIE_SECRET", - "valueFrom": { - "secretKeyRef": { - "key": "cookie_secret", - "name": f"{cluster_name}-oauth-config", - } - }, - } - lower_spec["headGroupSpec"]["template"]["spec"]["containers"][0]["env"].append( - cookie_secret_env_var - ) + lower_meta["namespace"] = namespace
    @@ -1506,9 +1125,6 @@

    Index

  • Functions

    • del_from_list_by_name
    • -
    • disable_raycluster_tls
    • -
    • enable_local_interactive
    • -
    • enable_openshift_oauth
    • gen_names
    • generate_appwrapper
    • get_default_kueue_name
    • @@ -1518,7 +1134,6 @@

      Index

    • load_components
    • read_template
    • update_affinity
    • -
    • update_ca_secret
    • update_custompodresources
    • update_env
    • update_image
    • From 736b1594ad4ac4fa06bdc33dd002887dd193b73f Mon Sep 17 00:00:00 2001 From: Karel Suta Date: Fri, 19 Apr 2024 19:10:16 +0200 Subject: [PATCH 193/496] Use stronger runner for Notebook sync workflow (#519) --- .github/workflows/odh-notebooks-sync.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/odh-notebooks-sync.yml b/.github/workflows/odh-notebooks-sync.yml index 0248ed92..14a743a6 100644 --- a/.github/workflows/odh-notebooks-sync.yml +++ b/.github/workflows/odh-notebooks-sync.yml @@ -28,7 +28,7 @@ env: jobs: build: - runs-on: ubuntu-latest + runs-on: ubuntu-20.04-4core steps: - name: Clone repository and Sync run: | From 4e28c9bcfbfc7910dac562c4e214d3aff2ae5807 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Fri, 19 Apr 2024 09:48:30 +0000 Subject: [PATCH 194/496] Updated coverage.svg --- coverage.svg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/coverage.svg b/coverage.svg index a8c7e72a..607d3de4 100644 --- a/coverage.svg +++ b/coverage.svg @@ -15,7 +15,7 @@ coverage coverage - 92% - 92% + 91% + 91% From 596b5e19ac9b46b1a9c417256d69158ca474de57 Mon Sep 17 00:00:00 2001 From: abhijeet-dhumal Date: Fri, 12 Apr 2024 16:40:47 +0530 Subject: [PATCH 195/496] Convert SDK's Go upgrade test to Python test in accordance with kueue (#494) --- go.mod | 98 --- go.sum | 870 ------------------- tests/__init__.py | 0 tests/e2e/support.go | 157 ---- tests/e2e/support.py | 8 + tests/unit_test.py | 2 +- tests/upgrade/__init__.py | 0 tests/upgrade/raycluster_sdk_upgrade_test.go | 396 --------- tests/upgrade/raycluster_sdk_upgrade_test.py | 164 ++++ 9 files changed, 173 insertions(+), 1522 deletions(-) delete mode 100644 go.mod delete mode 100644 go.sum create mode 100644 tests/__init__.py delete mode 100644 tests/e2e/support.go create mode 100644 tests/upgrade/__init__.py delete mode 100644 tests/upgrade/raycluster_sdk_upgrade_test.go create mode 100644 tests/upgrade/raycluster_sdk_upgrade_test.py diff --git a/go.mod b/go.mod deleted file mode 100644 index 436ed65c..00000000 --- a/go.mod +++ /dev/null @@ -1,98 +0,0 @@ -module github.com/project-codeflare/codeflare-sdk - -go 1.20 - -require ( - github.com/onsi/gomega v1.27.10 - github.com/project-codeflare/codeflare-common v0.0.0-20231129165224-988ba1da9069 - github.com/project-codeflare/multi-cluster-app-dispatcher v1.37.0 - github.com/ray-project/kuberay/ray-operator v1.0.0 - k8s.io/api v0.26.3 - k8s.io/apimachinery v0.26.3 - k8s.io/cli-runtime v0.26.3 - k8s.io/client-go v0.26.3 - k8s.io/kubectl v0.26.3 -) - -require ( - github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect - github.com/MakeNowJust/heredoc v1.0.0 // indirect - github.com/aymerick/douceur v0.2.0 // indirect - github.com/beorn7/perks v1.0.1 // indirect - github.com/cenkalti/backoff/v4 v4.1.3 // indirect - github.com/cespare/xxhash/v2 v2.1.2 // indirect - github.com/chai2010/gettext-go v1.0.2 // indirect - github.com/davecgh/go-spew v1.1.1 // indirect - github.com/emicklei/go-restful/v3 v3.10.1 // indirect - github.com/evanphx/json-patch v4.12.0+incompatible // indirect - github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d // indirect - github.com/fatih/camelcase v1.0.0 // indirect - github.com/fvbommel/sortorder v1.0.1 // indirect - github.com/go-errors/errors v1.0.1 // indirect - github.com/go-logr/logr v1.2.4 // indirect - github.com/go-openapi/jsonpointer v0.19.6 // indirect - github.com/go-openapi/jsonreference v0.20.1 // indirect - github.com/go-openapi/swag v0.22.3 // indirect - github.com/gogo/protobuf v1.3.2 // indirect - github.com/golang-jwt/jwt/v4 v4.4.1 // indirect - github.com/golang/glog v1.0.0 // indirect - github.com/golang/protobuf v1.5.3 // indirect - github.com/google/btree v1.0.1 // indirect - github.com/google/gnostic v0.6.9 // indirect - github.com/google/go-cmp v0.5.9 // indirect - github.com/google/gofuzz v1.2.0 // indirect - github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect - github.com/google/uuid v1.3.0 // indirect - github.com/gorilla/css v1.0.0 // indirect - github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7 // indirect - github.com/imdario/mergo v0.3.12 // indirect - github.com/inconshreveable/mousetrap v1.0.1 // indirect - github.com/josharian/intern v1.0.0 // indirect - github.com/json-iterator/go v1.1.12 // indirect - github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de // indirect - github.com/mailru/easyjson v0.7.7 // indirect - github.com/matttproud/golang_protobuf_extensions v1.0.2 // indirect - github.com/microcosm-cc/bluemonday v1.0.18 // indirect - github.com/mitchellh/go-wordwrap v1.0.0 // indirect - github.com/moby/spdystream v0.2.0 // indirect - github.com/moby/term v0.0.0-20220808134915-39b0c02b01ae // indirect - github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect - github.com/modern-go/reflect2 v1.0.2 // indirect - github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00 // indirect - github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect - github.com/openshift-online/ocm-sdk-go v0.1.368 // indirect - github.com/openshift/api v0.0.0-20230213134911-7ba313770556 // indirect - github.com/openshift/client-go v0.0.0-20221019143426-16aed247da5c // indirect - github.com/peterbourgon/diskv v2.0.1+incompatible // indirect - github.com/pkg/errors v0.9.1 // indirect - github.com/prometheus/client_golang v1.14.0 // indirect - github.com/prometheus/client_model v0.3.0 // indirect - github.com/prometheus/common v0.37.0 // indirect - github.com/prometheus/procfs v0.8.0 // indirect - github.com/russross/blackfriday/v2 v2.1.0 // indirect - github.com/spf13/cobra v1.6.0 // indirect - github.com/spf13/pflag v1.0.5 // indirect - github.com/xlab/treeprint v1.1.0 // indirect - go.starlark.net v0.0.0-20200306205701-8dd3e2ee1dd5 // indirect - golang.org/x/net v0.17.0 // indirect - golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 // indirect - golang.org/x/sys v0.13.0 // indirect - golang.org/x/term v0.13.0 // indirect - golang.org/x/text v0.13.0 // indirect - golang.org/x/time v0.3.0 // indirect - google.golang.org/appengine v1.6.7 // indirect - google.golang.org/protobuf v1.28.1 // indirect - gopkg.in/inf.v0 v0.9.1 // indirect - gopkg.in/yaml.v2 v2.4.0 // indirect - gopkg.in/yaml.v3 v3.0.1 // indirect - k8s.io/component-base v0.26.3 // indirect - k8s.io/klog/v2 v2.100.1 // indirect - k8s.io/kube-openapi v0.0.0-20230501164219-8b0f38b5fd1f // indirect - k8s.io/utils v0.0.0-20230505201702-9f6742963106 // indirect - sigs.k8s.io/controller-runtime v0.14.6 // indirect - sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd // indirect - sigs.k8s.io/kustomize/api v0.12.1 // indirect - sigs.k8s.io/kustomize/kyaml v0.13.9 // indirect - sigs.k8s.io/structured-merge-diff/v4 v4.2.3 // indirect - sigs.k8s.io/yaml v1.3.0 // indirect -) diff --git a/go.sum b/go.sum deleted file mode 100644 index 0d6034b8..00000000 --- a/go.sum +++ /dev/null @@ -1,870 +0,0 @@ -cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= -cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= -cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= -cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= -cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= -cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= -cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= -cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= -cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= -cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= -cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= -cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= -cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= -cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= -cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= -cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= -cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= -cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= -cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= -cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= -cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= -cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= -cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= -cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= -cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= -cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= -cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= -cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= -github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= -github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/MakeNowJust/heredoc v1.0.0 h1:cXCdzVdstXyiTqTvfqk9SDHpKNjxuom+DOlyEeQ4pzQ= -github.com/MakeNowJust/heredoc v1.0.0/go.mod h1:mG5amYoWBHf8vpLOuehzbGGw0EHxpZZ6lCpQ4fNJ8LE= -github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= -github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= -github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= -github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= -github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= -github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= -github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= -github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk= -github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= -github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= -github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= -github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= -github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= -github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= -github.com/cenkalti/backoff/v4 v4.1.3 h1:cFAlzYUlVYDysBEH2T5hyJZMh3+5+WCBvSnK6Q8UtC4= -github.com/cenkalti/backoff/v4 v4.1.3/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= -github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= -github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/cespare/xxhash/v2 v2.1.2 h1:YRXhKfTDauu4ajMg1TPgFO5jnlC2HCbmLXMcTG5cbYE= -github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/chai2010/gettext-go v1.0.2 h1:1Lwwip6Q2QGsAdl/ZKPCwTe9fe0CjlUbqj5bFNSjIRk= -github.com/chai2010/gettext-go v1.0.2/go.mod h1:y+wnP2cHYaVj19NZhYKAwEMH2CI1gNHeQQ+5AjwawxA= -github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= -github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= -github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= -github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= -github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= -github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= -github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= -github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= -github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= -github.com/creack/pty v1.1.11 h1:07n33Z8lZxZ2qwegKbObQohDhXDQxiMMz1NOUGYlesw= -github.com/creack/pty v1.1.11/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= -github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153 h1:yUdfgN0XgIJw7foRItutHYUIhlcKzcSf5vDpdhQAKTc= -github.com/emicklei/go-restful/v3 v3.10.1 h1:rc42Y5YTp7Am7CS630D7JmhRjq4UlEUuEKfrDac4bSQ= -github.com/emicklei/go-restful/v3 v3.10.1/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc= -github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= -github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= -github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/evanphx/json-patch v4.12.0+incompatible h1:4onqiflcdA9EOZ4RxV643DvftH5pOlLGNtQ5lPWQu84= -github.com/evanphx/json-patch v4.12.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= -github.com/evanphx/json-patch/v5 v5.6.0 h1:b91NhWfaz02IuVxO9faSllyAtNXHMPkC5J8sJCLunww= -github.com/evanphx/json-patch/v5 v5.6.0/go.mod h1:G79N1coSVB93tBe7j6PhzjmR3/2VvlbKOFpnXhI9Bw4= -github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d h1:105gxyaGwCFad8crR9dcMQWvV9Hvulu6hwUh4tWPJnM= -github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d/go.mod h1:ZZMPRZwes7CROmyNKgQzC3XPs6L/G2EJLHddWejkmf4= -github.com/fatih/camelcase v1.0.0 h1:hxNvNX/xYBp0ovncs8WyWZrOrpBNub/JfaMvbURyft8= -github.com/fatih/camelcase v1.0.0/go.mod h1:yN2Sb0lFhZJUdVvtELVWefmrXpuZESvPmqwoZc+/fpc= -github.com/flowstack/go-jsonschema v0.1.1/go.mod h1:yL7fNggx1o8rm9RlgXv7hTBWxdBM0rVwpMwimd3F3N0= -github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= -github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= -github.com/fvbommel/sortorder v1.0.1 h1:dSnXLt4mJYH25uDDGa3biZNQsozaUWDSWeKJ0qqFfzE= -github.com/fvbommel/sortorder v1.0.1/go.mod h1:uk88iVf1ovNn1iLfgUVU2F9o5eO30ui720w+kxuqRs0= -github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/go-errors/errors v1.0.1 h1:LUHzmkK3GUKUrL/1gfBUxAHzcev3apQlezX/+O7ma6w= -github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q= -github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= -github.com/go-kit/log v0.2.0/go.mod h1:NwTd00d/i8cPZ3xOwwiv2PO5MOcx78fFErGNcVmBjv0= -github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= -github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= -github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= -github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= -github.com/go-logr/logr v1.2.0/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.2.4 h1:g01GSCwiDw2xSZfjJ2/T9M+S6pFdcNtFYsp+Y43HYDQ= -github.com/go-logr/logr v1.2.4/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-openapi/jsonpointer v0.19.6 h1:eCs3fxoIi3Wh6vtgmLTOjdhSpiqphQ+DaPn38N2ZdrE= -github.com/go-openapi/jsonpointer v0.19.6/go.mod h1:osyAmYz/mB/C3I+WsTTSgw1ONzaLJoLCyoi6/zppojs= -github.com/go-openapi/jsonreference v0.20.1 h1:FBLnyygC4/IZZr893oiomc9XaghoveYTrLC1F86HID8= -github.com/go-openapi/jsonreference v0.20.1/go.mod h1:Bl1zwGIM8/wsvqjsOQLJ/SH+En5Ap4rVB5KVcIDZG2k= -github.com/go-openapi/swag v0.22.3 h1:yMBqmnQ0gyZvEb/+KzuWZOXgllrXT4SADYbvDaXHv/g= -github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= -github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= -github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI= -github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= -github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= -github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= -github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= -github.com/golang-jwt/jwt/v4 v4.4.1 h1:pC5DB52sCeK48Wlb9oPcdhnjkz1TKt1D/P7WKJ0kUcQ= -github.com/golang-jwt/jwt/v4 v4.4.1/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= -github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/glog v1.0.0 h1:nfP3RFugxnNRyKgeWd4oI1nYvXpxrx8ck8ZrcizshdQ= -github.com/golang/glog v1.0.0/go.mod h1:EWib/APOK0SL3dFbYqvxE3UYd8E6s1ouQ7iEp/0LWV4= -github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= -github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= -github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= -github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= -github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= -github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= -github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= -github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= -github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= -github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= -github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/btree v1.0.1 h1:gK4Kx5IaGY9CD5sPJ36FHiBJ6ZXl0kilRiiCj+jdYp4= -github.com/google/btree v1.0.1/go.mod h1:xXMiIv4Fb/0kKde4SpL7qlzvu5cMJDRkFDxJfI9uaxA= -github.com/google/gnostic v0.6.9 h1:ZK/5VhkoX835RikCHpSUJV9a+S3e1zLh59YnyWeBW+0= -github.com/google/gnostic v0.6.9/go.mod h1:Nm8234We1lq6iB9OmlgNv3nH91XLLVZHCDayfA3xq+E= -github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= -github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= -github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= -github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= -github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1 h1:K6RDEckDVWvDI9JAJYCmNdQXq6neHJOYx3V6jnqNEec= -github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= -github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= -github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= -github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= -github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= -github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= -github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY= -github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c= -github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= -github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7 h1:pdN6V1QBWetyv/0+wjACpqVH+eVULgEjkurDLq3goeM= -github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= -github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= -github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/imdario/mergo v0.3.12 h1:b6R2BslTbIEToALKP7LxUvijTsNI9TAe80pLWN2g/HU= -github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= -github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc= -github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= -github.com/itchyny/gojq v0.12.7 h1:hYPTpeWfrJ1OT+2j6cvBScbhl0TkdwGM4bc66onUSOQ= -github.com/itchyny/gojq v0.12.7/go.mod h1:ZdvNHVlzPgUf8pgjnuDTmGfHA/21KoutQUJ3An/xNuw= -github.com/itchyny/timefmt-go v0.1.3 h1:7M3LGVDsqcd0VZH2U+x393obrzZisp7C0uEe921iRkU= -github.com/itchyny/timefmt-go v0.1.3/go.mod h1:0osSSCQSASBJMsIZnhAaF1C2fCBTJZXrnj37mG8/c+A= -github.com/jackc/chunkreader v1.0.0 h1:4s39bBR8ByfqH+DKm8rQA3E1LHZWB9XWcrz8fqaZbe0= -github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= -github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= -github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8= -github.com/jackc/chunkreader/v2 v2.0.1/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= -github.com/jackc/pgconn v0.0.0-20190420214824-7e0022ef6ba3/go.mod h1:jkELnwuX+w9qN5YIfX0fl88Ehu4XC3keFuOJJk9pcnA= -github.com/jackc/pgconn v0.0.0-20190824142844-760dd75542eb/go.mod h1:lLjNuW/+OfW9/pnVKPazfWOgNfH2aPem8YQ7ilXGvJE= -github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsUgOEh9hBm+xYTstcNHg7UPMVJqRfQxq4s= -github.com/jackc/pgconn v1.8.0/go.mod h1:1C2Pb36bGIP9QHGBYCjnyhqu7Rv3sGshaQUvmfGIB/o= -github.com/jackc/pgconn v1.9.0/go.mod h1:YctiPyvzfU11JFxoXokUOOKQXQmDMoJL9vJzHH8/2JY= -github.com/jackc/pgconn v1.9.1-0.20210724152538-d89c8390a530/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI= -github.com/jackc/pgconn v1.12.0 h1:/RvQ24k3TnNdfBSW0ou9EOi5jx2cX7zfE8n2nLKuiP0= -github.com/jackc/pgconn v1.12.0/go.mod h1:ZkhRC59Llhrq3oSfrikvwQ5NaxYExr6twkdkMLaKono= -github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE= -github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8= -github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE= -github.com/jackc/pgmock v0.0.0-20201204152224-4fe30f7445fd/go.mod h1:hrBW0Enj2AZTNpt/7Y5rr2xe/9Mn757Wtb2xeBzPv2c= -github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65/go.mod h1:5R2h2EEX+qri8jOWMbJCtaPWkrrNc7OHwsp2TCqp7ak= -github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= -github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= -github.com/jackc/pgproto3 v1.1.0 h1:FYYE4yRw+AgI8wXIinMlNjBbp/UitDJwfj5LqqewP1A= -github.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78= -github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA= -github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg= -github.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= -github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= -github.com/jackc/pgproto3/v2 v2.0.6/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= -github.com/jackc/pgproto3/v2 v2.1.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= -github.com/jackc/pgproto3/v2 v2.3.0 h1:brH0pCGBDkBW07HWlN/oSBXrmo3WB0UvZd1pIuDcL8Y= -github.com/jackc/pgproto3/v2 v2.3.0/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= -github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b h1:C8S2+VttkHFdOOCXJe+YGfa4vHYwlt4Zx+IVXQ97jYg= -github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E= -github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg= -github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc= -github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw= -github.com/jackc/pgtype v1.8.1-0.20210724151600-32e20a603178/go.mod h1:C516IlIV9NKqfsMCXTdChteoXmwgUceqaLfjg2e3NlM= -github.com/jackc/pgtype v1.11.0 h1:u4uiGPz/1hryuXzyaBhSk6dnIyyG2683olG2OV+UUgs= -github.com/jackc/pgtype v1.11.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4= -github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y= -github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM= -github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc= -github.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgSXP7iUjYm9C1NxKhny7lq6ee99u/z+IHFcgs= -github.com/jackc/pgx/v4 v4.16.0 h1:4k1tROTJctHotannFYzu77dY3bgtMRymQP7tXQjqpPk= -github.com/jackc/pgx/v4 v4.16.0/go.mod h1:N0A9sFdWzkw/Jy1lwoiB64F2+ugFZi987zRxcPez/wI= -github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= -github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= -github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= -github.com/jackc/puddle v1.2.1/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= -github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= -github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= -github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= -github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= -github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= -github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= -github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= -github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= -github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= -github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= -github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= -github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= -github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI= -github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= -github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= -github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= -github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/lib/pq v1.10.5/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de h1:9TO3cAIGXtEhnIaL+V+BEER86oLrvS+kWobKpbJuye0= -github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de/go.mod h1:zAbeS9B/r2mtpb6U+EI2rYA5OAXxsYw6wTamcNW+zcE= -github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= -github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= -github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= -github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= -github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= -github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= -github.com/matttproud/golang_protobuf_extensions v1.0.2 h1:hAHbPm5IJGijwng3PWk09JkG9WeqChjprR5s9bBZ+OM= -github.com/matttproud/golang_protobuf_extensions v1.0.2/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= -github.com/microcosm-cc/bluemonday v1.0.18 h1:6HcxvXDAi3ARt3slx6nTesbvorIc3QeTzBNRvWktHBo= -github.com/microcosm-cc/bluemonday v1.0.18/go.mod h1:Z0r70sCuXHig8YpBzCc5eGHAap2K7e/u082ZUpDRRqM= -github.com/mitchellh/go-wordwrap v1.0.0 h1:6GlHJ/LTGMrIJbwgdqdl2eEH8o+Exx/0m8ir9Gns0u4= -github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= -github.com/moby/spdystream v0.2.0 h1:cjW1zVyyoiM0T7b6UoySUFqzXMoqRckQtXwGPiBhOM8= -github.com/moby/spdystream v0.2.0/go.mod h1:f7i0iNDQJ059oMTcWxx8MA/zKFIuD/lY+0GqbN2Wy8c= -github.com/moby/term v0.0.0-20220808134915-39b0c02b01ae h1:O4SWKdcHVCvYqyDV+9CJA1fcDN2L11Bule0iFy3YlAI= -github.com/moby/term v0.0.0-20220808134915-39b0c02b01ae/go.mod h1:E2VnQOmVuvZB6UYnnDB0qG5Nq/1tD9acaOpo6xmt0Kw= -github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= -github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= -github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00 h1:n6/2gBQ3RWajuToeY6ZtZTIKv2v7ThUy5KKusIT0yc0= -github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00/go.mod h1:Pm3mSP3c5uWn86xMLZ5Sa7JB9GsEZySvHYXCTK4E9q4= -github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= -github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= -github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= -github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= -github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= -github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= -github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= -github.com/onsi/ginkgo/v2 v2.1.3/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c= -github.com/onsi/ginkgo/v2 v2.1.4/go.mod h1:um6tUpWM/cxCK3/FK8BXqEiUMUwRgSM4JXG47RKZmLU= -github.com/onsi/ginkgo/v2 v2.11.0 h1:WgqUCUt/lT6yXoQ8Wef0fsNn5cAuMK7+KT9UFRz2tcU= -github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= -github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= -github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= -github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro= -github.com/onsi/gomega v1.27.10 h1:naR28SdDFlqrG6kScpT8VWpu1xWY5nJRCF3XaYyBjhI= -github.com/onsi/gomega v1.27.10/go.mod h1:RsS8tutOdbdgzbPtzzATp12yT7kM5I5aElG3evPbQ0M= -github.com/openshift-online/ocm-sdk-go v0.1.368 h1:qP+gkChV8WDwwpkUw1xUyjTXKdvrwyd70Gff2GMUSeU= -github.com/openshift-online/ocm-sdk-go v0.1.368/go.mod h1:KYOw8kAKAHyPrJcQoVR82CneQ4ofC02Na4cXXaTq4Nw= -github.com/openshift/api v0.0.0-20230213134911-7ba313770556 h1:7W2fOhJicyEff24VaF7ASNzPtYvr+iSCVft4SIBAzaE= -github.com/openshift/api v0.0.0-20230213134911-7ba313770556/go.mod h1:aQ6LDasvHMvHZXqLHnX2GRmnfTWCF/iIwz8EMTTIE9A= -github.com/openshift/client-go v0.0.0-20221019143426-16aed247da5c h1:CV76yFOTXmq9VciBR3Bve5ZWzSxdft7gaMVB3kS0rwg= -github.com/openshift/client-go v0.0.0-20221019143426-16aed247da5c/go.mod h1:lFMO8mLHXWFzSdYvGNo8ivF9SfF6zInA8ZGw4phRnUE= -github.com/peterbourgon/diskv v2.0.1+incompatible h1:UBdAOUP5p4RWqPBg048CAvpKN+vxiaj6gdUUzhl4XmI= -github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= -github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= -github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/project-codeflare/codeflare-common v0.0.0-20231129165224-988ba1da9069 h1:81+ma1mchF/LtAGsf+poAt50kJ/fLYjoTAcZOxci1Yc= -github.com/project-codeflare/codeflare-common v0.0.0-20231129165224-988ba1da9069/go.mod h1:zdi2GCYJX+QyxFWyCLMoTme3NMz/aucWDJWMqKfigxk= -github.com/project-codeflare/multi-cluster-app-dispatcher v1.37.0 h1:oyhdLdc4BgA4zcH1zlRrSrYpzuVxV5QLDbyIXrwnQqs= -github.com/project-codeflare/multi-cluster-app-dispatcher v1.37.0/go.mod h1:Yge6GRNpO9YIDfeL+XOcCE9xbmfCTD5C1h5dlW87mxQ= -github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= -github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= -github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= -github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= -github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= -github.com/prometheus/client_golang v1.14.0 h1:nJdhIvne2eSX/XRAFV9PcvFFRbrjbcTUj0VP62TMhnw= -github.com/prometheus/client_golang v1.14.0/go.mod h1:8vpkKitgIVNcqrRBWh1C4TIUQgYNtG/XQE4E/Zae36Y= -github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= -github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.3.0 h1:UBgGFHqYdG/TPFD1B1ogZywDqEkwp3fBMvqdiQ7Xew4= -github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w= -github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= -github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= -github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= -github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= -github.com/prometheus/common v0.37.0 h1:ccBbHCgIiT9uSoFY0vX8H3zsNR5eLt17/RQLUvn8pXE= -github.com/prometheus/common v0.37.0/go.mod h1:phzohg0JFMnBEFGxTDbfu3QyL5GI8gTQJFhYO5B3mfA= -github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= -github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= -github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= -github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= -github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= -github.com/prometheus/procfs v0.8.0 h1:ODq8ZFEaYeCaZOJlZZdJA2AbQR98dSHSM1KW/You5mo= -github.com/prometheus/procfs v0.8.0/go.mod h1:z7EfXMXOkbkqb9IINtpCn86r/to3BnA0uaxHdg830/4= -github.com/ray-project/kuberay/ray-operator v1.0.0 h1:i69nvbV7az2FG41VHQgxrmhD+SUl8ca+ek4RPbSE2Q0= -github.com/ray-project/kuberay/ray-operator v1.0.0/go.mod h1:7C7ebIkxtkmOX8w1iiLrKM1j4hkZs/Guzm3WdePk/yg= -github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= -github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= -github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= -github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= -github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= -github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= -github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= -github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= -github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= -github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= -github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= -github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= -github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= -github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= -github.com/spf13/cobra v1.6.0 h1:42a0n6jwCot1pUmomAp4T7DeMD+20LFv4Q54pxLf2LI= -github.com/spf13/cobra v1.6.0/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY= -github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= -github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/stoewer/go-strcase v1.2.0/go.mod h1:IBiWB2sKIp3wVVQ3Y035++gc+knqhUQag1KpM8ahLw8= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= -github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= -github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= -github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= -github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= -github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8= -github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= -github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= -github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= -github.com/xlab/treeprint v1.1.0 h1:G/1DjNkPpfZCFt9CSh6b5/nY4VimlbHF3Rh4obvtzDk= -github.com/xlab/treeprint v1.1.0/go.mod h1:gj5Gd3gPdKtR1ikdDK6fnFLdmIS0X30kTTuNd/WEJu0= -github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= -github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= -go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= -go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= -go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= -go.starlark.net v0.0.0-20200306205701-8dd3e2ee1dd5 h1:+FNtrFTmVw0YZGpBGX56XDee331t6JAXeK2bcyhLOOc= -go.starlark.net v0.0.0-20200306205701-8dd3e2ee1dd5/go.mod h1:nmDLcffg48OtT/PSW0Hg7FvpRQsQh5OSqIylirxKC7o= -go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= -go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= -go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= -go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= -go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= -go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= -go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= -go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= -go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= -golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= -golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= -golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20220427172511-eb4f295cb31f/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc= -golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= -golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= -golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= -golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= -golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= -golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= -golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= -golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= -golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= -golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= -golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY= -golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= -golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= -golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= -golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= -golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= -golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 h1:OSnWWcOd/CtWQC2cYSBgbTSJv3ciqd8r54ySIW2y3RE= -golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= -golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190403152447-81d4e9dc473e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191002063906-3421d5a6bb1c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220319134239-a9b59b0215f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE= -golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= -golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek= -golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= -golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k= -golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= -golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4= -golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= -golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190624222133-a101b041ded4/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E= -golang.org/x/tools v0.9.3 h1:Gn1I8+64MsuTb/HpH+LmQtNas23LhUVr3rYZ0eKuaMM= -golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= -google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= -google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= -google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= -google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= -google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= -google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= -google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= -google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= -google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= -google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= -google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20220107163113-42d7afdf6368/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= -google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= -google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= -google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= -google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= -google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= -google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= -google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= -google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= -google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= -google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= -google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= -google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= -google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= -google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= -gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= -gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= -gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= -gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= -gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= -gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= -gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= -gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk= -gotest.tools/v3 v3.0.3 h1:4AuOwCGf4lLR9u3YOe2awrHygurzhO/HeQ6laiA6Sx0= -honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= -honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -k8s.io/api v0.26.3 h1:emf74GIQMTik01Aum9dPP0gAypL8JTLl/lHa4V9RFSU= -k8s.io/api v0.26.3/go.mod h1:PXsqwPMXBSBcL1lJ9CYDKy7kIReUydukS5JiRlxC3qE= -k8s.io/apimachinery v0.26.3 h1:dQx6PNETJ7nODU3XPtrwkfuubs6w7sX0M8n61zHIV/k= -k8s.io/apimachinery v0.26.3/go.mod h1:ats7nN1LExKHvJ9TmwootT00Yz05MuYqPXEXaVeOy5I= -k8s.io/cli-runtime v0.26.3 h1:3ULe0oI28xmgeLMVXIstB+ZL5CTGvWSMVMLeHxitIuc= -k8s.io/cli-runtime v0.26.3/go.mod h1:5YEhXLV4kLt/OSy9yQwtSSNZU2Z7aTEYta1A+Jg4VC4= -k8s.io/client-go v0.26.3 h1:k1UY+KXfkxV2ScEL3gilKcF7761xkYsSD6BC9szIu8s= -k8s.io/client-go v0.26.3/go.mod h1:ZPNu9lm8/dbRIPAgteN30RSXea6vrCpFvq+MateTUuQ= -k8s.io/component-base v0.26.3 h1:oC0WMK/ggcbGDTkdcqefI4wIZRYdK3JySx9/HADpV0g= -k8s.io/component-base v0.26.3/go.mod h1:5kj1kZYwSC6ZstHJN7oHBqcJC6yyn41eR+Sqa/mQc8E= -k8s.io/klog/v2 v2.100.1 h1:7WCHKK6K8fNhTqfBhISHQ97KrnJNFZMcQvKp7gP/tmg= -k8s.io/klog/v2 v2.100.1/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0= -k8s.io/kube-openapi v0.0.0-20230501164219-8b0f38b5fd1f h1:2kWPakN3i/k81b0gvD5C5FJ2kxm1WrQFanWchyKuqGg= -k8s.io/kube-openapi v0.0.0-20230501164219-8b0f38b5fd1f/go.mod h1:byini6yhqGC14c3ebc/QwanvYwhuMWF6yz2F8uwW8eg= -k8s.io/kubectl v0.26.3 h1:bZ5SgFyeEXw6XTc1Qji0iNdtqAC76lmeIIQULg2wNXM= -k8s.io/kubectl v0.26.3/go.mod h1:02+gv7Qn4dupzN3fi/9OvqqdW+uG/4Zi56vc4Zmsp1g= -k8s.io/utils v0.0.0-20230505201702-9f6742963106 h1:EObNQ3TW2D+WptiYXlApGNLVy0zm/JIBVY9i+M4wpAU= -k8s.io/utils v0.0.0-20230505201702-9f6742963106/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= -rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= -rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= -rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= -sigs.k8s.io/controller-runtime v0.14.6 h1:oxstGVvXGNnMvY7TAESYk+lzr6S3V5VFxQ6d92KcwQA= -sigs.k8s.io/controller-runtime v0.14.6/go.mod h1:WqIdsAY6JBsjfc/CqO0CORmNtoCtE4S6qbPc9s68h+0= -sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd h1:EDPBXCAspyGV4jQlpZSudPeMmr1bNJefnuqLsRAsHZo= -sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd/go.mod h1:B8JuhiUyNFVKdsE8h686QcCxMaH6HrOAZj4vswFpcB0= -sigs.k8s.io/kustomize/api v0.12.1 h1:7YM7gW3kYBwtKvoY216ZzY+8hM+lV53LUayghNRJ0vM= -sigs.k8s.io/kustomize/api v0.12.1/go.mod h1:y3JUhimkZkR6sbLNwfJHxvo1TCLwuwm14sCYnkH6S1s= -sigs.k8s.io/kustomize/kyaml v0.13.9 h1:Qz53EAaFFANyNgyOEJbT/yoIHygK40/ZcvU3rgry2Tk= -sigs.k8s.io/kustomize/kyaml v0.13.9/go.mod h1:QsRbD0/KcU+wdk0/L0fIp2KLnohkVzs6fQ85/nOXac4= -sigs.k8s.io/structured-merge-diff/v4 v4.2.3 h1:PRbqxJClWWYMNV1dhaG4NsibJbArud9kFxnAMREiWFE= -sigs.k8s.io/structured-merge-diff/v4 v4.2.3/go.mod h1:qjx8mGObPmV2aSZepjQjbmb2ihdVs8cGKBraizNC69E= -sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo= -sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8= diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/e2e/support.go b/tests/e2e/support.go deleted file mode 100644 index c3940fe2..00000000 --- a/tests/e2e/support.go +++ /dev/null @@ -1,157 +0,0 @@ -/* -Copyright 2023. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package e2e - -import ( - "embed" - "os" - "path/filepath" - "strings" - "time" - - "github.com/onsi/gomega" - "github.com/project-codeflare/codeflare-common/support" - corev1 "k8s.io/api/core/v1" - "k8s.io/apimachinery/pkg/api/meta" - metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - "k8s.io/apimachinery/pkg/runtime/schema" - "k8s.io/apimachinery/pkg/runtime/serializer" - "k8s.io/cli-runtime/pkg/genericclioptions" - "k8s.io/client-go/discovery" - "k8s.io/client-go/rest" - "k8s.io/client-go/tools/clientcmd" - clientcmdapi "k8s.io/client-go/tools/clientcmd/api" - "k8s.io/kubectl/pkg/cmd/cp" - "k8s.io/kubectl/pkg/cmd/util" - "k8s.io/kubectl/pkg/scheme" -) - -//go:embed *.py *.txt *.sh -var files embed.FS - -func ReadFile(t support.Test, fileName string) []byte { - t.T().Helper() - file, err := files.ReadFile(fileName) - t.Expect(err).NotTo(gomega.HaveOccurred()) - return file -} - -func GetRestConfig(t support.Test) (*rest.Config, error) { - const GroupName = "" - var SchemeGroupVersion = schema.GroupVersion{Group: GroupName, Version: "v1"} - kubeconfig := filepath.Join(os.Getenv("HOME"), ".kube", "config") - restConfig, err := clientcmd.BuildConfigFromFlags("", kubeconfig) - if err != nil { - t.T().Errorf("Error building kubeconfig: %v", err) - return restConfig, err - } - restConfig.APIPath = "/api" - restConfig.GroupVersion = &SchemeGroupVersion - restConfig.NegotiatedSerializer = serializer.WithoutConversionCodecFactory{CodecFactory: scheme.Codecs} - return restConfig, nil -} - -func CopyToPod(t support.Test, namespace string, podName string, restConfig *rest.Config, srcDir string, dstDir string) error { - ioStreams, _, _, _ := genericclioptions.NewTestIOStreams() - copyOptions := cp.NewCopyOptions(ioStreams) - factory := util.NewFactory(newRestClientGetter(namespace, restConfig)) - if err := copyOptions.Complete(factory, cp.NewCmdCp(factory, ioStreams), []string{srcDir, podName + ":" + dstDir}); err != nil { - t.T().Errorf("error when completing all the required options: %v", err) - return err - } - if err := copyOptions.Validate(); err != nil { - t.T().Errorf("error when validating the provided values for CopyOptions: %v", err) - return err - } - if err := copyOptions.Run(); err != nil { - t.T().Errorf("could not run copy operation: %v", err) - return err - } - return nil -} - -// restClientGetter interface is used to get a rest client from a kubeconfig -type restClientGetter struct { - ClientConfig *rest.Config - ConfigLoader clientcmd.ClientConfig -} - -func newRestClientGetter(namespace string, clientConfig *rest.Config) restClientGetter { - return restClientGetter{ - ClientConfig: clientConfig, - ConfigLoader: clientcmd.NewDefaultClientConfig(clientcmdapi.Config{}, &clientcmd.ConfigOverrides{Context: clientcmdapi.Context{Namespace: namespace}}), - } -} - -func (r restClientGetter) ToRESTConfig() (*rest.Config, error) { - return r.ClientConfig, nil -} - -func (r restClientGetter) ToRawKubeConfigLoader() clientcmd.ClientConfig { - return r.ConfigLoader -} - -func (r restClientGetter) ToDiscoveryClient() (discovery.CachedDiscoveryInterface, error) { - return nil, nil -} - -func (r restClientGetter) ToRESTMapper() (meta.RESTMapper, error) { - return nil, nil -} - -func SetupCodeflareSDKInsidePod(test support.Test, namespace *corev1.Namespace, labelName string) { - - // Get pod name - podName := GetPodName(test, namespace, labelName) - - // Get rest config - restConfig, err := GetRestConfig(test) - if err != nil { - test.T().Errorf("Error getting rest config: %v", err) - } - - // Copy codeflare-sdk to the pod - srcDir := "../.././" - dstDir := "/codeflare-sdk" - if err := CopyToPod(test, namespace.Name, podName, restConfig, srcDir, dstDir); err != nil { - test.T().Errorf("Error copying codeflare-sdk to pod: %v", err) - } -} - -func GetPodName(test support.Test, namespace *corev1.Namespace, labelName string) string { - podName := "" - foundPod := false - for !foundPod { - pods, _ := test.Client().Core().CoreV1().Pods(namespace.Name).List(test.Ctx(), metav1.ListOptions{ - LabelSelector: "job-name=" + labelName, - }) - for _, pod := range pods.Items { - - if strings.HasPrefix(pod.Name, labelName+"-") && pod.Status.Phase == corev1.PodRunning { - podName = pod.Name - foundPod = true - test.T().Logf("Pod is running!") - break - } - } - if !foundPod { - test.T().Logf("Waiting for pod to start...") - time.Sleep(5 * time.Second) - } - } - return podName -} diff --git a/tests/e2e/support.py b/tests/e2e/support.py index 8171c879..7657cf21 100644 --- a/tests/e2e/support.py +++ b/tests/e2e/support.py @@ -25,6 +25,14 @@ def create_namespace(self): self.api_instance.create_namespace(namespace_body) +def create_namespace_with_name(self, namespace_name): + self.namespace = namespace_name + namespace_body = client.V1Namespace( + metadata=client.V1ObjectMeta(name=self.namespace) + ) + self.api_instance.create_namespace(namespace_body) + + def delete_namespace(self): if hasattr(self, "namespace"): self.api_instance.delete_namespace(self.namespace) diff --git a/tests/unit_test.py b/tests/unit_test.py index 3892f8e5..322449fb 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -64,7 +64,7 @@ export_env, ) -from unit_test_support import ( +from tests.unit_test_support import ( createClusterWithConfig, createClusterConfig, ) diff --git a/tests/upgrade/__init__.py b/tests/upgrade/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/upgrade/raycluster_sdk_upgrade_test.go b/tests/upgrade/raycluster_sdk_upgrade_test.go deleted file mode 100644 index 6b3c93cf..00000000 --- a/tests/upgrade/raycluster_sdk_upgrade_test.go +++ /dev/null @@ -1,396 +0,0 @@ -/* -Copyright 2023. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -package upgrade - -import ( - "fmt" - "testing" - - . "github.com/onsi/gomega" - . "github.com/project-codeflare/codeflare-common/support" - mcadv1beta1 "github.com/project-codeflare/multi-cluster-app-dispatcher/pkg/apis/controller/v1beta1" - rayv1 "github.com/ray-project/kuberay/ray-operator/apis/ray/v1" - - batchv1 "k8s.io/api/batch/v1" - corev1 "k8s.io/api/core/v1" - rbacv1 "k8s.io/api/rbac/v1" - metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" - - . "github.com/project-codeflare/codeflare-sdk/tests/e2e" -) - -var ( - nsName = "test-ns-rayclusterupgrade" -) - -// Creates a Ray cluster -func TestMNISTRayClusterUp(t *testing.T) { - - test := With(t) - - // Create a namespace - namespace := CreateTestNamespaceWithName(test, nsName) - test.T().Logf("Created namespace %s successfully", namespace.Name) - - // Delete namespace only if test failed - defer func() { - if t.Failed() { - DeleteTestNamespace(test, namespace) - } else { - StoreNamespaceLogs(test, namespace) - } - }() - - // Test configuration - config := CreateConfigMap(test, namespace.Name, map[string][]byte{ - // SDK script - "start_ray_cluster.py": ReadFile(test, "start_ray_cluster.py"), - // codeflare-sdk installation script - "install-codeflare-sdk.sh": ReadFile(test, "install-codeflare-sdk.sh"), - }) - - // Create RBAC, retrieve token for user with limited rights - policyRules := []rbacv1.PolicyRule{ - { - Verbs: []string{"get", "create", "delete", "list", "patch", "update"}, - APIGroups: []string{mcadv1beta1.GroupName}, - Resources: []string{"appwrappers"}, - }, - { - Verbs: []string{"get", "list"}, - APIGroups: []string{rayv1.GroupVersion.Group}, - Resources: []string{"rayclusters", "rayclusters/status"}, - }, - { - Verbs: []string{"get", "list"}, - APIGroups: []string{"route.openshift.io"}, - Resources: []string{"routes"}, - }, - { - Verbs: []string{"get", "list"}, - APIGroups: []string{"networking.k8s.io"}, - Resources: []string{"ingresses"}, - }, - } - - sa := CreateServiceAccount(test, namespace.Name) - role := CreateRole(test, namespace.Name, policyRules) - CreateRoleBinding(test, namespace.Name, sa, role) - - job := &batchv1.Job{ - TypeMeta: metav1.TypeMeta{ - APIVersion: batchv1.SchemeGroupVersion.String(), - Kind: "Job", - }, - ObjectMeta: metav1.ObjectMeta{ - Name: "sdk", - Namespace: namespace.Name, - }, - Spec: batchv1.JobSpec{ - Completions: Ptr(int32(1)), - Parallelism: Ptr(int32(1)), - BackoffLimit: Ptr(int32(0)), - Template: corev1.PodTemplateSpec{ - Spec: corev1.PodSpec{ - Volumes: []corev1.Volume{ - { - Name: "test", - VolumeSource: corev1.VolumeSource{ - ConfigMap: &corev1.ConfigMapVolumeSource{ - LocalObjectReference: corev1.LocalObjectReference{ - Name: config.Name, - }, - }, - }, - }, - { - Name: "codeflare-sdk", - VolumeSource: corev1.VolumeSource{ - EmptyDir: &corev1.EmptyDirVolumeSource{}, - }, - }, - { - Name: "workdir", - VolumeSource: corev1.VolumeSource{ - EmptyDir: &corev1.EmptyDirVolumeSource{}, - }, - }, - }, - Containers: []corev1.Container{ - { - Name: "test", - // FIXME: switch to base Python image once the dependency on OpenShift CLI is removed - // See https://github.com/project-codeflare/codeflare-sdk/pull/146 - Image: "quay.io/opendatahub/notebooks:jupyter-minimal-ubi8-python-3.8-4c8f26e", - Env: []corev1.EnvVar{ - {Name: "PYTHONUSERBASE", Value: "/workdir"}, - {Name: "RAY_IMAGE", Value: GetRayImage()}, - }, - Command: []string{ - "/bin/sh", "-c", - "while [ ! -f /codeflare-sdk/pyproject.toml ]; do sleep 1; done; " + - "cp /test/* . && chmod +x install-codeflare-sdk.sh && ./install-codeflare-sdk.sh && python start_ray_cluster.py " + namespace.Name, - }, - VolumeMounts: []corev1.VolumeMount{ - { - Name: "test", - MountPath: "/test", - }, - { - Name: "codeflare-sdk", - MountPath: "/codeflare-sdk", - }, - { - Name: "workdir", - MountPath: "/workdir", - }, - }, - WorkingDir: "/workdir", - SecurityContext: &corev1.SecurityContext{ - AllowPrivilegeEscalation: Ptr(false), - SeccompProfile: &corev1.SeccompProfile{ - Type: "RuntimeDefault", - }, - Capabilities: &corev1.Capabilities{ - Drop: []corev1.Capability{"ALL"}, - }, - RunAsNonRoot: Ptr(true), - }, - }, - }, - RestartPolicy: corev1.RestartPolicyNever, - ServiceAccountName: sa.Name, - }, - }, - }, - } - if GetClusterType(test) == KindCluster { - // Take first KinD node and redirect pod hostname requests there - node := GetNodes(test)[0] - hostname := GetClusterHostname(test) - IP := GetNodeInternalIP(test, node) - - test.T().Logf("Setting KinD cluster hostname '%s' to node IP '%s' for SDK pod", hostname, IP) - job.Spec.Template.Spec.HostAliases = []corev1.HostAlias{ - { - IP: IP, - Hostnames: []string{hostname}, - }, - } - - // Propagate hostname into Python code as env variable - hostnameEnvVar := corev1.EnvVar{Name: "CLUSTER_HOSTNAME", Value: hostname} - job.Spec.Template.Spec.Containers[0].Env = append(job.Spec.Template.Spec.Containers[0].Env, hostnameEnvVar) - fmt.Printf("CLUSTER_HOSTNAME environment variable value: %s\n", hostname) - test.T().Logf("CLUSTER_HOSTNAME environment variable value: %s", hostname) - } - - job, err := test.Client().Core().BatchV1().Jobs(namespace.Name).Create(test.Ctx(), job, metav1.CreateOptions{}) - test.Expect(err).NotTo(HaveOccurred()) - test.T().Logf("Created Job %s/%s successfully", job.Namespace, job.Name) - - // Setup the codeflare-sdk inside the pod associated to the created job - SetupCodeflareSDKInsidePod(test, namespace, job.Name) - - test.T().Logf("Waiting for Job %s/%s to complete", job.Namespace, job.Name) - test.Eventually(Job(test, job.Namespace, job.Name), TestTimeoutLong).Should( - Or( - WithTransform(ConditionStatus(batchv1.JobComplete), Equal(corev1.ConditionTrue)), - WithTransform(ConditionStatus(batchv1.JobFailed), Equal(corev1.ConditionTrue)), - )) - - // Assert the job has completed successfully - test.Expect(GetJob(test, job.Namespace, job.Name)). - To(WithTransform(ConditionStatus(batchv1.JobComplete), Equal(corev1.ConditionTrue))) -} - -// Submit a Job to the Ray cluster and trains the MNIST dataset using the CodeFlare SDK. -func TestMnistJobSubmit(t *testing.T) { - - test := With(t) - - namespace := GetNamespaceWithName(test, nsName) - - //delete the namespace after test complete - defer DeleteTestNamespace(test, namespace) - - // Test configuration - config := CreateConfigMap(test, namespace.Name, map[string][]byte{ - // SDK script - "mnist_rayjob.py": ReadFile(test, "mnist_rayjob.py"), - // pip requirements - "requirements.txt": ReadFile(test, "mnist_pip_requirements.txt"), - // MNIST training script - "mnist.py": ReadFile(test, "mnist.py"), - // codeflare-sdk installation script - "install-codeflare-sdk.sh": ReadFile(test, "install-codeflare-sdk.sh"), - }) - - // Create RBAC, retrieve token for user with limited rights - policyRules := []rbacv1.PolicyRule{ - { - Verbs: []string{"get", "create", "delete", "list", "patch", "update"}, - APIGroups: []string{mcadv1beta1.GroupName}, - Resources: []string{"appwrappers"}, - }, - { - Verbs: []string{"get", "list"}, - APIGroups: []string{rayv1.GroupVersion.Group}, - Resources: []string{"rayclusters", "rayclusters/status"}, - }, - { - Verbs: []string{"get", "list"}, - APIGroups: []string{"route.openshift.io"}, - Resources: []string{"routes"}, - }, - { - Verbs: []string{"get", "list"}, - APIGroups: []string{"networking.k8s.io"}, - Resources: []string{"ingresses"}, - }, - } - - serviceAccount := CreateServiceAccount(test, namespace.Name) - role := CreateRole(test, namespace.Name, policyRules) - CreateRoleBinding(test, namespace.Name, serviceAccount, role) - - job := &batchv1.Job{ - TypeMeta: metav1.TypeMeta{ - APIVersion: batchv1.SchemeGroupVersion.String(), - Kind: "Job", - }, - ObjectMeta: metav1.ObjectMeta{ - Name: "rayjob", - Namespace: namespace.Name, - }, - Spec: batchv1.JobSpec{ - Completions: Ptr(int32(1)), - Parallelism: Ptr(int32(1)), - BackoffLimit: Ptr(int32(0)), - Template: corev1.PodTemplateSpec{ - Spec: corev1.PodSpec{ - Volumes: []corev1.Volume{ - { - Name: "test", - VolumeSource: corev1.VolumeSource{ - ConfigMap: &corev1.ConfigMapVolumeSource{ - LocalObjectReference: corev1.LocalObjectReference{ - Name: config.Name, - }, - }, - }, - }, - { - Name: "codeflare-sdk", - VolumeSource: corev1.VolumeSource{ - EmptyDir: &corev1.EmptyDirVolumeSource{}, - }, - }, - { - Name: "workdir", - VolumeSource: corev1.VolumeSource{ - EmptyDir: &corev1.EmptyDirVolumeSource{}, - }, - }, - }, - Containers: []corev1.Container{ - { - Name: "test", - // FIXME: switch to base Python image once the dependency on OpenShift CLI is removed - // See https://github.com/project-codeflare/codeflare-sdk/pull/146 - Image: "quay.io/opendatahub/notebooks:jupyter-minimal-ubi8-python-3.8-4c8f26e", - Env: []corev1.EnvVar{ - {Name: "PYTHONUSERBASE", Value: "/workdir"}, - {Name: "RAY_IMAGE", Value: GetRayImage()}, - }, - Command: []string{ - "/bin/sh", "-c", - "while [ ! -f /codeflare-sdk/pyproject.toml ]; do sleep 1; done; " + - "cp /test/* . && chmod +x install-codeflare-sdk.sh && ./install-codeflare-sdk.sh && python mnist_rayjob.py " + namespace.Name, - }, - VolumeMounts: []corev1.VolumeMount{ - { - Name: "test", - MountPath: "/test", - }, - { - Name: "codeflare-sdk", - MountPath: "/codeflare-sdk", - }, - { - Name: "workdir", - MountPath: "/workdir", - }, - }, - WorkingDir: "/workdir", - SecurityContext: &corev1.SecurityContext{ - AllowPrivilegeEscalation: Ptr(false), - SeccompProfile: &corev1.SeccompProfile{ - Type: "RuntimeDefault", - }, - Capabilities: &corev1.Capabilities{ - Drop: []corev1.Capability{"ALL"}, - }, - RunAsNonRoot: Ptr(true), - }, - }, - }, - RestartPolicy: corev1.RestartPolicyNever, - ServiceAccountName: serviceAccount.Name, - }, - }, - }, - } - - if GetClusterType(test) == KindCluster { - // Take first KinD node and redirect pod hostname requests there - node := GetNodes(test)[0] - hostname := GetClusterHostname(test) - IP := GetNodeInternalIP(test, node) - - test.T().Logf("Setting KinD cluster hostname '%s' to node IP '%s' for SDK pod", hostname, IP) - job.Spec.Template.Spec.HostAliases = []corev1.HostAlias{ - { - IP: IP, - Hostnames: []string{hostname}, - }, - } - - // Propagate hostname into Python code as env variable - hostnameEnvVar := corev1.EnvVar{Name: "CLUSTER_HOSTNAME", Value: hostname} - job.Spec.Template.Spec.Containers[0].Env = append(job.Spec.Template.Spec.Containers[0].Env, hostnameEnvVar) - } - - job, err := test.Client().Core().BatchV1().Jobs(nsName).Create(test.Ctx(), job, metav1.CreateOptions{}) - test.Expect(err).NotTo(HaveOccurred()) - test.T().Logf("Created Job %s/%s successfully", job.Namespace, job.Name) - - // Setup the codeflare-sdk inside the pod associated to the created job - SetupCodeflareSDKInsidePod(test, namespace, job.Name) - - test.T().Logf("Waiting for Job %s/%s to complete", job.Namespace, job.Name) - test.Eventually(Job(test, job.Namespace, job.Name), TestTimeoutLong).Should( - Or( - WithTransform(ConditionStatus(batchv1.JobComplete), Equal(corev1.ConditionTrue)), - WithTransform(ConditionStatus(batchv1.JobFailed), Equal(corev1.ConditionTrue)), - )) - - // Assert the job has completed successfully - test.Expect(GetJob(test, job.Namespace, job.Name)). - To(WithTransform(ConditionStatus(batchv1.JobComplete), Equal(corev1.ConditionTrue))) - -} diff --git a/tests/upgrade/raycluster_sdk_upgrade_test.py b/tests/upgrade/raycluster_sdk_upgrade_test.py new file mode 100644 index 00000000..2455cae4 --- /dev/null +++ b/tests/upgrade/raycluster_sdk_upgrade_test.py @@ -0,0 +1,164 @@ +import requests +from time import sleep + +from torchx.specs.api import AppState, is_terminal + +from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication +from codeflare_sdk.job import RayJobClient + +import pytest + +from tests.e2e.support import * +from codeflare_sdk.cluster.cluster import get_cluster + +namespace = "test-ns-rayupgrade" + + +# Creates a Ray cluster +class TestMNISTRayClusterUp: + def setup_method(self): + initialize_kubernetes_client(self) + create_namespace_with_name(self, namespace) + cluster_queue = "cluster-queue" # add cluster name here + create_local_queue(self, cluster_queue) + + def test_mnist_ray_cluster_sdk_auth(self): + self.run_mnist_raycluster_sdk_oauth() + + def run_mnist_raycluster_sdk_oauth(self): + ray_image = get_ray_image() + + auth = TokenAuthentication( + token=run_oc_command(["whoami", "--show-token=true"]), + server=run_oc_command(["whoami", "--show-server=true"]), + skip_tls=True, + ) + auth.login() + + cluster = Cluster( + ClusterConfiguration( + name="mnist", + namespace=self.namespace, + num_workers=1, + head_cpus="1", + head_memory=2, + min_cpus="1", + max_cpus=1, + min_memory=1, + max_memory=2, + num_gpus=0, + instascale=False, + image=ray_image, + write_to_file=True, + mcad=False, + ) + ) + + try: + cluster.up() + cluster.status() + # wait for raycluster to be Ready + cluster.wait_ready() + cluster.status() + # Check cluster details + cluster.details() + # Assert the cluster status is READY + _, ready = cluster.status() + assert ready, "Cluster is not ready!" + + except Exception as e: + print(f"An unexpected error occurred. Error: ", e) + delete_namespace(self) + + +class TestMnistJobSubmit: + def setup_method(self): + initialize_kubernetes_client(self) + self.namespace = namespace + self.cluster = get_cluster("mnist", self.namespace) + if not self.cluster: + raise RuntimeError("TestRayClusterUp needs to be run before this test") + + def teardown_method(self): + delete_namespace(self) + + def test_mnist_job_submission(self): + auth = TokenAuthentication( + token=run_oc_command(["whoami", "--show-token=true"]), + server=run_oc_command(["whoami", "--show-server=true"]), + skip_tls=True, + ) + auth.login() + + self.assert_jobsubmit_withoutLogin(self.cluster) + self.assert_jobsubmit_withlogin(self.cluster) + self.cluster.down() + + # Assertions + def assert_jobsubmit_withoutLogin(self, cluster): + dashboard_url = cluster.cluster_dashboard_uri() + jobdata = { + "entrypoint": "python mnist.py", + "runtime_env": { + "working_dir": "./tests/e2e/", + "pip": "./tests/e2e/mnist_pip_requirements.txt", + }, + } + try: + response = requests.post( + dashboard_url + "/api/jobs/", verify=False, json=jobdata + ) + if response.status_code == 403: + assert True + else: + response.raise_for_status() + assert False + + except Exception as e: + print(f"An unexpected error occurred. Error: {e}") + assert False + + def assert_jobsubmit_withlogin(self, cluster): + auth_token = run_oc_command(["whoami", "--show-token=true"]) + ray_dashboard = cluster.cluster_dashboard_uri() + header = {"Authorization": f"Bearer {auth_token}"} + client = RayJobClient(address=ray_dashboard, headers=header, verify=False) + + # Submit the job + submission_id = client.submit_job( + entrypoint="python mnist.py", + runtime_env={ + "working_dir": "./tests/e2e/", + "pip": "./tests/e2e/mnist_pip_requirements.txt", + }, + ) + print(f"Submitted job with ID: {submission_id}") + done = False + time = 0 + timeout = 900 + while not done: + status = client.get_job_status(submission_id) + if status.is_terminal(): + break + if not done: + print(status) + if timeout and time >= timeout: + raise TimeoutError(f"job has timed out after waiting {timeout}s") + sleep(5) + time += 5 + + logs = client.get_job_logs(submission_id) + print(logs) + + self.assert_job_completion(status) + + client.delete_job(submission_id) + cluster.down() + + def assert_job_completion(self, status): + if status == "SUCCEEDED": + print(f"Job has completed: '{status}'") + assert True + else: + print(f"Job has completed: '{status}'") + assert False From 0cbf3a21746334adffae69190fe1dafd32f1b4fe Mon Sep 17 00:00:00 2001 From: abhijeet-dhumal Date: Fri, 12 Apr 2024 16:43:22 +0530 Subject: [PATCH 196/496] Adjust support methods to use exceptions and remove explicit use of mcad parameter in cluster-configuration function (#494) --- tests/e2e/support.py | 12 +++++++----- tests/upgrade/raycluster_sdk_upgrade_test.py | 9 +++++---- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/tests/e2e/support.py b/tests/e2e/support.py index 7657cf21..24256341 100644 --- a/tests/e2e/support.py +++ b/tests/e2e/support.py @@ -4,7 +4,6 @@ import subprocess from kubernetes import client, config import kubernetes.client -import subprocess def get_ray_image(): @@ -27,10 +26,13 @@ def create_namespace(self): def create_namespace_with_name(self, namespace_name): self.namespace = namespace_name - namespace_body = client.V1Namespace( - metadata=client.V1ObjectMeta(name=self.namespace) - ) - self.api_instance.create_namespace(namespace_body) + try: + namespace_body = client.V1Namespace( + metadata=client.V1ObjectMeta(name=self.namespace) + ) + self.api_instance.create_namespace(namespace_body) + except Exception as e: + return _kube_api_error_handling(e) def delete_namespace(self): diff --git a/tests/upgrade/raycluster_sdk_upgrade_test.py b/tests/upgrade/raycluster_sdk_upgrade_test.py index 2455cae4..a3630522 100644 --- a/tests/upgrade/raycluster_sdk_upgrade_test.py +++ b/tests/upgrade/raycluster_sdk_upgrade_test.py @@ -19,7 +19,7 @@ class TestMNISTRayClusterUp: def setup_method(self): initialize_kubernetes_client(self) create_namespace_with_name(self, namespace) - cluster_queue = "cluster-queue" # add cluster name here + cluster_queue = "cluster-queue" # add cluster-queue name here create_local_queue(self, cluster_queue) def test_mnist_ray_cluster_sdk_auth(self): @@ -50,7 +50,6 @@ def run_mnist_raycluster_sdk_oauth(self): instascale=False, image=ray_image, write_to_file=True, - mcad=False, ) ) @@ -58,17 +57,19 @@ def run_mnist_raycluster_sdk_oauth(self): cluster.up() cluster.status() # wait for raycluster to be Ready - cluster.wait_ready() + # cluster.wait_ready() # temporarily broken + sleep(60) cluster.status() # Check cluster details cluster.details() # Assert the cluster status is READY _, ready = cluster.status() - assert ready, "Cluster is not ready!" + assert ready except Exception as e: print(f"An unexpected error occurred. Error: ", e) delete_namespace(self) + assert False, "Cluster is not ready!" class TestMnistJobSubmit: From 134e07279cc8cda46e32c7af2aac373a4442d981 Mon Sep 17 00:00:00 2001 From: abhijeet-dhumal Date: Fri, 12 Apr 2024 17:01:15 +0530 Subject: [PATCH 197/496] Adjust assertion of job-submission_without_login to use RayjobClient instead of REST post request (#494) --- tests/upgrade/raycluster_sdk_upgrade_test.py | 23 +++++--------------- 1 file changed, 6 insertions(+), 17 deletions(-) diff --git a/tests/upgrade/raycluster_sdk_upgrade_test.py b/tests/upgrade/raycluster_sdk_upgrade_test.py index a3630522..6157dc43 100644 --- a/tests/upgrade/raycluster_sdk_upgrade_test.py +++ b/tests/upgrade/raycluster_sdk_upgrade_test.py @@ -57,7 +57,7 @@ def run_mnist_raycluster_sdk_oauth(self): cluster.up() cluster.status() # wait for raycluster to be Ready - # cluster.wait_ready() # temporarily broken + # cluster.wait_ready() #temporarily broken sleep(60) cluster.status() # Check cluster details @@ -98,27 +98,16 @@ def test_mnist_job_submission(self): # Assertions def assert_jobsubmit_withoutLogin(self, cluster): dashboard_url = cluster.cluster_dashboard_uri() - jobdata = { - "entrypoint": "python mnist.py", - "runtime_env": { - "working_dir": "./tests/e2e/", - "pip": "./tests/e2e/mnist_pip_requirements.txt", - }, - } try: - response = requests.post( - dashboard_url + "/api/jobs/", verify=False, json=jobdata - ) - if response.status_code == 403: + RayJobClient(address=dashboard_url, verify=False) + assert False + except Exception as e: + if e.response.status_code == 403: assert True else: - response.raise_for_status() + print(f"An unexpected error occurred. Error: {e}") assert False - except Exception as e: - print(f"An unexpected error occurred. Error: {e}") - assert False - def assert_jobsubmit_withlogin(self, cluster): auth_token = run_oc_command(["whoami", "--show-token=true"]) ray_dashboard = cluster.cluster_dashboard_uri() From 6ca9440aa0d70e20818a1bc1850992817ba6945d Mon Sep 17 00:00:00 2001 From: abhijeet-dhumal Date: Wed, 17 Apr 2024 12:43:59 +0530 Subject: [PATCH 198/496] Adjusted support method to use custom_api client to create kueue resources --- tests/e2e/kueue_resources_setup.sh | 53 --------- tests/e2e/support.py | 109 ++++++++++++++++++- tests/upgrade/raycluster_sdk_upgrade_test.py | 16 +-- 3 files changed, 113 insertions(+), 65 deletions(-) delete mode 100755 tests/e2e/kueue_resources_setup.sh diff --git a/tests/e2e/kueue_resources_setup.sh b/tests/e2e/kueue_resources_setup.sh deleted file mode 100755 index 23df1abf..00000000 --- a/tests/e2e/kueue_resources_setup.sh +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/bash - -name=${name:-cluster-queue-mnist} -flavor=${flavor:-default-flavor-mnist} -local_queue_name=${local_queue_name:-local-queue-mnist} -namespace=$1 - -echo "Applying Cluster Queue" - -cat < Date: Thu, 18 Apr 2024 12:09:54 +0530 Subject: [PATCH 199/496] Add kueue resources and cluster roles to create/get/delete Kueue resources --- .github/workflows/e2e_tests.yaml | 9 +++++++++ tests/upgrade/raycluster_sdk_upgrade_test.py | 6 +++--- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index f3ecc477..83ed3988 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -96,6 +96,9 @@ jobs: with: user-name: sdk-user + - name: Add kueue resources + run: kubectl apply --server-side -f "https://github.com/kubernetes-sigs/kueue/releases/download/v0.6.2/manifests.yaml" + - name: Configure RBAC for sdk user with limited permissions run: | kubectl create clusterrole list-ingresses --verb=get,list --resource=ingresses @@ -104,6 +107,12 @@ jobs: kubectl create clusterrolebinding sdk-user-namespace-creator --clusterrole=namespace-creator --user=sdk-user kubectl create clusterrole raycluster-creator --verb=get,list,create,delete,patch --resource=rayclusters kubectl create clusterrolebinding sdk-user-raycluster-creator --clusterrole=raycluster-creator --user=sdk-user + kubectl create clusterrole resourceflavor-creator --verb=get,list,create,delete --resource=resourceflavors + kubectl create clusterrolebinding sdk-user-resourceflavor-creator --clusterrole=resourceflavor-creator --user=sdk-user + kubectl create clusterrole clusterqueue-creator --verb=get,list,create,delete,patch --resource=clusterqueues + kubectl create clusterrolebinding sdk-user-clusterqueue-creator --clusterrole=clusterqueue-creator --user=sdk-user + kubectl create clusterrole localqueue-creator --verb=get,list,create,delete,patch --resource=localqueues + kubectl create clusterrolebinding sdk-user-localqueue-creator --clusterrole=localqueue-creator --user=sdk-user kubectl config use-context sdk-user - name: Run e2e tests diff --git a/tests/upgrade/raycluster_sdk_upgrade_test.py b/tests/upgrade/raycluster_sdk_upgrade_test.py index 08f4ebb4..c92d1963 100644 --- a/tests/upgrade/raycluster_sdk_upgrade_test.py +++ b/tests/upgrade/raycluster_sdk_upgrade_test.py @@ -41,16 +41,16 @@ def run_mnist_raycluster_sdk_oauth(self): name="mnist", namespace=self.namespace, num_workers=1, - head_cpus="1", + head_cpus=1, head_memory=2, - min_cpus="1", + min_cpus=1, max_cpus=1, min_memory=1, max_memory=2, num_gpus=0, - instascale=False, image=ray_image, write_to_file=True, + verify_tls=False, ) ) From 84b43c29188e5ce365baafbd87f2fb8991c0474c Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 21 Mar 2024 14:51:53 +0000 Subject: [PATCH 200/496] Added demo notebook for Kueue by default Updated existing notebooks in favour of Kueue specific nb Updated wording --- .../additional-demos/hf_interactive.ipynb | 14 +- .../additional-demos/local_interactive.ipynb | 158 +++--------------- demo-notebooks/guided-demos/0_basic_ray.ipynb | 13 +- .../guided-demos/1_basic_instascale.ipynb | 9 +- .../guided-demos/2_job_client.ipynb | 77 +++++++++ .../guided-demos/3_basic_interactive.ipynb | 13 +- .../notebook-ex-outputs/0_basic_ray.ipynb | 13 +- .../1_basic_instascale.ipynb | 9 +- .../3_basic_interactive.ipynb | 13 +- .../preview_nbs/0_basic_ray.ipynb | 13 +- .../preview_nbs/1_basic_instascale.ipynb | 9 +- .../preview_nbs/3_basic_interactive.ipynb | 13 +- 12 files changed, 174 insertions(+), 180 deletions(-) diff --git a/demo-notebooks/additional-demos/hf_interactive.ipynb b/demo-notebooks/additional-demos/hf_interactive.ipynb index 37216b5d..28441037 100644 --- a/demo-notebooks/additional-demos/hf_interactive.ipynb +++ b/demo-notebooks/additional-demos/hf_interactive.ipynb @@ -68,10 +68,12 @@ "id": "bc27f84c", "metadata": {}, "source": [ - "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding AppWrapper).\n", + "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding Ray Cluster).\n", "\n", "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image." + "The example here is a community image.\n", + "\n", + "NOTE: By default the SDK uses Kueue as it's scheduling solution to use MCAD set the `mcad=True` option in `ClusterConfiguration`" ] }, { @@ -89,7 +91,8 @@ } ], "source": [ - "# Create our cluster and submit appwrapper\n", + "# Create our cluster and submit\n", + "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\"\n", "cluster = Cluster(ClusterConfiguration(name='hfgputest', \n", " namespace=\"default\",\n", " num_workers=1,\n", @@ -99,7 +102,8 @@ " max_memory=16, \n", " num_gpus=4,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", - " instascale=True, machine_types=[\"m5.xlarge\", \"p3.8xlarge\"]))" + " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", + " ))" ] }, { @@ -107,7 +111,7 @@ "id": "12eef53c", "metadata": {}, "source": [ - "Next, we want to bring our cluster up, so we call the `up()` function below to submit our cluster AppWrapper yaml onto the MCAD queue, and begin the process of obtaining our resource cluster." + "Next, we want to bring our cluster up, so we call the `up()` function below to submit our Ray Cluster onto the queue, and begin the process of obtaining our resource cluster." ] }, { diff --git a/demo-notebooks/additional-demos/local_interactive.ipynb b/demo-notebooks/additional-demos/local_interactive.ipynb index 674a655e..bd259756 100644 --- a/demo-notebooks/additional-demos/local_interactive.ipynb +++ b/demo-notebooks/additional-demos/local_interactive.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "id": "9a44568b-61ef-41c7-8ad1-9a3b128f03a7", "metadata": { "tags": [] @@ -36,7 +36,9 @@ "source": [ "\n", "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image." + "The example here is a community image.\n", + "\n", + "NOTE: By default the SDK uses Kueue as it's scheduling solution to use MCAD set the `mcad=True` option in `ClusterConfiguration`" ] }, { @@ -48,7 +50,8 @@ }, "outputs": [], "source": [ - "# Create our cluster and submit appwrapper\n", + "# Create our cluster and submit\n", + "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\"\n", "namespace = \"default\"\n", "cluster_name = \"hfgputest-1\"\n", "local_interactive = True\n", @@ -63,13 +66,13 @@ " max_memory=4,\n", " num_gpus=0,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", - " instascale=False,\n", - " machine_types=[\"m5.xlarge\", \"p3.8xlarge\"]))" + " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", + " ))" ] }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "id": "69968140-15e6-482f-9529-82b0cd19524b", "metadata": { "tags": [] @@ -81,21 +84,12 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "id": "e20f9982-f671-460b-8c22-3d62e101fed9", "metadata": { "tags": [] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Waiting for requested resources to be set up...\n", - "Requested cluster up and running!\n" - ] - } - ], + "outputs": [], "source": [ "cluster.wait_ready()" ] @@ -124,82 +118,13 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "id": "9483bb98-33b3-4beb-9b15-163d7e76c1d7", "metadata": { "scrolled": true, "tags": [] }, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2023-06-27 19:14:16,088\tINFO client_builder.py:251 -- Passing the following kwargs to ray.init() on the server: logging_level\n", - "2023-06-27 19:14:16,100\tDEBUG worker.py:378 -- client gRPC channel state change: ChannelConnectivity.IDLE\n", - "2023-06-27 19:14:16,308\tDEBUG worker.py:378 -- client gRPC channel state change: ChannelConnectivity.CONNECTING\n", - "2023-06-27 19:14:16,434\tDEBUG worker.py:378 -- client gRPC channel state change: ChannelConnectivity.READY\n", - "2023-06-27 19:14:16,436\tDEBUG worker.py:807 -- Pinging server.\n", - "2023-06-27 19:14:18,634\tDEBUG worker.py:640 -- Retaining 00ffffffffffffffffffffffffffffffffffffff0100000001000000\n", - "2023-06-27 19:14:18,635\tDEBUG worker.py:564 -- Scheduling task get_dashboard_url 0 b'\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x01\\x00\\x00\\x00\\x01\\x00\\x00\\x00'\n", - "2023-06-27 19:14:18,645\tDEBUG worker.py:640 -- Retaining c8ef45ccd0112571ffffffffffffffffffffffff0100000001000000\n", - "2023-06-27 19:14:19,454\tDEBUG worker.py:636 -- Releasing c8ef45ccd0112571ffffffffffffffffffffffff0100000001000000\n" - ] - }, - { - "data": { - "text/html": [ - "
      \n", - "
      \n", - "

      Ray

      \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "\n", - "\n", - "
      Python version:3.8.13
      Ray version: 2.1.0
      Dashboard:http://10.254.20.41:8265
      \n", - "
      \n", - "
      \n" - ], - "text/plain": [ - "ClientContext(dashboard_url='10.254.20.41:8265', python_version='3.8.13', ray_version='2.1.0', ray_commit='23f34d948dae8de9b168667ab27e6cf940b3ae85', protocol_version='2022-10-05', _num_clients=1, _context_to_restore=)" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "import ray\n", "\n", @@ -209,7 +134,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "id": "3436eb4a-217c-4109-a3c3-309fda7e2442", "metadata": {}, "outputs": [], @@ -233,72 +158,33 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "id": "5cca1874-2be3-4631-ae48-9adfa45e3af3", "metadata": { "scrolled": true, "tags": [] }, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2023-06-27 19:14:28,222\tDEBUG worker.py:640 -- Retaining 00ffffffffffffffffffffffffffffffffffffff0100000002000000\n", - "2023-06-27 19:14:28,222\tDEBUG worker.py:564 -- Scheduling task heavy_calculation 0 b'\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x01\\x00\\x00\\x00\\x02\\x00\\x00\\x00'\n" - ] - } - ], + "outputs": [], "source": [ "ref = heavy_calculation.remote(3000)" ] }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "id": "01172c29-e8bf-41ef-8db5-eccb07906111", "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2023-06-27 19:14:29,202\tDEBUG worker.py:640 -- Retaining 16310a0f0a45af5cffffffffffffffffffffffff0100000001000000\n", - "2023-06-27 19:14:31,224\tDEBUG worker.py:439 -- Internal retry for get [ClientObjectRef(16310a0f0a45af5cffffffffffffffffffffffff0100000001000000)]\n" - ] - }, - { - "data": { - "text/plain": [ - "1789.4644387076714" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "ray.get(ref)" ] }, { "cell_type": "code", - "execution_count": 10, + "execution_count": null, "id": "9e79b547-a457-4232-b77d-19147067b972", "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2023-06-27 19:14:33,161\tDEBUG dataclient.py:287 -- Got unawaited response connection_cleanup {\n", - "}\n", - "\n", - "2023-06-27 19:14:34,460\tDEBUG dataclient.py:278 -- Shutting down data channel.\n" - ] - } - ], + "outputs": [], "source": [ "ray.cancel(ref)\n", "ray.shutdown()" @@ -306,7 +192,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "id": "2c198f1f-68bf-43ff-a148-02b5cb000ff2", "metadata": {}, "outputs": [], @@ -339,7 +225,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.17" + "version": "3.9.18" }, "vscode": { "interpreter": { diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb index 205f0217..9b6aace7 100644 --- a/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -45,10 +45,12 @@ "id": "bc27f84c", "metadata": {}, "source": [ - "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding AppWrapper).\n", + "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", "\n", "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image." + "The example here is a community image.\n", + "\n", + "NOTE: By default the SDK uses Kueue as it's scheduling solution to use MCAD set the `mcad=True` option in `ClusterConfiguration`" ] }, { @@ -58,7 +60,8 @@ "metadata": {}, "outputs": [], "source": [ - "# Create and configure our cluster object (and appwrapper)\n", + "# Create and configure our cluster object\n", + "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\"\n", "cluster = Cluster(ClusterConfiguration(\n", " name='raytest',\n", " namespace='default',\n", @@ -69,7 +72,7 @@ " max_memory=4,\n", " num_gpus=0,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", - " instascale=False\n", + " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" ] }, @@ -78,7 +81,7 @@ "id": "12eef53c", "metadata": {}, "source": [ - "Next, we want to bring our cluster up, so we call the `up()` function below to submit our cluster AppWrapper yaml onto the MCAD queue, and begin the process of obtaining our resource cluster." + "Next, we want to bring our cluster up, so we call the `up()` function below to submit our Ray Cluster onto the queue, and begin the process of obtaining our resource cluster." ] }, { diff --git a/demo-notebooks/guided-demos/1_basic_instascale.ipynb b/demo-notebooks/guided-demos/1_basic_instascale.ipynb index 418737eb..07983c6e 100644 --- a/demo-notebooks/guided-demos/1_basic_instascale.ipynb +++ b/demo-notebooks/guided-demos/1_basic_instascale.ipynb @@ -5,7 +5,9 @@ "id": "9865ee8c", "metadata": {}, "source": [ - "In this second notebook, we will go over the basics of using InstaScale to scale up/down necessary resources that are not currently available on your OpenShift Cluster (in cloud environments)." + "In this second notebook, we will go over the basics of using InstaScale to scale up/down necessary resources that are not currently available on your OpenShift Cluster (in cloud environments).\n", + "\n", + "NOTE: The InstaScale and MCAD components are in Tech Preview" ] }, { @@ -45,7 +47,9 @@ "This time, we are working in a cloud environment, and our OpenShift cluster does not have the resources needed for our desired workloads. We will use InstaScale to dynamically scale-up guaranteed resources based on our request (that will also automatically scale-down when we are finished working):\n", "\n", "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image." + "The example here is a community image.\n", + "\n", + "NOTE: This specific demo requires MCAD and InstaScale to be enabled on the Cluster" ] }, { @@ -66,6 +70,7 @@ " max_memory=8,\n", " num_gpus=1,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " mcad=True, # Enable MCAD\n", " instascale=True, # InstaScale now enabled, will scale OCP cluster to guarantee resource request\n", " machine_types=[\"m5.xlarge\", \"g4dn.xlarge\"] # Head, worker AWS machine types desired\n", "))" diff --git a/demo-notebooks/guided-demos/2_job_client.ipynb b/demo-notebooks/guided-demos/2_job_client.ipynb index 7b3d619b..73bd5dd9 100644 --- a/demo-notebooks/guided-demos/2_job_client.ipynb +++ b/demo-notebooks/guided-demos/2_job_client.ipynb @@ -52,7 +52,13 @@ " min_memory=4,\n", " max_memory=4,\n", " num_gpus=0,\n", +<<<<<<< HEAD:demo-notebooks/guided-demos/2_job_client.ipynb " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\"\n", +======= + " mcad=True,\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " instascale=False\n", +>>>>>>> 583747f (Added demo notebook for Kueue by default):demo-notebooks/guided-demos/preview_nbs/2_basic_jobs.ipynb "))" ] }, @@ -252,6 +258,77 @@ { "cell_type": "code", "execution_count": null, +<<<<<<< HEAD:demo-notebooks/guided-demos/2_job_client.ipynb +======= + "id": "496139cc", + "metadata": {}, + "outputs": [], + "source": [ + "jobdef = DDPJobDefinition(\n", + " name=\"mnistjob\",\n", + " script=\"mnist.py\",\n", + " scheduler_args={\"namespace\": \"default\"},\n", + " j=\"1x1\",\n", + " gpu=0,\n", + " cpu=1,\n", + " memMB=8000,\n", + " mcad=True,\n", + " image=\"quay.io/project-codeflare/mnist-job-test:v0.0.1\"\n", + ")\n", + "job = jobdef.submit()" + ] + }, + { + "cell_type": "markdown", + "id": "0837e43b", + "metadata": {}, + "source": [ + "Once again, we can look at job status and logs:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3d18d42c", + "metadata": {}, + "outputs": [], + "source": [ + "job.status()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "36d7ea97", + "metadata": {}, + "outputs": [], + "source": [ + "job.logs()" + ] + }, + { + "cell_type": "markdown", + "id": "aebf376a", + "metadata": {}, + "source": [ + "This time, once the pods complete, we can clean them up alongside any other associated resources. The following command can also be used to delete jobs early for both Ray and MCAD submission:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ebbb0674", + "metadata": {}, + "outputs": [], + "source": [ + "job.cancel()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0d41b90e", +>>>>>>> 583747f (Added demo notebook for Kueue by default):demo-notebooks/guided-demos/preview_nbs/2_basic_jobs.ipynb "metadata": {}, "outputs": [], "source": [ diff --git a/demo-notebooks/guided-demos/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/3_basic_interactive.ipynb index 090a4a30..346d9189 100644 --- a/demo-notebooks/guided-demos/3_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/3_basic_interactive.ipynb @@ -5,7 +5,7 @@ "id": "bbc21043", "metadata": {}, "source": [ - "In this fourth and final notebook, we will go over how to leverage the SDK to directly work interactively with a Ray cluster during development." + "In this fourth notebook, we will go over how to leverage the SDK to directly work interactively with a Ray Cluster during development." ] }, { @@ -45,7 +45,9 @@ "Once again, let's start by running through the same cluster setup as before:\n", "\n", "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image." + "The example here is a community image.\n", + "\n", + "NOTE: By default the SDK uses Kueue as it's scheduling solution to use MCAD set the `mcad=True` option in `ClusterConfiguration`" ] }, { @@ -55,7 +57,8 @@ "metadata": {}, "outputs": [], "source": [ - "# Create and configure our cluster object (and appwrapper)\n", + "# Create and configure our cluster object\n", + "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\"\n", "cluster = Cluster(ClusterConfiguration(\n", " name='interactivetest',\n", " namespace='default',\n", @@ -66,9 +69,7 @@ " max_memory=8,\n", " num_gpus=1,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", - " instascale=True, #<---instascale enabled\n", - " machine_types=[\"m5.xlarge\", \"g4dn.xlarge\"]\n", - " \n", + " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" ] }, diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb index d8f6c34c..ccc59cb0 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb @@ -45,10 +45,12 @@ "id": "bc27f84c", "metadata": {}, "source": [ - "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding AppWrapper).\n", + "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", "\n", "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image." + "The example here is a community image.\n", + "\n", + "NOTE: By default the SDK uses Kueue as it's scheduling solution to use MCAD set the `mcad=True` option in `ClusterConfiguration`" ] }, { @@ -66,7 +68,8 @@ } ], "source": [ - "# Create and configure our cluster object (and appwrapper)\n", + "# Create and configure our cluster object\n", + "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\"\n", "cluster = Cluster(ClusterConfiguration(\n", " name='raytest',\n", " namespace='default',\n", @@ -77,7 +80,7 @@ " max_memory=4,\n", " num_gpus=0,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", - " instascale=False\n", + " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" ] }, @@ -354,7 +357,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.17" + "version": "3.9.undefined" }, "vscode": { "interpreter": { diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb index 4b28b205..ba8426dc 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb @@ -5,7 +5,9 @@ "id": "9865ee8c", "metadata": {}, "source": [ - "In this second notebook, we will go over the basics of using InstaScale to scale up/down necessary resources that are not currently available on your OpenShift Cluster (in cloud environments)." + "In this second notebook, we will go over the basics of using InstaScale to scale up/down necessary resources that are not currently available on your OpenShift Cluster (in cloud environments).\n", + "\n", + "NOTE: The InstaScale and MCAD components are in Tech Preview" ] }, { @@ -45,7 +47,9 @@ "This time, we are working in a cloud environment, and our OpenShift cluster does not have the resources needed for our desired workloads. We will use InstaScale to dynamically scale-up guaranteed resources based on our request (that will also automatically scale-down when we are finished working):\n", "\n", "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image." + "The example here is a community image.\n", + "\n", + "NOTE: This specific demo requires MCAD and InstaScale to be enabled on the Cluster" ] }, { @@ -74,6 +78,7 @@ " max_memory=8,\n", " num_gpus=1,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " mcad=True, # Enable MCAD\n", " instascale=True, # InstaScale now enabled, will scale OCP cluster to guarantee resource request\n", " machine_types=[\"m5.xlarge\", \"g4dn.xlarge\"] # Head, worker AWS machine types desired\n", "))" diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb index 7ac00470..de673ed4 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb @@ -5,7 +5,7 @@ "id": "bbc21043", "metadata": {}, "source": [ - "In this fourth and final notebook, we will go over how to leverage the SDK to directly work interactively with a Ray cluster during development." + "In this notebook, we will go over how to leverage the SDK to directly work interactively with a Ray cluster during development." ] }, { @@ -45,7 +45,9 @@ "Once again, let's start by running through the same cluster setup as before:\n", "\n", "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image." + "The example here is a community image.\n", + "\n", + "NOTE: By default the SDK uses Kueue as it's scheduling solution to use MCAD set the `mcad=True` option in `ClusterConfiguration`" ] }, { @@ -63,7 +65,8 @@ } ], "source": [ - "# Create and configure our cluster object (and appwrapper)\n", + "# Create and configure our cluster object\n", + "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\"\n", "cluster = Cluster(ClusterConfiguration(\n", " name='interactivetest',\n", " namespace='default',\n", @@ -74,9 +77,7 @@ " max_memory=8,\n", " num_gpus=1,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", - " instascale=True, #<---instascale enabled\n", - " machine_types=[\"m5.xlarge\", \"g4dn.xlarge\"]\n", - " \n", + " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" ] }, diff --git a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb index b0f12d4b..7ea8e9e4 100644 --- a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb @@ -45,10 +45,12 @@ "id": "bc27f84c", "metadata": {}, "source": [ - "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding AppWrapper).\n", + "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", "\n", "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image." + "The example here is a community image.\n", + "\n", + "NOTE: By default the SDK uses Kueue as it's scheduling solution to use MCAD set the `mcad=True` option in `ClusterConfiguration`" ] }, { @@ -58,7 +60,8 @@ "metadata": {}, "outputs": [], "source": [ - "# Create and configure our cluster object (and appwrapper)\n", + "# Create and configure our cluster object\n", + "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\"\n", "cluster = Cluster(ClusterConfiguration(\n", " name='raytest',\n", " namespace='default',\n", @@ -69,7 +72,7 @@ " max_memory=4,\n", " num_gpus=0,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", - " instascale=False\n", + " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" ] }, @@ -78,7 +81,7 @@ "id": "12eef53c", "metadata": {}, "source": [ - "Next, we want to bring our cluster up, so we call the `up()` function below to submit our cluster AppWrapper yaml onto the MCAD queue, and begin the process of obtaining our resource cluster." + "Next, we want to bring our cluster up, so we call the `up()` function below to submit our Ray Cluster onto the queue, and begin the process of obtaining our resource cluster." ] }, { diff --git a/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb b/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb index 418737eb..07983c6e 100644 --- a/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb @@ -5,7 +5,9 @@ "id": "9865ee8c", "metadata": {}, "source": [ - "In this second notebook, we will go over the basics of using InstaScale to scale up/down necessary resources that are not currently available on your OpenShift Cluster (in cloud environments)." + "In this second notebook, we will go over the basics of using InstaScale to scale up/down necessary resources that are not currently available on your OpenShift Cluster (in cloud environments).\n", + "\n", + "NOTE: The InstaScale and MCAD components are in Tech Preview" ] }, { @@ -45,7 +47,9 @@ "This time, we are working in a cloud environment, and our OpenShift cluster does not have the resources needed for our desired workloads. We will use InstaScale to dynamically scale-up guaranteed resources based on our request (that will also automatically scale-down when we are finished working):\n", "\n", "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image." + "The example here is a community image.\n", + "\n", + "NOTE: This specific demo requires MCAD and InstaScale to be enabled on the Cluster" ] }, { @@ -66,6 +70,7 @@ " max_memory=8,\n", " num_gpus=1,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " mcad=True, # Enable MCAD\n", " instascale=True, # InstaScale now enabled, will scale OCP cluster to guarantee resource request\n", " machine_types=[\"m5.xlarge\", \"g4dn.xlarge\"] # Head, worker AWS machine types desired\n", "))" diff --git a/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb index 090a4a30..30665bba 100644 --- a/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb @@ -5,7 +5,7 @@ "id": "bbc21043", "metadata": {}, "source": [ - "In this fourth and final notebook, we will go over how to leverage the SDK to directly work interactively with a Ray cluster during development." + "In this notebook, we will go over how to leverage the SDK to directly work interactively with a Ray Cluster during development." ] }, { @@ -45,7 +45,9 @@ "Once again, let's start by running through the same cluster setup as before:\n", "\n", "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image." + "The example here is a community image.\n", + "\n", + "NOTE: By default the SDK uses Kueue as it's scheduling solution to use MCAD set the `mcad=True` option in `ClusterConfiguration`" ] }, { @@ -55,7 +57,8 @@ "metadata": {}, "outputs": [], "source": [ - "# Create and configure our cluster object (and appwrapper)\n", + "# Create and configure our cluster object\n", + "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\"\n", "cluster = Cluster(ClusterConfiguration(\n", " name='interactivetest',\n", " namespace='default',\n", @@ -66,9 +69,7 @@ " max_memory=8,\n", " num_gpus=1,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", - " instascale=True, #<---instascale enabled\n", - " machine_types=[\"m5.xlarge\", \"g4dn.xlarge\"]\n", - " \n", + " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" ] }, From cd130352a78bd353f5291fd348ecc36178f6cefe Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Fri, 5 Apr 2024 17:58:50 +0100 Subject: [PATCH 201/496] Fixed nb --- .../guided-demos/2_job_client.ipynb | 77 ------------------- 1 file changed, 77 deletions(-) diff --git a/demo-notebooks/guided-demos/2_job_client.ipynb b/demo-notebooks/guided-demos/2_job_client.ipynb index 73bd5dd9..7b3d619b 100644 --- a/demo-notebooks/guided-demos/2_job_client.ipynb +++ b/demo-notebooks/guided-demos/2_job_client.ipynb @@ -52,13 +52,7 @@ " min_memory=4,\n", " max_memory=4,\n", " num_gpus=0,\n", -<<<<<<< HEAD:demo-notebooks/guided-demos/2_job_client.ipynb " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\"\n", -======= - " mcad=True,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", - " instascale=False\n", ->>>>>>> 583747f (Added demo notebook for Kueue by default):demo-notebooks/guided-demos/preview_nbs/2_basic_jobs.ipynb "))" ] }, @@ -258,77 +252,6 @@ { "cell_type": "code", "execution_count": null, -<<<<<<< HEAD:demo-notebooks/guided-demos/2_job_client.ipynb -======= - "id": "496139cc", - "metadata": {}, - "outputs": [], - "source": [ - "jobdef = DDPJobDefinition(\n", - " name=\"mnistjob\",\n", - " script=\"mnist.py\",\n", - " scheduler_args={\"namespace\": \"default\"},\n", - " j=\"1x1\",\n", - " gpu=0,\n", - " cpu=1,\n", - " memMB=8000,\n", - " mcad=True,\n", - " image=\"quay.io/project-codeflare/mnist-job-test:v0.0.1\"\n", - ")\n", - "job = jobdef.submit()" - ] - }, - { - "cell_type": "markdown", - "id": "0837e43b", - "metadata": {}, - "source": [ - "Once again, we can look at job status and logs:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3d18d42c", - "metadata": {}, - "outputs": [], - "source": [ - "job.status()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "36d7ea97", - "metadata": {}, - "outputs": [], - "source": [ - "job.logs()" - ] - }, - { - "cell_type": "markdown", - "id": "aebf376a", - "metadata": {}, - "source": [ - "This time, once the pods complete, we can clean them up alongside any other associated resources. The following command can also be used to delete jobs early for both Ray and MCAD submission:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ebbb0674", - "metadata": {}, - "outputs": [], - "source": [ - "job.cancel()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0d41b90e", ->>>>>>> 583747f (Added demo notebook for Kueue by default):demo-notebooks/guided-demos/preview_nbs/2_basic_jobs.ipynb "metadata": {}, "outputs": [], "source": [ From 05b758664694acb0695be6ec0ae909ede7735a9b Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Mon, 22 Apr 2024 12:04:03 +0100 Subject: [PATCH 202/496] Ray mTLS for basic Interactive --- .../additional-demos/local_interactive.ipynb | 167 +++++++++++++++--- .../guided-demos/3_basic_interactive.ipynb | 19 +- .../3_basic_interactive.ipynb | 19 +- .../preview_nbs/3_basic_interactive.ipynb | 21 ++- 4 files changed, 190 insertions(+), 36 deletions(-) diff --git a/demo-notebooks/additional-demos/local_interactive.ipynb b/demo-notebooks/additional-demos/local_interactive.ipynb index bd259756..31f1ad17 100644 --- a/demo-notebooks/additional-demos/local_interactive.ipynb +++ b/demo-notebooks/additional-demos/local_interactive.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "id": "9a44568b-61ef-41c7-8ad1-9a3b128f03a7", "metadata": { "tags": [] @@ -36,9 +36,7 @@ "source": [ "\n", "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image.\n", - "\n", - "NOTE: By default the SDK uses Kueue as it's scheduling solution to use MCAD set the `mcad=True` option in `ClusterConfiguration`" + "The example here is a community image." ] }, { @@ -50,14 +48,11 @@ }, "outputs": [], "source": [ - "# Create our cluster and submit\n", - "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\"\n", + "# Create and submit our Ray Cluster\n", "namespace = \"default\"\n", "cluster_name = \"hfgputest-1\"\n", - "local_interactive = True\n", "\n", - "cluster = Cluster(ClusterConfiguration(local_interactive=local_interactive,\n", - " namespace=namespace,\n", + "cluster = Cluster(ClusterConfiguration(namespace=namespace,\n", " name=cluster_name,\n", " num_workers=1,\n", " min_cpus=1,\n", @@ -65,14 +60,12 @@ " min_memory=4,\n", " max_memory=4,\n", " num_gpus=0,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", - " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", - " ))" + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",))" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "id": "69968140-15e6-482f-9529-82b0cd19524b", "metadata": { "tags": [] @@ -84,12 +77,21 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "id": "e20f9982-f671-460b-8c22-3d62e101fed9", "metadata": { "tags": [] }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Waiting for requested resources to be set up...\n", + "Requested cluster up and running!\n" + ] + } + ], "source": [ "cluster.wait_ready()" ] @@ -110,21 +112,89 @@ "outputs": [], "source": [ "from codeflare_sdk import generate_cert\n", - "\n", - "if local_interactive:\n", - " generate_cert.generate_tls_cert(cluster_name, namespace)\n", - " generate_cert.export_env(cluster_name, namespace)" + "# Export the environment variables to enable TLS\n", + "generate_cert.generate_tls_cert(cluster_name, namespace)\n", + "generate_cert.export_env(cluster_name, namespace)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "id": "9483bb98-33b3-4beb-9b15-163d7e76c1d7", "metadata": { "scrolled": true, "tags": [] }, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-06-27 19:14:16,088\tINFO client_builder.py:251 -- Passing the following kwargs to ray.init() on the server: logging_level\n", + "2023-06-27 19:14:16,100\tDEBUG worker.py:378 -- client gRPC channel state change: ChannelConnectivity.IDLE\n", + "2023-06-27 19:14:16,308\tDEBUG worker.py:378 -- client gRPC channel state change: ChannelConnectivity.CONNECTING\n", + "2023-06-27 19:14:16,434\tDEBUG worker.py:378 -- client gRPC channel state change: ChannelConnectivity.READY\n", + "2023-06-27 19:14:16,436\tDEBUG worker.py:807 -- Pinging server.\n", + "2023-06-27 19:14:18,634\tDEBUG worker.py:640 -- Retaining 00ffffffffffffffffffffffffffffffffffffff0100000001000000\n", + "2023-06-27 19:14:18,635\tDEBUG worker.py:564 -- Scheduling task get_dashboard_url 0 b'\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x01\\x00\\x00\\x00\\x01\\x00\\x00\\x00'\n", + "2023-06-27 19:14:18,645\tDEBUG worker.py:640 -- Retaining c8ef45ccd0112571ffffffffffffffffffffffff0100000001000000\n", + "2023-06-27 19:14:19,454\tDEBUG worker.py:636 -- Releasing c8ef45ccd0112571ffffffffffffffffffffffff0100000001000000\n" + ] + }, + { + "data": { + "text/html": [ + "
      \n", + "
      \n", + "

      Ray

      \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "\n", + "\n", + "
      Python version:3.8.13
      Ray version: 2.1.0
      Dashboard:http://10.254.20.41:8265
      \n", + "
      \n", + "
      \n" + ], + "text/plain": [ + "ClientContext(dashboard_url='10.254.20.41:8265', python_version='3.8.13', ray_version='2.1.0', ray_commit='23f34d948dae8de9b168667ab27e6cf940b3ae85', protocol_version='2022-10-05', _num_clients=1, _context_to_restore=)" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "import ray\n", "\n", @@ -134,7 +204,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "id": "3436eb4a-217c-4109-a3c3-309fda7e2442", "metadata": {}, "outputs": [], @@ -158,33 +228,72 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 8, "id": "5cca1874-2be3-4631-ae48-9adfa45e3af3", "metadata": { "scrolled": true, "tags": [] }, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-06-27 19:14:28,222\tDEBUG worker.py:640 -- Retaining 00ffffffffffffffffffffffffffffffffffffff0100000002000000\n", + "2023-06-27 19:14:28,222\tDEBUG worker.py:564 -- Scheduling task heavy_calculation 0 b'\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x01\\x00\\x00\\x00\\x02\\x00\\x00\\x00'\n" + ] + } + ], "source": [ "ref = heavy_calculation.remote(3000)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 9, "id": "01172c29-e8bf-41ef-8db5-eccb07906111", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-06-27 19:14:29,202\tDEBUG worker.py:640 -- Retaining 16310a0f0a45af5cffffffffffffffffffffffff0100000001000000\n", + "2023-06-27 19:14:31,224\tDEBUG worker.py:439 -- Internal retry for get [ClientObjectRef(16310a0f0a45af5cffffffffffffffffffffffff0100000001000000)]\n" + ] + }, + { + "data": { + "text/plain": [ + "1789.4644387076714" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "ray.get(ref)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 10, "id": "9e79b547-a457-4232-b77d-19147067b972", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-06-27 19:14:33,161\tDEBUG dataclient.py:287 -- Got unawaited response connection_cleanup {\n", + "}\n", + "\n", + "2023-06-27 19:14:34,460\tDEBUG dataclient.py:278 -- Shutting down data channel.\n" + ] + } + ], "source": [ "ray.cancel(ref)\n", "ray.shutdown()" @@ -192,7 +301,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 11, "id": "2c198f1f-68bf-43ff-a148-02b5cb000ff2", "metadata": {}, "outputs": [], diff --git a/demo-notebooks/guided-demos/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/3_basic_interactive.ipynb index 346d9189..b0aa44ba 100644 --- a/demo-notebooks/guided-demos/3_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/3_basic_interactive.ipynb @@ -59,9 +59,11 @@ "source": [ "# Create and configure our cluster object\n", "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\"\n", + "namespace = \"default\"\n", + "cluster_name = \"interactivetest\"\n", "cluster = Cluster(ClusterConfiguration(\n", - " name='interactivetest',\n", - " namespace='default',\n", + " name=cluster_name,\n", + " namespace=namespace,\n", " num_workers=2,\n", " min_cpus=2,\n", " max_cpus=2,\n", @@ -126,6 +128,19 @@ "Now we can connect directly to our Ray cluster via the Ray python client:" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "c9436436", + "metadata": {}, + "outputs": [], + "source": [ + "from codeflare_sdk import generate_cert\n", + "# Export the environment variables to enable TLS\n", + "generate_cert.generate_tls_cert(cluster_name, namespace)\n", + "generate_cert.export_env(cluster_name, namespace)" + ] + }, { "cell_type": "code", "execution_count": null, diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb index de673ed4..33d6af10 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb @@ -67,9 +67,11 @@ "source": [ "# Create and configure our cluster object\n", "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\"\n", + "namespace = \"default\"\n", + "cluster_name = \"interactivetest\"\n", "cluster = Cluster(ClusterConfiguration(\n", - " name='interactivetest',\n", - " namespace='default',\n", + " name=cluster_name,\n", + " namespace=namespace,\n", " num_workers=2,\n", " min_cpus=2,\n", " max_cpus=2,\n", @@ -210,6 +212,19 @@ "Now we can connect directly to our Ray cluster via the Ray python client:" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "13eb52f6", + "metadata": {}, + "outputs": [], + "source": [ + "from codeflare_sdk import generate_cert\n", + "# Export the environment variables to enable TLS\n", + "generate_cert.generate_tls_cert(cluster_name, namespace)\n", + "generate_cert.export_env(cluster_name, namespace)" + ] + }, { "cell_type": "code", "execution_count": 6, diff --git a/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb index 30665bba..fb071611 100644 --- a/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb @@ -59,9 +59,11 @@ "source": [ "# Create and configure our cluster object\n", "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\"\n", + "namespace = \"default\"\n", + "cluster_name = \"interactivetest\"\n", "cluster = Cluster(ClusterConfiguration(\n", - " name='interactivetest',\n", - " namespace='default',\n", + " name=cluster_name,\n", + " namespace=namespace,\n", " num_workers=2,\n", " min_cpus=2,\n", " max_cpus=2,\n", @@ -126,6 +128,19 @@ "Now we can connect directly to our Ray cluster via the Ray python client:" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "e5308271", + "metadata": {}, + "outputs": [], + "source": [ + "from codeflare_sdk import generate_cert\n", + "# Export the environment variables to enable TLS\n", + "generate_cert.generate_tls_cert(cluster_name, namespace)\n", + "generate_cert.export_env(cluster_name, namespace)" + ] + }, { "cell_type": "code", "execution_count": null, @@ -296,7 +311,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.13" + "version": "3.9.18" }, "vscode": { "interpreter": { From 1ac5a877bfbb10d4f52629c831400ffe3f70101c Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Wed, 24 Apr 2024 12:52:50 +0100 Subject: [PATCH 203/496] Added Cluster Ray Job Client demo --- .../ray_job_client.ipynb} | 2 +- ...lient.ipynb => 2_cluster_job_client.ipynb} | 80 +--- .../2_cluster_job_client.ipynb | 223 +++++++++ .../notebook-ex-outputs/2_job_client.ipynb | 430 ------------------ .../preview_nbs/2_cluster_job_client.ipynb | 223 +++++++++ 5 files changed, 457 insertions(+), 501 deletions(-) rename demo-notebooks/{guided-demos/2_job_client.ipynb => additional-demos/ray_job_client.ipynb} (98%) rename demo-notebooks/guided-demos/{preview_nbs/2_job_client.ipynb => 2_cluster_job_client.ipynb} (70%) create mode 100644 demo-notebooks/guided-demos/notebook-ex-outputs/2_cluster_job_client.ipynb delete mode 100644 demo-notebooks/guided-demos/notebook-ex-outputs/2_job_client.ipynb create mode 100644 demo-notebooks/guided-demos/preview_nbs/2_cluster_job_client.ipynb diff --git a/demo-notebooks/guided-demos/2_job_client.ipynb b/demo-notebooks/additional-demos/ray_job_client.ipynb similarity index 98% rename from demo-notebooks/guided-demos/2_job_client.ipynb rename to demo-notebooks/additional-demos/ray_job_client.ipynb index 7b3d619b..d24c92c6 100644 --- a/demo-notebooks/guided-demos/2_job_client.ipynb +++ b/demo-notebooks/additional-demos/ray_job_client.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "In this third demo we will go over the basics of the Ray Job Submission Client in the SDK" + "In this demo we will go over the basics of the RayJobClient in the SDK" ] }, { diff --git a/demo-notebooks/guided-demos/preview_nbs/2_job_client.ipynb b/demo-notebooks/guided-demos/2_cluster_job_client.ipynb similarity index 70% rename from demo-notebooks/guided-demos/preview_nbs/2_job_client.ipynb rename to demo-notebooks/guided-demos/2_cluster_job_client.ipynb index 7b3d619b..8ec29cbe 100644 --- a/demo-notebooks/guided-demos/preview_nbs/2_job_client.ipynb +++ b/demo-notebooks/guided-demos/2_cluster_job_client.ipynb @@ -14,7 +14,7 @@ "outputs": [], "source": [ "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication, RayJobClient" + "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" ] }, { @@ -27,9 +27,8 @@ "# IF unused, SDK will automatically check for default kubeconfig, then in-cluster config\n", "# KubeConfigFileAuthentication can also be used to specify kubeconfig path manually\n", "\n", - "auth_token = \"XXXXX\" # The auth_token is used later for the RayJobClient\n", "auth = TokenAuthentication(\n", - " token = auth_token,\n", + " token = \"XXXXX\",\n", " server = \"XXXXX\",\n", " skip_tls=False\n", ")\n", @@ -80,14 +79,14 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Ray Job Submission - Authorized Ray Cluster" + "### Ray Job Submission" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "* Submit a job using an authorized Ray dashboard and the Job Submission Client\n", + "* Initialise the Cluster Job Client \n", "* Provide an entrypoint command directed to your job script\n", "* Set up your runtime environment" ] @@ -98,16 +97,11 @@ "metadata": {}, "outputs": [], "source": [ - "# Gather the dashboard URL\n", - "ray_dashboard = cluster.cluster_dashboard_uri()\n", - "\n", - "# Create the header for passing your bearer token\n", - "header = {\n", - " 'Authorization': f'Bearer {auth_token}'\n", - "}\n", - "\n", - "# Initialize the RayJobClient\n", - "client = RayJobClient(address=ray_dashboard, headers=header, verify=True)" + "# Initialize the Job Submission Client\n", + "\"\"\"\n", + "The SDK will automatically gather the dashboard address and authenticate using the Ray Job Submission Client\n", + "\"\"\"\n", + "client = cluster.job_client" ] }, { @@ -116,7 +110,7 @@ "metadata": {}, "outputs": [], "source": [ - "# Submit an example mnist job using the RayJobClient\n", + "# Submit an example mnist job using the Job Submission Client\n", "submission_id = client.submit_job(\n", " entrypoint=\"python mnist.py\",\n", " runtime_env={\"working_dir\": \"./\",\"pip\": \"requirements.txt\"},\n", @@ -186,60 +180,6 @@ "client.delete_job(submission_id)" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Unauthorized Ray Cluster with the Ray Job Client" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "\"\"\"\n", - "Initialise the RayJobClient with the Ray Dashboard\n", - "\"\"\"\n", - "ray_dashboard = cluster.cluster_dashboard_uri()\n", - "client = RayJobClient(address=ray_dashboard, verify=False)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Submit an example mnist job using the RayJobClient\n", - "submission_id = client.submit_job(\n", - " entrypoint=\"python mnist.py\",\n", - " runtime_env={\"working_dir\": \"./\",\"pip\": \"requirements.txt\"},\n", - ")\n", - "print(submission_id)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Stop the job \n", - "client.stop_job(submission_id)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Delete the job\n", - "client.delete_job(submission_id)" - ] - }, { "cell_type": "code", "execution_count": null, diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/2_cluster_job_client.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/2_cluster_job_client.ipynb new file mode 100644 index 00000000..8ec29cbe --- /dev/null +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/2_cluster_job_client.ipynb @@ -0,0 +1,223 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In this third demo we will go over the basics of the Ray Job Submission Client in the SDK" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Import pieces from codeflare-sdk\n", + "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Create authentication object for user permissions\n", + "# IF unused, SDK will automatically check for default kubeconfig, then in-cluster config\n", + "# KubeConfigFileAuthentication can also be used to specify kubeconfig path manually\n", + "\n", + "auth = TokenAuthentication(\n", + " token = \"XXXXX\",\n", + " server = \"XXXXX\",\n", + " skip_tls=False\n", + ")\n", + "auth.login()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Create and configure our cluster object\n", + "cluster = Cluster(ClusterConfiguration(\n", + " name='jobtest',\n", + " namespace='default',\n", + " num_workers=2,\n", + " min_cpus=1,\n", + " max_cpus=1,\n", + " min_memory=4,\n", + " max_memory=4,\n", + " num_gpus=0,\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\"\n", + "))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Bring up the cluster\n", + "cluster.up()\n", + "cluster.wait_ready()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cluster.details()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Ray Job Submission" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "* Initialise the Cluster Job Client \n", + "* Provide an entrypoint command directed to your job script\n", + "* Set up your runtime environment" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Initialize the Job Submission Client\n", + "\"\"\"\n", + "The SDK will automatically gather the dashboard address and authenticate using the Ray Job Submission Client\n", + "\"\"\"\n", + "client = cluster.job_client" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Submit an example mnist job using the Job Submission Client\n", + "submission_id = client.submit_job(\n", + " entrypoint=\"python mnist.py\",\n", + " runtime_env={\"working_dir\": \"./\",\"pip\": \"requirements.txt\"},\n", + ")\n", + "print(submission_id)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Get the job's logs\n", + "client.get_job_logs(submission_id)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Get the job's status\n", + "client.get_job_status(submission_id)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Get job related info\n", + "client.get_job_info(submission_id)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# List all existing jobs\n", + "client.list_jobs()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Iterate through the logs of a job \n", + "async for lines in client.tail_job_logs(submission_id):\n", + " print(lines, end=\"\") " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Delete a job\n", + "# Can run client.cancel_job(submission_id) first if job is still running\n", + "client.delete_job(submission_id)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cluster.down()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "auth.logout()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.18" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/2_job_client.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/2_job_client.ipynb deleted file mode 100644 index 75000ce4..00000000 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/2_job_client.ipynb +++ /dev/null @@ -1,430 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "In this third demo we will go over the basics of the Ray Job Submission Client in the SDK" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication, RayJobClient" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Create authentication object for user permissions\n", - "# IF unused, SDK will automatically check for default kubeconfig, then in-cluster config\n", - "# KubeConfigFileAuthentication can also be used to specify kubeconfig path manually\n", - "\n", - "auth_token = \"XXXXX\" # The auth_token is used later for the RayJobClient\n", - "auth = TokenAuthentication(\n", - " token = auth_token,\n", - " server = \"XXXXX\",\n", - " skip_tls=False\n", - ")\n", - "auth.login()" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Yaml resources loaded for jobtest\n" - ] - } - ], - "source": [ - "# Create and configure our cluster object\n", - "cluster = Cluster(ClusterConfiguration(\n", - " name='jobtest',\n", - " namespace='default',\n", - " num_workers=2,\n", - " min_cpus=1,\n", - " max_cpus=1,\n", - " min_memory=4,\n", - " max_memory=4,\n", - " num_gpus=0,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\"\n", - "))" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Waiting for requested resources to be set up...\n", - "Requested cluster is up and running!\n", - "Dashboard is ready!\n" - ] - } - ], - "source": [ - "# Bring up the cluster\n", - "cluster.up()\n", - "cluster.wait_ready()" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
                        🚀 CodeFlare Cluster Details 🚀                  \n",
      -       "                                                                   \n",
      -       " ╭───────────────────────────────────────────────────────────────╮ \n",
      -       " │   Name                                                        │ \n",
      -       " │   jobtest                                        Active ✅    │ \n",
      -       " │                                                               │ \n",
      -       " │   URI: ray://jobtest-head-svc.default.svc:10001               │ \n",
      -       " │                                                               │ \n",
      -       " │   Dashboard🔗                                                 │ \n",
      -       " │                                                               │ \n",
      -       " │                       Cluster Resources                       │ \n",
      -       " │   ╭── Workers ──╮  ╭───────── Worker specs(each) ─────────╮   │ \n",
      -       " │   │  # Workers  │  │  Memory      CPU         GPU         │   │ \n",
      -       " │   │             │  │                                      │   │ \n",
      -       " │   │  2          │  │  4~4         1           0           │   │ \n",
      -       " │   │             │  │                                      │   │ \n",
      -       " │   ╰─────────────╯  ╰──────────────────────────────────────╯   │ \n",
      -       " ╰───────────────────────────────────────────────────────────────╯ \n",
      -       "
      \n" - ], - "text/plain": [ - "\u001b[3m \u001b[0m\u001b[1;3m 🚀 CodeFlare Cluster Details 🚀\u001b[0m\u001b[3m \u001b[0m\n", - "\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\n", - " ╭───────────────────────────────────────────────────────────────╮ \n", - " │ \u001b[1;37;42mName\u001b[0m │ \n", - " │ \u001b[1;4mjobtest\u001b[0m Active ✅ │ \n", - " │ │ \n", - " │ \u001b[1mURI:\u001b[0m ray://jobtest-head-svc.default.svc:10001 │ \n", - " │ │ \n", - " │ \u001b]8;id=561347;https://ray-dashboard-jobtest-default.apps.rosa.mcampbel.af68.p3.openshiftapps.com\u001b\\\u001b[4;34mDashboard🔗\u001b[0m\u001b]8;;\u001b\\ │ \n", - " │ │ \n", - " │ \u001b[3m Cluster Resources \u001b[0m │ \n", - " │ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │ \n", - " │ │ \u001b[1m \u001b[0m\u001b[1m# Workers\u001b[0m\u001b[1m \u001b[0m │ │ \u001b[1m \u001b[0m\u001b[1mMemory \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mCPU \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mGPU \u001b[0m\u001b[1m \u001b[0m │ │ \n", - " │ │ \u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ │ \u001b[35m \u001b[0m\u001b[35m2 \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m4~4 \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m1 \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m0 \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ │ \u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ ╰─────────────╯ ╰──────────────────────────────────────╯ │ \n", - " ╰───────────────────────────────────────────────────────────────╯ \n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/plain": [ - "RayCluster(name='jobtest', status=, head_cpus=2, head_mem=8, head_gpu=0, workers=2, worker_mem_min=4, worker_mem_max=4, worker_cpu=1, worker_gpu=0, namespace='default', dashboard='https://ray-dashboard-jobtest-default.apps.rosa.mcampbel.af68.p3.openshiftapps.com')" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "cluster.details()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Ray Job Submission - Authorized Ray Cluster" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "* Submit a job using an authorized Ray dashboard and the Job Submission Client\n", - "* Provide an entrypoint command directed to your job script\n", - "* Set up your runtime environment" - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "# Gather the dashboard URL\n", - "ray_dashboard = cluster.cluster_dashboard_uri()\n", - "\n", - "# Create the header for passing your bearer token\n", - "header = {\n", - " 'Authorization': f'Bearer {auth_token}'\n", - "}\n", - "\n", - "# Initialize the RayJobClient\n", - "client = RayJobClient(address=ray_dashboard, headers=header, verify=True)" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2024-04-03 12:16:07,112\tINFO dashboard_sdk.py:338 -- Uploading package gcs://_ray_pkg_431abdedbcc7e123.zip.\n", - "2024-04-03 12:16:07,115\tINFO packaging.py:518 -- Creating a file package for local directory './'.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "raysubmit_NvXkkh1QP1kdq4LG\n" - ] - } - ], - "source": [ - "# Submit an example mnist job using the RayJobClient\n", - "submission_id = client.submit_job(\n", - " entrypoint=\"python mnist.py\",\n", - " runtime_env={\"working_dir\": \"./\",\"pip\": \"requirements.txt\"},\n", - ")\n", - "print(submission_id)" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "''" - ] - }, - "execution_count": 8, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Get the job's logs\n", - "client.get_job_logs(submission_id)" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Get the job's status\n", - "client.get_job_status(submission_id)" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "JobDetails(type=, job_id=None, submission_id='raysubmit_NvXkkh1QP1kdq4LG', driver_info=None, status=, entrypoint='python mnist.py', message='Job has not started yet. It may be waiting for the runtime environment to be set up.', error_type=None, start_time=1712142968879, end_time=None, metadata={}, runtime_env={'working_dir': 'gcs://_ray_pkg_431abdedbcc7e123.zip', 'pip': {'packages': ['pytorch_lightning==1.5.10', 'ray_lightning', 'torchmetrics==0.9.1', 'torchvision==0.12.0'], 'pip_check': False}, '_ray_commit': 'b4bba4717f5ba04ee25580fe8f88eed63ef0c5dc'}, driver_agent_http_address=None, driver_node_id=None)" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Get job related info\n", - "client.get_job_info(submission_id)" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[JobDetails(type=, job_id=None, submission_id='raysubmit_NvXkkh1QP1kdq4LG', driver_info=None, status=, entrypoint='python mnist.py', message='Job has not started yet. It may be waiting for the runtime environment to be set up.', error_type=None, start_time=1712142968879, end_time=None, metadata={}, runtime_env={'working_dir': 'gcs://_ray_pkg_431abdedbcc7e123.zip', 'pip': {'packages': ['pytorch_lightning==1.5.10', 'ray_lightning', 'torchmetrics==0.9.1', 'torchvision==0.12.0'], 'pip_check': False}, '_ray_commit': 'b4bba4717f5ba04ee25580fe8f88eed63ef0c5dc'}, driver_agent_http_address=None, driver_node_id=None)]" - ] - }, - "execution_count": 11, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# List all existing jobs\n", - "client.list_jobs()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Iterate through the logs of a job \n", - "async for lines in client.tail_job_logs(submission_id):\n", - " print(lines, end=\"\") " - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(True, 'Successfully deleted Job raysubmit_NvXkkh1QP1kdq4LG')" - ] - }, - "execution_count": 12, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "# Delete a job\n", - "# Can run client.cancel_job(submission_id) first if job is still running\n", - "client.delete_job(submission_id)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Unauthorized Ray Cluster with the Ray Job Client" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "\"\"\"\n", - "Initialise the RayJobClient with the Ray Dashboard\n", - "\"\"\"\n", - "ray_dashboard = cluster.cluster_dashboard_uri()\n", - "client = RayJobClient(address=ray_dashboard, verify=False)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Submit an example mnist job using the RayJobClient\n", - "submission_id = client.submit_job(\n", - " entrypoint=\"python mnist.py\",\n", - " runtime_env={\"working_dir\": \"./\",\"pip\": \"requirements.txt\"},\n", - ")\n", - "print(submission_id)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Stop the job \n", - "client.stop_job(submission_id)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Delete the job\n", - "client.delete_job(submission_id)" - ] - }, - { - "cell_type": "code", - "execution_count": 13, - "metadata": {}, - "outputs": [], - "source": [ - "cluster.down()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "auth.logout()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.18" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/demo-notebooks/guided-demos/preview_nbs/2_cluster_job_client.ipynb b/demo-notebooks/guided-demos/preview_nbs/2_cluster_job_client.ipynb new file mode 100644 index 00000000..8ec29cbe --- /dev/null +++ b/demo-notebooks/guided-demos/preview_nbs/2_cluster_job_client.ipynb @@ -0,0 +1,223 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In this third demo we will go over the basics of the Ray Job Submission Client in the SDK" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Import pieces from codeflare-sdk\n", + "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Create authentication object for user permissions\n", + "# IF unused, SDK will automatically check for default kubeconfig, then in-cluster config\n", + "# KubeConfigFileAuthentication can also be used to specify kubeconfig path manually\n", + "\n", + "auth = TokenAuthentication(\n", + " token = \"XXXXX\",\n", + " server = \"XXXXX\",\n", + " skip_tls=False\n", + ")\n", + "auth.login()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Create and configure our cluster object\n", + "cluster = Cluster(ClusterConfiguration(\n", + " name='jobtest',\n", + " namespace='default',\n", + " num_workers=2,\n", + " min_cpus=1,\n", + " max_cpus=1,\n", + " min_memory=4,\n", + " max_memory=4,\n", + " num_gpus=0,\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\"\n", + "))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Bring up the cluster\n", + "cluster.up()\n", + "cluster.wait_ready()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cluster.details()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Ray Job Submission" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "* Initialise the Cluster Job Client \n", + "* Provide an entrypoint command directed to your job script\n", + "* Set up your runtime environment" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Initialize the Job Submission Client\n", + "\"\"\"\n", + "The SDK will automatically gather the dashboard address and authenticate using the Ray Job Submission Client\n", + "\"\"\"\n", + "client = cluster.job_client" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Submit an example mnist job using the Job Submission Client\n", + "submission_id = client.submit_job(\n", + " entrypoint=\"python mnist.py\",\n", + " runtime_env={\"working_dir\": \"./\",\"pip\": \"requirements.txt\"},\n", + ")\n", + "print(submission_id)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Get the job's logs\n", + "client.get_job_logs(submission_id)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Get the job's status\n", + "client.get_job_status(submission_id)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Get job related info\n", + "client.get_job_info(submission_id)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# List all existing jobs\n", + "client.list_jobs()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Iterate through the logs of a job \n", + "async for lines in client.tail_job_logs(submission_id):\n", + " print(lines, end=\"\") " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Delete a job\n", + "# Can run client.cancel_job(submission_id) first if job is still running\n", + "client.delete_job(submission_id)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cluster.down()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "auth.logout()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.18" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} From 5f0ee255092d3063b3c5208ce6d44c7d2f4c937e Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Wed, 24 Apr 2024 13:00:38 +0100 Subject: [PATCH 204/496] Added namespace reminder --- .../additional-demos/local_interactive.ipynb | 10 ++++++---- demo-notebooks/additional-demos/ray_job_client.ipynb | 2 +- demo-notebooks/guided-demos/0_basic_ray.ipynb | 2 +- demo-notebooks/guided-demos/1_basic_instascale.ipynb | 2 +- demo-notebooks/guided-demos/2_cluster_job_client.ipynb | 2 +- demo-notebooks/guided-demos/3_basic_interactive.ipynb | 2 +- .../guided-demos/notebook-ex-outputs/0_basic_ray.ipynb | 4 ++-- .../notebook-ex-outputs/1_basic_instascale.ipynb | 2 +- .../notebook-ex-outputs/2_cluster_job_client.ipynb | 2 +- .../notebook-ex-outputs/3_basic_interactive.ipynb | 2 +- .../guided-demos/preview_nbs/0_basic_ray.ipynb | 4 ++-- .../guided-demos/preview_nbs/1_basic_instascale.ipynb | 2 +- .../preview_nbs/2_cluster_job_client.ipynb | 2 +- .../guided-demos/preview_nbs/3_basic_interactive.ipynb | 2 +- 14 files changed, 21 insertions(+), 19 deletions(-) diff --git a/demo-notebooks/additional-demos/local_interactive.ipynb b/demo-notebooks/additional-demos/local_interactive.ipynb index 31f1ad17..61dc2583 100644 --- a/demo-notebooks/additional-demos/local_interactive.ipynb +++ b/demo-notebooks/additional-demos/local_interactive.ipynb @@ -48,8 +48,8 @@ }, "outputs": [], "source": [ - "# Create and submit our Ray Cluster\n", - "namespace = \"default\"\n", + "# Create our cluster and submit appwrapper\n", + "namespace = \"default\" # Update to your namespace\n", "cluster_name = \"hfgputest-1\"\n", "\n", "cluster = Cluster(ClusterConfiguration(namespace=namespace,\n", @@ -60,7 +60,9 @@ " min_memory=4,\n", " max_memory=4,\n", " num_gpus=0,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",))" + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " instascale=False,\n", + " machine_types=[\"m5.xlarge\", \"p3.8xlarge\"]))" ] }, { @@ -112,7 +114,7 @@ "outputs": [], "source": [ "from codeflare_sdk import generate_cert\n", - "# Export the environment variables to enable TLS\n", + "\n", "generate_cert.generate_tls_cert(cluster_name, namespace)\n", "generate_cert.export_env(cluster_name, namespace)" ] diff --git a/demo-notebooks/additional-demos/ray_job_client.ipynb b/demo-notebooks/additional-demos/ray_job_client.ipynb index d24c92c6..78978f9c 100644 --- a/demo-notebooks/additional-demos/ray_job_client.ipynb +++ b/demo-notebooks/additional-demos/ray_job_client.ipynb @@ -45,7 +45,7 @@ "# Create and configure our cluster object\n", "cluster = Cluster(ClusterConfiguration(\n", " name='jobtest',\n", - " namespace='default',\n", + " namespace='default', # Update to your namespace\n", " num_workers=2,\n", " min_cpus=1,\n", " max_cpus=1,\n", diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb index 9b6aace7..dabadfb4 100644 --- a/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -64,7 +64,7 @@ "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\"\n", "cluster = Cluster(ClusterConfiguration(\n", " name='raytest',\n", - " namespace='default',\n", + " namespace='default', # Update to your namespace\n", " num_workers=2,\n", " min_cpus=1,\n", " max_cpus=1,\n", diff --git a/demo-notebooks/guided-demos/1_basic_instascale.ipynb b/demo-notebooks/guided-demos/1_basic_instascale.ipynb index 07983c6e..6f34df3f 100644 --- a/demo-notebooks/guided-demos/1_basic_instascale.ipynb +++ b/demo-notebooks/guided-demos/1_basic_instascale.ipynb @@ -62,7 +62,7 @@ "# Create and configure our cluster object (and appwrapper)\n", "cluster = Cluster(ClusterConfiguration(\n", " name='instascaletest',\n", - " namespace='default',\n", + " namespace='default', # Update to your namespace\n", " num_workers=2,\n", " min_cpus=2,\n", " max_cpus=2,\n", diff --git a/demo-notebooks/guided-demos/2_cluster_job_client.ipynb b/demo-notebooks/guided-demos/2_cluster_job_client.ipynb index 8ec29cbe..ea194ca9 100644 --- a/demo-notebooks/guided-demos/2_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/2_cluster_job_client.ipynb @@ -44,7 +44,7 @@ "# Create and configure our cluster object\n", "cluster = Cluster(ClusterConfiguration(\n", " name='jobtest',\n", - " namespace='default',\n", + " namespace='default', # Update to your namespace\n", " num_workers=2,\n", " min_cpus=1,\n", " max_cpus=1,\n", diff --git a/demo-notebooks/guided-demos/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/3_basic_interactive.ipynb index b0aa44ba..53caed00 100644 --- a/demo-notebooks/guided-demos/3_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/3_basic_interactive.ipynb @@ -59,7 +59,7 @@ "source": [ "# Create and configure our cluster object\n", "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\"\n", - "namespace = \"default\"\n", + "namespace = \"default\" # Update to your namespace\n", "cluster_name = \"interactivetest\"\n", "cluster = Cluster(ClusterConfiguration(\n", " name=cluster_name,\n", diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb index ccc59cb0..a9df543d 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb @@ -72,7 +72,7 @@ "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\"\n", "cluster = Cluster(ClusterConfiguration(\n", " name='raytest',\n", - " namespace='default',\n", + " namespace='default', # Update to your namespace\n", " num_workers=2,\n", " min_cpus=1,\n", " max_cpus=1,\n", @@ -357,7 +357,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.undefined" + "version": "3.9.18" }, "vscode": { "interpreter": { diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb index ba8426dc..34543597 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb @@ -70,7 +70,7 @@ "# Create and configure our cluster object (and appwrapper)\n", "cluster = Cluster(ClusterConfiguration(\n", " name='instascaletest',\n", - " namespace='default',\n", + " namespace='default', # Update to your namespace\n", " num_workers=2,\n", " min_cpus=2,\n", " max_cpus=2,\n", diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/2_cluster_job_client.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/2_cluster_job_client.ipynb index 8ec29cbe..ea194ca9 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/2_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/2_cluster_job_client.ipynb @@ -44,7 +44,7 @@ "# Create and configure our cluster object\n", "cluster = Cluster(ClusterConfiguration(\n", " name='jobtest',\n", - " namespace='default',\n", + " namespace='default', # Update to your namespace\n", " num_workers=2,\n", " min_cpus=1,\n", " max_cpus=1,\n", diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb index 33d6af10..d5c12356 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb @@ -67,7 +67,7 @@ "source": [ "# Create and configure our cluster object\n", "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\"\n", - "namespace = \"default\"\n", + "namespace = \"default\" # Update to your namespace\n", "cluster_name = \"interactivetest\"\n", "cluster = Cluster(ClusterConfiguration(\n", " name=cluster_name,\n", diff --git a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb index 7ea8e9e4..dabadfb4 100644 --- a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb @@ -64,7 +64,7 @@ "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\"\n", "cluster = Cluster(ClusterConfiguration(\n", " name='raytest',\n", - " namespace='default',\n", + " namespace='default', # Update to your namespace\n", " num_workers=2,\n", " min_cpus=1,\n", " max_cpus=1,\n", @@ -196,7 +196,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.13" + "version": "3.9.18" }, "vscode": { "interpreter": { diff --git a/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb b/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb index 07983c6e..6f34df3f 100644 --- a/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb @@ -62,7 +62,7 @@ "# Create and configure our cluster object (and appwrapper)\n", "cluster = Cluster(ClusterConfiguration(\n", " name='instascaletest',\n", - " namespace='default',\n", + " namespace='default', # Update to your namespace\n", " num_workers=2,\n", " min_cpus=2,\n", " max_cpus=2,\n", diff --git a/demo-notebooks/guided-demos/preview_nbs/2_cluster_job_client.ipynb b/demo-notebooks/guided-demos/preview_nbs/2_cluster_job_client.ipynb index 8ec29cbe..ea194ca9 100644 --- a/demo-notebooks/guided-demos/preview_nbs/2_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/2_cluster_job_client.ipynb @@ -44,7 +44,7 @@ "# Create and configure our cluster object\n", "cluster = Cluster(ClusterConfiguration(\n", " name='jobtest',\n", - " namespace='default',\n", + " namespace='default', # Update to your namespace\n", " num_workers=2,\n", " min_cpus=1,\n", " max_cpus=1,\n", diff --git a/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb index fb071611..169bb8b9 100644 --- a/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb @@ -59,7 +59,7 @@ "source": [ "# Create and configure our cluster object\n", "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\"\n", - "namespace = \"default\"\n", + "namespace = \"default\" # Update to your namespace\n", "cluster_name = \"interactivetest\"\n", "cluster = Cluster(ClusterConfiguration(\n", " name=cluster_name,\n", From d27e676dbf9860c9c650debdb2ffefe7adb2058f Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Wed, 24 Apr 2024 14:19:54 +0100 Subject: [PATCH 205/496] Addressed review comments --- demo-notebooks/additional-demos/hf_interactive.ipynb | 2 +- demo-notebooks/additional-demos/local_interactive.ipynb | 3 ++- demo-notebooks/additional-demos/ray_job_client.ipynb | 2 +- demo-notebooks/guided-demos/0_basic_ray.ipynb | 4 ++-- demo-notebooks/guided-demos/1_basic_instascale.ipynb | 2 +- demo-notebooks/guided-demos/2_cluster_job_client.ipynb | 4 ++-- demo-notebooks/guided-demos/3_basic_interactive.ipynb | 6 +++--- .../guided-demos/notebook-ex-outputs/0_basic_ray.ipynb | 4 ++-- .../notebook-ex-outputs/1_basic_instascale.ipynb | 2 +- .../notebook-ex-outputs/2_cluster_job_client.ipynb | 4 ++-- .../notebook-ex-outputs/3_basic_interactive.ipynb | 4 ++-- demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb | 4 ++-- .../guided-demos/preview_nbs/1_basic_instascale.ipynb | 2 +- .../guided-demos/preview_nbs/2_cluster_job_client.ipynb | 4 ++-- .../guided-demos/preview_nbs/3_basic_interactive.ipynb | 4 ++-- 15 files changed, 26 insertions(+), 25 deletions(-) diff --git a/demo-notebooks/additional-demos/hf_interactive.ipynb b/demo-notebooks/additional-demos/hf_interactive.ipynb index 28441037..72b3dba2 100644 --- a/demo-notebooks/additional-demos/hf_interactive.ipynb +++ b/demo-notebooks/additional-demos/hf_interactive.ipynb @@ -92,7 +92,7 @@ ], "source": [ "# Create our cluster and submit\n", - "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\"\n", + "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(name='hfgputest', \n", " namespace=\"default\",\n", " num_workers=1,\n", diff --git a/demo-notebooks/additional-demos/local_interactive.ipynb b/demo-notebooks/additional-demos/local_interactive.ipynb index 61dc2583..f8078e24 100644 --- a/demo-notebooks/additional-demos/local_interactive.ipynb +++ b/demo-notebooks/additional-demos/local_interactive.ipynb @@ -48,7 +48,8 @@ }, "outputs": [], "source": [ - "# Create our cluster and submit appwrapper\n", + "# Create and submit our cluster\n", + "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "namespace = \"default\" # Update to your namespace\n", "cluster_name = \"hfgputest-1\"\n", "\n", diff --git a/demo-notebooks/additional-demos/ray_job_client.ipynb b/demo-notebooks/additional-demos/ray_job_client.ipynb index 78978f9c..cd81423c 100644 --- a/demo-notebooks/additional-demos/ray_job_client.ipynb +++ b/demo-notebooks/additional-demos/ray_job_client.ipynb @@ -26,7 +26,6 @@ "# Create authentication object for user permissions\n", "# IF unused, SDK will automatically check for default kubeconfig, then in-cluster config\n", "# KubeConfigFileAuthentication can also be used to specify kubeconfig path manually\n", - "\n", "auth_token = \"XXXXX\" # The auth_token is used later for the RayJobClient\n", "auth = TokenAuthentication(\n", " token = auth_token,\n", @@ -43,6 +42,7 @@ "outputs": [], "source": [ "# Create and configure our cluster object\n", + "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(\n", " name='jobtest',\n", " namespace='default', # Update to your namespace\n", diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb index dabadfb4..47884f0f 100644 --- a/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -5,7 +5,7 @@ "id": "8d4a42f6", "metadata": {}, "source": [ - "In this first notebook, we will go through the basics of using the SDK to:\n", + "In first notebook, we will go through the basics of using the SDK to:\n", " - Spin up a Ray cluster with our desired resources\n", " - View the status and specs of our Ray cluster\n", " - Take down the Ray cluster when finished" @@ -61,7 +61,7 @@ "outputs": [], "source": [ "# Create and configure our cluster object\n", - "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\"\n", + "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(\n", " name='raytest',\n", " namespace='default', # Update to your namespace\n", diff --git a/demo-notebooks/guided-demos/1_basic_instascale.ipynb b/demo-notebooks/guided-demos/1_basic_instascale.ipynb index 6f34df3f..00963886 100644 --- a/demo-notebooks/guided-demos/1_basic_instascale.ipynb +++ b/demo-notebooks/guided-demos/1_basic_instascale.ipynb @@ -5,7 +5,7 @@ "id": "9865ee8c", "metadata": {}, "source": [ - "In this second notebook, we will go over the basics of using InstaScale to scale up/down necessary resources that are not currently available on your OpenShift Cluster (in cloud environments).\n", + "In this notebook, we will go over the basics of using InstaScale to scale up/down necessary resources that are not currently available on your OpenShift Cluster (in cloud environments).\n", "\n", "NOTE: The InstaScale and MCAD components are in Tech Preview" ] diff --git a/demo-notebooks/guided-demos/2_cluster_job_client.ipynb b/demo-notebooks/guided-demos/2_cluster_job_client.ipynb index ea194ca9..65447ac3 100644 --- a/demo-notebooks/guided-demos/2_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/2_cluster_job_client.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "In this third demo we will go over the basics of the Ray Job Submission Client in the SDK" + "In this demo we will go over the basics of the Ray Job Submission Client in the SDK" ] }, { @@ -26,7 +26,6 @@ "# Create authentication object for user permissions\n", "# IF unused, SDK will automatically check for default kubeconfig, then in-cluster config\n", "# KubeConfigFileAuthentication can also be used to specify kubeconfig path manually\n", - "\n", "auth = TokenAuthentication(\n", " token = \"XXXXX\",\n", " server = \"XXXXX\",\n", @@ -42,6 +41,7 @@ "outputs": [], "source": [ "# Create and configure our cluster object\n", + "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(\n", " name='jobtest',\n", " namespace='default', # Update to your namespace\n", diff --git a/demo-notebooks/guided-demos/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/3_basic_interactive.ipynb index 53caed00..87229830 100644 --- a/demo-notebooks/guided-demos/3_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/3_basic_interactive.ipynb @@ -5,7 +5,7 @@ "id": "bbc21043", "metadata": {}, "source": [ - "In this fourth notebook, we will go over how to leverage the SDK to directly work interactively with a Ray Cluster during development." + "In this notebook, we will go over how to leverage the SDK to directly work interactively with a Ray Cluster during development." ] }, { @@ -58,7 +58,7 @@ "outputs": [], "source": [ "# Create and configure our cluster object\n", - "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\"\n", + "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "namespace = \"default\" # Update to your namespace\n", "cluster_name = \"interactivetest\"\n", "cluster = Cluster(ClusterConfiguration(\n", @@ -136,7 +136,7 @@ "outputs": [], "source": [ "from codeflare_sdk import generate_cert\n", - "# Export the environment variables to enable TLS\n", + "# Create required TLS cert and export the environment variables to enable TLS\n", "generate_cert.generate_tls_cert(cluster_name, namespace)\n", "generate_cert.export_env(cluster_name, namespace)" ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb index a9df543d..afadb55d 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb @@ -5,7 +5,7 @@ "id": "8d4a42f6", "metadata": {}, "source": [ - "In this first notebook, we will go through the basics of using the SDK to:\n", + "In this notebook, we will go through the basics of using the SDK to:\n", " - Spin up a Ray cluster with our desired resources\n", " - View the status and specs of our Ray cluster\n", " - Take down the Ray cluster when finished" @@ -69,7 +69,7 @@ ], "source": [ "# Create and configure our cluster object\n", - "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\"\n", + "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(\n", " name='raytest',\n", " namespace='default', # Update to your namespace\n", diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb index 34543597..ab10a558 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb @@ -5,7 +5,7 @@ "id": "9865ee8c", "metadata": {}, "source": [ - "In this second notebook, we will go over the basics of using InstaScale to scale up/down necessary resources that are not currently available on your OpenShift Cluster (in cloud environments).\n", + "In this notebook, we will go over the basics of using InstaScale to scale up/down necessary resources that are not currently available on your OpenShift Cluster (in cloud environments).\n", "\n", "NOTE: The InstaScale and MCAD components are in Tech Preview" ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/2_cluster_job_client.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/2_cluster_job_client.ipynb index ea194ca9..65447ac3 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/2_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/2_cluster_job_client.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "In this third demo we will go over the basics of the Ray Job Submission Client in the SDK" + "In this demo we will go over the basics of the Ray Job Submission Client in the SDK" ] }, { @@ -26,7 +26,6 @@ "# Create authentication object for user permissions\n", "# IF unused, SDK will automatically check for default kubeconfig, then in-cluster config\n", "# KubeConfigFileAuthentication can also be used to specify kubeconfig path manually\n", - "\n", "auth = TokenAuthentication(\n", " token = \"XXXXX\",\n", " server = \"XXXXX\",\n", @@ -42,6 +41,7 @@ "outputs": [], "source": [ "# Create and configure our cluster object\n", + "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(\n", " name='jobtest',\n", " namespace='default', # Update to your namespace\n", diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb index d5c12356..1d3afdc6 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb @@ -66,7 +66,7 @@ ], "source": [ "# Create and configure our cluster object\n", - "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\"\n", + "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "namespace = \"default\" # Update to your namespace\n", "cluster_name = \"interactivetest\"\n", "cluster = Cluster(ClusterConfiguration(\n", @@ -220,7 +220,7 @@ "outputs": [], "source": [ "from codeflare_sdk import generate_cert\n", - "# Export the environment variables to enable TLS\n", + "# Create required TLS cert and export the environment variables to enable TLS\n", "generate_cert.generate_tls_cert(cluster_name, namespace)\n", "generate_cert.export_env(cluster_name, namespace)" ] diff --git a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb index dabadfb4..bf2bde96 100644 --- a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb @@ -5,7 +5,7 @@ "id": "8d4a42f6", "metadata": {}, "source": [ - "In this first notebook, we will go through the basics of using the SDK to:\n", + "In this notebook, we will go through the basics of using the SDK to:\n", " - Spin up a Ray cluster with our desired resources\n", " - View the status and specs of our Ray cluster\n", " - Take down the Ray cluster when finished" @@ -61,7 +61,7 @@ "outputs": [], "source": [ "# Create and configure our cluster object\n", - "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\"\n", + "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(\n", " name='raytest',\n", " namespace='default', # Update to your namespace\n", diff --git a/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb b/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb index 6f34df3f..00963886 100644 --- a/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb @@ -5,7 +5,7 @@ "id": "9865ee8c", "metadata": {}, "source": [ - "In this second notebook, we will go over the basics of using InstaScale to scale up/down necessary resources that are not currently available on your OpenShift Cluster (in cloud environments).\n", + "In this notebook, we will go over the basics of using InstaScale to scale up/down necessary resources that are not currently available on your OpenShift Cluster (in cloud environments).\n", "\n", "NOTE: The InstaScale and MCAD components are in Tech Preview" ] diff --git a/demo-notebooks/guided-demos/preview_nbs/2_cluster_job_client.ipynb b/demo-notebooks/guided-demos/preview_nbs/2_cluster_job_client.ipynb index ea194ca9..65447ac3 100644 --- a/demo-notebooks/guided-demos/preview_nbs/2_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/2_cluster_job_client.ipynb @@ -4,7 +4,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "In this third demo we will go over the basics of the Ray Job Submission Client in the SDK" + "In this demo we will go over the basics of the Ray Job Submission Client in the SDK" ] }, { @@ -26,7 +26,6 @@ "# Create authentication object for user permissions\n", "# IF unused, SDK will automatically check for default kubeconfig, then in-cluster config\n", "# KubeConfigFileAuthentication can also be used to specify kubeconfig path manually\n", - "\n", "auth = TokenAuthentication(\n", " token = \"XXXXX\",\n", " server = \"XXXXX\",\n", @@ -42,6 +41,7 @@ "outputs": [], "source": [ "# Create and configure our cluster object\n", + "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(\n", " name='jobtest',\n", " namespace='default', # Update to your namespace\n", diff --git a/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb index 169bb8b9..27ce71f2 100644 --- a/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb @@ -58,7 +58,7 @@ "outputs": [], "source": [ "# Create and configure our cluster object\n", - "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\"\n", + "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "namespace = \"default\" # Update to your namespace\n", "cluster_name = \"interactivetest\"\n", "cluster = Cluster(ClusterConfiguration(\n", @@ -136,7 +136,7 @@ "outputs": [], "source": [ "from codeflare_sdk import generate_cert\n", - "# Export the environment variables to enable TLS\n", + "# Create required TLS cert and export the environment variables to enable TLS\n", "generate_cert.generate_tls_cert(cluster_name, namespace)\n", "generate_cert.export_env(cluster_name, namespace)" ] From afe433c120875916732ef5825b4e12d9aa4fa9bd Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Wed, 24 Apr 2024 14:33:57 +0100 Subject: [PATCH 206/496] Added Tech Preview comment to MCAD references --- demo-notebooks/guided-demos/0_basic_ray.ipynb | 2 +- demo-notebooks/guided-demos/3_basic_interactive.ipynb | 2 +- .../guided-demos/notebook-ex-outputs/0_basic_ray.ipynb | 2 +- .../guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb | 2 +- demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb | 2 +- .../guided-demos/preview_nbs/3_basic_interactive.ipynb | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb index 47884f0f..d5b7000a 100644 --- a/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -50,7 +50,7 @@ "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", "The example here is a community image.\n", "\n", - "NOTE: By default the SDK uses Kueue as it's scheduling solution to use MCAD set the `mcad=True` option in `ClusterConfiguration`" + "NOTE: By default the SDK uses Kueue as it's scheduling solution to use Tech Preview component MCAD set the `mcad=True` option in `ClusterConfiguration`" ] }, { diff --git a/demo-notebooks/guided-demos/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/3_basic_interactive.ipynb index 87229830..6afde0bc 100644 --- a/demo-notebooks/guided-demos/3_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/3_basic_interactive.ipynb @@ -47,7 +47,7 @@ "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", "The example here is a community image.\n", "\n", - "NOTE: By default the SDK uses Kueue as it's scheduling solution to use MCAD set the `mcad=True` option in `ClusterConfiguration`" + "NOTE: By default the SDK uses Kueue as it's scheduling solution to use Tech Preview component MCAD set the `mcad=True` option in `ClusterConfiguration`" ] }, { diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb index afadb55d..7d2dfac1 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb @@ -50,7 +50,7 @@ "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", "The example here is a community image.\n", "\n", - "NOTE: By default the SDK uses Kueue as it's scheduling solution to use MCAD set the `mcad=True` option in `ClusterConfiguration`" + "NOTE: By default the SDK uses Kueue as it's scheduling solution to use Tech Preview component MCAD set the `mcad=True` option in `ClusterConfiguration`" ] }, { diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb index 1d3afdc6..78ace55e 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb @@ -47,7 +47,7 @@ "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", "The example here is a community image.\n", "\n", - "NOTE: By default the SDK uses Kueue as it's scheduling solution to use MCAD set the `mcad=True` option in `ClusterConfiguration`" + "NOTE: By default the SDK uses Kueue as it's scheduling solution to use Tech Preview component MCAD set the `mcad=True` option in `ClusterConfiguration`" ] }, { diff --git a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb index bf2bde96..f963fa5f 100644 --- a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb @@ -50,7 +50,7 @@ "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", "The example here is a community image.\n", "\n", - "NOTE: By default the SDK uses Kueue as it's scheduling solution to use MCAD set the `mcad=True` option in `ClusterConfiguration`" + "NOTE: By default the SDK uses Kueue as it's scheduling solution to use Tech Preview component MCAD set the `mcad=True` option in `ClusterConfiguration`" ] }, { diff --git a/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb index 27ce71f2..06062e14 100644 --- a/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb @@ -47,7 +47,7 @@ "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", "The example here is a community image.\n", "\n", - "NOTE: By default the SDK uses Kueue as it's scheduling solution to use MCAD set the `mcad=True` option in `ClusterConfiguration`" + "NOTE: By default the SDK uses Kueue as it's scheduling solution to use Tech Preview component MCAD set the `mcad=True` option in `ClusterConfiguration`" ] }, { From 543fb196ea7096205a08236393e8ca324a80f3b8 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Wed, 24 Apr 2024 15:23:51 +0100 Subject: [PATCH 207/496] Ommited MCAD references from notebooks except InstaScale NB --- demo-notebooks/additional-demos/hf_interactive.ipynb | 7 +++---- demo-notebooks/additional-demos/local_interactive.ipynb | 1 + demo-notebooks/additional-demos/ray_job_client.ipynb | 3 ++- demo-notebooks/guided-demos/0_basic_ray.ipynb | 5 ++--- demo-notebooks/guided-demos/1_basic_instascale.ipynb | 1 + demo-notebooks/guided-demos/2_cluster_job_client.ipynb | 3 ++- demo-notebooks/guided-demos/3_basic_interactive.ipynb | 5 ++--- .../guided-demos/notebook-ex-outputs/0_basic_ray.ipynb | 7 +++---- .../notebook-ex-outputs/1_basic_instascale.ipynb | 1 + .../notebook-ex-outputs/2_cluster_job_client.ipynb | 3 ++- .../notebook-ex-outputs/3_basic_interactive.ipynb | 5 ++--- demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb | 5 ++--- .../guided-demos/preview_nbs/1_basic_instascale.ipynb | 1 + .../guided-demos/preview_nbs/2_cluster_job_client.ipynb | 3 ++- .../guided-demos/preview_nbs/3_basic_interactive.ipynb | 5 ++--- 15 files changed, 28 insertions(+), 27 deletions(-) diff --git a/demo-notebooks/additional-demos/hf_interactive.ipynb b/demo-notebooks/additional-demos/hf_interactive.ipynb index 72b3dba2..9181f3aa 100644 --- a/demo-notebooks/additional-demos/hf_interactive.ipynb +++ b/demo-notebooks/additional-demos/hf_interactive.ipynb @@ -71,9 +71,7 @@ "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding Ray Cluster).\n", "\n", "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image.\n", - "\n", - "NOTE: By default the SDK uses Kueue as it's scheduling solution to use MCAD set the `mcad=True` option in `ClusterConfiguration`" + "The example here is a community image." ] }, { @@ -94,7 +92,7 @@ "# Create our cluster and submit\n", "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(name='hfgputest', \n", - " namespace=\"default\",\n", + " namespace=\"default\", # Update to your namespace\n", " num_workers=1,\n", " min_cpus=8, \n", " max_cpus=8, \n", @@ -102,6 +100,7 @@ " max_memory=16, \n", " num_gpus=4,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", " ))" ] diff --git a/demo-notebooks/additional-demos/local_interactive.ipynb b/demo-notebooks/additional-demos/local_interactive.ipynb index f8078e24..9af9bc33 100644 --- a/demo-notebooks/additional-demos/local_interactive.ipynb +++ b/demo-notebooks/additional-demos/local_interactive.ipynb @@ -62,6 +62,7 @@ " max_memory=4,\n", " num_gpus=0,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " instascale=False,\n", " machine_types=[\"m5.xlarge\", \"p3.8xlarge\"]))" ] diff --git a/demo-notebooks/additional-demos/ray_job_client.ipynb b/demo-notebooks/additional-demos/ray_job_client.ipynb index cd81423c..e3d90cd3 100644 --- a/demo-notebooks/additional-demos/ray_job_client.ipynb +++ b/demo-notebooks/additional-demos/ray_job_client.ipynb @@ -52,7 +52,8 @@ " min_memory=4,\n", " max_memory=4,\n", " num_gpus=0,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\"\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " write_to_file=False # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", "))" ] }, diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb index d5b7000a..bd2e66f0 100644 --- a/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -48,9 +48,7 @@ "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", "\n", "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image.\n", - "\n", - "NOTE: By default the SDK uses Kueue as it's scheduling solution to use Tech Preview component MCAD set the `mcad=True` option in `ClusterConfiguration`" + "The example here is a community image." ] }, { @@ -72,6 +70,7 @@ " max_memory=4,\n", " num_gpus=0,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" ] diff --git a/demo-notebooks/guided-demos/1_basic_instascale.ipynb b/demo-notebooks/guided-demos/1_basic_instascale.ipynb index 00963886..072392c7 100644 --- a/demo-notebooks/guided-demos/1_basic_instascale.ipynb +++ b/demo-notebooks/guided-demos/1_basic_instascale.ipynb @@ -71,6 +71,7 @@ " num_gpus=1,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " mcad=True, # Enable MCAD\n", + " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " instascale=True, # InstaScale now enabled, will scale OCP cluster to guarantee resource request\n", " machine_types=[\"m5.xlarge\", \"g4dn.xlarge\"] # Head, worker AWS machine types desired\n", "))" diff --git a/demo-notebooks/guided-demos/2_cluster_job_client.ipynb b/demo-notebooks/guided-demos/2_cluster_job_client.ipynb index 65447ac3..c8f1157a 100644 --- a/demo-notebooks/guided-demos/2_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/2_cluster_job_client.ipynb @@ -51,7 +51,8 @@ " min_memory=4,\n", " max_memory=4,\n", " num_gpus=0,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\"\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " write_to_file=False # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", "))" ] }, diff --git a/demo-notebooks/guided-demos/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/3_basic_interactive.ipynb index 6afde0bc..943425a8 100644 --- a/demo-notebooks/guided-demos/3_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/3_basic_interactive.ipynb @@ -45,9 +45,7 @@ "Once again, let's start by running through the same cluster setup as before:\n", "\n", "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image.\n", - "\n", - "NOTE: By default the SDK uses Kueue as it's scheduling solution to use Tech Preview component MCAD set the `mcad=True` option in `ClusterConfiguration`" + "The example here is a community image." ] }, { @@ -71,6 +69,7 @@ " max_memory=8,\n", " num_gpus=1,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb index 7d2dfac1..6771e9d7 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb @@ -48,9 +48,7 @@ "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", "\n", "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image.\n", - "\n", - "NOTE: By default the SDK uses Kueue as it's scheduling solution to use Tech Preview component MCAD set the `mcad=True` option in `ClusterConfiguration`" + "The example here is a community image." ] }, { @@ -80,6 +78,7 @@ " max_memory=4,\n", " num_gpus=0,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" ] @@ -89,7 +88,7 @@ "id": "12eef53c", "metadata": {}, "source": [ - "Next, we want to bring our cluster up, so we call the `up()` function below to submit our cluster AppWrapper yaml onto the MCAD queue, and begin the process of obtaining our resource cluster." + "Next, we want to bring our cluster up, so we call the `up()` function below to submit our Ray Cluster onto the queue, and begin the process of obtaining our resource cluster." ] }, { diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb index ab10a558..003cdb5f 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb @@ -79,6 +79,7 @@ " num_gpus=1,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " mcad=True, # Enable MCAD\n", + " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " instascale=True, # InstaScale now enabled, will scale OCP cluster to guarantee resource request\n", " machine_types=[\"m5.xlarge\", \"g4dn.xlarge\"] # Head, worker AWS machine types desired\n", "))" diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/2_cluster_job_client.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/2_cluster_job_client.ipynb index 65447ac3..c8f1157a 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/2_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/2_cluster_job_client.ipynb @@ -51,7 +51,8 @@ " min_memory=4,\n", " max_memory=4,\n", " num_gpus=0,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\"\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " write_to_file=False # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", "))" ] }, diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb index 78ace55e..62d34f3f 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb @@ -45,9 +45,7 @@ "Once again, let's start by running through the same cluster setup as before:\n", "\n", "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image.\n", - "\n", - "NOTE: By default the SDK uses Kueue as it's scheduling solution to use Tech Preview component MCAD set the `mcad=True` option in `ClusterConfiguration`" + "The example here is a community image." ] }, { @@ -79,6 +77,7 @@ " max_memory=8,\n", " num_gpus=1,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" ] diff --git a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb index f963fa5f..6a3b3710 100644 --- a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb @@ -48,9 +48,7 @@ "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", "\n", "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image.\n", - "\n", - "NOTE: By default the SDK uses Kueue as it's scheduling solution to use Tech Preview component MCAD set the `mcad=True` option in `ClusterConfiguration`" + "The example here is a community image." ] }, { @@ -72,6 +70,7 @@ " max_memory=4,\n", " num_gpus=0,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" ] diff --git a/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb b/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb index 00963886..072392c7 100644 --- a/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb @@ -71,6 +71,7 @@ " num_gpus=1,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " mcad=True, # Enable MCAD\n", + " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " instascale=True, # InstaScale now enabled, will scale OCP cluster to guarantee resource request\n", " machine_types=[\"m5.xlarge\", \"g4dn.xlarge\"] # Head, worker AWS machine types desired\n", "))" diff --git a/demo-notebooks/guided-demos/preview_nbs/2_cluster_job_client.ipynb b/demo-notebooks/guided-demos/preview_nbs/2_cluster_job_client.ipynb index 65447ac3..c8f1157a 100644 --- a/demo-notebooks/guided-demos/preview_nbs/2_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/2_cluster_job_client.ipynb @@ -51,7 +51,8 @@ " min_memory=4,\n", " max_memory=4,\n", " num_gpus=0,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\"\n", + " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " write_to_file=False # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", "))" ] }, diff --git a/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb index 06062e14..28e05a26 100644 --- a/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb @@ -45,9 +45,7 @@ "Once again, let's start by running through the same cluster setup as before:\n", "\n", "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image.\n", - "\n", - "NOTE: By default the SDK uses Kueue as it's scheduling solution to use Tech Preview component MCAD set the `mcad=True` option in `ClusterConfiguration`" + "The example here is a community image." ] }, { @@ -71,6 +69,7 @@ " max_memory=8,\n", " num_gpus=1,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" ] From 7c667f58c315e28b1850d969429c3795c175674a Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Wed, 24 Apr 2024 15:51:37 +0100 Subject: [PATCH 208/496] Removed InstaScale NB --- .../guided-demos/1_basic_instascale.ipynb | 183 ------------- ...lient.ipynb => 1_cluster_job_client.ipynb} | 0 ...active.ipynb => 2_basic_interactive.ipynb} | 0 .../1_basic_instascale.ipynb | 258 ------------------ ...lient.ipynb => 1_cluster_job_client.ipynb} | 0 ...active.ipynb => 2_basic_interactive.ipynb} | 0 .../preview_nbs/1_basic_instascale.ipynb | 183 ------------- ...lient.ipynb => 1_cluster_job_client.ipynb} | 0 ...active.ipynb => 2_basic_interactive.ipynb} | 0 9 files changed, 624 deletions(-) delete mode 100644 demo-notebooks/guided-demos/1_basic_instascale.ipynb rename demo-notebooks/guided-demos/{2_cluster_job_client.ipynb => 1_cluster_job_client.ipynb} (100%) rename demo-notebooks/guided-demos/{3_basic_interactive.ipynb => 2_basic_interactive.ipynb} (100%) delete mode 100644 demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb rename demo-notebooks/guided-demos/notebook-ex-outputs/{2_cluster_job_client.ipynb => 1_cluster_job_client.ipynb} (100%) rename demo-notebooks/guided-demos/notebook-ex-outputs/{3_basic_interactive.ipynb => 2_basic_interactive.ipynb} (100%) delete mode 100644 demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb rename demo-notebooks/guided-demos/preview_nbs/{2_cluster_job_client.ipynb => 1_cluster_job_client.ipynb} (100%) rename demo-notebooks/guided-demos/preview_nbs/{3_basic_interactive.ipynb => 2_basic_interactive.ipynb} (100%) diff --git a/demo-notebooks/guided-demos/1_basic_instascale.ipynb b/demo-notebooks/guided-demos/1_basic_instascale.ipynb deleted file mode 100644 index 072392c7..00000000 --- a/demo-notebooks/guided-demos/1_basic_instascale.ipynb +++ /dev/null @@ -1,183 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "9865ee8c", - "metadata": {}, - "source": [ - "In this notebook, we will go over the basics of using InstaScale to scale up/down necessary resources that are not currently available on your OpenShift Cluster (in cloud environments).\n", - "\n", - "NOTE: The InstaScale and MCAD components are in Tech Preview" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b55bc3ea-4ce3-49bf-bb1f-e209de8ca47a", - "metadata": {}, - "outputs": [], - "source": [ - "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "614daa0c", - "metadata": {}, - "outputs": [], - "source": [ - "# Create authentication object for user permissions\n", - "# IF unused, SDK will automatically check for default kubeconfig, then in-cluster config\n", - "# KubeConfigFileAuthentication can also be used to specify kubeconfig path manually\n", - "auth = TokenAuthentication(\n", - " token = \"XXXXX\",\n", - " server = \"XXXXX\",\n", - " skip_tls=False\n", - ")\n", - "auth.login()" - ] - }, - { - "cell_type": "markdown", - "id": "bc27f84c", - "metadata": {}, - "source": [ - "This time, we are working in a cloud environment, and our OpenShift cluster does not have the resources needed for our desired workloads. We will use InstaScale to dynamically scale-up guaranteed resources based on our request (that will also automatically scale-down when we are finished working):\n", - "\n", - "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image.\n", - "\n", - "NOTE: This specific demo requires MCAD and InstaScale to be enabled on the Cluster" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0f4bc870-091f-4e11-9642-cba145710159", - "metadata": {}, - "outputs": [], - "source": [ - "# Create and configure our cluster object (and appwrapper)\n", - "cluster = Cluster(ClusterConfiguration(\n", - " name='instascaletest',\n", - " namespace='default', # Update to your namespace\n", - " num_workers=2,\n", - " min_cpus=2,\n", - " max_cpus=2,\n", - " min_memory=8,\n", - " max_memory=8,\n", - " num_gpus=1,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", - " mcad=True, # Enable MCAD\n", - " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", - " instascale=True, # InstaScale now enabled, will scale OCP cluster to guarantee resource request\n", - " machine_types=[\"m5.xlarge\", \"g4dn.xlarge\"] # Head, worker AWS machine types desired\n", - "))" - ] - }, - { - "cell_type": "markdown", - "id": "12eef53c", - "metadata": {}, - "source": [ - "Same as last time, we will bring the cluster up, wait for it to be ready, and confirm that the specs are as-requested:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f0884bbc-c224-4ca0-98a0-02dfa09c2200", - "metadata": {}, - "outputs": [], - "source": [ - "# Bring up the cluster\n", - "cluster.up()\n", - "cluster.wait_ready()" - ] - }, - { - "cell_type": "markdown", - "id": "6abfe904", - "metadata": {}, - "source": [ - "While the resources are being scaled, we can also go into the console and take a look at the InstaScale logs, as well as the new machines/nodes spinning up.\n", - "\n", - "Once the cluster is ready, we can confirm the specs:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7fd45bc5-03c0-4ae5-9ec5-dd1c30f1a084", - "metadata": {}, - "outputs": [], - "source": [ - "cluster.details()" - ] - }, - { - "cell_type": "markdown", - "id": "5af8cd32", - "metadata": {}, - "source": [ - "Finally, we bring our resource cluster down and release/terminate the associated resources, bringing everything back to the way it was before our cluster was brought up." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5f36db0f-31f6-4373-9503-dc3c1c4c3f57", - "metadata": {}, - "outputs": [], - "source": [ - "cluster.down()" - ] - }, - { - "cell_type": "markdown", - "id": "c883caea", - "metadata": {}, - "source": [ - "Once again, we can look at the machines/nodes and see that everything has been successfully scaled down!" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0d41b90e", - "metadata": {}, - "outputs": [], - "source": [ - "auth.logout()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.13" - }, - "vscode": { - "interpreter": { - "hash": "f9f85f796d01129d0dd105a088854619f454435301f6ffec2fea96ecbd9be4ac" - } - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/demo-notebooks/guided-demos/2_cluster_job_client.ipynb b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb similarity index 100% rename from demo-notebooks/guided-demos/2_cluster_job_client.ipynb rename to demo-notebooks/guided-demos/1_cluster_job_client.ipynb diff --git a/demo-notebooks/guided-demos/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/2_basic_interactive.ipynb similarity index 100% rename from demo-notebooks/guided-demos/3_basic_interactive.ipynb rename to demo-notebooks/guided-demos/2_basic_interactive.ipynb diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb deleted file mode 100644 index 003cdb5f..00000000 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/1_basic_instascale.ipynb +++ /dev/null @@ -1,258 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "9865ee8c", - "metadata": {}, - "source": [ - "In this notebook, we will go over the basics of using InstaScale to scale up/down necessary resources that are not currently available on your OpenShift Cluster (in cloud environments).\n", - "\n", - "NOTE: The InstaScale and MCAD components are in Tech Preview" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "b55bc3ea-4ce3-49bf-bb1f-e209de8ca47a", - "metadata": {}, - "outputs": [], - "source": [ - "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "614daa0c", - "metadata": {}, - "outputs": [], - "source": [ - "# Create authentication object for user permissions\n", - "# IF unused, SDK will automatically check for default kubeconfig, then in-cluster config\n", - "# KubeConfigFileAuthentication can also be used to specify kubeconfig path manually\n", - "auth = TokenAuthentication(\n", - " token = \"XXXXX\",\n", - " server = \"XXXXX\",\n", - " skip_tls=False\n", - ")\n", - "auth.login()" - ] - }, - { - "cell_type": "markdown", - "id": "bc27f84c", - "metadata": {}, - "source": [ - "This time, we are working in a cloud environment, and our OpenShift cluster does not have the resources needed for our desired workloads. We will use InstaScale to dynamically scale-up guaranteed resources based on our request (that will also automatically scale-down when we are finished working):\n", - "\n", - "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image.\n", - "\n", - "NOTE: This specific demo requires MCAD and InstaScale to be enabled on the Cluster" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "0f4bc870-091f-4e11-9642-cba145710159", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Written to: instascaletest.yaml\n" - ] - } - ], - "source": [ - "# Create and configure our cluster object (and appwrapper)\n", - "cluster = Cluster(ClusterConfiguration(\n", - " name='instascaletest',\n", - " namespace='default', # Update to your namespace\n", - " num_workers=2,\n", - " min_cpus=2,\n", - " max_cpus=2,\n", - " min_memory=8,\n", - " max_memory=8,\n", - " num_gpus=1,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", - " mcad=True, # Enable MCAD\n", - " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", - " instascale=True, # InstaScale now enabled, will scale OCP cluster to guarantee resource request\n", - " machine_types=[\"m5.xlarge\", \"g4dn.xlarge\"] # Head, worker AWS machine types desired\n", - "))" - ] - }, - { - "cell_type": "markdown", - "id": "12eef53c", - "metadata": {}, - "source": [ - "Same as last time, we will bring the cluster up, wait for it to be ready, and confirm that the specs are as-requested:" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "id": "f0884bbc-c224-4ca0-98a0-02dfa09c2200", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Waiting for requested resources to be set up...\n", - "Requested cluster up and running!\n" - ] - } - ], - "source": [ - "# Bring up the cluster\n", - "cluster.up()\n", - "cluster.wait_ready()" - ] - }, - { - "cell_type": "markdown", - "id": "6abfe904", - "metadata": {}, - "source": [ - "While the resources are being scaled, we can also go into the console and take a look at the InstaScale logs, as well as the new machines/nodes spinning up.\n", - "\n", - "Once the cluster is ready, we can confirm the specs:" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "7fd45bc5-03c0-4ae5-9ec5-dd1c30f1a084", - "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
                           🚀 CodeFlare Cluster Details 🚀                     \n",
      -       "                                                                         \n",
      -       " ╭─────────────────────────────────────────────────────────────────────╮ \n",
      -       " │   Name                                                              │ \n",
      -       " │   instascaletest                                        Active ✅   │ \n",
      -       " │                                                                     │ \n",
      -       " │   URI: ray://instascaletest-head-svc.default.svc:10001              │ \n",
      -       " │                                                                     │ \n",
      -       " │   Dashboard🔗                                                       │ \n",
      -       " │                                                                     │ \n",
      -       " │                       Cluster Resources                             │ \n",
      -       " │   ╭── Workers ──╮  ╭───────── Worker specs(each) ─────────╮         │ \n",
      -       " │   │  # Workers  │  │  Memory      CPU         GPU         │         │ \n",
      -       " │   │             │  │                                      │         │ \n",
      -       " │   │  2          │  │  8~8         2           1           │         │ \n",
      -       " │   │             │  │                                      │         │ \n",
      -       " │   ╰─────────────╯  ╰──────────────────────────────────────╯         │ \n",
      -       " ╰─────────────────────────────────────────────────────────────────────╯ \n",
      -       "
      \n" - ], - "text/plain": [ - "\u001b[3m \u001b[0m\u001b[1;3m 🚀 CodeFlare Cluster Details 🚀\u001b[0m\u001b[3m \u001b[0m\n", - "\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\n", - " ╭─────────────────────────────────────────────────────────────────────╮ \n", - " │ \u001b[1;37;42mName\u001b[0m │ \n", - " │ \u001b[1;4minstascaletest\u001b[0m Active ✅ │ \n", - " │ │ \n", - " │ \u001b[1mURI:\u001b[0m ray://instascaletest-head-svc.default.svc:10001 │ \n", - " │ │ \n", - " │ \u001b]8;id=65933;http://ray-dashboard-instascaletest-default.apps.meyceoz-07122023.psap.aws.rhperfscale.org\u001b\\\u001b[4;34mDashboard🔗\u001b[0m\u001b]8;;\u001b\\ │ \n", - " │ │ \n", - " │ \u001b[3m Cluster Resources \u001b[0m │ \n", - " │ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │ \n", - " │ │ \u001b[1m \u001b[0m\u001b[1m# Workers\u001b[0m\u001b[1m \u001b[0m │ │ \u001b[1m \u001b[0m\u001b[1mMemory \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mCPU \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mGPU \u001b[0m\u001b[1m \u001b[0m │ │ \n", - " │ │ \u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ │ \u001b[35m \u001b[0m\u001b[35m2 \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m8~8 \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m2 \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m1 \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ │ \u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ ╰─────────────╯ ╰──────────────────────────────────────╯ │ \n", - " ╰─────────────────────────────────────────────────────────────────────╯ \n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/plain": [ - "RayCluster(name='instascaletest', status=, workers=2, worker_mem_min=8, worker_mem_max=8, worker_cpu=2, worker_gpu=1, namespace='default', dashboard='http://ray-dashboard-instascaletest-default.apps.meyceoz-07122023.psap.aws.rhperfscale.org')" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "cluster.details()" - ] - }, - { - "cell_type": "markdown", - "id": "5af8cd32", - "metadata": {}, - "source": [ - "Finally, we bring our resource cluster down and release/terminate the associated resources, bringing everything back to the way it was before our cluster was brought up." - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "5f36db0f-31f6-4373-9503-dc3c1c4c3f57", - "metadata": {}, - "outputs": [], - "source": [ - "cluster.down()" - ] - }, - { - "cell_type": "markdown", - "id": "c883caea", - "metadata": {}, - "source": [ - "Once again, we can look at the machines/nodes and see that everything has been successfully scaled down!" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0d41b90e", - "metadata": {}, - "outputs": [], - "source": [ - "auth.logout()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.17" - }, - "vscode": { - "interpreter": { - "hash": "f9f85f796d01129d0dd105a088854619f454435301f6ffec2fea96ecbd9be4ac" - } - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/2_cluster_job_client.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb similarity index 100% rename from demo-notebooks/guided-demos/notebook-ex-outputs/2_cluster_job_client.ipynb rename to demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb similarity index 100% rename from demo-notebooks/guided-demos/notebook-ex-outputs/3_basic_interactive.ipynb rename to demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb diff --git a/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb b/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb deleted file mode 100644 index 072392c7..00000000 --- a/demo-notebooks/guided-demos/preview_nbs/1_basic_instascale.ipynb +++ /dev/null @@ -1,183 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "9865ee8c", - "metadata": {}, - "source": [ - "In this notebook, we will go over the basics of using InstaScale to scale up/down necessary resources that are not currently available on your OpenShift Cluster (in cloud environments).\n", - "\n", - "NOTE: The InstaScale and MCAD components are in Tech Preview" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b55bc3ea-4ce3-49bf-bb1f-e209de8ca47a", - "metadata": {}, - "outputs": [], - "source": [ - "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "614daa0c", - "metadata": {}, - "outputs": [], - "source": [ - "# Create authentication object for user permissions\n", - "# IF unused, SDK will automatically check for default kubeconfig, then in-cluster config\n", - "# KubeConfigFileAuthentication can also be used to specify kubeconfig path manually\n", - "auth = TokenAuthentication(\n", - " token = \"XXXXX\",\n", - " server = \"XXXXX\",\n", - " skip_tls=False\n", - ")\n", - "auth.login()" - ] - }, - { - "cell_type": "markdown", - "id": "bc27f84c", - "metadata": {}, - "source": [ - "This time, we are working in a cloud environment, and our OpenShift cluster does not have the resources needed for our desired workloads. We will use InstaScale to dynamically scale-up guaranteed resources based on our request (that will also automatically scale-down when we are finished working):\n", - "\n", - "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image.\n", - "\n", - "NOTE: This specific demo requires MCAD and InstaScale to be enabled on the Cluster" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0f4bc870-091f-4e11-9642-cba145710159", - "metadata": {}, - "outputs": [], - "source": [ - "# Create and configure our cluster object (and appwrapper)\n", - "cluster = Cluster(ClusterConfiguration(\n", - " name='instascaletest',\n", - " namespace='default', # Update to your namespace\n", - " num_workers=2,\n", - " min_cpus=2,\n", - " max_cpus=2,\n", - " min_memory=8,\n", - " max_memory=8,\n", - " num_gpus=1,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", - " mcad=True, # Enable MCAD\n", - " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", - " instascale=True, # InstaScale now enabled, will scale OCP cluster to guarantee resource request\n", - " machine_types=[\"m5.xlarge\", \"g4dn.xlarge\"] # Head, worker AWS machine types desired\n", - "))" - ] - }, - { - "cell_type": "markdown", - "id": "12eef53c", - "metadata": {}, - "source": [ - "Same as last time, we will bring the cluster up, wait for it to be ready, and confirm that the specs are as-requested:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f0884bbc-c224-4ca0-98a0-02dfa09c2200", - "metadata": {}, - "outputs": [], - "source": [ - "# Bring up the cluster\n", - "cluster.up()\n", - "cluster.wait_ready()" - ] - }, - { - "cell_type": "markdown", - "id": "6abfe904", - "metadata": {}, - "source": [ - "While the resources are being scaled, we can also go into the console and take a look at the InstaScale logs, as well as the new machines/nodes spinning up.\n", - "\n", - "Once the cluster is ready, we can confirm the specs:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7fd45bc5-03c0-4ae5-9ec5-dd1c30f1a084", - "metadata": {}, - "outputs": [], - "source": [ - "cluster.details()" - ] - }, - { - "cell_type": "markdown", - "id": "5af8cd32", - "metadata": {}, - "source": [ - "Finally, we bring our resource cluster down and release/terminate the associated resources, bringing everything back to the way it was before our cluster was brought up." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5f36db0f-31f6-4373-9503-dc3c1c4c3f57", - "metadata": {}, - "outputs": [], - "source": [ - "cluster.down()" - ] - }, - { - "cell_type": "markdown", - "id": "c883caea", - "metadata": {}, - "source": [ - "Once again, we can look at the machines/nodes and see that everything has been successfully scaled down!" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0d41b90e", - "metadata": {}, - "outputs": [], - "source": [ - "auth.logout()" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.13" - }, - "vscode": { - "interpreter": { - "hash": "f9f85f796d01129d0dd105a088854619f454435301f6ffec2fea96ecbd9be4ac" - } - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/demo-notebooks/guided-demos/preview_nbs/2_cluster_job_client.ipynb b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb similarity index 100% rename from demo-notebooks/guided-demos/preview_nbs/2_cluster_job_client.ipynb rename to demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb diff --git a/demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb similarity index 100% rename from demo-notebooks/guided-demos/preview_nbs/3_basic_interactive.ipynb rename to demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb From befd806934793e5a1de5f2c749b309173f5ac4b9 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 25 Apr 2024 09:28:05 +0100 Subject: [PATCH 209/496] Removed mention of InstaScale in NB --- .../additional-demos/local_interactive.ipynb | 3 +- .../notebook-ex-outputs/instascaletest.yaml | 185 ------------------ 2 files changed, 1 insertion(+), 187 deletions(-) delete mode 100644 demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml diff --git a/demo-notebooks/additional-demos/local_interactive.ipynb b/demo-notebooks/additional-demos/local_interactive.ipynb index 9af9bc33..f0243d6e 100644 --- a/demo-notebooks/additional-demos/local_interactive.ipynb +++ b/demo-notebooks/additional-demos/local_interactive.ipynb @@ -63,8 +63,7 @@ " num_gpus=0,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", - " instascale=False,\n", - " machine_types=[\"m5.xlarge\", \"p3.8xlarge\"]))" + " ))" ] }, { diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml deleted file mode 100644 index 8cb96a79..00000000 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/instascaletest.yaml +++ /dev/null @@ -1,185 +0,0 @@ -apiVersion: workload.codeflare.dev/v1beta1 -kind: AppWrapper -metadata: - labels: - orderedinstance: m5.xlarge_g4dn.xlarge - name: instascaletest - namespace: default -spec: - priority: 9 - resources: - GenericItems: - - custompodresources: - - limits: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - replicas: 1 - requests: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - - limits: - cpu: 2 - memory: 8G - nvidia.com/gpu: 1 - replicas: 2 - requests: - cpu: 2 - memory: 8G - nvidia.com/gpu: 1 - generictemplate: - apiVersion: ray.io/v1 - kind: RayCluster - metadata: - labels: - appwrapper.mcad.ibm.com: instascaletest - controller-tools.k8s.io: '1.0' - name: instascaletest - namespace: default - spec: - autoscalerOptions: - idleTimeoutSeconds: 60 - imagePullPolicy: Always - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 500m - memory: 512Mi - upscalingMode: Default - enableInTreeAutoscaling: false - headGroupSpec: - rayStartParams: - block: 'true' - dashboard-host: 0.0.0.0 - num-gpus: '0' - serviceType: ClusterIP - template: - spec: - affinity: - nodeAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - nodeSelectorTerms: - - matchExpressions: - - key: instascaletest - operator: In - values: - - instascaletest - containers: - - env: - - name: MY_POD_IP - valueFrom: - fieldRef: - fieldPath: status.podIP - - name: RAY_USE_TLS - value: '0' - - name: RAY_TLS_SERVER_CERT - value: /home/ray/workspace/tls/server.crt - - name: RAY_TLS_SERVER_KEY - value: /home/ray/workspace/tls/server.key - - name: RAY_TLS_CA_CERT - value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:latest-py39-cu118 - imagePullPolicy: Always - lifecycle: - preStop: - exec: - command: - - /bin/sh - - -c - - ray stop - name: ray-head - ports: - - containerPort: 6379 - name: gcs - - containerPort: 8265 - name: dashboard - - containerPort: 10001 - name: client - resources: - limits: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - requests: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - imagePullSecrets: [] - rayVersion: 2.1.0 - workerGroupSpecs: - - groupName: small-group-instascaletest - maxReplicas: 2 - minReplicas: 2 - rayStartParams: - block: 'true' - num-gpus: '1' - replicas: 2 - template: - metadata: - annotations: - key: value - labels: - key: value - spec: - affinity: - nodeAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - nodeSelectorTerms: - - matchExpressions: - - key: instascaletest - operator: In - values: - - instascaletest - containers: - - env: - - name: MY_POD_IP - valueFrom: - fieldRef: - fieldPath: status.podIP - - name: RAY_USE_TLS - value: '0' - - name: RAY_TLS_SERVER_CERT - value: /home/ray/workspace/tls/server.crt - - name: RAY_TLS_SERVER_KEY - value: /home/ray/workspace/tls/server.key - - name: RAY_TLS_CA_CERT - value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:latest-py39-cu118 - lifecycle: - preStop: - exec: - command: - - /bin/sh - - -c - - ray stop - name: machine-learning - resources: - limits: - cpu: 2 - memory: 8G - nvidia.com/gpu: 1 - requests: - cpu: 2 - memory: 8G - nvidia.com/gpu: 1 - imagePullSecrets: [] - replicas: 1 - - generictemplate: - apiVersion: route.openshift.io/v1 - kind: Route - metadata: - labels: - odh-ray-cluster-service: instascaletest-head-svc - name: ray-dashboard-instascaletest - namespace: default - spec: - port: - targetPort: dashboard - to: - kind: Service - name: instascaletest-head-svc - replicas: 1 - Items: [] From 610029afd95b30e2c5e0cad536b629a1214fe66e Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 25 Apr 2024 09:49:48 +0100 Subject: [PATCH 210/496] Added local queue comment --- demo-notebooks/additional-demos/local_interactive.ipynb | 1 + demo-notebooks/guided-demos/1_cluster_job_client.ipynb | 3 ++- .../notebook-ex-outputs/1_cluster_job_client.ipynb | 3 ++- .../guided-demos/preview_nbs/1_cluster_job_client.ipynb | 3 ++- 4 files changed, 7 insertions(+), 3 deletions(-) diff --git a/demo-notebooks/additional-demos/local_interactive.ipynb b/demo-notebooks/additional-demos/local_interactive.ipynb index f0243d6e..36adfb50 100644 --- a/demo-notebooks/additional-demos/local_interactive.ipynb +++ b/demo-notebooks/additional-demos/local_interactive.ipynb @@ -63,6 +63,7 @@ " num_gpus=0,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", + " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", " ))" ] }, diff --git a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb index c8f1157a..e46dc62a 100644 --- a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb @@ -52,7 +52,8 @@ " max_memory=4,\n", " num_gpus=0,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", - " write_to_file=False # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", + " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", + " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" ] }, diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb index c8f1157a..e46dc62a 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb @@ -52,7 +52,8 @@ " max_memory=4,\n", " num_gpus=0,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", - " write_to_file=False # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", + " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", + " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" ] }, diff --git a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb index c8f1157a..b20f920b 100644 --- a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb @@ -52,7 +52,8 @@ " max_memory=4,\n", " num_gpus=0,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", - " write_to_file=False # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", + " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources\n", + " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" ] }, From 87e6f26ff077257916683b8e1e0122289a72627d Mon Sep 17 00:00:00 2001 From: Mark Campbell Date: Thu, 25 Apr 2024 09:56:12 +0100 Subject: [PATCH 211/496] Update demo-notebooks/guided-demos/0_basic_ray.ipynb Co-authored-by: Fiona Waters --- demo-notebooks/guided-demos/0_basic_ray.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb index bd2e66f0..6a3b3710 100644 --- a/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -5,7 +5,7 @@ "id": "8d4a42f6", "metadata": {}, "source": [ - "In first notebook, we will go through the basics of using the SDK to:\n", + "In this notebook, we will go through the basics of using the SDK to:\n", " - Spin up a Ray cluster with our desired resources\n", " - View the status and specs of our Ray cluster\n", " - Take down the Ray cluster when finished" From 80de58e4f9e49953fd79c08f3c3147cf3206bd7e Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Thu, 25 Apr 2024 10:27:10 +0000 Subject: [PATCH 212/496] Updated coverage.svg --- coverage.svg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/coverage.svg b/coverage.svg index 607d3de4..a8c7e72a 100644 --- a/coverage.svg +++ b/coverage.svg @@ -15,7 +15,7 @@ coverage coverage - 91% - 91% + 92% + 92% From 82d2c5b60e569f724697acbabc992805552aeec7 Mon Sep 17 00:00:00 2001 From: Kevin Postlethwait Date: Fri, 26 Apr 2024 17:24:34 -0400 Subject: [PATCH 213/496] raise error if raycluster crd not available (#493) Signed-off-by: Kevin --- src/codeflare_sdk/cluster/cluster.py | 25 +++++++++++++++++++++ tests/unit_test.py | 33 ++++++++++++++++++++++++++++ 2 files changed, 58 insertions(+) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 295332ae..c3e1d33a 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -46,6 +46,7 @@ import requests from kubernetes import config +from kubernetes.client.rest import ApiException class Cluster: @@ -216,6 +217,10 @@ def up(self): Applies the AppWrapper yaml, pushing the resource request onto the MCAD queue. """ + + # check if RayCluster CustomResourceDefinition exists if not throw RuntimeError + self._throw_for_no_raycluster() + namespace = self.config.namespace try: @@ -246,12 +251,32 @@ def up(self): except Exception as e: # pragma: no cover return _kube_api_error_handling(e) + def _throw_for_no_raycluster(self): + api_instance = client.CustomObjectsApi(api_config_handler()) + try: + api_instance.list_namespaced_custom_object( + group="ray.io", + version="v1", + namespace=self.config.namespace, + plural="rayclusters", + ) + except ApiException as e: + if e.status == 404: + raise RuntimeError( + "RayCluster CustomResourceDefinition unavailable contact your administrator." + ) + else: + raise RuntimeError( + "Failed to get RayCluster CustomResourceDefinition: " + str(e) + ) + def down(self): """ Deletes the AppWrapper yaml, scaling-down and deleting all resources associated with the cluster. """ namespace = self.config.namespace + self._throw_for_no_raycluster() try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) diff --git a/tests/unit_test.py b/tests/unit_test.py index 322449fb..49ebf8e2 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -508,6 +508,7 @@ def arg_check_del_effect(group, version, namespace, plural, name, *args): def test_cluster_up_down(mocker): mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch("codeflare_sdk.cluster.cluster.Cluster._throw_for_no_raycluster") mocker.patch( "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", return_value={"spec": {"domain": ""}}, @@ -530,6 +531,7 @@ def test_cluster_up_down(mocker): def test_cluster_up_down_no_mcad(mocker): + mocker.patch("codeflare_sdk.cluster.cluster.Cluster._throw_for_no_raycluster") mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch( @@ -3000,6 +3002,37 @@ def test_export_env(): ) +def test_cluster_throw_for_no_raycluster(mocker: MockerFixture): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch( + "codeflare_sdk.cluster.cluster.get_current_namespace", + return_value="opendatahub", + ) + mocker.patch( + "codeflare_sdk.utils.generate_yaml.get_default_kueue_name", + return_value="default", + ) + + def throw_if_getting_raycluster(group, version, namespace, plural): + if plural == "rayclusters": + raise client.ApiException(status=404) + return + + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + side_effect=throw_if_getting_raycluster, + ) + cluster = Cluster( + ClusterConfiguration( + "test_cluster", + image="quay.io/project-codeflare/ray:latest-py39-cu118", + write_to_file=False, + ) + ) + with pytest.raises(RuntimeError): + cluster.up() + + """ Ray Jobs tests """ From 59cbccc5b0c440fcb0f08944e3eb57ac1b6aa9cc Mon Sep 17 00:00:00 2001 From: Kevin Postlethwait Date: Fri, 26 Apr 2024 17:25:34 -0400 Subject: [PATCH 214/496] use string for storing mem resources (#522) Signed-off-by: Kevin --- src/codeflare_sdk/cluster/cluster.py | 34 ++++++++++-------------- src/codeflare_sdk/cluster/config.py | 31 ++++++++++++++++----- src/codeflare_sdk/utils/generate_yaml.py | 12 ++++----- src/codeflare_sdk/utils/pretty_print.py | 2 +- tests/unit_test.py | 16 +++++------ 5 files changed, 54 insertions(+), 41 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index c3e1d33a..76f64287 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -18,6 +18,7 @@ cluster setup queue, a list of all existing clusters, and the user's working namespace. """ +import re from time import sleep from typing import List, Optional, Tuple, Dict @@ -41,6 +42,7 @@ RayClusterStatus, ) from kubernetes import client, config +from kubernetes.utils import parse_quantity import yaml import os import requests @@ -513,26 +515,18 @@ def from_k8_cluster_object( namespace=rc["metadata"]["namespace"], machine_types=machine_types, num_workers=rc["spec"]["workerGroupSpecs"][0]["minReplicas"], - min_cpus=int( - rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ - "resources" - ]["requests"]["cpu"] - ), - max_cpus=int( - rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ - "resources" - ]["limits"]["cpu"] - ), - min_memory=int( - rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ - "resources" - ]["requests"]["memory"][:-1] - ), - max_memory=int( - rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ - "resources" - ]["limits"]["memory"][:-1] - ), + min_cpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + "containers" + ][0]["resources"]["requests"]["cpu"], + max_cpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + "containers" + ][0]["resources"]["limits"]["cpu"], + min_memory=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + "containers" + ][0]["resources"]["requests"]["memory"], + max_memory=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + "containers" + ][0]["resources"]["limits"]["memory"], num_gpus=int( rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ "resources" diff --git a/src/codeflare_sdk/cluster/config.py b/src/codeflare_sdk/cluster/config.py index e4d046f9..064b51cd 100644 --- a/src/codeflare_sdk/cluster/config.py +++ b/src/codeflare_sdk/cluster/config.py @@ -20,6 +20,7 @@ from dataclasses import dataclass, field import pathlib +import typing dir = pathlib.Path(__file__).parent.parent.resolve() @@ -34,15 +35,15 @@ class ClusterConfiguration: name: str namespace: str = None head_info: list = field(default_factory=list) - head_cpus: int = 2 - head_memory: int = 8 + head_cpus: typing.Union[int, str] = 2 + head_memory: typing.Union[int, str] = 8 head_gpus: int = 0 machine_types: list = field(default_factory=list) # ["m4.xlarge", "g4dn.xlarge"] - min_cpus: int = 1 - max_cpus: int = 1 + min_cpus: typing.Union[int, str] = 1 + max_cpus: typing.Union[int, str] = 1 num_workers: int = 1 - min_memory: int = 2 - max_memory: int = 2 + min_memory: typing.Union[int, str] = 2 + max_memory: typing.Union[int, str] = 2 num_gpus: int = 0 template: str = f"{dir}/templates/base-template.yaml" instascale: bool = False @@ -59,5 +60,23 @@ def __post_init__(self): print( "Warning: TLS verification has been disabled - Endpoint checks will be bypassed" ) + self._memory_to_string() + self._str_mem_no_unit_add_GB() + + def _str_mem_no_unit_add_GB(self): + if isinstance(self.head_memory, str) and self.head_memory.isdecimal(): + self.head_memory = f"{self.head_memory}G" + if isinstance(self.min_memory, str) and self.min_memory.isdecimal(): + self.min_memory = f"{self.min_memory}G" + if isinstance(self.max_memory, str) and self.max_memory.isdecimal(): + self.max_memory = f"{self.max_memory}G" + + def _memory_to_string(self): + if isinstance(self.head_memory, int): + self.head_memory = f"{self.head_memory}G" + if isinstance(self.min_memory, int): + self.min_memory = f"{self.min_memory}G" + if isinstance(self.max_memory, int): + self.max_memory = f"{self.max_memory}G" local_queue: str = None diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 95c3d04f..97dda5ba 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -140,8 +140,8 @@ def update_custompodresources( # Leave head node resources as template default resource["requests"]["cpu"] = head_cpus resource["limits"]["cpu"] = head_cpus - resource["requests"]["memory"] = str(head_memory) + "G" - resource["limits"]["memory"] = str(head_memory) + "G" + resource["requests"]["memory"] = head_memory + resource["limits"]["memory"] = head_memory resource["requests"]["nvidia.com/gpu"] = head_gpus resource["limits"]["nvidia.com/gpu"] = head_gpus @@ -158,9 +158,9 @@ def update_custompodresources( resource[k][spec] = min_cpu if spec == "memory": if k == "limits": - resource[k][spec] = str(max_memory) + "G" + resource[k][spec] = max_memory else: - resource[k][spec] = str(min_memory) + "G" + resource[k][spec] = min_memory if spec == "nvidia.com/gpu": if i == 0: resource[k][spec] = 0 @@ -213,12 +213,12 @@ def update_resources(spec, min_cpu, max_cpu, min_memory, max_memory, gpu): requests = resource.get("resources").get("requests") if requests is not None: requests["cpu"] = min_cpu - requests["memory"] = str(min_memory) + "G" + requests["memory"] = min_memory requests["nvidia.com/gpu"] = gpu limits = resource.get("resources").get("limits") if limits is not None: limits["cpu"] = max_cpu - limits["memory"] = str(max_memory) + "G" + limits["memory"] = max_memory limits["nvidia.com/gpu"] = gpu diff --git a/src/codeflare_sdk/utils/pretty_print.py b/src/codeflare_sdk/utils/pretty_print.py index 74678ecc..42ef8398 100644 --- a/src/codeflare_sdk/utils/pretty_print.py +++ b/src/codeflare_sdk/utils/pretty_print.py @@ -136,7 +136,7 @@ def print_clusters(clusters: List[RayCluster]): name = cluster.name dashboard = cluster.dashboard workers = str(cluster.workers) - memory = str(cluster.worker_mem_min) + "~" + str(cluster.worker_mem_max) + memory = f"{cluster.worker_mem_min}~{cluster.worker_mem_max}" cpu = str(cluster.worker_cpu) gpu = str(cluster.worker_gpu) diff --git a/tests/unit_test.py b/tests/unit_test.py index 49ebf8e2..6f2ccee1 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -248,7 +248,7 @@ def test_config_creation(): assert config.name == "unit-test-cluster" and config.namespace == "ns" assert config.num_workers == 2 assert config.min_cpus == 3 and config.max_cpus == 4 - assert config.min_memory == 5 and config.max_memory == 6 + assert config.min_memory == "5G" and config.max_memory == "6G" assert config.num_gpus == 7 assert config.image == "quay.io/project-codeflare/ray:latest-py39-cu118" assert config.template == f"{parent}/src/codeflare_sdk/templates/base-template.yaml" @@ -851,8 +851,8 @@ def test_ray_details(mocker, capsys): name="raytest1", status=RayClusterStatus.READY, workers=1, - worker_mem_min=2, - worker_mem_max=2, + worker_mem_min="2G", + worker_mem_max="2G", worker_cpu=1, worker_gpu=0, namespace="ns", @@ -911,7 +911,7 @@ def test_ray_details(mocker, capsys): " │ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │ \n" " │ │ # Workers │ │ Memory CPU GPU │ │ \n" " │ │ │ │ │ │ \n" - " │ │ 1 │ │ 2~2 1 0 │ │ \n" + " │ │ 1 │ │ 2G~2G 1 0 │ │ \n" " │ │ │ │ │ │ \n" " │ ╰─────────────╯ ╰──────────────────────────────────────╯ │ \n" " ╰───────────────────────────────────────────────────────────────╯ \n" @@ -929,7 +929,7 @@ def test_ray_details(mocker, capsys): " │ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │ \n" " │ │ # Workers │ │ Memory CPU GPU │ │ \n" " │ │ │ │ │ │ \n" - " │ │ 1 │ │ 2~2 1 0 │ │ \n" + " │ │ 1 │ │ 2G~2G 1 0 │ │ \n" " │ │ │ │ │ │ \n" " │ ╰─────────────╯ ╰──────────────────────────────────────╯ │ \n" " ╰───────────────────────────────────────────────────────────────╯ \n" @@ -945,7 +945,7 @@ def test_ray_details(mocker, capsys): "│ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │\n" "│ │ # Workers │ │ Memory CPU GPU │ │\n" "│ │ │ │ │ │\n" - "│ │ 1 │ │ 2~2 1 0 │ │\n" + "│ │ 1 │ │ 2G~2G 1 0 │ │\n" "│ │ │ │ │ │\n" "│ ╰─────────────╯ ╰──────────────────────────────────────╯ │\n" "╰───────────────────────────────────────────────────────────────╯\n" @@ -2438,7 +2438,7 @@ def custom_side_effect(group, version, namespace, plural, **kwargs): and "g4dn.xlarge" in cluster_config.machine_types ) assert cluster_config.min_cpus == 1 and cluster_config.max_cpus == 1 - assert cluster_config.min_memory == 2 and cluster_config.max_memory == 2 + assert cluster_config.min_memory == "2G" and cluster_config.max_memory == "2G" assert cluster_config.num_gpus == 0 assert ( cluster_config.image @@ -2470,7 +2470,7 @@ def test_get_cluster(mocker): and "g4dn.xlarge" in cluster_config.machine_types ) assert cluster_config.min_cpus == 1 and cluster_config.max_cpus == 1 - assert cluster_config.min_memory == 2 and cluster_config.max_memory == 2 + assert cluster_config.min_memory == "2G" and cluster_config.max_memory == "2G" assert cluster_config.num_gpus == 0 assert cluster_config.instascale assert ( From e2cc8e133acdcc004bd193f3b7c585d8609f4a41 Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Tue, 30 Apr 2024 17:00:07 +0100 Subject: [PATCH 215/496] New ClusterConfiguration parameter for user labels --- src/codeflare_sdk/cluster/cluster.py | 6 +++--- src/codeflare_sdk/cluster/config.py | 1 + src/codeflare_sdk/utils/generate_yaml.py | 23 +++++++++++++++++++---- 3 files changed, 23 insertions(+), 7 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 76f64287..31bdf5c4 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -187,6 +187,7 @@ def create_app_wrapper(self): write_to_file = self.config.write_to_file verify_tls = self.config.verify_tls local_queue = self.config.local_queue + user_labels = self.config.user_labels return generate_appwrapper( name=name, namespace=namespace, @@ -211,6 +212,7 @@ def create_app_wrapper(self): write_to_file=write_to_file, verify_tls=verify_tls, local_queue=local_queue, + user_labels=user_labels, ) # creates a new cluster with the provided or default spec @@ -453,9 +455,7 @@ def cluster_dashboard_uri(self) -> str: "name" ] == f"ray-dashboard-{self.config.name}" or route["metadata"][ "name" - ].startswith( - f"{self.config.name}-ingress" - ): + ].startswith(f"{self.config.name}-ingress"): protocol = "https" if route["spec"].get("tls") else "http" return f"{protocol}://{route['spec']['host']}" else: diff --git a/src/codeflare_sdk/cluster/config.py b/src/codeflare_sdk/cluster/config.py index 064b51cd..8a264046 100644 --- a/src/codeflare_sdk/cluster/config.py +++ b/src/codeflare_sdk/cluster/config.py @@ -54,6 +54,7 @@ class ClusterConfiguration: dispatch_priority: str = None write_to_file: bool = False verify_tls: bool = True + user_labels: dict = field(default_factory=dict) def __post_init__(self): if not self.verify_tls: diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 97dda5ba..58b11f2c 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -309,7 +309,11 @@ def get_default_kueue_name(namespace: str): def write_components( - user_yaml: dict, output_file_name: str, namespace: str, local_queue: Optional[str] + user_yaml: dict, + output_file_name: str, + namespace: str, + local_queue: Optional[str], + user_labels: dict, ): # Create the directory if it doesn't exist directory_path = os.path.dirname(output_file_name) @@ -331,6 +335,8 @@ def write_components( ] labels = component["generictemplate"]["metadata"]["labels"] labels.update({"kueue.x-k8s.io/queue-name": lq_name}) + for key in user_labels: + labels.update({key: user_labels[key]}) outfile.write("---\n") yaml.dump( component["generictemplate"], outfile, default_flow_style=False @@ -339,7 +345,11 @@ def write_components( def load_components( - user_yaml: dict, name: str, namespace: str, local_queue: Optional[str] + user_yaml: dict, + name: str, + namespace: str, + local_queue: Optional[str], + user_labels: dict, ): component_list = [] components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") @@ -355,6 +365,8 @@ def load_components( ] labels = component["generictemplate"]["metadata"]["labels"] labels.update({"kueue.x-k8s.io/queue-name": lq_name}) + for key in user_labels: + labels.update({key: user_labels[key]}) component_list.append(component["generictemplate"]) resources = "---\n" + "---\n".join( @@ -395,6 +407,7 @@ def generate_appwrapper( write_to_file: bool, verify_tls: bool, local_queue: Optional[str], + user_labels, ): user_yaml = read_template(template) appwrapper_name, cluster_name = gen_names(name) @@ -446,11 +459,13 @@ def generate_appwrapper( if mcad: write_user_appwrapper(user_yaml, outfile) else: - write_components(user_yaml, outfile, namespace, local_queue) + write_components(user_yaml, outfile, namespace, local_queue, user_labels) return outfile else: if mcad: user_yaml = load_appwrapper(user_yaml, name) else: - user_yaml = load_components(user_yaml, name, namespace, local_queue) + user_yaml = load_components( + user_yaml, name, namespace, local_queue, user_labels + ) return user_yaml From 56479d12ca9dc12c758bb21f179524fcbb13e33d Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Tue, 30 Apr 2024 17:07:55 +0100 Subject: [PATCH 216/496] Updated unit test for user labels --- tests/test-case-no-mcad.yamls | 2 ++ tests/unit_test.py | 3 +++ 2 files changed, 5 insertions(+) diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index aaf9324e..7fcf1fdc 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -5,6 +5,8 @@ metadata: labels: controller-tools.k8s.io: '1.0' kueue.x-k8s.io/queue-name: local-queue-default + testlabel: test + testlabel2: test name: unit-test-cluster-ray namespace: ns spec: diff --git a/tests/unit_test.py b/tests/unit_test.py index 6f2ccee1..2ae850ad 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -324,6 +324,7 @@ def test_cluster_creation_no_mcad(mocker): config.name = "unit-test-cluster-ray" config.write_to_file = True config.mcad = False + config.user_labels = {"testlabel": "test", "testlabel2": "test"} cluster = Cluster(config) assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-ray.yaml" @@ -348,6 +349,7 @@ def test_cluster_creation_no_mcad_local_queue(mocker): config.mcad = False config.write_to_file = True config.local_queue = "local-queue-default" + config.user_labels = {"testlabel": "test", "testlabel2": "test"} cluster = Cluster(config) assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-ray.yaml" assert cluster.app_wrapper_name == "unit-test-cluster-ray" @@ -373,6 +375,7 @@ def test_cluster_creation_no_mcad_local_queue(mocker): write_to_file=True, mcad=False, local_queue="local-queue-default", + user_labels={"testlabel": "test", "testlabel2": "test"}, ) cluster = Cluster(config) assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-ray.yaml" From 4efbfdf14b352b50c12b0f474efb1bb46d7dc77b Mon Sep 17 00:00:00 2001 From: Francisco Javier Arceo Date: Tue, 30 Apr 2024 12:22:06 -0400 Subject: [PATCH 217/496] Revert "Updated unit test for user labels" This reverts commit 56479d12ca9dc12c758bb21f179524fcbb13e33d. --- tests/test-case-no-mcad.yamls | 2 -- tests/unit_test.py | 3 --- 2 files changed, 5 deletions(-) diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index 7fcf1fdc..aaf9324e 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -5,8 +5,6 @@ metadata: labels: controller-tools.k8s.io: '1.0' kueue.x-k8s.io/queue-name: local-queue-default - testlabel: test - testlabel2: test name: unit-test-cluster-ray namespace: ns spec: diff --git a/tests/unit_test.py b/tests/unit_test.py index 2ae850ad..6f2ccee1 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -324,7 +324,6 @@ def test_cluster_creation_no_mcad(mocker): config.name = "unit-test-cluster-ray" config.write_to_file = True config.mcad = False - config.user_labels = {"testlabel": "test", "testlabel2": "test"} cluster = Cluster(config) assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-ray.yaml" @@ -349,7 +348,6 @@ def test_cluster_creation_no_mcad_local_queue(mocker): config.mcad = False config.write_to_file = True config.local_queue = "local-queue-default" - config.user_labels = {"testlabel": "test", "testlabel2": "test"} cluster = Cluster(config) assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-ray.yaml" assert cluster.app_wrapper_name == "unit-test-cluster-ray" @@ -375,7 +373,6 @@ def test_cluster_creation_no_mcad_local_queue(mocker): write_to_file=True, mcad=False, local_queue="local-queue-default", - user_labels={"testlabel": "test", "testlabel2": "test"}, ) cluster = Cluster(config) assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-ray.yaml" From a9b314e8276b0f8188380ab9224c14cf9ade331a Mon Sep 17 00:00:00 2001 From: Francisco Javier Arceo Date: Tue, 30 Apr 2024 12:22:17 -0400 Subject: [PATCH 218/496] Revert "New ClusterConfiguration parameter for user labels" This reverts commit e2cc8e133acdcc004bd193f3b7c585d8609f4a41. --- src/codeflare_sdk/cluster/cluster.py | 6 +++--- src/codeflare_sdk/cluster/config.py | 1 - src/codeflare_sdk/utils/generate_yaml.py | 23 ++++------------------- 3 files changed, 7 insertions(+), 23 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 31bdf5c4..76f64287 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -187,7 +187,6 @@ def create_app_wrapper(self): write_to_file = self.config.write_to_file verify_tls = self.config.verify_tls local_queue = self.config.local_queue - user_labels = self.config.user_labels return generate_appwrapper( name=name, namespace=namespace, @@ -212,7 +211,6 @@ def create_app_wrapper(self): write_to_file=write_to_file, verify_tls=verify_tls, local_queue=local_queue, - user_labels=user_labels, ) # creates a new cluster with the provided or default spec @@ -455,7 +453,9 @@ def cluster_dashboard_uri(self) -> str: "name" ] == f"ray-dashboard-{self.config.name}" or route["metadata"][ "name" - ].startswith(f"{self.config.name}-ingress"): + ].startswith( + f"{self.config.name}-ingress" + ): protocol = "https" if route["spec"].get("tls") else "http" return f"{protocol}://{route['spec']['host']}" else: diff --git a/src/codeflare_sdk/cluster/config.py b/src/codeflare_sdk/cluster/config.py index 8a264046..064b51cd 100644 --- a/src/codeflare_sdk/cluster/config.py +++ b/src/codeflare_sdk/cluster/config.py @@ -54,7 +54,6 @@ class ClusterConfiguration: dispatch_priority: str = None write_to_file: bool = False verify_tls: bool = True - user_labels: dict = field(default_factory=dict) def __post_init__(self): if not self.verify_tls: diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 58b11f2c..97dda5ba 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -309,11 +309,7 @@ def get_default_kueue_name(namespace: str): def write_components( - user_yaml: dict, - output_file_name: str, - namespace: str, - local_queue: Optional[str], - user_labels: dict, + user_yaml: dict, output_file_name: str, namespace: str, local_queue: Optional[str] ): # Create the directory if it doesn't exist directory_path = os.path.dirname(output_file_name) @@ -335,8 +331,6 @@ def write_components( ] labels = component["generictemplate"]["metadata"]["labels"] labels.update({"kueue.x-k8s.io/queue-name": lq_name}) - for key in user_labels: - labels.update({key: user_labels[key]}) outfile.write("---\n") yaml.dump( component["generictemplate"], outfile, default_flow_style=False @@ -345,11 +339,7 @@ def write_components( def load_components( - user_yaml: dict, - name: str, - namespace: str, - local_queue: Optional[str], - user_labels: dict, + user_yaml: dict, name: str, namespace: str, local_queue: Optional[str] ): component_list = [] components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") @@ -365,8 +355,6 @@ def load_components( ] labels = component["generictemplate"]["metadata"]["labels"] labels.update({"kueue.x-k8s.io/queue-name": lq_name}) - for key in user_labels: - labels.update({key: user_labels[key]}) component_list.append(component["generictemplate"]) resources = "---\n" + "---\n".join( @@ -407,7 +395,6 @@ def generate_appwrapper( write_to_file: bool, verify_tls: bool, local_queue: Optional[str], - user_labels, ): user_yaml = read_template(template) appwrapper_name, cluster_name = gen_names(name) @@ -459,13 +446,11 @@ def generate_appwrapper( if mcad: write_user_appwrapper(user_yaml, outfile) else: - write_components(user_yaml, outfile, namespace, local_queue, user_labels) + write_components(user_yaml, outfile, namespace, local_queue) return outfile else: if mcad: user_yaml = load_appwrapper(user_yaml, name) else: - user_yaml = load_components( - user_yaml, name, namespace, local_queue, user_labels - ) + user_yaml = load_components(user_yaml, name, namespace, local_queue) return user_yaml From f836653d408f8d5723597100b8c1fdfc7d60fdc5 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 2 May 2024 15:59:13 +0100 Subject: [PATCH 219/496] Added mnist_fashion as default job demo --- .../guided-demos/1_cluster_job_client.ipynb | 5 +- demo-notebooks/guided-demos/mnist_fashion.py | 84 +++++++++++++++++++ .../1_cluster_job_client.ipynb | 5 +- .../preview_nbs/1_cluster_job_client.ipynb | 5 +- 4 files changed, 93 insertions(+), 6 deletions(-) create mode 100644 demo-notebooks/guided-demos/mnist_fashion.py diff --git a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb index e46dc62a..787bd315 100644 --- a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb @@ -50,7 +50,8 @@ " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " num_gpus=0,\n", + " num_gpus=1,\n", + " head_gpus=1,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", @@ -114,7 +115,7 @@ "source": [ "# Submit an example mnist job using the Job Submission Client\n", "submission_id = client.submit_job(\n", - " entrypoint=\"python mnist.py\",\n", + " entrypoint=\"python mnist_fashion.py\",\n", " runtime_env={\"working_dir\": \"./\",\"pip\": \"requirements.txt\"},\n", ")\n", "print(submission_id)" diff --git a/demo-notebooks/guided-demos/mnist_fashion.py b/demo-notebooks/guided-demos/mnist_fashion.py new file mode 100644 index 00000000..2b75a865 --- /dev/null +++ b/demo-notebooks/guided-demos/mnist_fashion.py @@ -0,0 +1,84 @@ +import torch +import torch.nn as nn +import ray +from torch.utils.data import DataLoader +from torchvision import datasets +from torchvision.transforms import ToTensor +from ray.train.torch import TorchTrainer +from ray.train import ScalingConfig + + +def get_dataset(): + return datasets.FashionMNIST( + root="/tmp/data", + train=True, + download=True, + transform=ToTensor(), + ) + + +class NeuralNetwork(nn.Module): + def __init__(self): + super().__init__() + self.flatten = nn.Flatten() + self.linear_relu_stack = nn.Sequential( + nn.Linear(28 * 28, 512), + nn.ReLU(), + nn.Linear(512, 512), + nn.ReLU(), + nn.Linear(512, 10), + ) + + def forward(self, inputs): + inputs = self.flatten(inputs) + logits = self.linear_relu_stack(inputs) + return logits + + +def get_dataset(): + return datasets.FashionMNIST( + root="/tmp/data", + train=True, + download=True, + transform=ToTensor(), + ) + + +def train_func_distributed(): + num_epochs = 3 + batch_size = 64 + + dataset = get_dataset() + dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=True) + dataloader = ray.train.torch.prepare_data_loader(dataloader) + + model = NeuralNetwork() + model = ray.train.torch.prepare_model(model) + + criterion = nn.CrossEntropyLoss() + optimizer = torch.optim.SGD(model.parameters(), lr=0.01) + + for epoch in range(num_epochs): + if ray.train.get_context().get_world_size() > 1: + dataloader.sampler.set_epoch(epoch) + + for inputs, labels in dataloader: + optimizer.zero_grad() + pred = model(inputs) + loss = criterion(pred, labels) + loss.backward() + optimizer.step() + print(f"epoch: {epoch}, loss: {loss.item()}") + + +# For GPU Training, set `use_gpu` to True. +use_gpu = True + +trainer = TorchTrainer( + train_func_distributed, + scaling_config=ScalingConfig( + num_workers=3, use_gpu=use_gpu + ), # num_workers = number of worker nodes with the ray head node included +) + +results = trainer.fit() diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb index e46dc62a..787bd315 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb @@ -50,7 +50,8 @@ " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " num_gpus=0,\n", + " num_gpus=1,\n", + " head_gpus=1,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", @@ -114,7 +115,7 @@ "source": [ "# Submit an example mnist job using the Job Submission Client\n", "submission_id = client.submit_job(\n", - " entrypoint=\"python mnist.py\",\n", + " entrypoint=\"python mnist_fashion.py\",\n", " runtime_env={\"working_dir\": \"./\",\"pip\": \"requirements.txt\"},\n", ")\n", "print(submission_id)" diff --git a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb index b20f920b..db11cada 100644 --- a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb @@ -50,7 +50,8 @@ " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " num_gpus=0,\n", + " num_gpus=1,\n", + " head_gpus=1,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources\n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", @@ -114,7 +115,7 @@ "source": [ "# Submit an example mnist job using the Job Submission Client\n", "submission_id = client.submit_job(\n", - " entrypoint=\"python mnist.py\",\n", + " entrypoint=\"python mnist_fashion.py\",\n", " runtime_env={\"working_dir\": \"./\",\"pip\": \"requirements.txt\"},\n", ")\n", "print(submission_id)" From 4593387d532249bd54b00bb6d450425aa105abd8 Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Thu, 2 May 2024 11:05:51 +0100 Subject: [PATCH 220/496] Added doc for creating basic kueue resources --- docs/setup-kueue.md | 66 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 66 insertions(+) create mode 100644 docs/setup-kueue.md diff --git a/docs/setup-kueue.md b/docs/setup-kueue.md new file mode 100644 index 00000000..c8fffa10 --- /dev/null +++ b/docs/setup-kueue.md @@ -0,0 +1,66 @@ +# Basic Kueue Resources configuration + +## Introduction: + +This document is designed for administrators who have Kueue installed on their cluster. We will walk through the process of setting up essential Kueue resources, namely Cluster Queue, Resource Flavor, and Local Queue. + +## 1. Resource Flavor: +Resource Flavors allow the cluster admin to define different types of resources with specific characteristics, such as CPU, memory, GPU, etc. These can then be assigned to workloads to ensure they are executed on appropriate resources. + +The YAML configuration provided below creates an empty Resource Flavor named default-flavor. It serves as a starting point and does not specify any detailed resource characteristics. +```yaml +apiVersion: kueue.x-k8s.io/v1beta1 +kind: ResourceFlavor +metadata: + name: default-flavor +``` +For more detailed information on Resource Flavor configuration options, refer to the Kueue documentation: [Resource Flavor Configuration](https://kueue.sigs.k8s.io/docs/concepts/resource_flavor/) + +## 2. Cluster Queue: +A Cluster Queue represents a shared queue across the entire cluster. It allows the cluster admin to define global settings for workload prioritization and resource allocation. + +When setting up a Cluster Queue in Kueue, it's crucial that the resource specifications match the actual capacities and operational requirements of your cluster. The example provided outlines a basic setup; however, each cluster may have different resource availabilities and needs. +```yaml +apiVersion: kueue.x-k8s.io/v1beta1 +kind: ClusterQueue +metadata: + name: "cluster-queue" +spec: + namespaceSelector: {} # match all. + resourceGroups: + - coveredResources: ["cpu", "memory", "pods", "nvidia.com/gpu"] + flavors: + - name: "default-flavor" + resources: + - name: "cpu" + nominalQuota: 9 + - name: "memory" + nominalQuota: 36Gi + - name: "pods" + nominalQuota: 5 + - name: "nvidia.com/gpu" + nominalQuota: '0' +``` + +For more detailed information on Cluster Queue configuration options, refer to the Kueue documentation: [Cluster Queue Configuration](https://kueue.sigs.k8s.io/docs/concepts/cluster_queue/) + +## 3. Local Queue (With Default Annotation): +A Local Queue represents a queue associated with a specific namespace within the cluster. It allows namespace-level control over workload prioritization and resource allocation. +```yaml +apiVersion: kueue.x-k8s.io/v1beta1 +kind: LocalQueue +metadata: + namespace: team-a + name: team-a-queue + annotations: + kueue.x-k8s.io/default-queue: "true" +spec: + clusterQueue: cluster-queue +``` + +In the LocalQueue configuration provided above, the annotations field specifies `kueue.x-k8s.io/default-queue: "true"`. This annotation indicates that the team-a-queue is designated as the default queue for the team-a namespace. When this is set, any workloads submitted to the team-a namespace without explicitly specifying a queue will automatically be routed to the team-a-queue. + +For more detailed information on Local Queue configuration options, refer to the Kueue documentation: [Local Queue Configuration](https://kueue.sigs.k8s.io/docs/concepts/local_queue/) + +## Conclusion: +By following the steps outlined in this document, the cluster admin can successfully create the basic Kueue resources necessary for workload management in the cluster. For more advanced configurations and features, please refer to the comprehensive [Kueue documentation](https://kueue.sigs.k8s.io/docs/concepts/). From 6a9b185778ea7d689e76daa554b66b116a048829 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Tue, 26 Mar 2024 09:43:16 +0000 Subject: [PATCH 221/496] Added ipywidgets to pyproject.toml --- poetry.lock | 298 ++++++++++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 1 + 2 files changed, 298 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index cf848f0b..e0fa9acb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -135,6 +135,35 @@ files = [ {file = "ansicon-1.89.0.tar.gz", hash = "sha256:e4d039def5768a47e4afec8e89e83ec3ae5a26bf00ad851f914d1240b444d2b1"}, ] +[[package]] +name = "appnope" +version = "0.1.4" +description = "Disable App Nap on macOS >= 10.9" +optional = false +python-versions = ">=3.6" +files = [ + {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, + {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, +] + +[[package]] +name = "asttokens" +version = "2.4.1" +description = "Annotate AST trees with source code positions" +optional = false +python-versions = "*" +files = [ + {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, + {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, +] + +[package.dependencies] +six = ">=1.12.0" + +[package.extras] +astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] +test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] + [[package]] name = "async-timeout" version = "4.0.3" @@ -164,6 +193,17 @@ docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib- tests = ["attrs[tests-no-zope]", "zope-interface"] tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +[[package]] +name = "backcall" +version = "0.2.0" +description = "Specifications for callback functions passed in to an API" +optional = false +python-versions = "*" +files = [ + {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, + {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, +] + [[package]] name = "bcrypt" version = "4.0.1" @@ -466,6 +506,23 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} +[[package]] +name = "comm" +version = "0.2.2" +description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." +optional = false +python-versions = ">=3.8" +files = [ + {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, + {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, +] + +[package.dependencies] +traitlets = ">=4" + +[package.extras] +test = ["pytest"] + [[package]] name = "commonmark" version = "0.9.1" @@ -593,6 +650,17 @@ test = ["iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-co test-randomorder = ["pytest-randomly"] tox = ["tox"] +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + [[package]] name = "distlib" version = "0.3.7" @@ -990,6 +1058,85 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "ipython" +version = "8.12.3" +description = "IPython: Productive Interactive Computing" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ipython-8.12.3-py3-none-any.whl", hash = "sha256:b0340d46a933d27c657b211a329d0be23793c36595acf9e6ef4164bc01a1804c"}, + {file = "ipython-8.12.3.tar.gz", hash = "sha256:3910c4b54543c2ad73d06579aa771041b7d5707b033bd488669b4cf544e3b363"}, +] + +[package.dependencies] +appnope = {version = "*", markers = "sys_platform == \"darwin\""} +backcall = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} +pickleshare = "*" +prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0" +pygments = ">=2.4.0" +stack-data = "*" +traitlets = ">=5" +typing-extensions = {version = "*", markers = "python_version < \"3.10\""} + +[package.extras] +all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +black = ["black"] +doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +kernel = ["ipykernel"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] +test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] + +[[package]] +name = "ipywidgets" +version = "8.1.2" +description = "Jupyter interactive widgets" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ipywidgets-8.1.2-py3-none-any.whl", hash = "sha256:bbe43850d79fb5e906b14801d6c01402857996864d1e5b6fa62dd2ee35559f60"}, + {file = "ipywidgets-8.1.2.tar.gz", hash = "sha256:d0b9b41e49bae926a866e613a39b0f0097745d2b9f1f3dd406641b4a57ec42c9"}, +] + +[package.dependencies] +comm = ">=0.1.3" +ipython = ">=6.1.0" +jupyterlab-widgets = ">=3.0.10,<3.1.0" +traitlets = ">=4.3.1" +widgetsnbextension = ">=4.0.10,<4.1.0" + +[package.extras] +test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] + +[[package]] +name = "jedi" +version = "0.19.1" +description = "An autocompletion tool for Python that can be used for text editors." +optional = false +python-versions = ">=3.6" +files = [ + {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, + {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, +] + +[package.dependencies] +parso = ">=0.8.3,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] + [[package]] name = "jinxed" version = "1.2.1" @@ -1042,6 +1189,17 @@ files = [ importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} referencing = ">=0.28.0" +[[package]] +name = "jupyterlab-widgets" +version = "3.0.10" +description = "Jupyter interactive widgets for JupyterLab" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jupyterlab_widgets-3.0.10-py3-none-any.whl", hash = "sha256:dd61f3ae7a5a7f80299e14585ce6cf3d6925a96c9103c978eda293197730cb64"}, + {file = "jupyterlab_widgets-3.0.10.tar.gz", hash = "sha256:04f2ac04976727e4f9d0fa91cdc2f1ab860f965e504c29dbd6a65c882c9d04c0"}, +] + [[package]] name = "kubernetes" version = "26.1.0" @@ -1174,6 +1332,20 @@ files = [ {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] +[[package]] +name = "matplotlib-inline" +version = "0.1.6" +description = "Inline Matplotlib backend for Jupyter" +optional = false +python-versions = ">=3.5" +files = [ + {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, + {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, +] + +[package.dependencies] +traitlets = "*" + [[package]] name = "msgpack" version = "1.0.7" @@ -1538,6 +1710,21 @@ all = ["gssapi (>=1.4.1)", "invoke (>=2.0)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1 gssapi = ["gssapi (>=1.4.1)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] invoke = ["invoke (>=2.0)"] +[[package]] +name = "parso" +version = "0.8.3" +description = "A Python Parser" +optional = false +python-versions = ">=3.6" +files = [ + {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, + {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, +] + +[package.extras] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["docopt", "pytest (<6.0.0)"] + [[package]] name = "pdoc3" version = "0.10.0" @@ -1553,6 +1740,31 @@ files = [ mako = "*" markdown = ">=3.0" +[[package]] +name = "pexpect" +version = "4.9.0" +description = "Pexpect allows easy control of interactive console applications." +optional = false +python-versions = "*" +files = [ + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, +] + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "pickleshare" +version = "0.7.5" +description = "Tiny 'shelve'-like database with concurrency support" +optional = false +python-versions = "*" +files = [ + {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, + {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, +] + [[package]] name = "pkgutil-resolve-name" version = "1.3.10" @@ -1608,6 +1820,20 @@ files = [ [package.extras] twisted = ["twisted"] +[[package]] +name = "prompt-toolkit" +version = "3.0.43" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, + {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, +] + +[package.dependencies] +wcwidth = "*" + [[package]] name = "protobuf" version = "4.24.4" @@ -1658,6 +1884,31 @@ files = [ [package.extras] test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +optional = false +python-versions = "*" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] + +[[package]] +name = "pure-eval" +version = "0.2.2" +description = "Safely evaluate AST nodes without side effects" +optional = false +python-versions = "*" +files = [ + {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, + {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, +] + +[package.extras] +tests = ["pytest"] + [[package]] name = "py-spy" version = "0.3.14" @@ -2350,6 +2601,25 @@ ssh = ["paramiko"] test = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "moto[server]", "paramiko", "pytest", "pytest-rerunfailures", "requests", "responses"] webhdfs = ["requests"] +[[package]] +name = "stack-data" +version = "0.6.3" +description = "Extract data from python stack frames and tracebacks for informative displays" +optional = false +python-versions = "*" +files = [ + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, +] + +[package.dependencies] +asttokens = ">=2.1.0" +executing = ">=1.2.0" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] + [[package]] name = "tabulate" version = "0.9.0" @@ -2375,6 +2645,21 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] +[[package]] +name = "traitlets" +version = "5.14.2" +description = "Traitlets Python configuration system" +optional = false +python-versions = ">=3.8" +files = [ + {file = "traitlets-5.14.2-py3-none-any.whl", hash = "sha256:fcdf85684a772ddeba87db2f398ce00b40ff550d1528c03c14dbf6a02003cd80"}, + {file = "traitlets-5.14.2.tar.gz", hash = "sha256:8cdd83c040dab7d1dee822678e5f5d100b514f7b72b01615b26fc5718916fdf9"}, +] + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.1)", "pytest-mock", "pytest-mypy-testing"] + [[package]] name = "typing-extensions" version = "4.8.0" @@ -2475,6 +2760,17 @@ docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] optional = ["python-socks", "wsaccel"] test = ["websockets"] +[[package]] +name = "widgetsnbextension" +version = "4.0.10" +description = "Jupyter interactive widgets for Jupyter Notebook" +optional = false +python-versions = ">=3.7" +files = [ + {file = "widgetsnbextension-4.0.10-py3-none-any.whl", hash = "sha256:d37c3724ec32d8c48400a435ecfa7d3e259995201fbefa37163124a9fcb393cc"}, + {file = "widgetsnbextension-4.0.10.tar.gz", hash = "sha256:64196c5ff3b9a9183a8e699a4227fb0b7002f252c814098e66c4d1cd0644688f"}, +] + [[package]] name = "yarl" version = "1.9.4" @@ -2596,4 +2892,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "33f4e77105de697fd6fc15537180df77ab057e3d2e5db27d7a9a2f8cc9f69a03" +content-hash = "e82f9c28e3f78682ca3e6d5414e1c1fe8f0bd62b5a992f749d8b7bd4bbd0273d" diff --git a/pyproject.toml b/pyproject.toml index c47e466a..99d85a6b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,6 +29,7 @@ codeflare-torchx = "0.6.0.dev2" cryptography = "40.0.2" executing = "1.2.0" pydantic = "< 2" +ipywidgets = "8.1.2" [tool.poetry.group.docs] optional = true From 134e3067ac72854a59edb22afc943abc6c301907 Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Tue, 30 Apr 2024 17:00:07 +0100 Subject: [PATCH 222/496] New ClusterConfiguration parameter for user labels --- docs/cluster-configuration.md | 6 ++++-- src/codeflare_sdk/cluster/cluster.py | 2 ++ src/codeflare_sdk/cluster/config.py | 1 + src/codeflare_sdk/utils/generate_yaml.py | 21 +++++++++++++++++---- 4 files changed, 24 insertions(+), 6 deletions(-) diff --git a/docs/cluster-configuration.md b/docs/cluster-configuration.md index 7684db2c..b83600fe 100644 --- a/docs/cluster-configuration.md +++ b/docs/cluster-configuration.md @@ -3,7 +3,7 @@ To create Ray Clusters using the CodeFlare SDK a cluster configuration needs to be created first.
      This is what a typical cluster configuration would look like; Note: The values for CPU and Memory are at the minimum requirements for creating the Ray Cluster. -``` +```python from codeflare_sdk import Cluster, ClusterConfiguration cluster = Cluster(ClusterConfiguration( @@ -20,8 +20,8 @@ cluster = Cluster(ClusterConfiguration( num_gpus=0, # Default 0 mcad=True, # Default True image="quay.io/project-codeflare/ray:latest-py39-cu118", # Mandatory Field - instascale=False, # Default False machine_types=["m5.xlarge", "g4dn.xlarge"], + labels={"exampleLabel": "example", "secondLabel": "example"}, )) ``` @@ -30,3 +30,5 @@ From there a user can call `cluster.up()` and `cluster.down()` to create and rem In cases where `mcad=False` a yaml file will be created with the individual Ray Cluster, Route/Ingress and Secret included.
      The Ray Cluster and service will be created by KubeRay directly and the other components will be individually created. + +The `labels={"exampleLabel": "example"}` parameter can be used to apply additional labels to the RayCluster resource. diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 76f64287..12a90f4a 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -187,6 +187,7 @@ def create_app_wrapper(self): write_to_file = self.config.write_to_file verify_tls = self.config.verify_tls local_queue = self.config.local_queue + labels = self.config.labels return generate_appwrapper( name=name, namespace=namespace, @@ -211,6 +212,7 @@ def create_app_wrapper(self): write_to_file=write_to_file, verify_tls=verify_tls, local_queue=local_queue, + labels=labels, ) # creates a new cluster with the provided or default spec diff --git a/src/codeflare_sdk/cluster/config.py b/src/codeflare_sdk/cluster/config.py index 064b51cd..f8010ea9 100644 --- a/src/codeflare_sdk/cluster/config.py +++ b/src/codeflare_sdk/cluster/config.py @@ -54,6 +54,7 @@ class ClusterConfiguration: dispatch_priority: str = None write_to_file: bool = False verify_tls: bool = True + labels: dict = field(default_factory=dict) def __post_init__(self): if not self.verify_tls: diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 97dda5ba..1dff1b75 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -309,7 +309,11 @@ def get_default_kueue_name(namespace: str): def write_components( - user_yaml: dict, output_file_name: str, namespace: str, local_queue: Optional[str] + user_yaml: dict, + output_file_name: str, + namespace: str, + local_queue: Optional[str], + user_labels: dict, ): # Create the directory if it doesn't exist directory_path = os.path.dirname(output_file_name) @@ -331,6 +335,7 @@ def write_components( ] labels = component["generictemplate"]["metadata"]["labels"] labels.update({"kueue.x-k8s.io/queue-name": lq_name}) + labels.update(user_labels) outfile.write("---\n") yaml.dump( component["generictemplate"], outfile, default_flow_style=False @@ -339,7 +344,11 @@ def write_components( def load_components( - user_yaml: dict, name: str, namespace: str, local_queue: Optional[str] + user_yaml: dict, + name: str, + namespace: str, + local_queue: Optional[str], + user_labels: dict, ): component_list = [] components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") @@ -355,6 +364,7 @@ def load_components( ] labels = component["generictemplate"]["metadata"]["labels"] labels.update({"kueue.x-k8s.io/queue-name": lq_name}) + labels.update(user_labels) component_list.append(component["generictemplate"]) resources = "---\n" + "---\n".join( @@ -395,6 +405,7 @@ def generate_appwrapper( write_to_file: bool, verify_tls: bool, local_queue: Optional[str], + user_labels, ): user_yaml = read_template(template) appwrapper_name, cluster_name = gen_names(name) @@ -446,11 +457,13 @@ def generate_appwrapper( if mcad: write_user_appwrapper(user_yaml, outfile) else: - write_components(user_yaml, outfile, namespace, local_queue) + write_components(user_yaml, outfile, namespace, local_queue, user_labels) return outfile else: if mcad: user_yaml = load_appwrapper(user_yaml, name) else: - user_yaml = load_components(user_yaml, name, namespace, local_queue) + user_yaml = load_components( + user_yaml, name, namespace, local_queue, user_labels + ) return user_yaml From 9b49b5916c31c77acaec06b9a22b835a6f92dc9a Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Tue, 30 Apr 2024 17:07:55 +0100 Subject: [PATCH 223/496] Updated unit test for user labels --- src/codeflare_sdk/utils/generate_yaml.py | 18 +++++++++--------- tests/test-case-no-mcad.yamls | 2 ++ tests/unit_test.py | 3 +++ 3 files changed, 14 insertions(+), 9 deletions(-) diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 1dff1b75..f5de1fba 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -313,7 +313,7 @@ def write_components( output_file_name: str, namespace: str, local_queue: Optional[str], - user_labels: dict, + labels: dict, ): # Create the directory if it doesn't exist directory_path = os.path.dirname(output_file_name) @@ -323,6 +323,7 @@ def write_components( components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") open(output_file_name, "w").close() lq_name = local_queue or get_default_kueue_name(namespace) + cluster_labels = labels with open(output_file_name, "a") as outfile: for component in components: if "generictemplate" in component: @@ -335,7 +336,7 @@ def write_components( ] labels = component["generictemplate"]["metadata"]["labels"] labels.update({"kueue.x-k8s.io/queue-name": lq_name}) - labels.update(user_labels) + labels.update(cluster_labels) outfile.write("---\n") yaml.dump( component["generictemplate"], outfile, default_flow_style=False @@ -348,11 +349,12 @@ def load_components( name: str, namespace: str, local_queue: Optional[str], - user_labels: dict, + labels: dict, ): component_list = [] components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") lq_name = local_queue or get_default_kueue_name(namespace) + cluster_labels = labels for component in components: if "generictemplate" in component: if ( @@ -364,7 +366,7 @@ def load_components( ] labels = component["generictemplate"]["metadata"]["labels"] labels.update({"kueue.x-k8s.io/queue-name": lq_name}) - labels.update(user_labels) + labels.update(cluster_labels) component_list.append(component["generictemplate"]) resources = "---\n" + "---\n".join( @@ -405,7 +407,7 @@ def generate_appwrapper( write_to_file: bool, verify_tls: bool, local_queue: Optional[str], - user_labels, + labels, ): user_yaml = read_template(template) appwrapper_name, cluster_name = gen_names(name) @@ -457,13 +459,11 @@ def generate_appwrapper( if mcad: write_user_appwrapper(user_yaml, outfile) else: - write_components(user_yaml, outfile, namespace, local_queue, user_labels) + write_components(user_yaml, outfile, namespace, local_queue, labels) return outfile else: if mcad: user_yaml = load_appwrapper(user_yaml, name) else: - user_yaml = load_components( - user_yaml, name, namespace, local_queue, user_labels - ) + user_yaml = load_components(user_yaml, name, namespace, local_queue, labels) return user_yaml diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index aaf9324e..7fcf1fdc 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -5,6 +5,8 @@ metadata: labels: controller-tools.k8s.io: '1.0' kueue.x-k8s.io/queue-name: local-queue-default + testlabel: test + testlabel2: test name: unit-test-cluster-ray namespace: ns spec: diff --git a/tests/unit_test.py b/tests/unit_test.py index 6f2ccee1..53c88888 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -324,6 +324,7 @@ def test_cluster_creation_no_mcad(mocker): config.name = "unit-test-cluster-ray" config.write_to_file = True config.mcad = False + config.labels = {"testlabel": "test", "testlabel2": "test"} cluster = Cluster(config) assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-ray.yaml" @@ -348,6 +349,7 @@ def test_cluster_creation_no_mcad_local_queue(mocker): config.mcad = False config.write_to_file = True config.local_queue = "local-queue-default" + config.labels = {"testlabel": "test", "testlabel2": "test"} cluster = Cluster(config) assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-ray.yaml" assert cluster.app_wrapper_name == "unit-test-cluster-ray" @@ -373,6 +375,7 @@ def test_cluster_creation_no_mcad_local_queue(mocker): write_to_file=True, mcad=False, local_queue="local-queue-default", + labels={"testlabel": "test", "testlabel2": "test"}, ) cluster = Cluster(config) assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-ray.yaml" From ea820312fbef4e2840e438b73d56956b9cba3d49 Mon Sep 17 00:00:00 2001 From: Francisco Arceo Date: Fri, 10 May 2024 12:05:22 -0400 Subject: [PATCH 224/496] Update OWNERS --- OWNERS | 2 ++ 1 file changed, 2 insertions(+) diff --git a/OWNERS b/OWNERS index f3fcc7ee..4c899d72 100644 --- a/OWNERS +++ b/OWNERS @@ -6,6 +6,7 @@ approvers: - kpostoffice - maxusmusti - MichaelClifford + - franciscojavierarceo reviewers: - anishasthana - astefanutti @@ -16,3 +17,4 @@ reviewers: - kpostoffice - maxusmusti - MichaelClifford + - franciscojavierarceo From f69d5e4a51057d16c3e09647f756b08997da3950 Mon Sep 17 00:00:00 2001 From: Francisco Arceo Date: Fri, 10 May 2024 12:10:42 -0400 Subject: [PATCH 225/496] Update OWNERS --- OWNERS | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/OWNERS b/OWNERS index 4c899d72..fdb68837 100644 --- a/OWNERS +++ b/OWNERS @@ -3,10 +3,10 @@ approvers: - Bobbins228 - dimakis - Fiona-Waters + - franciscojavierarceo - kpostoffice - maxusmusti - MichaelClifford - - franciscojavierarceo reviewers: - anishasthana - astefanutti @@ -14,7 +14,7 @@ reviewers: - ChristianZaccaria - dimakis - Fiona-Waters + - franciscojavierarceo - kpostoffice - maxusmusti - MichaelClifford - - franciscojavierarceo From 7e87599a184604101b328cd7f06c53004dd5fbe1 Mon Sep 17 00:00:00 2001 From: codeflare-machine-account Date: Mon, 13 May 2024 14:18:42 +0000 Subject: [PATCH 226/496] Changes in docs for release: v0.16.1 --- .../cluster/cluster.html | 158 +++++++++++------- .../cluster/config.html | 82 ++++++--- .../utils/generate_yaml.html | 72 +++++--- .../utils/pretty_print.html | 4 +- 4 files changed, 212 insertions(+), 104 deletions(-) diff --git a/docs/detailed-documentation/cluster/cluster.html b/docs/detailed-documentation/cluster/cluster.html index eed1e154..efb6ccfe 100644 --- a/docs/detailed-documentation/cluster/cluster.html +++ b/docs/detailed-documentation/cluster/cluster.html @@ -50,6 +50,7 @@

      Module codeflare_sdk.cluster.cluster

      cluster setup queue, a list of all existing clusters, and the user's working namespace. """ +import re from time import sleep from typing import List, Optional, Tuple, Dict @@ -73,11 +74,13 @@

      Module codeflare_sdk.cluster.cluster

      RayClusterStatus, ) from kubernetes import client, config +from kubernetes.utils import parse_quantity import yaml import os import requests from kubernetes import config +from kubernetes.client.rest import ApiException class Cluster: @@ -216,6 +219,7 @@

      Module codeflare_sdk.cluster.cluster

      write_to_file = self.config.write_to_file verify_tls = self.config.verify_tls local_queue = self.config.local_queue + labels = self.config.labels return generate_appwrapper( name=name, namespace=namespace, @@ -240,6 +244,7 @@

      Module codeflare_sdk.cluster.cluster

      write_to_file=write_to_file, verify_tls=verify_tls, local_queue=local_queue, + labels=labels, ) # creates a new cluster with the provided or default spec @@ -248,6 +253,10 @@

      Module codeflare_sdk.cluster.cluster

      Applies the AppWrapper yaml, pushing the resource request onto the MCAD queue. """ + + # check if RayCluster CustomResourceDefinition exists if not throw RuntimeError + self._throw_for_no_raycluster() + namespace = self.config.namespace try: @@ -278,12 +287,32 @@

      Module codeflare_sdk.cluster.cluster

      except Exception as e: # pragma: no cover return _kube_api_error_handling(e) + def _throw_for_no_raycluster(self): + api_instance = client.CustomObjectsApi(api_config_handler()) + try: + api_instance.list_namespaced_custom_object( + group="ray.io", + version="v1", + namespace=self.config.namespace, + plural="rayclusters", + ) + except ApiException as e: + if e.status == 404: + raise RuntimeError( + "RayCluster CustomResourceDefinition unavailable contact your administrator." + ) + else: + raise RuntimeError( + "Failed to get RayCluster CustomResourceDefinition: " + str(e) + ) + def down(self): """ Deletes the AppWrapper yaml, scaling-down and deleting all resources associated with the cluster. """ namespace = self.config.namespace + self._throw_for_no_raycluster() try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) @@ -520,26 +549,18 @@

      Module codeflare_sdk.cluster.cluster

      namespace=rc["metadata"]["namespace"], machine_types=machine_types, num_workers=rc["spec"]["workerGroupSpecs"][0]["minReplicas"], - min_cpus=int( - rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ - "resources" - ]["requests"]["cpu"] - ), - max_cpus=int( - rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ - "resources" - ]["limits"]["cpu"] - ), - min_memory=int( - rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ - "resources" - ]["requests"]["memory"][:-1] - ), - max_memory=int( - rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ - "resources" - ]["limits"]["memory"][:-1] - ), + min_cpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + "containers" + ][0]["resources"]["requests"]["cpu"], + max_cpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + "containers" + ][0]["resources"]["limits"]["cpu"], + min_memory=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + "containers" + ][0]["resources"]["requests"]["memory"], + max_memory=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + "containers" + ][0]["resources"]["limits"]["memory"], num_gpus=int( rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ "resources" @@ -1265,6 +1286,7 @@

      Classes

      write_to_file = self.config.write_to_file verify_tls = self.config.verify_tls local_queue = self.config.local_queue + labels = self.config.labels return generate_appwrapper( name=name, namespace=namespace, @@ -1289,6 +1311,7 @@

      Classes

      write_to_file=write_to_file, verify_tls=verify_tls, local_queue=local_queue, + labels=labels, ) # creates a new cluster with the provided or default spec @@ -1297,6 +1320,10 @@

      Classes

      Applies the AppWrapper yaml, pushing the resource request onto the MCAD queue. """ + + # check if RayCluster CustomResourceDefinition exists if not throw RuntimeError + self._throw_for_no_raycluster() + namespace = self.config.namespace try: @@ -1327,12 +1354,32 @@

      Classes

      except Exception as e: # pragma: no cover return _kube_api_error_handling(e) + def _throw_for_no_raycluster(self): + api_instance = client.CustomObjectsApi(api_config_handler()) + try: + api_instance.list_namespaced_custom_object( + group="ray.io", + version="v1", + namespace=self.config.namespace, + plural="rayclusters", + ) + except ApiException as e: + if e.status == 404: + raise RuntimeError( + "RayCluster CustomResourceDefinition unavailable contact your administrator." + ) + else: + raise RuntimeError( + "Failed to get RayCluster CustomResourceDefinition: " + str(e) + ) + def down(self): """ Deletes the AppWrapper yaml, scaling-down and deleting all resources associated with the cluster. """ namespace = self.config.namespace + self._throw_for_no_raycluster() try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) @@ -1569,26 +1616,18 @@

      Classes

      namespace=rc["metadata"]["namespace"], machine_types=machine_types, num_workers=rc["spec"]["workerGroupSpecs"][0]["minReplicas"], - min_cpus=int( - rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ - "resources" - ]["requests"]["cpu"] - ), - max_cpus=int( - rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ - "resources" - ]["limits"]["cpu"] - ), - min_memory=int( - rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ - "resources" - ]["requests"]["memory"][:-1] - ), - max_memory=int( - rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ - "resources" - ]["limits"]["memory"][:-1] - ), + min_cpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + "containers" + ][0]["resources"]["requests"]["cpu"], + max_cpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + "containers" + ][0]["resources"]["limits"]["cpu"], + min_memory=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + "containers" + ][0]["resources"]["requests"]["memory"], + max_memory=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + "containers" + ][0]["resources"]["limits"]["memory"], num_gpus=int( rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ "resources" @@ -1811,6 +1850,7 @@

      Methods

      write_to_file = self.config.write_to_file verify_tls = self.config.verify_tls local_queue = self.config.local_queue + labels = self.config.labels return generate_appwrapper( name=name, namespace=namespace, @@ -1835,6 +1875,7 @@

      Methods

      write_to_file=write_to_file, verify_tls=verify_tls, local_queue=local_queue, + labels=labels, )
      @@ -1870,6 +1911,7 @@

      Methods

      associated with the cluster. """ namespace = self.config.namespace + self._throw_for_no_raycluster() try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) @@ -1944,26 +1986,18 @@

      Methods

      namespace=rc["metadata"]["namespace"], machine_types=machine_types, num_workers=rc["spec"]["workerGroupSpecs"][0]["minReplicas"], - min_cpus=int( - rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ - "resources" - ]["requests"]["cpu"] - ), - max_cpus=int( - rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ - "resources" - ]["limits"]["cpu"] - ), - min_memory=int( - rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ - "resources" - ]["requests"]["memory"][:-1] - ), - max_memory=int( - rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ - "resources" - ]["limits"]["memory"][:-1] - ), + min_cpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + "containers" + ][0]["resources"]["requests"]["cpu"], + max_cpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + "containers" + ][0]["resources"]["limits"]["cpu"], + min_memory=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + "containers" + ][0]["resources"]["requests"]["memory"], + max_memory=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + "containers" + ][0]["resources"]["limits"]["memory"], num_gpus=int( rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ "resources" @@ -2168,6 +2202,10 @@

      Methods

      Applies the AppWrapper yaml, pushing the resource request onto the MCAD queue. """ + + # check if RayCluster CustomResourceDefinition exists if not throw RuntimeError + self._throw_for_no_raycluster() + namespace = self.config.namespace try: diff --git a/docs/detailed-documentation/cluster/config.html b/docs/detailed-documentation/cluster/config.html index c7b9ccfc..1879afaf 100644 --- a/docs/detailed-documentation/cluster/config.html +++ b/docs/detailed-documentation/cluster/config.html @@ -52,6 +52,7 @@

      Module codeflare_sdk.cluster.config

      from dataclasses import dataclass, field import pathlib +import typing dir = pathlib.Path(__file__).parent.parent.resolve() @@ -66,15 +67,15 @@

      Module codeflare_sdk.cluster.config

      name: str namespace: str = None head_info: list = field(default_factory=list) - head_cpus: int = 2 - head_memory: int = 8 + head_cpus: typing.Union[int, str] = 2 + head_memory: typing.Union[int, str] = 8 head_gpus: int = 0 machine_types: list = field(default_factory=list) # ["m4.xlarge", "g4dn.xlarge"] - min_cpus: int = 1 - max_cpus: int = 1 + min_cpus: typing.Union[int, str] = 1 + max_cpus: typing.Union[int, str] = 1 num_workers: int = 1 - min_memory: int = 2 - max_memory: int = 2 + min_memory: typing.Union[int, str] = 2 + max_memory: typing.Union[int, str] = 2 num_gpus: int = 0 template: str = f"{dir}/templates/base-template.yaml" instascale: bool = False @@ -85,12 +86,31 @@

      Module codeflare_sdk.cluster.config

      dispatch_priority: str = None write_to_file: bool = False verify_tls: bool = True + labels: dict = field(default_factory=dict) def __post_init__(self): if not self.verify_tls: print( "Warning: TLS verification has been disabled - Endpoint checks will be bypassed" ) + self._memory_to_string() + self._str_mem_no_unit_add_GB() + + def _str_mem_no_unit_add_GB(self): + if isinstance(self.head_memory, str) and self.head_memory.isdecimal(): + self.head_memory = f"{self.head_memory}G" + if isinstance(self.min_memory, str) and self.min_memory.isdecimal(): + self.min_memory = f"{self.min_memory}G" + if isinstance(self.max_memory, str) and self.max_memory.isdecimal(): + self.max_memory = f"{self.max_memory}G" + + def _memory_to_string(self): + if isinstance(self.head_memory, int): + self.head_memory = f"{self.head_memory}G" + if isinstance(self.min_memory, int): + self.min_memory = f"{self.min_memory}G" + if isinstance(self.max_memory, int): + self.max_memory = f"{self.max_memory}G" local_queue: str = None @@ -106,7 +126,7 @@

      Classes

      class ClusterConfiguration -(name: str, namespace: str = None, head_info: list = <factory>, head_cpus: int = 2, head_memory: int = 8, head_gpus: int = 0, machine_types: list = <factory>, min_cpus: int = 1, max_cpus: int = 1, num_workers: int = 1, min_memory: int = 2, max_memory: int = 2, num_gpus: int = 0, template: str = '/home/runner/work/codeflare-sdk/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', instascale: bool = False, mcad: bool = False, envs: dict = <factory>, image: str = '', image_pull_secrets: list = <factory>, dispatch_priority: str = None, write_to_file: bool = False, verify_tls: bool = True, local_queue: str = None) +(name: str, namespace: str = None, head_info: list = <factory>, head_cpus: Union[int, str] = 2, head_memory: Union[int, str] = 8, head_gpus: int = 0, machine_types: list = <factory>, min_cpus: Union[int, str] = 1, max_cpus: Union[int, str] = 1, num_workers: int = 1, min_memory: Union[int, str] = 2, max_memory: Union[int, str] = 2, num_gpus: int = 0, template: str = '/home/runner/work/codeflare-sdk/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', instascale: bool = False, mcad: bool = False, envs: dict = <factory>, image: str = '', image_pull_secrets: list = <factory>, dispatch_priority: str = None, write_to_file: bool = False, verify_tls: bool = True, labels: dict = <factory>, local_queue: str = None)

      This dataclass is used to specify resource requirements and other details, and @@ -124,15 +144,15 @@

      Classes

      name: str namespace: str = None head_info: list = field(default_factory=list) - head_cpus: int = 2 - head_memory: int = 8 + head_cpus: typing.Union[int, str] = 2 + head_memory: typing.Union[int, str] = 8 head_gpus: int = 0 machine_types: list = field(default_factory=list) # ["m4.xlarge", "g4dn.xlarge"] - min_cpus: int = 1 - max_cpus: int = 1 + min_cpus: typing.Union[int, str] = 1 + max_cpus: typing.Union[int, str] = 1 num_workers: int = 1 - min_memory: int = 2 - max_memory: int = 2 + min_memory: typing.Union[int, str] = 2 + max_memory: typing.Union[int, str] = 2 num_gpus: int = 0 template: str = f"{dir}/templates/base-template.yaml" instascale: bool = False @@ -143,12 +163,31 @@

      Classes

      dispatch_priority: str = None write_to_file: bool = False verify_tls: bool = True + labels: dict = field(default_factory=dict) def __post_init__(self): if not self.verify_tls: print( "Warning: TLS verification has been disabled - Endpoint checks will be bypassed" ) + self._memory_to_string() + self._str_mem_no_unit_add_GB() + + def _str_mem_no_unit_add_GB(self): + if isinstance(self.head_memory, str) and self.head_memory.isdecimal(): + self.head_memory = f"{self.head_memory}G" + if isinstance(self.min_memory, str) and self.min_memory.isdecimal(): + self.min_memory = f"{self.min_memory}G" + if isinstance(self.max_memory, str) and self.max_memory.isdecimal(): + self.max_memory = f"{self.max_memory}G" + + def _memory_to_string(self): + if isinstance(self.head_memory, int): + self.head_memory = f"{self.head_memory}G" + if isinstance(self.min_memory, int): + self.min_memory = f"{self.min_memory}G" + if isinstance(self.max_memory, int): + self.max_memory = f"{self.max_memory}G" local_queue: str = None @@ -162,7 +201,7 @@

      Class variables

      -
      var head_cpus : int
      +
      var head_cpus : Union[int, str]
      @@ -174,7 +213,7 @@

      Class variables

      -
      var head_memory : int
      +
      var head_memory : Union[int, str]
      @@ -190,6 +229,10 @@

      Class variables

      +
      var labels : dict
      +
      +
      +
      var local_queue : str
      @@ -198,11 +241,11 @@

      Class variables

      -
      var max_cpus : int
      +
      var max_cpus : Union[int, str]
      -
      var max_memory : int
      +
      var max_memory : Union[int, str]
      @@ -210,11 +253,11 @@

      Class variables

      -
      var min_cpus : int
      +
      var min_cpus : Union[int, str]
      -
      var min_memory : int
      +
      var min_memory : Union[int, str]
      @@ -276,6 +319,7 @@

      image
    • image_pull_secrets
    • instascale
    • +
    • labels
    • local_queue
    • machine_types
    • max_cpus
    • diff --git a/docs/detailed-documentation/utils/generate_yaml.html b/docs/detailed-documentation/utils/generate_yaml.html index fc714780..14190101 100644 --- a/docs/detailed-documentation/utils/generate_yaml.html +++ b/docs/detailed-documentation/utils/generate_yaml.html @@ -171,8 +171,8 @@

      Module codeflare_sdk.utils.generate_yaml

      # Leave head node resources as template default resource["requests"]["cpu"] = head_cpus resource["limits"]["cpu"] = head_cpus - resource["requests"]["memory"] = str(head_memory) + "G" - resource["limits"]["memory"] = str(head_memory) + "G" + resource["requests"]["memory"] = head_memory + resource["limits"]["memory"] = head_memory resource["requests"]["nvidia.com/gpu"] = head_gpus resource["limits"]["nvidia.com/gpu"] = head_gpus @@ -189,9 +189,9 @@

      Module codeflare_sdk.utils.generate_yaml

      resource[k][spec] = min_cpu if spec == "memory": if k == "limits": - resource[k][spec] = str(max_memory) + "G" + resource[k][spec] = max_memory else: - resource[k][spec] = str(min_memory) + "G" + resource[k][spec] = min_memory if spec == "nvidia.com/gpu": if i == 0: resource[k][spec] = 0 @@ -244,12 +244,12 @@

      Module codeflare_sdk.utils.generate_yaml

      requests = resource.get("resources").get("requests") if requests is not None: requests["cpu"] = min_cpu - requests["memory"] = str(min_memory) + "G" + requests["memory"] = min_memory requests["nvidia.com/gpu"] = gpu limits = resource.get("resources").get("limits") if limits is not None: limits["cpu"] = max_cpu - limits["memory"] = str(max_memory) + "G" + limits["memory"] = max_memory limits["nvidia.com/gpu"] = gpu @@ -340,7 +340,11 @@

      Module codeflare_sdk.utils.generate_yaml

      def write_components( - user_yaml: dict, output_file_name: str, namespace: str, local_queue: Optional[str] + user_yaml: dict, + output_file_name: str, + namespace: str, + local_queue: Optional[str], + labels: dict, ): # Create the directory if it doesn't exist directory_path = os.path.dirname(output_file_name) @@ -350,6 +354,7 @@

      Module codeflare_sdk.utils.generate_yaml

      components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") open(output_file_name, "w").close() lq_name = local_queue or get_default_kueue_name(namespace) + cluster_labels = labels with open(output_file_name, "a") as outfile: for component in components: if "generictemplate" in component: @@ -362,6 +367,7 @@

      Module codeflare_sdk.utils.generate_yaml

      ] labels = component["generictemplate"]["metadata"]["labels"] labels.update({"kueue.x-k8s.io/queue-name": lq_name}) + labels.update(cluster_labels) outfile.write("---\n") yaml.dump( component["generictemplate"], outfile, default_flow_style=False @@ -370,11 +376,16 @@

      Module codeflare_sdk.utils.generate_yaml

      def load_components( - user_yaml: dict, name: str, namespace: str, local_queue: Optional[str] + user_yaml: dict, + name: str, + namespace: str, + local_queue: Optional[str], + labels: dict, ): component_list = [] components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") lq_name = local_queue or get_default_kueue_name(namespace) + cluster_labels = labels for component in components: if "generictemplate" in component: if ( @@ -386,6 +397,7 @@

      Module codeflare_sdk.utils.generate_yaml

      ] labels = component["generictemplate"]["metadata"]["labels"] labels.update({"kueue.x-k8s.io/queue-name": lq_name}) + labels.update(cluster_labels) component_list.append(component["generictemplate"]) resources = "---\n" + "---\n".join( @@ -426,6 +438,7 @@

      Module codeflare_sdk.utils.generate_yaml

      write_to_file: bool, verify_tls: bool, local_queue: Optional[str], + labels, ): user_yaml = read_template(template) appwrapper_name, cluster_name = gen_names(name) @@ -477,13 +490,13 @@

      Module codeflare_sdk.utils.generate_yaml

      if mcad: write_user_appwrapper(user_yaml, outfile) else: - write_components(user_yaml, outfile, namespace, local_queue) + write_components(user_yaml, outfile, namespace, local_queue, labels) return outfile else: if mcad: user_yaml = load_appwrapper(user_yaml, name) else: - user_yaml = load_components(user_yaml, name, namespace, local_queue) + user_yaml = load_components(user_yaml, name, namespace, local_queue, labels) return user_yaml
  • @@ -527,7 +540,7 @@

    Functions

    -def generate_appwrapper(name: str, namespace: str, head_cpus: int, head_memory: int, head_gpus: int, min_cpu: int, max_cpu: int, min_memory: int, max_memory: int, gpu: int, workers: int, template: str, image: str, instascale: bool, mcad: bool, instance_types: list, env, image_pull_secrets: list, dispatch_priority: str, priority_val: int, write_to_file: bool, verify_tls: bool, local_queue: Optional[str]) +def generate_appwrapper(name: str, namespace: str, head_cpus: int, head_memory: int, head_gpus: int, min_cpu: int, max_cpu: int, min_memory: int, max_memory: int, gpu: int, workers: int, template: str, image: str, instascale: bool, mcad: bool, instance_types: list, env, image_pull_secrets: list, dispatch_priority: str, priority_val: int, write_to_file: bool, verify_tls: bool, local_queue: Optional[str], labels)
    @@ -559,6 +572,7 @@

    Functions

    write_to_file: bool, verify_tls: bool, local_queue: Optional[str], + labels, ): user_yaml = read_template(template) appwrapper_name, cluster_name = gen_names(name) @@ -610,13 +624,13 @@

    Functions

    if mcad: write_user_appwrapper(user_yaml, outfile) else: - write_components(user_yaml, outfile, namespace, local_queue) + write_components(user_yaml, outfile, namespace, local_queue, labels) return outfile else: if mcad: user_yaml = load_appwrapper(user_yaml, name) else: - user_yaml = load_components(user_yaml, name, namespace, local_queue) + user_yaml = load_components(user_yaml, name, namespace, local_queue, labels) return user_yaml
    @@ -715,7 +729,7 @@

    Functions

    -def load_components(user_yaml: dict, name: str, namespace: str, local_queue: Optional[str]) +def load_components(user_yaml: dict, name: str, namespace: str, local_queue: Optional[str], labels: dict)
    @@ -724,11 +738,16 @@

    Functions

    Expand source code
    def load_components(
    -    user_yaml: dict, name: str, namespace: str, local_queue: Optional[str]
    +    user_yaml: dict,
    +    name: str,
    +    namespace: str,
    +    local_queue: Optional[str],
    +    labels: dict,
     ):
         component_list = []
         components = user_yaml.get("spec", "resources")["resources"].get("GenericItems")
         lq_name = local_queue or get_default_kueue_name(namespace)
    +    cluster_labels = labels
         for component in components:
             if "generictemplate" in component:
                 if (
    @@ -740,6 +759,7 @@ 

    Functions

    ] labels = component["generictemplate"]["metadata"]["labels"] labels.update({"kueue.x-k8s.io/queue-name": lq_name}) + labels.update(cluster_labels) component_list.append(component["generictemplate"]) resources = "---\n" + "---\n".join( @@ -819,8 +839,8 @@

    Functions

    # Leave head node resources as template default resource["requests"]["cpu"] = head_cpus resource["limits"]["cpu"] = head_cpus - resource["requests"]["memory"] = str(head_memory) + "G" - resource["limits"]["memory"] = str(head_memory) + "G" + resource["requests"]["memory"] = head_memory + resource["limits"]["memory"] = head_memory resource["requests"]["nvidia.com/gpu"] = head_gpus resource["limits"]["nvidia.com/gpu"] = head_gpus @@ -837,9 +857,9 @@

    Functions

    resource[k][spec] = min_cpu if spec == "memory": if k == "limits": - resource[k][spec] = str(max_memory) + "G" + resource[k][spec] = max_memory else: - resource[k][spec] = str(min_memory) + "G" + resource[k][spec] = min_memory if spec == "nvidia.com/gpu": if i == 0: resource[k][spec] = 0 @@ -1037,17 +1057,17 @@

    Functions

    requests = resource.get("resources").get("requests") if requests is not None: requests["cpu"] = min_cpu - requests["memory"] = str(min_memory) + "G" + requests["memory"] = min_memory requests["nvidia.com/gpu"] = gpu limits = resource.get("resources").get("limits") if limits is not None: limits["cpu"] = max_cpu - limits["memory"] = str(max_memory) + "G" + limits["memory"] = max_memory limits["nvidia.com/gpu"] = gpu
    -def write_components(user_yaml: dict, output_file_name: str, namespace: str, local_queue: Optional[str]) +def write_components(user_yaml: dict, output_file_name: str, namespace: str, local_queue: Optional[str], labels: dict)
    @@ -1056,7 +1076,11 @@

    Functions

    Expand source code
    def write_components(
    -    user_yaml: dict, output_file_name: str, namespace: str, local_queue: Optional[str]
    +    user_yaml: dict,
    +    output_file_name: str,
    +    namespace: str,
    +    local_queue: Optional[str],
    +    labels: dict,
     ):
         # Create the directory if it doesn't exist
         directory_path = os.path.dirname(output_file_name)
    @@ -1066,6 +1090,7 @@ 

    Functions

    components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") open(output_file_name, "w").close() lq_name = local_queue or get_default_kueue_name(namespace) + cluster_labels = labels with open(output_file_name, "a") as outfile: for component in components: if "generictemplate" in component: @@ -1078,6 +1103,7 @@

    Functions

    ] labels = component["generictemplate"]["metadata"]["labels"] labels.update({"kueue.x-k8s.io/queue-name": lq_name}) + labels.update(cluster_labels) outfile.write("---\n") yaml.dump( component["generictemplate"], outfile, default_flow_style=False diff --git a/docs/detailed-documentation/utils/pretty_print.html b/docs/detailed-documentation/utils/pretty_print.html index e7c10825..2e7a69b8 100644 --- a/docs/detailed-documentation/utils/pretty_print.html +++ b/docs/detailed-documentation/utils/pretty_print.html @@ -167,7 +167,7 @@

    Module codeflare_sdk.utils.pretty_print

    name = cluster.name dashboard = cluster.dashboard workers = str(cluster.workers) - memory = str(cluster.worker_mem_min) + "~" + str(cluster.worker_mem_max) + memory = f"{cluster.worker_mem_min}~{cluster.worker_mem_max}" cpu = str(cluster.worker_cpu) gpu = str(cluster.worker_gpu) @@ -345,7 +345,7 @@

    Functions

    name = cluster.name dashboard = cluster.dashboard workers = str(cluster.workers) - memory = str(cluster.worker_mem_min) + "~" + str(cluster.worker_mem_max) + memory = f"{cluster.worker_mem_min}~{cluster.worker_mem_max}" cpu = str(cluster.worker_cpu) gpu = str(cluster.worker_gpu) From a9bcce0a649362a7324cc8f4cac9ed24e601d042 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Fri, 26 Apr 2024 13:14:09 +0100 Subject: [PATCH 227/496] Align SDK with validating webhook behaviour on enableIngress --- src/codeflare_sdk/cluster/cluster.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 12a90f4a..89396b66 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -560,10 +560,10 @@ def _component_resources_up( .get("headGroupSpec", {}) .get("enableIngress") ) - if resource["kind"] == "RayCluster" and enable_ingress is not False: + if resource["kind"] == "RayCluster" and enable_ingress is True: name = resource["metadata"]["name"] print( - f"Forbidden: RayCluster '{name}' has 'enableIngress' set to 'True' or is unset." + f"Forbidden: RayCluster '{name}' has 'enableIngress' set to 'True'." ) return _create_resources(yamls, namespace, api_instance) From 179fd758a8f2031a966e4a925e0d90162d658cae Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Mon, 13 May 2024 16:08:47 +0100 Subject: [PATCH 228/496] Added release notes generation to release wf --- .github/workflows/release.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index d5ad965c..0d589d95 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -81,6 +81,7 @@ jobs: uses: ncipollo/release-action@v1 with: tag: "v${{ github.event.inputs.release-version }}" + generateReleaseNotes: true - name: Publish package distributions to PyPI uses: pypa/gh-action-pypi-publish@release/v1 From 890975ea250f3c291a3ba36d75cd198c65afd549 Mon Sep 17 00:00:00 2001 From: Karel Suta Date: Mon, 20 May 2024 10:30:31 +0200 Subject: [PATCH 229/496] e2e tests: Setup Go version based on CFO go.mod file --- .github/workflows/e2e_tests.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index 83ed3988..0283c472 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -68,7 +68,8 @@ jobs: - name: Set Go uses: actions/setup-go@v5 with: - go-version: v1.20 + go-version-file: './codeflare-operator/go.mod' + cache-dependency-path: "./codeflare-operator/go.sum" - name: Set up gotestfmt uses: gotesttools/gotestfmt-action@v2 From b782a4ab27e44043796cbf92b012ee37fa421d7d Mon Sep 17 00:00:00 2001 From: Karel Suta Date: Mon, 20 May 2024 21:31:28 +0200 Subject: [PATCH 230/496] Use native gRPC resolver in PR check for local interactive This resolver is compatible with dnsmasq used for dynamic hostnames in KinD. --- .github/workflows/e2e_tests.yaml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index 0283c472..c742e506 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -76,6 +76,12 @@ jobs: with: token: ${{ secrets.GITHUB_TOKEN }} + - name: Set up specific Python version + uses: actions/setup-python@v5 + with: + python-version: '3.9' + cache: 'pip' # caching pip dependencies + - name: Setup and start KinD cluster uses: ./common/github-actions/kind @@ -114,6 +120,8 @@ jobs: kubectl create clusterrolebinding sdk-user-clusterqueue-creator --clusterrole=clusterqueue-creator --user=sdk-user kubectl create clusterrole localqueue-creator --verb=get,list,create,delete,patch --resource=localqueues kubectl create clusterrolebinding sdk-user-localqueue-creator --clusterrole=localqueue-creator --user=sdk-user + kubectl create clusterrole list-secrets --verb=get,list --resource=secrets + kubectl create clusterrolebinding sdk-user-list-secrets --clusterrole=list-secrets --user=sdk-user kubectl config use-context sdk-user - name: Run e2e tests @@ -126,6 +134,8 @@ jobs: poetry install --with test,docs echo "Running e2e tests..." poetry run pytest -v -s ./tests/e2e -m kind > ${CODEFLARE_TEST_OUTPUT_DIR}/pytest_output.log 2>&1 + env: + GRPC_DNS_RESOLVER: "native" - name: Switch to kind-cluster context to print logs if: always() && steps.deploy.outcome == 'success' From c99424ef1ff012ae6feb0c38b4d4f7cbc900cfc1 Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Thu, 16 May 2024 10:18:02 +0100 Subject: [PATCH 231/496] Fix for cases without default localqueue --- src/codeflare_sdk/cluster/cluster.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 89396b66..f35f2d52 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -541,6 +541,9 @@ def from_k8_cluster_object( mcad=mcad, write_to_file=write_to_file, verify_tls=verify_tls, + local_queue=rc["metadata"] + .get("labels", dict()) + .get("kueue.x-k8s.io/queue-name", None), ) return Cluster(cluster_config) From b4c55e4910222e2be8a7f8c7026b98287c9a9cbe Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Thu, 16 May 2024 10:26:35 +0100 Subject: [PATCH 232/496] Unit test for get_cluster with localqueue --- tests/unit_test.py | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/tests/unit_test.py b/tests/unit_test.py index 53c88888..e8837a13 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -1002,6 +1002,7 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "resourceName": "quicktest", "workload.codeflare.dev/appwrapper": "quicktest", "orderedinstance": "m4.xlarge_g4dn.xlarge", + "kueue.x-k8s.io/queue-name": "team-a-queue", }, "managedFields": [ { @@ -2483,6 +2484,36 @@ def test_get_cluster(mocker): assert cluster_config.num_workers == 1 +def test_get_cluster_no_mcad(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + side_effect=get_ray_obj, + ) + mocker.patch( + "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", + return_value=ingress_retrieval(cluster_name="quicktest", client_ing=True), + ) + cluster = get_cluster("quicktest") + cluster_config = cluster.config + assert cluster_config.name == "quicktest" and cluster_config.namespace == "ns" + assert ( + "m4.xlarge" in cluster_config.machine_types + and "g4dn.xlarge" in cluster_config.machine_types + ) + assert cluster_config.min_cpus == 1 and cluster_config.max_cpus == 1 + assert cluster_config.min_memory == "2G" and cluster_config.max_memory == "2G" + assert cluster_config.num_gpus == 0 + assert cluster_config.instascale + assert ( + cluster_config.image + == "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103" + ) + assert cluster_config.num_workers == 1 + assert cluster_config.local_queue == "team-a-queue" + + def route_retrieval(group, version, namespace, plural, name): assert group == "route.openshift.io" assert version == "v1" From 54e90f5b7900822ae534d885ff6e621eb0c9b638 Mon Sep 17 00:00:00 2001 From: Jiri Petrlik Date: Wed, 22 May 2024 22:28:27 +0200 Subject: [PATCH 233/496] RHOAIENG-5344 - E2E test for Ray local interactive (#532) --- tests/e2e/local_interactive_sdk_kind_test.py | 85 ++++++++++++++++++ tests/e2e/local_interactive_sdk_oauth_test.py | 87 +++++++++++++++++++ tests/e2e/mnist_raycluster_sdk_kind_test.py | 1 + tests/e2e/mnist_raycluster_sdk_oauth_test.py | 1 + 4 files changed, 174 insertions(+) create mode 100644 tests/e2e/local_interactive_sdk_kind_test.py create mode 100644 tests/e2e/local_interactive_sdk_oauth_test.py diff --git a/tests/e2e/local_interactive_sdk_kind_test.py b/tests/e2e/local_interactive_sdk_kind_test.py new file mode 100644 index 00000000..1cbc5f62 --- /dev/null +++ b/tests/e2e/local_interactive_sdk_kind_test.py @@ -0,0 +1,85 @@ +from codeflare_sdk import ( + Cluster, + ClusterConfiguration, + TokenAuthentication, + generate_cert, +) + +import pytest +import ray +import math + +from support import * + + +@pytest.mark.kind +class TestRayLocalInteractiveOauth: + def setup_method(self): + initialize_kubernetes_client(self) + + def teardown_method(self): + delete_namespace(self) + + def test_local_interactives(self): + self.setup_method() + create_namespace(self) + create_kueue_resources(self) + self.run_local_interactives() + self.teardown_method() + + def run_local_interactives(self): + ray_image = get_ray_image() + + cluster_name = "test-ray-cluster-li" + + cluster = Cluster( + ClusterConfiguration( + name=cluster_name, + namespace=self.namespace, + num_workers=1, + head_cpus="500m", + head_memory=2, + min_cpus="500m", + max_cpus=1, + min_memory=1, + max_memory=2, + num_gpus=0, + image=ray_image, + write_to_file=True, + verify_tls=False, + ) + ) + cluster.up() + cluster.wait_ready() + + generate_cert.generate_tls_cert(cluster_name, self.namespace) + generate_cert.export_env(cluster_name, self.namespace) + + print(cluster.local_client_url()) + + ray.shutdown() + ray.init(address=cluster.local_client_url(), logging_level="DEBUG") + + @ray.remote + def heavy_calculation_part(num_iterations): + result = 0.0 + for i in range(num_iterations): + for j in range(num_iterations): + for k in range(num_iterations): + result += math.sin(i) * math.cos(j) * math.tan(k) + return result + + @ray.remote + def heavy_calculation(num_iterations): + results = ray.get( + [heavy_calculation_part.remote(num_iterations // 30) for _ in range(30)] + ) + return sum(results) + + ref = heavy_calculation.remote(3000) + result = ray.get(ref) + assert result == 1789.4644387076714 + ray.cancel(ref) + ray.shutdown() + + cluster.down() diff --git a/tests/e2e/local_interactive_sdk_oauth_test.py b/tests/e2e/local_interactive_sdk_oauth_test.py new file mode 100644 index 00000000..5210fe25 --- /dev/null +++ b/tests/e2e/local_interactive_sdk_oauth_test.py @@ -0,0 +1,87 @@ +from codeflare_sdk import ( + Cluster, + ClusterConfiguration, + TokenAuthentication, + generate_cert, +) + +import math +import pytest +import ray + +from support import * + + +@pytest.mark.openshift +class TestRayLocalInteractiveOauth: + def setup_method(self): + initialize_kubernetes_client(self) + + def teardown_method(self): + delete_namespace(self) + + def test_local_interactives(self): + self.setup_method() + create_namespace(self) + create_kueue_resources(self) + self.run_local_interactives() + self.teardown_method() + + def run_local_interactives(self): + ray_image = get_ray_image() + + auth = TokenAuthentication( + token=run_oc_command(["whoami", "--show-token=true"]), + server=run_oc_command(["whoami", "--show-server=true"]), + skip_tls=True, + ) + auth.login() + + cluster_name = "test-ray-cluster-li" + + cluster = Cluster( + ClusterConfiguration( + namespace=self.namespace, + name=cluster_name, + num_workers=1, + min_cpus=1, + max_cpus=1, + min_memory=4, + max_memory=4, + num_gpus=0, + image=ray_image, + verify_tls=False, + ) + ) + cluster.up() + cluster.wait_ready() + + generate_cert.generate_tls_cert(cluster_name, self.namespace) + generate_cert.export_env(cluster_name, self.namespace) + + ray.shutdown() + ray.init(address=cluster.local_client_url(), logging_level="DEBUG") + + @ray.remote + def heavy_calculation_part(num_iterations): + result = 0.0 + for i in range(num_iterations): + for j in range(num_iterations): + for k in range(num_iterations): + result += math.sin(i) * math.cos(j) * math.tan(k) + return result + + @ray.remote + def heavy_calculation(num_iterations): + results = ray.get( + [heavy_calculation_part.remote(num_iterations // 30) for _ in range(30)] + ) + return sum(results) + + ref = heavy_calculation.remote(3000) + result = ray.get(ref) + assert result == 1789.4644387076714 + ray.cancel(ref) + ray.shutdown() + + cluster.down() diff --git a/tests/e2e/mnist_raycluster_sdk_kind_test.py b/tests/e2e/mnist_raycluster_sdk_kind_test.py index 51c65947..ece7069f 100644 --- a/tests/e2e/mnist_raycluster_sdk_kind_test.py +++ b/tests/e2e/mnist_raycluster_sdk_kind_test.py @@ -25,6 +25,7 @@ def test_mnist_ray_cluster_sdk_kind(self): create_namespace(self) create_kueue_resources(self) self.run_mnist_raycluster_sdk_kind() + self.teardown_method() def run_mnist_raycluster_sdk_kind(self): ray_image = get_ray_image() diff --git a/tests/e2e/mnist_raycluster_sdk_oauth_test.py b/tests/e2e/mnist_raycluster_sdk_oauth_test.py index d0d090f3..548c80ad 100644 --- a/tests/e2e/mnist_raycluster_sdk_oauth_test.py +++ b/tests/e2e/mnist_raycluster_sdk_oauth_test.py @@ -25,6 +25,7 @@ def test_mnist_ray_cluster_sdk_auth(self): create_namespace(self) create_kueue_resources(self) self.run_mnist_raycluster_sdk_oauth() + self.teardown_method() def run_mnist_raycluster_sdk_oauth(self): ray_image = get_ray_image() From 077db18db36423cfdd2c248ff4a3cae76032d0a5 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 16 May 2024 14:12:18 +0100 Subject: [PATCH 234/496] Fixed get_current_namespace for in-cluster configuration --- src/codeflare_sdk/cluster/cluster.py | 31 +++++++++++++++++++++------- 1 file changed, 23 insertions(+), 8 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index f35f2d52..c1358035 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -633,14 +633,29 @@ def get_current_namespace(): # pragma: no cover print("Unable to find current namespace") return None else: - try: - _, active_context = config.list_kube_config_contexts(config_check()) - except Exception as e: - return _kube_api_error_handling(e) - try: - return active_context["context"]["namespace"] - except KeyError: - return None + if "KUBERNETES_PORT" in os.environ: + if os.path.isfile( + "/var/run/secrets/kubernetes.io/serviceaccount/namespace" + ): + try: + file = open( + "/var/run/secrets/kubernetes.io/serviceaccount/namespace", "r" + ) + active_context = file.readline().strip("\n") + return active_context + except Exception as e: + print( + "unable to gather namespace from /var/run/secrets/kubernetes.io/serviceaccount/namespace trying to gather from current context" + ) + else: + try: + _, active_context = config.list_kube_config_contexts(config_check()) + except Exception as e: + return _kube_api_error_handling(e) + try: + return active_context["context"]["namespace"] + except KeyError: + return None def get_cluster( From 7d758ebdf2e283214ef25dc80c76b2ee7ab8e32b Mon Sep 17 00:00:00 2001 From: Kevin Date: Tue, 21 May 2024 14:36:25 -0400 Subject: [PATCH 235/496] remove check for cluster port Signed-off-by: Kevin --- src/codeflare_sdk/cluster/cluster.py | 25 +++++++++++-------------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index c1358035..6ddd778c 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -633,20 +633,17 @@ def get_current_namespace(): # pragma: no cover print("Unable to find current namespace") return None else: - if "KUBERNETES_PORT" in os.environ: - if os.path.isfile( - "/var/run/secrets/kubernetes.io/serviceaccount/namespace" - ): - try: - file = open( - "/var/run/secrets/kubernetes.io/serviceaccount/namespace", "r" - ) - active_context = file.readline().strip("\n") - return active_context - except Exception as e: - print( - "unable to gather namespace from /var/run/secrets/kubernetes.io/serviceaccount/namespace trying to gather from current context" - ) + if os.path.isfile("/var/run/secrets/kubernetes.io/serviceaccount/namespace"): + try: + file = open( + "/var/run/secrets/kubernetes.io/serviceaccount/namespace", "r" + ) + active_context = file.readline().strip("\n") + return active_context + except Exception as e: + print( + "unable to gather namespace from /var/run/secrets/kubernetes.io/serviceaccount/namespace trying to gather from current context" + ) else: try: _, active_context = config.list_kube_config_contexts(config_check()) From 32cb75151dc549cd014a46732555cff97f1495d4 Mon Sep 17 00:00:00 2001 From: Fiona Waters Date: Fri, 10 May 2024 10:28:03 +0100 Subject: [PATCH 236/496] adding validation for local_queue provided in cluster config --- src/codeflare_sdk/utils/generate_yaml.py | 28 ++++++++++++++++++++++++ tests/unit_test.py | 8 +++++++ 2 files changed, 36 insertions(+) diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index f5de1fba..2ea6dd78 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -308,6 +308,26 @@ def get_default_kueue_name(namespace: str): ) +def local_queue_exists(namespace: str, local_queue_name: str): + # get all local queues in the namespace + try: + config_check() + api_instance = client.CustomObjectsApi(api_config_handler()) + local_queues = api_instance.list_namespaced_custom_object( + group="kueue.x-k8s.io", + version="v1beta1", + namespace=namespace, + plural="localqueues", + ) + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + # check if local queue with the name provided in cluster config exists + for lq in local_queues["items"]: + if lq["metadata"]["name"] == local_queue_name: + return True + return False + + def write_components( user_yaml: dict, output_file_name: str, @@ -324,6 +344,10 @@ def write_components( open(output_file_name, "w").close() lq_name = local_queue or get_default_kueue_name(namespace) cluster_labels = labels + if not local_queue_exists(namespace, lq_name): + raise ValueError( + "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration" + ) with open(output_file_name, "a") as outfile: for component in components: if "generictemplate" in component: @@ -355,6 +379,10 @@ def load_components( components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") lq_name = local_queue or get_default_kueue_name(namespace) cluster_labels = labels + if not local_queue_exists(namespace, lq_name): + raise ValueError( + "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration" + ) for component in components: if "generictemplate" in component: if ( diff --git a/tests/unit_test.py b/tests/unit_test.py index e8837a13..1d4ca361 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -344,6 +344,10 @@ def test_cluster_creation_no_mcad_local_queue(mocker): "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), + ) config = createClusterConfig() config.name = "unit-test-cluster-ray" config.mcad = False @@ -3046,6 +3050,10 @@ def test_cluster_throw_for_no_raycluster(mocker: MockerFixture): "codeflare_sdk.utils.generate_yaml.get_default_kueue_name", return_value="default", ) + mocker.patch( + "codeflare_sdk.utils.generate_yaml.local_queue_exists", + return_value="true", + ) def throw_if_getting_raycluster(group, version, namespace, plural): if plural == "rayclusters": From 8471812a7209c79cf63c334b50d9d4913874794e Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Fri, 22 Mar 2024 15:46:59 +0000 Subject: [PATCH 237/496] Made ODH cert default cert for Token Auth --- src/codeflare_sdk/cluster/auth.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/codeflare_sdk/cluster/auth.py b/src/codeflare_sdk/cluster/auth.py index 36233a0e..70d9ca68 100644 --- a/src/codeflare_sdk/cluster/auth.py +++ b/src/codeflare_sdk/cluster/auth.py @@ -101,12 +101,16 @@ def login(self) -> str: """ global config_path global api_client + odh_ca_path = "/etc/pki/tls/custom-certs/ca-bundle.crt" try: configuration = client.Configuration() configuration.api_key_prefix["authorization"] = "Bearer" configuration.host = self.server configuration.api_key["authorization"] = self.token if self.skip_tls == False and self.ca_cert_path == None: + if os.path.isfile(odh_ca_path): + print(f"Authenticated with certificate located at {odh_ca_path}") + configuration.ssl_ca_cert = odh_ca_path configuration.verify_ssl = True elif self.skip_tls == False: configuration.ssl_ca_cert = self.ca_cert_path From 652a15ff3a439a2bf13d0bde4a346611fdce8cea Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Mon, 13 May 2024 12:25:16 +0100 Subject: [PATCH 238/496] Added env variable option for setting ca-cert path --- src/codeflare_sdk/cluster/auth.py | 26 ++++++++++++++++++-------- tests/auth-test.crt | 20 ++++++++++++++++++++ tests/unit_test.py | 23 +++++++++++++++++------ 3 files changed, 55 insertions(+), 14 deletions(-) create mode 100644 tests/auth-test.crt diff --git a/src/codeflare_sdk/cluster/auth.py b/src/codeflare_sdk/cluster/auth.py index 70d9ca68..91dadb05 100644 --- a/src/codeflare_sdk/cluster/auth.py +++ b/src/codeflare_sdk/cluster/auth.py @@ -81,7 +81,7 @@ def __init__( token: str, server: str, skip_tls: bool = False, - ca_cert_path: str = None, + ca_cert_path: str = "/etc/pki/tls/custom-certs/ca-bundle.crt", ): """ Initialize a TokenAuthentication object that requires a value for `token`, the API Token @@ -101,19 +101,29 @@ def login(self) -> str: """ global config_path global api_client - odh_ca_path = "/etc/pki/tls/custom-certs/ca-bundle.crt" try: configuration = client.Configuration() configuration.api_key_prefix["authorization"] = "Bearer" configuration.host = self.server configuration.api_key["authorization"] = self.token - if self.skip_tls == False and self.ca_cert_path == None: - if os.path.isfile(odh_ca_path): - print(f"Authenticated with certificate located at {odh_ca_path}") - configuration.ssl_ca_cert = odh_ca_path + ca_path_env = os.environ.get("CA_CERT_PATH") + + if self.skip_tls == False: + if ca_path_env != None: + self.ca_cert_path = ca_path_env + + if self.ca_cert_path == None: + configuration.ssl_ca_cert = None + elif os.path.isfile(self.ca_cert_path): + print( + f"Authenticated with certificate located at {self.ca_cert_path}" + ) + configuration.ssl_ca_cert = self.ca_cert_path + else: + raise FileNotFoundError( + f"Certificate file not found at {self.ca_cert_path}" + ) configuration.verify_ssl = True - elif self.skip_tls == False: - configuration.ssl_ca_cert = self.ca_cert_path else: urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) print("Insecure request warnings have been disabled") diff --git a/tests/auth-test.crt b/tests/auth-test.crt new file mode 100644 index 00000000..f470c632 --- /dev/null +++ b/tests/auth-test.crt @@ -0,0 +1,20 @@ +-----BEGIN CERTIFICATE----- +MIIDOTCCAiGgAwIBAgIUENjaZDrvhc5uV3j7GI8deZJwc+YwDQYJKoZIhvcNAQEL +BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM +GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDAeFw0yNDA1MTMxMTE1NDZaFw0yNTA1 +MTMxMTE1NDZaMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEw +HwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggEiMA0GCSqGSIb3DQEB +AQUAA4IBDwAwggEKAoIBAQDEYYk81jvPijZXXeI9cByf5EIbOVaBTH7I51J9EKG5 +Y/KRXI43WgvVEiZ3jP8LJnSD79WhBiL6TgadQZje5ndroRYDM9vyqz1OUZapnOO+ +yzl01y/qSsH8Kn88eLAzkE9HSu4QN9PuJtySyksjDFQJ6kjyE8ZHUSorur0FlLLf +IToFgTuaIPDYjvFRchOCfZ7sV/MF7LxqFfFnaWOYvH41ZdvqJiRcVsMi+mYs9/I/ +I72IMXwVnQDVnK8H84ntEmHNN6NoVuMKla0So4/wKcHJSCgS3axLI2Ka2aaaJo9K +l2cn21NOyodF+DaSFy7qaGRXxoTQ2k9tUrSvxkBJvRmBAgMBAAGjITAfMB0GA1Ud +DgQWBBRTK8mO5XMcmR+Xg/PVNFnvz4eubDANBgkqhkiG9w0BAQsFAAOCAQEAlZva +6ws3zRff7u0tWT2JJaE1uPqsuAdHtVvEyAMp2QvYfyrgADTroUTaSU4p6ppX/t7v +ynHhuzR6UOVkuY0/CH1P3UUGrEPNOXT8i2BDwL+j4y2K2aRN8zU0Nu/IVePBhu+4 +Jdt+3P7/MuwiCON5JukgxUYlQKhVhzFj7GOd2+Ca+fh8Siq3tkWDSN54+90fgylQ ++74Yfya1NVabpzLqP3Isqu2XQhEVaBFvj8Yu0h83e3D8LeQToC3mVMF4yy5BZ9Ty +K66YGlGQgszWEUFPEdsB8Dj/iJMhkWXuyc3u/w0s3t7rXeMYYgr+xrEeK+g0oyB5 +xeZuMjd567Znmu5oMw== +-----END CERTIFICATE----- diff --git a/tests/unit_test.py b/tests/unit_test.py index 1d4ca361..45e04670 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -123,27 +123,30 @@ def test_token_auth_creation(): assert token_auth.token == "token" assert token_auth.server == "server" assert token_auth.skip_tls == False - assert token_auth.ca_cert_path == None + assert token_auth.ca_cert_path == "/etc/pki/tls/custom-certs/ca-bundle.crt" token_auth = TokenAuthentication(token="token", server="server", skip_tls=True) assert token_auth.token == "token" assert token_auth.server == "server" assert token_auth.skip_tls == True - assert token_auth.ca_cert_path == None + assert token_auth.ca_cert_path == "/etc/pki/tls/custom-certs/ca-bundle.crt" token_auth = TokenAuthentication(token="token", server="server", skip_tls=False) assert token_auth.token == "token" assert token_auth.server == "server" assert token_auth.skip_tls == False - assert token_auth.ca_cert_path == None + assert token_auth.ca_cert_path == "/etc/pki/tls/custom-certs/ca-bundle.crt" token_auth = TokenAuthentication( - token="token", server="server", skip_tls=False, ca_cert_path="path/to/cert" + token="token", + server="server", + skip_tls=False, + ca_cert_path=f"{parent}/tests/auth-test.crt", ) assert token_auth.token == "token" assert token_auth.server == "server" assert token_auth.skip_tls == False - assert token_auth.ca_cert_path == "path/to/cert" + assert token_auth.ca_cert_path == f"{parent}/tests/auth-test.crt" except Exception: assert 0 == 1 @@ -174,7 +177,15 @@ def test_token_auth_login_tls(mocker): token="testtoken", server="testserver:6443", skip_tls=False, - ca_cert_path="path/to/cert", + ca_cert_path=f"{parent}/tests/auth-test.crt", + ) + assert token_auth.login() == ("Logged into testserver:6443") + + os.environ["CA_CERT_PATH"] = f"{parent}/tests/auth-test.crt" + token_auth = TokenAuthentication( + token="testtoken", + server="testserver:6443", + skip_tls=False, ) assert token_auth.login() == ("Logged into testserver:6443") From 2f98a2773ea19226445e74cf5876662673638b1a Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Wed, 15 May 2024 14:24:24 +0100 Subject: [PATCH 239/496] Updated env var --- docs/authentication.md | 2 +- src/codeflare_sdk/cluster/auth.py | 4 ++-- tests/unit_test.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/authentication.md b/docs/authentication.md index 8812a6fc..bb27f171 100644 --- a/docs/authentication.md +++ b/docs/authentication.md @@ -17,7 +17,7 @@ auth.login() # log out with auth.logout() ``` Setting `skip_tls=True` allows interaction with an HTTPS server bypassing the server certificate checks although this is not secure.
    -You can pass a custom certificate to `TokenAuthentication` by using `ca_cert_path="/path/to/cert"` when authenticating provided `skip_tls=False`. +You can pass a custom certificate to `TokenAuthentication` by using `ca_cert_path="/path/to/cert"` when authenticating provided `skip_tls=False`. Alternatively you can set the environment variable `CF_SDK_CA_CERT_PATH` to the path of your custom certificate. ## Method 2 Kubernetes Config File Authentication (Default location) If a user has authenticated to their cluster by alternate means e.g. run a login command like `oc login --token= --server=` their kubernetes config file should have updated.
    diff --git a/src/codeflare_sdk/cluster/auth.py b/src/codeflare_sdk/cluster/auth.py index 91dadb05..e22cd828 100644 --- a/src/codeflare_sdk/cluster/auth.py +++ b/src/codeflare_sdk/cluster/auth.py @@ -106,10 +106,10 @@ def login(self) -> str: configuration.api_key_prefix["authorization"] = "Bearer" configuration.host = self.server configuration.api_key["authorization"] = self.token - ca_path_env = os.environ.get("CA_CERT_PATH") + ca_path_env = os.environ.get("CF_SDK_CA_CERT_PATH", self.ca_cert_path) if self.skip_tls == False: - if ca_path_env != None: + if ca_path_env != self.ca_cert_path: self.ca_cert_path = ca_path_env if self.ca_cert_path == None: diff --git a/tests/unit_test.py b/tests/unit_test.py index 45e04670..28f21e7d 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -181,7 +181,7 @@ def test_token_auth_login_tls(mocker): ) assert token_auth.login() == ("Logged into testserver:6443") - os.environ["CA_CERT_PATH"] = f"{parent}/tests/auth-test.crt" + os.environ["CF_SDK_CA_CERT_PATH"] = f"{parent}/tests/auth-test.crt" token_auth = TokenAuthentication( token="testtoken", server="testserver:6443", From 7b8ee11e5794e3e123be071436028bcf228b3f12 Mon Sep 17 00:00:00 2001 From: Kevin Date: Tue, 21 May 2024 17:30:11 -0400 Subject: [PATCH 240/496] init arg should take priority over env var Signed-off-by: Kevin --- src/codeflare_sdk/cluster/auth.py | 24 ++++++++++++++++-------- tests/unit_test.py | 6 ++++-- 2 files changed, 20 insertions(+), 10 deletions(-) diff --git a/src/codeflare_sdk/cluster/auth.py b/src/codeflare_sdk/cluster/auth.py index e22cd828..c39fe1d4 100644 --- a/src/codeflare_sdk/cluster/auth.py +++ b/src/codeflare_sdk/cluster/auth.py @@ -32,6 +32,8 @@ global config_path config_path = None +WORKBENCH_CA_CERT_PATH = "/etc/pki/tls/custom-certs/ca-bundle.crt" + class Authentication(metaclass=abc.ABCMeta): """ @@ -81,7 +83,7 @@ def __init__( token: str, server: str, skip_tls: bool = False, - ca_cert_path: str = "/etc/pki/tls/custom-certs/ca-bundle.crt", + ca_cert_path: str = None, ): """ Initialize a TokenAuthentication object that requires a value for `token`, the API Token @@ -91,7 +93,17 @@ def __init__( self.token = token self.server = server self.skip_tls = skip_tls - self.ca_cert_path = ca_cert_path + self.ca_cert_path = self._gen_ca_cert_path(ca_cert_path) + + def _gen_ca_cert_path(self, ca_cert_path: str): + if ca_cert_path is not None: + return ca_cert_path + elif "CF_SDK_CA_CERT_PATH" in os.environ: + return os.environ.get("CF_SDK_CA_CERT_PATH") + elif os.path.exists(WORKBENCH_CA_CERT_PATH): + return WORKBENCH_CA_CERT_PATH + else: + return None def login(self) -> str: """ @@ -106,13 +118,9 @@ def login(self) -> str: configuration.api_key_prefix["authorization"] = "Bearer" configuration.host = self.server configuration.api_key["authorization"] = self.token - ca_path_env = os.environ.get("CF_SDK_CA_CERT_PATH", self.ca_cert_path) - - if self.skip_tls == False: - if ca_path_env != self.ca_cert_path: - self.ca_cert_path = ca_path_env - if self.ca_cert_path == None: + if not self.skip_tls: + if self.ca_cert_path is None: configuration.ssl_ca_cert = None elif os.path.isfile(self.ca_cert_path): print( diff --git a/tests/unit_test.py b/tests/unit_test.py index 28f21e7d..c9a8fb78 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -123,19 +123,21 @@ def test_token_auth_creation(): assert token_auth.token == "token" assert token_auth.server == "server" assert token_auth.skip_tls == False - assert token_auth.ca_cert_path == "/etc/pki/tls/custom-certs/ca-bundle.crt" + assert token_auth.ca_cert_path == None token_auth = TokenAuthentication(token="token", server="server", skip_tls=True) assert token_auth.token == "token" assert token_auth.server == "server" assert token_auth.skip_tls == True - assert token_auth.ca_cert_path == "/etc/pki/tls/custom-certs/ca-bundle.crt" + assert token_auth.ca_cert_path == None + os.environ["CF_SDK_CA_CERT_PATH"] = f"/etc/pki/tls/custom-certs/ca-bundle.crt" token_auth = TokenAuthentication(token="token", server="server", skip_tls=False) assert token_auth.token == "token" assert token_auth.server == "server" assert token_auth.skip_tls == False assert token_auth.ca_cert_path == "/etc/pki/tls/custom-certs/ca-bundle.crt" + os.environ.pop("CF_SDK_CA_CERT_PATH") token_auth = TokenAuthentication( token="token", From f96c3e3eaf8e6744262460f16814f1e011eb6f4b Mon Sep 17 00:00:00 2001 From: Kevin Date: Thu, 23 May 2024 12:45:09 -0400 Subject: [PATCH 241/496] add team-a-queue to quicktest unit test cluster Signed-off-by: Kevin --- tests/unit_test.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/tests/unit_test.py b/tests/unit_test.py index c9a8fb78..1fe139de 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -314,7 +314,16 @@ def get_local_queue(group, version, namespace, plural): "namespace": "ns", }, "spec": {"clusterQueue": "cluster-queue"}, - } + }, + { + "apiVersion": "kueue.x-k8s.io/v1beta1", + "kind": "LocalQueue", + "metadata": { + "name": "team-a-queue", + "namespace": "ns", + }, + "spec": {"clusterQueue": "team-a-queue"}, + }, ], "kind": "LocalQueueList", "metadata": {"continue": "", "resourceVersion": "2266811"}, From 5a7b57f117eca7832ab5bdc965b802716298a761 Mon Sep 17 00:00:00 2001 From: codeflare-machine-account Date: Thu, 23 May 2024 17:47:02 +0000 Subject: [PATCH 242/496] Changes in docs for release: v0.16.2 --- docs/detailed-documentation/cluster/auth.html | 74 ++++++++++++++++--- .../cluster/cluster.html | 73 +++++++++++++----- .../utils/generate_yaml.html | 66 +++++++++++++++++ 3 files changed, 182 insertions(+), 31 deletions(-) diff --git a/docs/detailed-documentation/cluster/auth.html b/docs/detailed-documentation/cluster/auth.html index be649e78..61d199e5 100644 --- a/docs/detailed-documentation/cluster/auth.html +++ b/docs/detailed-documentation/cluster/auth.html @@ -65,6 +65,8 @@

    Module codeflare_sdk.cluster.auth

    global config_path config_path = None +WORKBENCH_CA_CERT_PATH = "/etc/pki/tls/custom-certs/ca-bundle.crt" + class Authentication(metaclass=abc.ABCMeta): """ @@ -124,7 +126,17 @@

    Module codeflare_sdk.cluster.auth

    self.token = token self.server = server self.skip_tls = skip_tls - self.ca_cert_path = ca_cert_path + self.ca_cert_path = self._gen_ca_cert_path(ca_cert_path) + + def _gen_ca_cert_path(self, ca_cert_path: str): + if ca_cert_path is not None: + return ca_cert_path + elif "CF_SDK_CA_CERT_PATH" in os.environ: + return os.environ.get("CF_SDK_CA_CERT_PATH") + elif os.path.exists(WORKBENCH_CA_CERT_PATH): + return WORKBENCH_CA_CERT_PATH + else: + return None def login(self) -> str: """ @@ -139,10 +151,20 @@

    Module codeflare_sdk.cluster.auth

    configuration.api_key_prefix["authorization"] = "Bearer" configuration.host = self.server configuration.api_key["authorization"] = self.token - if self.skip_tls == False and self.ca_cert_path == None: + + if not self.skip_tls: + if self.ca_cert_path is None: + configuration.ssl_ca_cert = None + elif os.path.isfile(self.ca_cert_path): + print( + f"Authenticated with certificate located at {self.ca_cert_path}" + ) + configuration.ssl_ca_cert = self.ca_cert_path + else: + raise FileNotFoundError( + f"Certificate file not found at {self.ca_cert_path}" + ) configuration.verify_ssl = True - elif self.skip_tls == False: - configuration.ssl_ca_cert = self.ca_cert_path else: urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) print("Insecure request warnings have been disabled") @@ -551,7 +573,17 @@

    Methods

    self.token = token self.server = server self.skip_tls = skip_tls - self.ca_cert_path = ca_cert_path + self.ca_cert_path = self._gen_ca_cert_path(ca_cert_path) + + def _gen_ca_cert_path(self, ca_cert_path: str): + if ca_cert_path is not None: + return ca_cert_path + elif "CF_SDK_CA_CERT_PATH" in os.environ: + return os.environ.get("CF_SDK_CA_CERT_PATH") + elif os.path.exists(WORKBENCH_CA_CERT_PATH): + return WORKBENCH_CA_CERT_PATH + else: + return None def login(self) -> str: """ @@ -566,10 +598,20 @@

    Methods

    configuration.api_key_prefix["authorization"] = "Bearer" configuration.host = self.server configuration.api_key["authorization"] = self.token - if self.skip_tls == False and self.ca_cert_path == None: + + if not self.skip_tls: + if self.ca_cert_path is None: + configuration.ssl_ca_cert = None + elif os.path.isfile(self.ca_cert_path): + print( + f"Authenticated with certificate located at {self.ca_cert_path}" + ) + configuration.ssl_ca_cert = self.ca_cert_path + else: + raise FileNotFoundError( + f"Certificate file not found at {self.ca_cert_path}" + ) configuration.verify_ssl = True - elif self.skip_tls == False: - configuration.ssl_ca_cert = self.ca_cert_path else: urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) print("Insecure request warnings have been disabled") @@ -622,10 +664,20 @@

    Methods

    configuration.api_key_prefix["authorization"] = "Bearer" configuration.host = self.server configuration.api_key["authorization"] = self.token - if self.skip_tls == False and self.ca_cert_path == None: + + if not self.skip_tls: + if self.ca_cert_path is None: + configuration.ssl_ca_cert = None + elif os.path.isfile(self.ca_cert_path): + print( + f"Authenticated with certificate located at {self.ca_cert_path}" + ) + configuration.ssl_ca_cert = self.ca_cert_path + else: + raise FileNotFoundError( + f"Certificate file not found at {self.ca_cert_path}" + ) configuration.verify_ssl = True - elif self.skip_tls == False: - configuration.ssl_ca_cert = self.ca_cert_path else: urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) print("Insecure request warnings have been disabled") diff --git a/docs/detailed-documentation/cluster/cluster.html b/docs/detailed-documentation/cluster/cluster.html index efb6ccfe..07aea536 100644 --- a/docs/detailed-documentation/cluster/cluster.html +++ b/docs/detailed-documentation/cluster/cluster.html @@ -573,6 +573,9 @@

    Module codeflare_sdk.cluster.cluster

    mcad=mcad, write_to_file=write_to_file, verify_tls=verify_tls, + local_queue=rc["metadata"] + .get("labels", dict()) + .get("kueue.x-k8s.io/queue-name", None), ) return Cluster(cluster_config) @@ -592,10 +595,10 @@

    Module codeflare_sdk.cluster.cluster

    .get("headGroupSpec", {}) .get("enableIngress") ) - if resource["kind"] == "RayCluster" and enable_ingress is not False: + if resource["kind"] == "RayCluster" and enable_ingress is True: name = resource["metadata"]["name"] print( - f"Forbidden: RayCluster '{name}' has 'enableIngress' set to 'True' or is unset." + f"Forbidden: RayCluster '{name}' has 'enableIngress' set to 'True'." ) return _create_resources(yamls, namespace, api_instance) @@ -662,14 +665,26 @@

    Module codeflare_sdk.cluster.cluster

    print("Unable to find current namespace") return None else: - try: - _, active_context = config.list_kube_config_contexts(config_check()) - except Exception as e: - return _kube_api_error_handling(e) - try: - return active_context["context"]["namespace"] - except KeyError: - return None + if os.path.isfile("/var/run/secrets/kubernetes.io/serviceaccount/namespace"): + try: + file = open( + "/var/run/secrets/kubernetes.io/serviceaccount/namespace", "r" + ) + active_context = file.readline().strip("\n") + return active_context + except Exception as e: + print( + "unable to gather namespace from /var/run/secrets/kubernetes.io/serviceaccount/namespace trying to gather from current context" + ) + else: + try: + _, active_context = config.list_kube_config_contexts(config_check()) + except Exception as e: + return _kube_api_error_handling(e) + try: + return active_context["context"]["namespace"] + except KeyError: + return None def get_cluster( @@ -1070,14 +1085,26 @@

    Functions

    print("Unable to find current namespace") return None else: - try: - _, active_context = config.list_kube_config_contexts(config_check()) - except Exception as e: - return _kube_api_error_handling(e) - try: - return active_context["context"]["namespace"] - except KeyError: - return None
    + if os.path.isfile("/var/run/secrets/kubernetes.io/serviceaccount/namespace"): + try: + file = open( + "/var/run/secrets/kubernetes.io/serviceaccount/namespace", "r" + ) + active_context = file.readline().strip("\n") + return active_context + except Exception as e: + print( + "unable to gather namespace from /var/run/secrets/kubernetes.io/serviceaccount/namespace trying to gather from current context" + ) + else: + try: + _, active_context = config.list_kube_config_contexts(config_check()) + except Exception as e: + return _kube_api_error_handling(e) + try: + return active_context["context"]["namespace"] + except KeyError: + return None
    @@ -1640,6 +1667,9 @@

    Classes

    mcad=mcad, write_to_file=write_to_file, verify_tls=verify_tls, + local_queue=rc["metadata"] + .get("labels", dict()) + .get("kueue.x-k8s.io/queue-name", None), ) return Cluster(cluster_config) @@ -1659,10 +1689,10 @@

    Classes

    .get("headGroupSpec", {}) .get("enableIngress") ) - if resource["kind"] == "RayCluster" and enable_ingress is not False: + if resource["kind"] == "RayCluster" and enable_ingress is True: name = resource["metadata"]["name"] print( - f"Forbidden: RayCluster '{name}' has 'enableIngress' set to 'True' or is unset." + f"Forbidden: RayCluster '{name}' has 'enableIngress' set to 'True'." ) return _create_resources(yamls, namespace, api_instance) @@ -2010,6 +2040,9 @@

    Methods

    mcad=mcad, write_to_file=write_to_file, verify_tls=verify_tls, + local_queue=rc["metadata"] + .get("labels", dict()) + .get("kueue.x-k8s.io/queue-name", None), ) return Cluster(cluster_config)
    diff --git a/docs/detailed-documentation/utils/generate_yaml.html b/docs/detailed-documentation/utils/generate_yaml.html index 14190101..b2a3af84 100644 --- a/docs/detailed-documentation/utils/generate_yaml.html +++ b/docs/detailed-documentation/utils/generate_yaml.html @@ -339,6 +339,26 @@

    Module codeflare_sdk.utils.generate_yaml

    ) +def local_queue_exists(namespace: str, local_queue_name: str): + # get all local queues in the namespace + try: + config_check() + api_instance = client.CustomObjectsApi(api_config_handler()) + local_queues = api_instance.list_namespaced_custom_object( + group="kueue.x-k8s.io", + version="v1beta1", + namespace=namespace, + plural="localqueues", + ) + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + # check if local queue with the name provided in cluster config exists + for lq in local_queues["items"]: + if lq["metadata"]["name"] == local_queue_name: + return True + return False + + def write_components( user_yaml: dict, output_file_name: str, @@ -355,6 +375,10 @@

    Module codeflare_sdk.utils.generate_yaml

    open(output_file_name, "w").close() lq_name = local_queue or get_default_kueue_name(namespace) cluster_labels = labels + if not local_queue_exists(namespace, lq_name): + raise ValueError( + "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration" + ) with open(output_file_name, "a") as outfile: for component in components: if "generictemplate" in component: @@ -386,6 +410,10 @@

    Module codeflare_sdk.utils.generate_yaml

    components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") lq_name = local_queue or get_default_kueue_name(namespace) cluster_labels = labels + if not local_queue_exists(namespace, lq_name): + raise ValueError( + "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration" + ) for component in components: if "generictemplate" in component: if ( @@ -748,6 +776,10 @@

    Functions

    components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") lq_name = local_queue or get_default_kueue_name(namespace) cluster_labels = labels + if not local_queue_exists(namespace, lq_name): + raise ValueError( + "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration" + ) for component in components: if "generictemplate" in component: if ( @@ -770,6 +802,35 @@

    Functions

    return user_yaml +
    +def local_queue_exists(namespace: str, local_queue_name: str) +
    +
    +
    +
    + +Expand source code + +
    def local_queue_exists(namespace: str, local_queue_name: str):
    +    # get all local queues in the namespace
    +    try:
    +        config_check()
    +        api_instance = client.CustomObjectsApi(api_config_handler())
    +        local_queues = api_instance.list_namespaced_custom_object(
    +            group="kueue.x-k8s.io",
    +            version="v1beta1",
    +            namespace=namespace,
    +            plural="localqueues",
    +        )
    +    except Exception as e:  # pragma: no cover
    +        return _kube_api_error_handling(e)
    +    # check if local queue with the name provided in cluster config exists
    +    for lq in local_queues["items"]:
    +        if lq["metadata"]["name"] == local_queue_name:
    +            return True
    +    return False
    +
    +
    def read_template(template)
    @@ -1091,6 +1152,10 @@

    Functions

    open(output_file_name, "w").close() lq_name = local_queue or get_default_kueue_name(namespace) cluster_labels = labels + if not local_queue_exists(namespace, lq_name): + raise ValueError( + "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration" + ) with open(output_file_name, "a") as outfile: for component in components: if "generictemplate" in component: @@ -1158,6 +1223,7 @@

    Index

  • is_openshift_cluster
  • load_appwrapper
  • load_components
  • +
  • local_queue_exists
  • read_template
  • update_affinity
  • update_custompodresources
  • From 981c312c013215c45937db0fb73a83549c74bf37 Mon Sep 17 00:00:00 2001 From: oksanabaza Date: Tue, 28 May 2024 11:11:07 +0100 Subject: [PATCH 243/496] prep --- src/codeflare_sdk/utils/generate_yaml.py | 46 ++++++++++++++++++++++-- 1 file changed, 44 insertions(+), 2 deletions(-) diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 2ea6dd78..17c5af20 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -30,6 +30,7 @@ from os import urandom from base64 import b64encode from urllib3.util import parse_url +from kubernetes.client.rest import ApiException def read_template(template): @@ -307,6 +308,32 @@ def get_default_kueue_name(namespace: str): "Default Local Queue with kueue.x-k8s.io/default-queue: true annotation not found please create a default Local Queue or provide the local_queue name in Cluster Configuration" ) +# test function + +# def _throw_for_no_raycluster( user_yaml: dict, +# output_file_name: str, +# namespace: str, +# local_queue: Optional[str], +# labels: dict,): +# api_instance = client.CustomObjectsApi(api_config_handler()) +# print('hello') +# # try: +# # api_instance.list_namespaced_custom_object( +# # group="kueue.x-k8s.io", +# # version="v1", +# # namespace=namespace, +# # plural="localqueues", +# # ) +# # except ApiException as e: +# # if e.status == 404: +# # raise RuntimeError( +# # "RayCluster CustomResourceDefinition unavailable contact your administrator." +# # ) +# # else: +# # raise RuntimeError( +# # "Failed to get RayCluster CustomResourceDefinition: " + str(e) +# # ) + def local_queue_exists(namespace: str, local_queue_name: str): # get all local queues in the namespace @@ -341,6 +368,12 @@ def write_components( os.makedirs(directory_path) components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") + # check if RayCluster CustomResourceDefinition exists if not throw RuntimeError + # _throw_for_no_raycluster() + # if local_queue is None: + # print( + # "Kueue is not installed or won't be used. The absence of CRDs may lack the necessary functionality." + # ) open(output_file_name, "w").close() lq_name = local_queue or get_default_kueue_name(namespace) cluster_labels = labels @@ -367,22 +400,31 @@ def write_components( ) print(f"Written to: {output_file_name}") - def load_components( user_yaml: dict, name: str, namespace: str, local_queue: Optional[str], - labels: dict, + labels: dict ): component_list = [] components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") lq_name = local_queue or get_default_kueue_name(namespace) cluster_labels = labels +<<<<<<< HEAD if not local_queue_exists(namespace, lq_name): raise ValueError( "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration" ) +======= + + # _throw_for_no_raycluster() + + # if local_queue is None: + # print( + # "Kueue is not installed or won't be used. The absence of CRDs may lack the necessary functionality." + # ) +>>>>>>> 1d38c8d (prep) for component in components: if "generictemplate" in component: if ( From 5ad68e22234cd732a5598d24f2d21760067a22a2 Mon Sep 17 00:00:00 2001 From: oksanabaza Date: Tue, 28 May 2024 11:20:33 +0100 Subject: [PATCH 244/496] prep --- src/codeflare_sdk/utils/generate_yaml.py | 45 +++++------------------- 1 file changed, 8 insertions(+), 37 deletions(-) diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 17c5af20..fd2410f7 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -308,32 +308,6 @@ def get_default_kueue_name(namespace: str): "Default Local Queue with kueue.x-k8s.io/default-queue: true annotation not found please create a default Local Queue or provide the local_queue name in Cluster Configuration" ) -# test function - -# def _throw_for_no_raycluster( user_yaml: dict, -# output_file_name: str, -# namespace: str, -# local_queue: Optional[str], -# labels: dict,): -# api_instance = client.CustomObjectsApi(api_config_handler()) -# print('hello') -# # try: -# # api_instance.list_namespaced_custom_object( -# # group="kueue.x-k8s.io", -# # version="v1", -# # namespace=namespace, -# # plural="localqueues", -# # ) -# # except ApiException as e: -# # if e.status == 404: -# # raise RuntimeError( -# # "RayCluster CustomResourceDefinition unavailable contact your administrator." -# # ) -# # else: -# # raise RuntimeError( -# # "Failed to get RayCluster CustomResourceDefinition: " + str(e) -# # ) - def local_queue_exists(namespace: str, local_queue_name: str): # get all local queues in the namespace @@ -370,10 +344,10 @@ def write_components( components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") # check if RayCluster CustomResourceDefinition exists if not throw RuntimeError # _throw_for_no_raycluster() - # if local_queue is None: - # print( - # "Kueue is not installed or won't be used. The absence of CRDs may lack the necessary functionality." - # ) + if local_queue is None: + print( + "Kueue is not installed or won't be used. The absence of CRDs may lack the necessary functionality." + ) open(output_file_name, "w").close() lq_name = local_queue or get_default_kueue_name(namespace) cluster_labels = labels @@ -411,20 +385,17 @@ def load_components( components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") lq_name = local_queue or get_default_kueue_name(namespace) cluster_labels = labels -<<<<<<< HEAD if not local_queue_exists(namespace, lq_name): raise ValueError( "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration" ) -======= # _throw_for_no_raycluster() - # if local_queue is None: - # print( - # "Kueue is not installed or won't be used. The absence of CRDs may lack the necessary functionality." - # ) ->>>>>>> 1d38c8d (prep) + if local_queue is None: + print( + "Kueue is not installed or won't be used. The absence of CRDs may lack the necessary functionality." + ) for component in components: if "generictemplate" in component: if ( From add2d361f6e88d91852f28eeb96d823cddb1aa6e Mon Sep 17 00:00:00 2001 From: Fiona Waters Date: Tue, 28 May 2024 11:38:46 +0100 Subject: [PATCH 245/496] Revert "prep" This reverts commit 5ad68e22234cd732a5598d24f2d21760067a22a2. --- src/codeflare_sdk/utils/generate_yaml.py | 45 +++++++++++++++++++----- 1 file changed, 37 insertions(+), 8 deletions(-) diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index fd2410f7..17c5af20 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -308,6 +308,32 @@ def get_default_kueue_name(namespace: str): "Default Local Queue with kueue.x-k8s.io/default-queue: true annotation not found please create a default Local Queue or provide the local_queue name in Cluster Configuration" ) +# test function + +# def _throw_for_no_raycluster( user_yaml: dict, +# output_file_name: str, +# namespace: str, +# local_queue: Optional[str], +# labels: dict,): +# api_instance = client.CustomObjectsApi(api_config_handler()) +# print('hello') +# # try: +# # api_instance.list_namespaced_custom_object( +# # group="kueue.x-k8s.io", +# # version="v1", +# # namespace=namespace, +# # plural="localqueues", +# # ) +# # except ApiException as e: +# # if e.status == 404: +# # raise RuntimeError( +# # "RayCluster CustomResourceDefinition unavailable contact your administrator." +# # ) +# # else: +# # raise RuntimeError( +# # "Failed to get RayCluster CustomResourceDefinition: " + str(e) +# # ) + def local_queue_exists(namespace: str, local_queue_name: str): # get all local queues in the namespace @@ -344,10 +370,10 @@ def write_components( components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") # check if RayCluster CustomResourceDefinition exists if not throw RuntimeError # _throw_for_no_raycluster() - if local_queue is None: - print( - "Kueue is not installed or won't be used. The absence of CRDs may lack the necessary functionality." - ) + # if local_queue is None: + # print( + # "Kueue is not installed or won't be used. The absence of CRDs may lack the necessary functionality." + # ) open(output_file_name, "w").close() lq_name = local_queue or get_default_kueue_name(namespace) cluster_labels = labels @@ -385,17 +411,20 @@ def load_components( components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") lq_name = local_queue or get_default_kueue_name(namespace) cluster_labels = labels +<<<<<<< HEAD if not local_queue_exists(namespace, lq_name): raise ValueError( "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration" ) +======= # _throw_for_no_raycluster() - if local_queue is None: - print( - "Kueue is not installed or won't be used. The absence of CRDs may lack the necessary functionality." - ) + # if local_queue is None: + # print( + # "Kueue is not installed or won't be used. The absence of CRDs may lack the necessary functionality." + # ) +>>>>>>> 1d38c8d (prep) for component in components: if "generictemplate" in component: if ( From 2a43d9e05f750fd7f5ef14ef2e032c75b51c569b Mon Sep 17 00:00:00 2001 From: Fiona Waters Date: Tue, 28 May 2024 11:40:05 +0100 Subject: [PATCH 246/496] Revert "prep" This reverts commit 981c312c013215c45937db0fb73a83549c74bf37. --- src/codeflare_sdk/utils/generate_yaml.py | 46 ++---------------------- 1 file changed, 2 insertions(+), 44 deletions(-) diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 17c5af20..2ea6dd78 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -30,7 +30,6 @@ from os import urandom from base64 import b64encode from urllib3.util import parse_url -from kubernetes.client.rest import ApiException def read_template(template): @@ -308,32 +307,6 @@ def get_default_kueue_name(namespace: str): "Default Local Queue with kueue.x-k8s.io/default-queue: true annotation not found please create a default Local Queue or provide the local_queue name in Cluster Configuration" ) -# test function - -# def _throw_for_no_raycluster( user_yaml: dict, -# output_file_name: str, -# namespace: str, -# local_queue: Optional[str], -# labels: dict,): -# api_instance = client.CustomObjectsApi(api_config_handler()) -# print('hello') -# # try: -# # api_instance.list_namespaced_custom_object( -# # group="kueue.x-k8s.io", -# # version="v1", -# # namespace=namespace, -# # plural="localqueues", -# # ) -# # except ApiException as e: -# # if e.status == 404: -# # raise RuntimeError( -# # "RayCluster CustomResourceDefinition unavailable contact your administrator." -# # ) -# # else: -# # raise RuntimeError( -# # "Failed to get RayCluster CustomResourceDefinition: " + str(e) -# # ) - def local_queue_exists(namespace: str, local_queue_name: str): # get all local queues in the namespace @@ -368,12 +341,6 @@ def write_components( os.makedirs(directory_path) components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") - # check if RayCluster CustomResourceDefinition exists if not throw RuntimeError - # _throw_for_no_raycluster() - # if local_queue is None: - # print( - # "Kueue is not installed or won't be used. The absence of CRDs may lack the necessary functionality." - # ) open(output_file_name, "w").close() lq_name = local_queue or get_default_kueue_name(namespace) cluster_labels = labels @@ -400,31 +367,22 @@ def write_components( ) print(f"Written to: {output_file_name}") + def load_components( user_yaml: dict, name: str, namespace: str, local_queue: Optional[str], - labels: dict + labels: dict, ): component_list = [] components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") lq_name = local_queue or get_default_kueue_name(namespace) cluster_labels = labels -<<<<<<< HEAD if not local_queue_exists(namespace, lq_name): raise ValueError( "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration" ) -======= - - # _throw_for_no_raycluster() - - # if local_queue is None: - # print( - # "Kueue is not installed or won't be used. The absence of CRDs may lack the necessary functionality." - # ) ->>>>>>> 1d38c8d (prep) for component in components: if "generictemplate" in component: if ( From 9da541d9a55b41dccaf0c1526d919425b3a04bdf Mon Sep 17 00:00:00 2001 From: oksanabaza Date: Thu, 2 May 2024 12:04:39 +0100 Subject: [PATCH 247/496] Add GPU to ClusterConfiguration --- demo-notebooks/additional-demos/hf_interactive.ipynb | 3 ++- demo-notebooks/additional-demos/local_interactive.ipynb | 3 ++- demo-notebooks/additional-demos/ray_job_client.ipynb | 3 ++- demo-notebooks/guided-demos/0_basic_ray.ipynb | 5 +++-- demo-notebooks/guided-demos/1_cluster_job_client.ipynb | 4 ++-- demo-notebooks/guided-demos/2_basic_interactive.ipynb | 3 ++- .../guided-demos/notebook-ex-outputs/0_basic_ray.ipynb | 3 ++- .../notebook-ex-outputs/1_cluster_job_client.ipynb | 4 ++-- .../notebook-ex-outputs/2_basic_interactive.ipynb | 3 ++- demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb | 3 ++- .../guided-demos/preview_nbs/1_cluster_job_client.ipynb | 4 ++-- .../guided-demos/preview_nbs/2_basic_interactive.ipynb | 3 ++- docs/cluster-configuration.md | 2 +- 13 files changed, 26 insertions(+), 17 deletions(-) diff --git a/demo-notebooks/additional-demos/hf_interactive.ipynb b/demo-notebooks/additional-demos/hf_interactive.ipynb index 9181f3aa..9a362321 100644 --- a/demo-notebooks/additional-demos/hf_interactive.ipynb +++ b/demo-notebooks/additional-demos/hf_interactive.ipynb @@ -93,12 +93,13 @@ "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(name='hfgputest', \n", " namespace=\"default\", # Update to your namespace\n", + " head_gpus=1, # For GPU enabled workloads set the head_gpus and num_gpus\n", + " num_gpus=1,\n", " num_workers=1,\n", " min_cpus=8, \n", " max_cpus=8, \n", " min_memory=16, \n", " max_memory=16, \n", - " num_gpus=4,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", diff --git a/demo-notebooks/additional-demos/local_interactive.ipynb b/demo-notebooks/additional-demos/local_interactive.ipynb index 36adfb50..94e657a7 100644 --- a/demo-notebooks/additional-demos/local_interactive.ipynb +++ b/demo-notebooks/additional-demos/local_interactive.ipynb @@ -55,12 +55,13 @@ "\n", "cluster = Cluster(ClusterConfiguration(namespace=namespace,\n", " name=cluster_name,\n", + " head_gpus=0, # For GPU enabled workloads set the head_gpus and num_gpus\n", + " num_gpus=0,\n", " num_workers=1,\n", " min_cpus=1,\n", " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " num_gpus=0,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", diff --git a/demo-notebooks/additional-demos/ray_job_client.ipynb b/demo-notebooks/additional-demos/ray_job_client.ipynb index e3d90cd3..0182f1b3 100644 --- a/demo-notebooks/additional-demos/ray_job_client.ipynb +++ b/demo-notebooks/additional-demos/ray_job_client.ipynb @@ -46,12 +46,13 @@ "cluster = Cluster(ClusterConfiguration(\n", " name='jobtest',\n", " namespace='default', # Update to your namespace\n", + " head_gpus=0, # For GPU enabled workloads set the head_gpus and num_gpus\n", + " num_gpus=0,\n", " num_workers=2,\n", " min_cpus=1,\n", " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " num_gpus=0,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " write_to_file=False # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", "))" diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb index 6a3b3710..7dc9e8cd 100644 --- a/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -61,14 +61,15 @@ "# Create and configure our cluster object\n", "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(\n", - " name='raytest',\n", + " name='raytest', \n", " namespace='default', # Update to your namespace\n", + " head_gpus=0, # For GPU enabled workloads set the head_gpus and num_gpus\n", + " num_gpus=0,\n", " num_workers=2,\n", " min_cpus=1,\n", " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " num_gpus=0,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", diff --git a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb index 787bd315..608fbd2e 100644 --- a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb @@ -45,13 +45,13 @@ "cluster = Cluster(ClusterConfiguration(\n", " name='jobtest',\n", " namespace='default', # Update to your namespace\n", + " head_gpus=1, # For GPU enabled workloads set the head_gpus and num_gpus\n", + " num_gpus=1,\n", " num_workers=2,\n", " min_cpus=1,\n", " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " num_gpus=1,\n", - " head_gpus=1,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", diff --git a/demo-notebooks/guided-demos/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/2_basic_interactive.ipynb index 943425a8..6cee82e1 100644 --- a/demo-notebooks/guided-demos/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/2_basic_interactive.ipynb @@ -62,12 +62,13 @@ "cluster = Cluster(ClusterConfiguration(\n", " name=cluster_name,\n", " namespace=namespace,\n", + " head_gpus=1, # For GPU enabled workloads set the head_gpus and num_gpus\n", + " num_gpus=1,\n", " num_workers=2,\n", " min_cpus=2,\n", " max_cpus=2,\n", " min_memory=8,\n", " max_memory=8,\n", - " num_gpus=1,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb index 6771e9d7..569b4d34 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb @@ -71,12 +71,13 @@ "cluster = Cluster(ClusterConfiguration(\n", " name='raytest',\n", " namespace='default', # Update to your namespace\n", + " head_gpus=0, # For GPU enabled workloads set the head_gpus and num_gpus\n", + " num_gpus=0,\n", " num_workers=2,\n", " min_cpus=1,\n", " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " num_gpus=0,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb index 787bd315..608fbd2e 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb @@ -45,13 +45,13 @@ "cluster = Cluster(ClusterConfiguration(\n", " name='jobtest',\n", " namespace='default', # Update to your namespace\n", + " head_gpus=1, # For GPU enabled workloads set the head_gpus and num_gpus\n", + " num_gpus=1,\n", " num_workers=2,\n", " min_cpus=1,\n", " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " num_gpus=1,\n", - " head_gpus=1,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb index 62d34f3f..ff88d621 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb @@ -70,12 +70,13 @@ "cluster = Cluster(ClusterConfiguration(\n", " name=cluster_name,\n", " namespace=namespace,\n", + " head_gpus=1, # For GPU enabled workloads set the head_gpus and num_gpus\n", + " num_gpus=1,\n", " num_workers=2,\n", " min_cpus=2,\n", " max_cpus=2,\n", " min_memory=8,\n", " max_memory=8,\n", - " num_gpus=1,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", diff --git a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb index 6a3b3710..77b0e278 100644 --- a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb @@ -63,12 +63,13 @@ "cluster = Cluster(ClusterConfiguration(\n", " name='raytest',\n", " namespace='default', # Update to your namespace\n", + " head_gpus=0, # For GPU enabled workloads set the head_gpus and num_gpus\n", + " num_gpus=0,\n", " num_workers=2,\n", " min_cpus=1,\n", " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " num_gpus=0,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", diff --git a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb index db11cada..a041ab5b 100644 --- a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb @@ -45,13 +45,13 @@ "cluster = Cluster(ClusterConfiguration(\n", " name='jobtest',\n", " namespace='default', # Update to your namespace\n", + " head_gpus=1, # For GPU enabled workloads set the head_gpus and num_gpus\n", + " num_gpus=1,\n", " num_workers=2,\n", " min_cpus=1,\n", " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " num_gpus=1,\n", - " head_gpus=1,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources\n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", diff --git a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb index 28e05a26..44846f61 100644 --- a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb @@ -62,12 +62,13 @@ "cluster = Cluster(ClusterConfiguration(\n", " name=cluster_name,\n", " namespace=namespace,\n", + " head_gpus=1, # For GPU enabled workloads set the head_gpus and num_gpus\n", + " num_gpus=1,\n", " num_workers=2,\n", " min_cpus=2,\n", " max_cpus=2,\n", " min_memory=8,\n", " max_memory=8,\n", - " num_gpus=1,\n", " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", diff --git a/docs/cluster-configuration.md b/docs/cluster-configuration.md index b83600fe..c131b20c 100644 --- a/docs/cluster-configuration.md +++ b/docs/cluster-configuration.md @@ -12,12 +12,12 @@ cluster = Cluster(ClusterConfiguration( head_cpus=1, # Default 2 head_memory=1, # Default 8 head_gpus=0, # Default 0 + num_gpus=0, # Default 0 num_workers=1, # Default 1 min_cpus=1, # Default 1 max_cpus=1, # Default 1 min_memory=2, # Default 2 max_memory=2, # Default 2 - num_gpus=0, # Default 0 mcad=True, # Default True image="quay.io/project-codeflare/ray:latest-py39-cu118", # Mandatory Field machine_types=["m5.xlarge", "g4dn.xlarge"], From a057ac1b151a910f047785dee728e8cc4001be9c Mon Sep 17 00:00:00 2001 From: Srihari Venkataramaiah Date: Fri, 31 May 2024 18:08:38 +0530 Subject: [PATCH 248/496] Skip namespace deletion for upgrade testing (#550) --- tests/upgrade/raycluster_sdk_upgrade_test.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/upgrade/raycluster_sdk_upgrade_test.py b/tests/upgrade/raycluster_sdk_upgrade_test.py index c92d1963..8a17654a 100644 --- a/tests/upgrade/raycluster_sdk_upgrade_test.py +++ b/tests/upgrade/raycluster_sdk_upgrade_test.py @@ -80,9 +80,6 @@ def setup_method(self): if not self.cluster: raise RuntimeError("TestRayClusterUp needs to be run before this test") - def teardown_method(self): - delete_namespace(self) - def test_mnist_job_submission(self): auth = TokenAuthentication( token=run_oc_command(["whoami", "--show-token=true"]), From 56b4478fe49a043866edf56fcdb89598ea46412f Mon Sep 17 00:00:00 2001 From: Karel Suta Date: Thu, 30 May 2024 11:03:41 +0200 Subject: [PATCH 249/496] Fix incorrect comment to stop job in 1_cluster_job_client.ipynb --- demo-notebooks/guided-demos/1_cluster_job_client.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb index 608fbd2e..9e626f55 100644 --- a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb @@ -179,7 +179,7 @@ "outputs": [], "source": [ "# Delete a job\n", - "# Can run client.cancel_job(submission_id) first if job is still running\n", + "# Can run client.stop_job(submission_id) first if job is still running\n", "client.delete_job(submission_id)" ] }, From c0f7d7fa36e5c03724aa2bfa62be12289b1108f6 Mon Sep 17 00:00:00 2001 From: David Grove Date: Wed, 5 Jun 2024 05:38:17 -0400 Subject: [PATCH 250/496] Migrate from MCAD to AppWrapper v1beta2 (#521) * rename mcad to appwrapper * remove dispatch_priority (not supported by v1beta2 AppWrapper) * remove instascale * remove priority/affinity from template -- not compatible with Kueue * make mocked objects easier to maintain by removing unnecessary metadata * port appwrapper status to v1beta2 names * prune mocked appwrappers * eliminate dependency on workload.codeflare.dev/appwrapper label * Finish converting AppWrappers to v1beta2 * fix incomplete rebase * rebase: remove instascale from new testcase * add e2e test for appwrapper containing a raycluster * Also must add local_queue label to appwrappers * user labels should also be added to ray cluster wrapped in appwrapper * fix more incorrect test cases that were assuming that appwrappers don't get a localqueue * sdk_user must have rbacs to create appwrappers for e2e test to succeed * elide AppWrappers from top-level documentation --- .github/workflows/e2e_tests.yaml | 2 + docs/cluster-configuration.md | 10 +- src/codeflare_sdk/cluster/awload.py | 4 +- src/codeflare_sdk/cluster/cluster.py | 100 +- src/codeflare_sdk/cluster/config.py | 4 +- src/codeflare_sdk/cluster/model.py | 15 +- .../templates/base-template.yaml | 451 +++--- src/codeflare_sdk/utils/generate_yaml.py | 215 +-- .../e2e/mnist_raycluster_sdk_aw_kind_test.py | 106 ++ tests/e2e/start_ray_cluster.py | 3 +- tests/test-case-bad.yaml | 263 ++-- tests/test-case-no-mcad.yamls | 18 - tests/test-case-prio.yaml | 205 --- tests/test-case.yaml | 343 ++--- tests/test-default-appwrapper.yaml | 321 ++--- tests/unit_test.py | 1215 +++++------------ tests/unit_test_support.py | 3 +- 17 files changed, 1166 insertions(+), 2112 deletions(-) create mode 100644 tests/e2e/mnist_raycluster_sdk_aw_kind_test.py delete mode 100644 tests/test-case-prio.yaml diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index c742e506..9ef73315 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -114,6 +114,8 @@ jobs: kubectl create clusterrolebinding sdk-user-namespace-creator --clusterrole=namespace-creator --user=sdk-user kubectl create clusterrole raycluster-creator --verb=get,list,create,delete,patch --resource=rayclusters kubectl create clusterrolebinding sdk-user-raycluster-creator --clusterrole=raycluster-creator --user=sdk-user + kubectl create clusterrole appwrapper-creator --verb=get,list,create,delete,patch --resource=appwrappers + kubectl create clusterrolebinding sdk-user-appwrapper-creator --clusterrole=appwrapper-creator --user=sdk-user kubectl create clusterrole resourceflavor-creator --verb=get,list,create,delete --resource=resourceflavors kubectl create clusterrolebinding sdk-user-resourceflavor-creator --clusterrole=resourceflavor-creator --user=sdk-user kubectl create clusterrole clusterqueue-creator --verb=get,list,create,delete,patch --resource=clusterqueues diff --git a/docs/cluster-configuration.md b/docs/cluster-configuration.md index c131b20c..ae6cd2ea 100644 --- a/docs/cluster-configuration.md +++ b/docs/cluster-configuration.md @@ -18,17 +18,13 @@ cluster = Cluster(ClusterConfiguration( max_cpus=1, # Default 1 min_memory=2, # Default 2 max_memory=2, # Default 2 - mcad=True, # Default True + num_gpus=0, # Default 0 image="quay.io/project-codeflare/ray:latest-py39-cu118", # Mandatory Field machine_types=["m5.xlarge", "g4dn.xlarge"], labels={"exampleLabel": "example", "secondLabel": "example"}, )) ``` -Upon creating a cluster configuration with `mcad=True` an appwrapper will be created featuring the Ray Cluster and any Routes, Ingresses or Secrets that are needed to be created along side it.
    -From there a user can call `cluster.up()` and `cluster.down()` to create and remove the appwrapper thus creating and removing the Ray Cluster. - -In cases where `mcad=False` a yaml file will be created with the individual Ray Cluster, Route/Ingress and Secret included.
    -The Ray Cluster and service will be created by KubeRay directly and the other components will be individually created. - The `labels={"exampleLabel": "example"}` parameter can be used to apply additional labels to the RayCluster resource. + +After creating their`cluster`, a user can call `cluster.up()` and `cluster.down()` to respectively create or remove the Ray Cluster. diff --git a/src/codeflare_sdk/cluster/awload.py b/src/codeflare_sdk/cluster/awload.py index 97d138d5..c622f877 100644 --- a/src/codeflare_sdk/cluster/awload.py +++ b/src/codeflare_sdk/cluster/awload.py @@ -62,7 +62,7 @@ def submit(self) -> None: api_instance = client.CustomObjectsApi(api_config_handler()) api_instance.create_namespaced_custom_object( group="workload.codeflare.dev", - version="v1beta1", + version="v1beta2", namespace=self.namespace, plural="appwrappers", body=self.awyaml, @@ -87,7 +87,7 @@ def remove(self) -> None: api_instance = client.CustomObjectsApi(api_config_handler()) api_instance.delete_namespaced_custom_object( group="workload.codeflare.dev", - version="v1beta1", + version="v1beta2", namespace=self.namespace, plural="appwrappers", name=self.name, diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 6ddd778c..35c26b0a 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -103,26 +103,6 @@ def job_client(self): ) return self._job_submission_client - def evaluate_dispatch_priority(self): - priority_class = self.config.dispatch_priority - - try: - config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) - priority_classes = api_instance.list_cluster_custom_object( - group="scheduling.k8s.io", - version="v1", - plural="priorityclasses", - ) - except Exception as e: # pragma: no cover - return _kube_api_error_handling(e) - - for pc in priority_classes["items"]: - if pc["metadata"]["name"] == priority_class: - return pc["value"] - print(f"Priority class {priority_class} is not available in the cluster") - return None - def validate_image_config(self): """ Validates that the image configuration is not empty. @@ -152,18 +132,6 @@ def create_app_wrapper(self): self.validate_image_config() # Before attempting to create the cluster AW, let's evaluate the ClusterConfig - if self.config.dispatch_priority: - if not self.config.mcad: - raise ValueError( - "Invalid Cluster Configuration, cannot have dispatch priority without MCAD" - ) - priority_val = self.evaluate_dispatch_priority() - if priority_val == None: - raise ValueError( - "Invalid Cluster Configuration, AppWrapper not generated" - ) - else: - priority_val = None name = self.config.name namespace = self.config.namespace @@ -178,12 +146,10 @@ def create_app_wrapper(self): workers = self.config.num_workers template = self.config.template image = self.config.image - instascale = self.config.instascale - mcad = self.config.mcad + appwrapper = self.config.appwrapper instance_types = self.config.machine_types env = self.config.envs image_pull_secrets = self.config.image_pull_secrets - dispatch_priority = self.config.dispatch_priority write_to_file = self.config.write_to_file verify_tls = self.config.verify_tls local_queue = self.config.local_queue @@ -202,13 +168,10 @@ def create_app_wrapper(self): workers=workers, template=template, image=image, - instascale=instascale, - mcad=mcad, + appwrapper=appwrapper, instance_types=instance_types, env=env, image_pull_secrets=image_pull_secrets, - dispatch_priority=dispatch_priority, - priority_val=priority_val, write_to_file=write_to_file, verify_tls=verify_tls, local_queue=local_queue, @@ -230,13 +193,13 @@ def up(self): try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) - if self.config.mcad: + if self.config.appwrapper: if self.config.write_to_file: with open(self.app_wrapper_yaml) as f: aw = yaml.load(f, Loader=yaml.FullLoader) api_instance.create_namespaced_custom_object( group="workload.codeflare.dev", - version="v1beta1", + version="v1beta2", namespace=namespace, plural="appwrappers", body=aw, @@ -245,7 +208,7 @@ def up(self): aw = yaml.safe_load(self.app_wrapper_yaml) api_instance.create_namespaced_custom_object( group="workload.codeflare.dev", - version="v1beta1", + version="v1beta2", namespace=namespace, plural="appwrappers", body=aw, @@ -284,10 +247,10 @@ def down(self): try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) - if self.config.mcad: + if self.config.appwrapper: api_instance.delete_namespaced_custom_object( group="workload.codeflare.dev", - version="v1beta1", + version="v1beta2", namespace=namespace, plural="appwrappers", name=self.app_wrapper_name, @@ -306,30 +269,28 @@ def status( """ ready = False status = CodeFlareClusterStatus.UNKNOWN - if self.config.mcad: + if self.config.appwrapper: # check the app wrapper status appwrapper = _app_wrapper_status(self.config.name, self.config.namespace) if appwrapper: if appwrapper.status in [ - AppWrapperStatus.RUNNING, - AppWrapperStatus.COMPLETED, - AppWrapperStatus.RUNNING_HOLD_COMPLETION, + AppWrapperStatus.RESUMING, + AppWrapperStatus.RESETTING, ]: ready = False status = CodeFlareClusterStatus.STARTING elif appwrapper.status in [ AppWrapperStatus.FAILED, - AppWrapperStatus.DELETED, ]: ready = False status = CodeFlareClusterStatus.FAILED # should deleted be separate return status, ready # exit early, no need to check ray status elif appwrapper.status in [ - AppWrapperStatus.PENDING, - AppWrapperStatus.QUEUEING, + AppWrapperStatus.SUSPENDED, + AppWrapperStatus.SUSPENDING, ]: ready = False - if appwrapper.status == AppWrapperStatus.PENDING: + if appwrapper.status == AppWrapperStatus.SUSPENDED: status = CodeFlareClusterStatus.QUEUED else: status = CodeFlareClusterStatus.QUEUEING @@ -501,7 +462,7 @@ def job_logs(self, job_id: str) -> str: def from_k8_cluster_object( rc, - mcad=True, + appwrapper=True, write_to_file=False, verify_tls=True, ): @@ -534,11 +495,10 @@ def from_k8_cluster_object( "resources" ]["limits"]["nvidia.com/gpu"] ), - instascale=True if machine_types else False, image=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][ 0 ]["image"], - mcad=mcad, + appwrapper=appwrapper, write_to_file=write_to_file, verify_tls=verify_tls, local_queue=rc["metadata"] @@ -597,15 +557,15 @@ def list_all_clusters(namespace: str, print_to_console: bool = True): return clusters -def list_all_queued(namespace: str, print_to_console: bool = True, mcad: bool = False): +def list_all_queued( + namespace: str, print_to_console: bool = True, appwrapper: bool = False +): """ Returns (and prints by default) a list of all currently queued-up Ray Clusters in a given namespace. """ - if mcad: - resources = _get_app_wrappers( - namespace, filter=[AppWrapperStatus.RUNNING, AppWrapperStatus.PENDING] - ) + if appwrapper: + resources = _get_app_wrappers(namespace, filter=[AppWrapperStatus.SUSPENDED]) if print_to_console: pretty_print.print_app_wrappers_status(resources) else: @@ -675,10 +635,10 @@ def get_cluster( for rc in rcs["items"]: if rc["metadata"]["name"] == cluster_name: - mcad = _check_aw_exists(cluster_name, namespace) + appwrapper = _check_aw_exists(cluster_name, namespace) return Cluster.from_k8_cluster_object( rc, - mcad=mcad, + appwrapper=appwrapper, write_to_file=write_to_file, verify_tls=verify_tls, ) @@ -721,7 +681,7 @@ def _check_aw_exists(name: str, namespace: str) -> bool: api_instance = client.CustomObjectsApi(api_config_handler()) aws = api_instance.list_namespaced_custom_object( group="workload.codeflare.dev", - version="v1beta1", + version="v1beta2", namespace=namespace, plural="appwrappers", ) @@ -781,7 +741,7 @@ def _app_wrapper_status(name, namespace="default") -> Optional[AppWrapper]: api_instance = client.CustomObjectsApi(api_config_handler()) aws = api_instance.list_namespaced_custom_object( group="workload.codeflare.dev", - version="v1beta1", + version="v1beta2", namespace=namespace, plural="appwrappers", ) @@ -851,7 +811,7 @@ def _get_app_wrappers( api_instance = client.CustomObjectsApi(api_config_handler()) aws = api_instance.list_namespaced_custom_object( group="workload.codeflare.dev", - version="v1beta1", + version="v1beta2", namespace=namespace, plural="appwrappers", ) @@ -945,18 +905,14 @@ def _map_to_ray_cluster(rc) -> Optional[RayCluster]: def _map_to_app_wrapper(aw) -> AppWrapper: - if "status" in aw and "canrun" in aw["status"]: + if "status" in aw: return AppWrapper( name=aw["metadata"]["name"], - status=AppWrapperStatus(aw["status"]["state"].lower()), - can_run=aw["status"]["canrun"], - job_state=aw["status"]["queuejobstate"], + status=AppWrapperStatus(aw["status"]["phase"].lower()), ) return AppWrapper( name=aw["metadata"]["name"], - status=AppWrapperStatus("queueing"), - can_run=False, - job_state="Still adding to queue", + status=AppWrapperStatus("suspended"), ) diff --git a/src/codeflare_sdk/cluster/config.py b/src/codeflare_sdk/cluster/config.py index f8010ea9..9e069c37 100644 --- a/src/codeflare_sdk/cluster/config.py +++ b/src/codeflare_sdk/cluster/config.py @@ -46,12 +46,10 @@ class ClusterConfiguration: max_memory: typing.Union[int, str] = 2 num_gpus: int = 0 template: str = f"{dir}/templates/base-template.yaml" - instascale: bool = False - mcad: bool = False + appwrapper: bool = False envs: dict = field(default_factory=dict) image: str = "" image_pull_secrets: list = field(default_factory=list) - dispatch_priority: str = None write_to_file: bool = False verify_tls: bool = True labels: dict = field(default_factory=dict) diff --git a/src/codeflare_sdk/cluster/model.py b/src/codeflare_sdk/cluster/model.py index e2dcb652..2547de25 100644 --- a/src/codeflare_sdk/cluster/model.py +++ b/src/codeflare_sdk/cluster/model.py @@ -37,16 +37,17 @@ class RayClusterStatus(Enum): class AppWrapperStatus(Enum): """ - Defines the possible reportable states of an AppWrapper. + Defines the possible reportable phases of an AppWrapper. """ - QUEUEING = "queueing" - PENDING = "pending" + SUSPENDED = "suspended" + RESUMING = "resuming" RUNNING = "running" + RESETTING = "resetting" + SUSPENDING = "suspending" + SUCCEEDED = "succeeded" FAILED = "failed" - DELETED = "deleted" - COMPLETED = "completed" - RUNNING_HOLD_COMPLETION = "runningholdcompletion" + TERMINATING = "terminating" class CodeFlareClusterStatus(Enum): @@ -91,5 +92,3 @@ class AppWrapper: name: str status: AppWrapperStatus - can_run: bool - job_state: str diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index 356e3494..b6a70b2b 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -1,254 +1,207 @@ -apiVersion: workload.codeflare.dev/v1beta1 +apiVersion: workload.codeflare.dev/v1beta2 kind: AppWrapper metadata: name: aw-kuberay namespace: default - #new addition - labels: - orderedinstance: "m4.xlarge_g4dn.xlarge" spec: - priority: 9 - resources: - Items: [] - GenericItems: - - replicas: 1 - #new addition - custompodresources: - - replicas: 1 - requests: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - limits: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - - replicas: 3 - requests: - cpu: 2 - memory: 12G - nvidia.com/gpu: 1 - limits: - cpu: 2 - memory: 12G - nvidia.com/gpu: 1 - generictemplate: - # This config demonstrates KubeRay's Ray autoscaler integration. - # The resource requests and limits in this config are too small for production! - # For an example with more realistic resource configuration, see - # ray-cluster.autoscaler.large.yaml. - apiVersion: ray.io/v1 - kind: RayCluster - metadata: - labels: - workload.codeflare.dev/appwrapper: "aw-kuberay" - controller-tools.k8s.io: "1.0" - # A unique identifier for the head node and workers of this cluster. - name: kuberay-cluster - # finalizers: - # - kubernetes - spec: - # The version of Ray you are using. Make sure all Ray containers are running this version of Ray. - rayVersion: '2.7.0' - # If enableInTreeAutoscaling is true, the autoscaler sidecar will be added to the Ray head pod. - # Ray autoscaler integration is supported only for Ray versions >= 1.11.0 - # Ray autoscaler integration is Beta with KubeRay >= 0.3.0 and Ray >= 2.0.0. - enableInTreeAutoscaling: false - # autoscalerOptions is an OPTIONAL field specifying configuration overrides for the Ray autoscaler. - # The example configuration shown below below represents the DEFAULT values. - # (You may delete autoscalerOptions if the defaults are suitable.) - autoscalerOptions: - # upscalingMode is "Default" or "Aggressive." - # Conservative: Upscaling is rate-limited; the number of pending worker pods is at most the size of the Ray cluster. - # Default: Upscaling is not rate-limited. - # Aggressive: An alias for Default; upscaling is not rate-limited. - upscalingMode: Default - # idleTimeoutSeconds is the number of seconds to wait before scaling down a worker pod which is not using Ray resources. - idleTimeoutSeconds: 60 - # image optionally overrides the autoscaler's container image. - # If instance.spec.rayVersion is at least "2.0.0", the autoscaler will default to the same image as - # the ray container. For older Ray versions, the autoscaler will default to using the Ray 2.0.0 image. - ## image: "my-repo/my-custom-autoscaler-image:tag" - # imagePullPolicy optionally overrides the autoscaler container's image pull policy. - imagePullPolicy: Always - # resources specifies optional resource request and limit overrides for the autoscaler container. - # For large Ray clusters, we recommend monitoring container resource usage to determine if overriding the defaults is required. - resources: - limits: - cpu: "500m" - memory: "512Mi" - requests: - cpu: "500m" - memory: "512Mi" - ######################headGroupSpec################################# - # head group template and specs, (perhaps 'group' is not needed in the name) - headGroupSpec: - # Kubernetes Service Type, valid values are 'ClusterIP', 'NodePort' and 'LoadBalancer' - serviceType: ClusterIP - enableIngress: false - # logical group name, for this called head-group, also can be functional - # pod type head or worker - # rayNodeType: head # Not needed since it is under the headgroup - # the following params are used to complete the ray start: ray start --head --block ... - rayStartParams: - # Flag "no-monitor" will be automatically set when autoscaling is enabled. - dashboard-host: '0.0.0.0' - block: 'true' - # num-cpus: '1' # can be auto-completed from the limits - # Use `resources` to optionally specify custom resource annotations for the Ray node. - # The value of `resources` is a string-integer mapping. - # Currently, `resources` must be provided in the specific format demonstrated below: - # resources: '"{\"Custom1\": 1, \"Custom2\": 5}"' - num-gpus: '0' - #pod template - template: - spec: - #new addition - affinity: - nodeAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - nodeSelectorTerms: - - matchExpressions: - - key: aw-kuberay - operator: In - values: - - "aw-kuberay" - containers: - # The Ray head pod - - name: ray-head - image: quay.io/project-codeflare/ray:latest-py39-cu118 - imagePullPolicy: Always - ports: - - containerPort: 6379 - name: gcs - - containerPort: 8265 - name: dashboard - - containerPort: 10001 - name: client - lifecycle: - preStop: - exec: - command: ["/bin/sh","-c","ray stop"] - resources: - limits: - cpu: 2 - memory: "8G" - nvidia.com/gpu: 0 - requests: - cpu: 2 - memory: "8G" - nvidia.com/gpu: 0 - volumeMounts: - - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - volumes: - - name: odh-trusted-ca-cert - configMap: - name: odh-trusted-ca-bundle - items: - - key: ca-bundle.crt - path: odh-trusted-ca-bundle.crt - optional: true - - name: odh-ca-cert - configMap: - name: odh-trusted-ca-bundle - items: - - key: odh-ca-bundle.crt - path: odh-ca-bundle.crt - optional: true - workerGroupSpecs: - # the pod replicas in this group typed worker - - replicas: 3 - minReplicas: 3 - maxReplicas: 3 - # logical group name, for this called small-group, also can be functional - groupName: small-group - # if worker pods need to be added, we can simply increment the replicas - # if worker pods need to be removed, we decrement the replicas, and populate the podsToDelete list - # the operator will remove pods from the list until the number of replicas is satisfied - # when a pod is confirmed to be deleted, its name will be removed from the list below - #scaleStrategy: - # workersToDelete: - # - raycluster-complete-worker-small-group-bdtwh - # - raycluster-complete-worker-small-group-hv457 - # - raycluster-complete-worker-small-group-k8tj7 - # the following params are used to complete the ray start: ray start --block ... - rayStartParams: - block: 'true' - num-gpus: 1 - #pod template - template: - metadata: - labels: - key: value - # annotations for pod - annotations: - key: value - # finalizers: - # - kubernetes - spec: - affinity: - nodeAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - nodeSelectorTerms: - - matchExpressions: - - key: aw-kuberay - operator: In - values: - - "aw-kuberay" - containers: - - name: machine-learning # must consist of lower case alphanumeric characters or '-', and must start and end with an alphanumeric character (e.g. 'my-name', or '123-abc' - image: quay.io/project-codeflare/ray:latest-py39-cu118 - # environment variables to set in the container.Optional. - # Refer to https://kubernetes.io/docs/tasks/inject-data-application/define-environment-variable-container/ - lifecycle: - preStop: - exec: - command: ["/bin/sh","-c","ray stop"] - resources: - limits: - cpu: "2" - memory: "12G" - nvidia.com/gpu: "1" - requests: - cpu: "2" - memory: "12G" - nvidia.com/gpu: "1" - volumeMounts: - - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - volumes: - - name: odh-trusted-ca-cert - configMap: - name: odh-trusted-ca-bundle - items: - - key: ca-bundle.crt - path: odh-trusted-ca-bundle.crt - optional: true - - name: odh-ca-cert - configMap: - name: odh-trusted-ca-bundle - items: - - key: odh-ca-bundle.crt - path: odh-ca-bundle.crt - optional: true + components: + - template: + # This config demonstrates KubeRay's Ray autoscaler integration. + # The resource requests and limits in this config are too small for production! + # For an example with more realistic resource configuration, see + # ray-cluster.autoscaler.large.yaml. + apiVersion: ray.io/v1 + kind: RayCluster + metadata: + labels: + controller-tools.k8s.io: "1.0" + # A unique identifier for the head node and workers of this cluster. + name: kuberay-cluster + # finalizers: + # - kubernetes + spec: + # The version of Ray you are using. Make sure all Ray containers are running this version of Ray. + rayVersion: '2.7.0' + # If enableInTreeAutoscaling is true, the autoscaler sidecar will be added to the Ray head pod. + # Ray autoscaler integration is supported only for Ray versions >= 1.11.0 + # Ray autoscaler integration is Beta with KubeRay >= 0.3.0 and Ray >= 2.0.0. + enableInTreeAutoscaling: false + # autoscalerOptions is an OPTIONAL field specifying configuration overrides for the Ray autoscaler. + # The example configuration shown below below represents the DEFAULT values. + # (You may delete autoscalerOptions if the defaults are suitable.) + autoscalerOptions: + # upscalingMode is "Default" or "Aggressive." + # Conservative: Upscaling is rate-limited; the number of pending worker pods is at most the size of the Ray cluster. + # Default: Upscaling is not rate-limited. + # Aggressive: An alias for Default; upscaling is not rate-limited. + upscalingMode: Default + # idleTimeoutSeconds is the number of seconds to wait before scaling down a worker pod which is not using Ray resources. + idleTimeoutSeconds: 60 + # image optionally overrides the autoscaler's container image. + # If instance.spec.rayVersion is at least "2.0.0", the autoscaler will default to the same image as + # the ray container. For older Ray versions, the autoscaler will default to using the Ray 2.0.0 image. + ## image: "my-repo/my-custom-autoscaler-image:tag" + # imagePullPolicy optionally overrides the autoscaler container's image pull policy. + imagePullPolicy: Always + # resources specifies optional resource request and limit overrides for the autoscaler container. + # For large Ray clusters, we recommend monitoring container resource usage to determine if overriding the defaults is required. + resources: + limits: + cpu: "500m" + memory: "512Mi" + requests: + cpu: "500m" + memory: "512Mi" + ######################headGroupSpec################################# + # head group template and specs, (perhaps 'group' is not needed in the name) + headGroupSpec: + # Kubernetes Service Type, valid values are 'ClusterIP', 'NodePort' and 'LoadBalancer' + serviceType: ClusterIP + enableIngress: false + # logical group name, for this called head-group, also can be functional + # pod type head or worker + # rayNodeType: head # Not needed since it is under the headgroup + # the following params are used to complete the ray start: ray start --head --block ... + rayStartParams: + # Flag "no-monitor" will be automatically set when autoscaling is enabled. + dashboard-host: '0.0.0.0' + block: 'true' + # num-cpus: '1' # can be auto-completed from the limits + # Use `resources` to optionally specify custom resource annotations for the Ray node. + # The value of `resources` is a string-integer mapping. + # Currently, `resources` must be provided in the specific format demonstrated below: + # resources: '"{\"Custom1\": 1, \"Custom2\": 5}"' + num-gpus: '0' + #pod template + template: + spec: + containers: + # The Ray head pod + - name: ray-head + image: quay.io/project-codeflare/ray:latest-py39-cu118 + imagePullPolicy: Always + ports: + - containerPort: 6379 + name: gcs + - containerPort: 8265 + name: dashboard + - containerPort: 10001 + name: client + lifecycle: + preStop: + exec: + command: ["/bin/sh","-c","ray stop"] + resources: + limits: + cpu: 2 + memory: "8G" + nvidia.com/gpu: 0 + requests: + cpu: 2 + memory: "8G" + nvidia.com/gpu: 0 + volumeMounts: + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + volumes: + - name: odh-trusted-ca-cert + configMap: + name: odh-trusted-ca-bundle + items: + - key: ca-bundle.crt + path: odh-trusted-ca-bundle.crt + optional: true + - name: odh-ca-cert + configMap: + name: odh-trusted-ca-bundle + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + optional: true + workerGroupSpecs: + # the pod replicas in this group typed worker + - replicas: 3 + minReplicas: 3 + maxReplicas: 3 + # logical group name, for this called small-group, also can be functional + groupName: small-group + # if worker pods need to be added, we can simply increment the replicas + # if worker pods need to be removed, we decrement the replicas, and populate the podsToDelete list + # the operator will remove pods from the list until the number of replicas is satisfied + # when a pod is confirmed to be deleted, its name will be removed from the list below + #scaleStrategy: + # workersToDelete: + # - raycluster-complete-worker-small-group-bdtwh + # - raycluster-complete-worker-small-group-hv457 + # - raycluster-complete-worker-small-group-k8tj7 + # the following params are used to complete the ray start: ray start --block ... + rayStartParams: + block: 'true' + num-gpus: 1 + #pod template + template: + metadata: + labels: + key: value + # annotations for pod + annotations: + key: value + # finalizers: + # - kubernetes + spec: + containers: + - name: machine-learning # must consist of lower case alphanumeric characters or '-', and must start and end with an alphanumeric character (e.g. 'my-name', or '123-abc' + image: quay.io/project-codeflare/ray:latest-py39-cu118 + # environment variables to set in the container.Optional. + # Refer to https://kubernetes.io/docs/tasks/inject-data-application/define-environment-variable-container/ + lifecycle: + preStop: + exec: + command: ["/bin/sh","-c","ray stop"] + resources: + limits: + cpu: "2" + memory: "12G" + nvidia.com/gpu: "1" + requests: + cpu: "2" + memory: "12G" + nvidia.com/gpu: "1" + volumeMounts: + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + volumes: + - name: odh-trusted-ca-cert + configMap: + name: odh-trusted-ca-bundle + items: + - key: ca-bundle.crt + path: odh-trusted-ca-bundle.crt + optional: true + - name: odh-ca-cert + configMap: + name: odh-trusted-ca-bundle + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + optional: true diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 2ea6dd78..dcd4a42c 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -81,109 +81,11 @@ def update_names(yaml, item, appwrapper_name, cluster_name, namespace): metadata = yaml.get("metadata") metadata["name"] = appwrapper_name metadata["namespace"] = namespace - lower_meta = item.get("generictemplate", {}).get("metadata") - lower_meta["labels"]["workload.codeflare.dev/appwrapper"] = appwrapper_name + lower_meta = item.get("template", {}).get("metadata") lower_meta["name"] = cluster_name lower_meta["namespace"] = namespace -def update_labels(yaml, instascale, instance_types): - metadata = yaml.get("metadata") - if instascale: - if not len(instance_types) > 0: - sys.exit( - "If instascale is set to true, must provide at least one instance type" - ) - type_str = "" - for type in instance_types: - type_str += type + "_" - type_str = type_str[:-1] - metadata["labels"]["orderedinstance"] = type_str - else: - metadata.pop("labels") - - -def update_priority(yaml, item, dispatch_priority, priority_val): - spec = yaml.get("spec") - if dispatch_priority is not None: - if priority_val: - spec["priority"] = priority_val - else: - raise ValueError( - "AW generation error: Priority value is None, while dispatch_priority is defined" - ) - head = item.get("generictemplate").get("spec").get("headGroupSpec") - worker = item.get("generictemplate").get("spec").get("workerGroupSpecs")[0] - head["template"]["spec"]["priorityClassName"] = dispatch_priority - worker["template"]["spec"]["priorityClassName"] = dispatch_priority - else: - spec.pop("priority") - - -def update_custompodresources( - item, - min_cpu, - max_cpu, - min_memory, - max_memory, - gpu, - workers, - head_cpus, - head_memory, - head_gpus, -): - if "custompodresources" in item.keys(): - custompodresources = item.get("custompodresources") - for i in range(len(custompodresources)): - resource = custompodresources[i] - if i == 0: - # Leave head node resources as template default - resource["requests"]["cpu"] = head_cpus - resource["limits"]["cpu"] = head_cpus - resource["requests"]["memory"] = head_memory - resource["limits"]["memory"] = head_memory - resource["requests"]["nvidia.com/gpu"] = head_gpus - resource["limits"]["nvidia.com/gpu"] = head_gpus - - else: - for k, v in resource.items(): - if k == "replicas" and i == 1: - resource[k] = workers - if k == "requests" or k == "limits": - for spec, _ in v.items(): - if spec == "cpu": - if k == "limits": - resource[k][spec] = max_cpu - else: - resource[k][spec] = min_cpu - if spec == "memory": - if k == "limits": - resource[k][spec] = max_memory - else: - resource[k][spec] = min_memory - if spec == "nvidia.com/gpu": - if i == 0: - resource[k][spec] = 0 - else: - resource[k][spec] = gpu - else: - sys.exit("Error: malformed template") - - -def update_affinity(spec, appwrapper_name, instascale): - if instascale: - node_selector_terms = ( - spec.get("affinity") - .get("nodeAffinity") - .get("requiredDuringSchedulingIgnoredDuringExecution") - .get("nodeSelectorTerms") - ) - node_selector_terms[0]["matchExpressions"][0]["values"][0] = appwrapper_name - node_selector_terms[0]["matchExpressions"][0]["key"] = appwrapper_name - else: - spec.pop("affinity") - - def update_image(spec, image): containers = spec.get("containers") for container in containers: @@ -232,18 +134,17 @@ def update_nodes( gpu, workers, image, - instascale, env, image_pull_secrets, head_cpus, head_memory, head_gpus, ): - if "generictemplate" in item.keys(): - head = item.get("generictemplate").get("spec").get("headGroupSpec") + if "template" in item.keys(): + head = item.get("template").get("spec").get("headGroupSpec") head["rayStartParams"]["num-gpus"] = str(int(head_gpus)) - worker = item.get("generictemplate").get("spec").get("workerGroupSpecs")[0] + worker = item.get("template").get("spec").get("workerGroupSpecs")[0] # Head counts as first worker worker["replicas"] = workers worker["minReplicas"] = workers @@ -253,7 +154,6 @@ def update_nodes( for comp in [head, worker]: spec = comp.get("template").get("spec") - update_affinity(spec, appwrapper_name, instascale) update_image_pull_secrets(spec, image_pull_secrets) update_image(spec, image) update_env(spec, env) @@ -328,74 +228,52 @@ def local_queue_exists(namespace: str, local_queue_name: str): return False +def add_queue_label(item: dict, namespace: str, local_queue: Optional[str]): + lq_name = local_queue or get_default_kueue_name(namespace) + if not local_queue_exists(namespace, lq_name): + raise ValueError( + "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration" + ) + if not "labels" in item["metadata"]: + item["metadata"]["labels"] = {} + item["metadata"]["labels"].update({"kueue.x-k8s.io/queue-name": lq_name}) + + +def augment_labels(item: dict, labels: dict): + if "template" in item: + if not "labels" in item["template"]["metadata"]: + item["template"]["metadata"]["labels"] = {} + item["template"]["metadata"]["labels"].update(labels) + + def write_components( user_yaml: dict, output_file_name: str, - namespace: str, - local_queue: Optional[str], - labels: dict, ): # Create the directory if it doesn't exist directory_path = os.path.dirname(output_file_name) if not os.path.exists(directory_path): os.makedirs(directory_path) - components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") + components = user_yaml.get("spec", "resources").get("components") open(output_file_name, "w").close() - lq_name = local_queue or get_default_kueue_name(namespace) - cluster_labels = labels - if not local_queue_exists(namespace, lq_name): - raise ValueError( - "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration" - ) with open(output_file_name, "a") as outfile: for component in components: - if "generictemplate" in component: - if ( - "workload.codeflare.dev/appwrapper" - in component["generictemplate"]["metadata"]["labels"] - ): - del component["generictemplate"]["metadata"]["labels"][ - "workload.codeflare.dev/appwrapper" - ] - labels = component["generictemplate"]["metadata"]["labels"] - labels.update({"kueue.x-k8s.io/queue-name": lq_name}) - labels.update(cluster_labels) + if "template" in component: outfile.write("---\n") - yaml.dump( - component["generictemplate"], outfile, default_flow_style=False - ) + yaml.dump(component["template"], outfile, default_flow_style=False) print(f"Written to: {output_file_name}") def load_components( user_yaml: dict, name: str, - namespace: str, - local_queue: Optional[str], - labels: dict, ): component_list = [] - components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") - lq_name = local_queue or get_default_kueue_name(namespace) - cluster_labels = labels - if not local_queue_exists(namespace, lq_name): - raise ValueError( - "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration" - ) + components = user_yaml.get("spec", "resources").get("components") for component in components: - if "generictemplate" in component: - if ( - "workload.codeflare.dev/appwrapper" - in component["generictemplate"]["metadata"]["labels"] - ): - del component["generictemplate"]["metadata"]["labels"][ - "workload.codeflare.dev/appwrapper" - ] - labels = component["generictemplate"]["metadata"]["labels"] - labels.update({"kueue.x-k8s.io/queue-name": lq_name}) - labels.update(cluster_labels) - component_list.append(component["generictemplate"]) + if "template" in component: + component_list.append(component["template"]) resources = "---\n" + "---\n".join( [yaml.dump(component) for component in component_list] @@ -425,13 +303,10 @@ def generate_appwrapper( workers: int, template: str, image: str, - instascale: bool, - mcad: bool, + appwrapper: bool, instance_types: list, env, image_pull_secrets: list, - dispatch_priority: str, - priority_val: int, write_to_file: bool, verify_tls: bool, local_queue: Optional[str], @@ -440,7 +315,7 @@ def generate_appwrapper( user_yaml = read_template(template) appwrapper_name, cluster_name = gen_names(name) resources = user_yaml.get("spec", "resources") - item = resources["resources"].get("GenericItems")[0] + item = resources.get("components")[0] update_names( user_yaml, item, @@ -448,20 +323,6 @@ def generate_appwrapper( cluster_name, namespace, ) - update_labels(user_yaml, instascale, instance_types) - update_priority(user_yaml, item, dispatch_priority, priority_val) - update_custompodresources( - item, - min_cpu, - max_cpu, - min_memory, - max_memory, - gpu, - workers, - head_cpus, - head_memory, - head_gpus, - ) update_nodes( item, appwrapper_name, @@ -472,7 +333,6 @@ def generate_appwrapper( gpu, workers, image, - instascale, env, image_pull_secrets, head_cpus, @@ -480,18 +340,25 @@ def generate_appwrapper( head_gpus, ) + augment_labels(item, labels) + + if appwrapper: + add_queue_label(user_yaml, namespace, local_queue) + else: + add_queue_label(item["template"], namespace, local_queue) + directory_path = os.path.expanduser("~/.codeflare/resources/") outfile = os.path.join(directory_path, appwrapper_name + ".yaml") if write_to_file: - if mcad: + if appwrapper: write_user_appwrapper(user_yaml, outfile) else: - write_components(user_yaml, outfile, namespace, local_queue, labels) + write_components(user_yaml, outfile) return outfile else: - if mcad: + if appwrapper: user_yaml = load_appwrapper(user_yaml, name) else: - user_yaml = load_components(user_yaml, name, namespace, local_queue, labels) + user_yaml = load_components(user_yaml, name) return user_yaml diff --git a/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py b/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py new file mode 100644 index 00000000..2aa5da16 --- /dev/null +++ b/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py @@ -0,0 +1,106 @@ +import requests + +from time import sleep + +from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication +from codeflare_sdk.job import RayJobClient + +import pytest + +from support import * + +# This test creates an AppWrapper containing a Ray Cluster and covers the Ray Job submission functionality on Kind Cluster + + +@pytest.mark.kind +class TestRayClusterSDKAppWrapperKind: + def setup_method(self): + initialize_kubernetes_client(self) + + def teardown_method(self): + delete_namespace(self) + + def test_mnist_ray_cluster_sdk_kind(self): + self.setup_method() + create_namespace(self) + create_kueue_resources(self) + self.run_mnist_raycluster_sdk_kind() + + def run_mnist_raycluster_sdk_kind(self): + ray_image = get_ray_image() + + cluster = Cluster( + ClusterConfiguration( + name="mnist", + namespace=self.namespace, + num_workers=1, + head_cpus="500m", + head_memory=2, + min_cpus="500m", + max_cpus=1, + min_memory=1, + max_memory=2, + num_gpus=0, + image=ray_image, + write_to_file=True, + verify_tls=False, + appwrapper=True, + ) + ) + + cluster.up() + + cluster.status() + + cluster.wait_ready() + + cluster.status() + + cluster.details() + + self.assert_jobsubmit_withoutlogin_kind(cluster) + + # Assertions + + def assert_jobsubmit_withoutlogin_kind(self, cluster): + ray_dashboard = cluster.cluster_dashboard_uri() + client = RayJobClient(address=ray_dashboard, verify=False) + + submission_id = client.submit_job( + entrypoint="python mnist.py", + runtime_env={ + "working_dir": "./tests/e2e/", + "pip": "./tests/e2e/mnist_pip_requirements.txt", + }, + ) + print(f"Submitted job with ID: {submission_id}") + done = False + time = 0 + timeout = 900 + while not done: + status = client.get_job_status(submission_id) + if status.is_terminal(): + break + if not done: + print(status) + if timeout and time >= timeout: + raise TimeoutError(f"job has timed out after waiting {timeout}s") + sleep(5) + time += 5 + + logs = client.get_job_logs(submission_id) + print(logs) + + self.assert_job_completion(status) + + client.delete_job(submission_id) + + cluster.down() + + def assert_job_completion(self, status): + if status == "SUCCEEDED": + print(f"Job has completed: '{status}'") + assert True + else: + print(f"Job has completed: '{status}'") + assert False diff --git a/tests/e2e/start_ray_cluster.py b/tests/e2e/start_ray_cluster.py index 8bb18580..957d0c25 100644 --- a/tests/e2e/start_ray_cluster.py +++ b/tests/e2e/start_ray_cluster.py @@ -20,9 +20,8 @@ min_memory=1, max_memory=2, num_gpus=0, - instascale=False, image=ray_image, - mcad=True, + appwrapper=True, ) ) diff --git a/tests/test-case-bad.yaml b/tests/test-case-bad.yaml index 6e969e01..3c5bf076 100644 --- a/tests/test-case-bad.yaml +++ b/tests/test-case-bad.yaml @@ -1,4 +1,4 @@ -apiVersion: workload.codeflare.dev/v1beta1 +apiVersion: workload.codeflare.dev/v1beta2 kind: AppsWrapper metadata: labels: @@ -6,162 +6,105 @@ metadata: nam: unit-test-cluster namspace: ns spec: - priority: 9 - resources: - GenericItems: - - custompodresources: - - limits: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - replicas: 1 - requests: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - - limits: - cpu: 4 - memory: 6G - nvidia.com/gpu: 7 - replicas: 2 - requests: - cpu: 3 - memory: 5G - nvidia.com/gpu: 7 - generictemplate: - apiVersion: ray.io/v1 - kind: RayCluster - metadata: - labels: - workload.codeflare.dev/appwrapper: unit-test-cluster - controller-tools.k8s.io: '1.0' - name: unit-test-cluster - namespace: ns - spec: - autoscalerOptions: - idleTimeoutSeconds: 60 - imagePullPolicy: Always - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 500m - memory: 512Mi - upscalingMode: Default - enableInTreeAutoscaling: false - headGroupSpec: - rayStartParams: - block: 'true' - dashboard-host: 0.0.0.0 - num-gpus: '0' - serviceType: ClusterIP - template: - spec: - affinity: - nodeAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - nodeSelectorTerms: - - matchExpressions: - - key: unit-test-cluster - operator: In - values: - - unit-test-cluster - containers: - - env: - - name: MY_POD_IP - valueFrom: - fieldRef: - fieldPath: status.podIP - image: quay.io/project-codeflare/ray:latest-py39-cu118 - imagePullPolicy: Always - lifecycle: - preStop: - exec: - command: - - /bin/sh - - -c - - ray stop - name: ray-head - ports: - - containerPort: 6379 - name: gcs - - containerPort: 8265 - name: dashboard - - containerPort: 10001 - name: client - resources: - limits: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - requests: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - rayVersion: 1.12.0 - workerGroupSpecs: - - groupName: small-group-unit-test-cluster - maxReplicas: 2 - minReplicas: 2 - rayStartParams: - block: 'true' - num-gpus: '7' - replicas: 2 - template: - metadata: - annotations: - key: value - labels: - key: value - spec: - affinity: - nodeAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - nodeSelectorTerms: - - matchExpressions: - - key: unit-test-cluster - operator: In - values: - - unit-test-cluster - containers: - - env: - - name: MY_POD_IP - valueFrom: - fieldRef: - fieldPath: status.podIP - image: quay.io/project-codeflare/ray:latest-py39-cu118 - lifecycle: - preStop: - exec: - command: - - /bin/sh - - -c - - ray stop - name: machine-learning - resources: - limits: - cpu: 4 - memory: 6G - nvidia.com/gpu: 7 - requests: - cpu: 3 - memory: 5G - nvidia.com/gpu: 7 - replicas: 1 - - generictemplate: - apiVersion: route.openshift.io/v1 - kind: Route - metadata: - labels: - odh-ray-cluster-service: unit-test-cluster-head-svc - name: ray-dashboard-unit-test-cluster - namespace: ns - spec: - port: - targetPort: dashboard - to: - kind: Service - name: unit-test-cluster-head-svc - replicas: 1 - Items: [] + components: + - template: + apiVersion: ray.io/v1 + kind: RayCluster + metadata: + labels: + controller-tools.k8s.io: '1.0' + name: unit-test-cluster + namespace: ns + spec: + autoscalerOptions: + idleTimeoutSeconds: 60 + imagePullPolicy: Always + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 500m + memory: 512Mi + upscalingMode: Default + enableInTreeAutoscaling: false + headGroupSpec: + rayStartParams: + block: 'true' + dashboard-host: 0.0.0.0 + num-gpus: '0' + serviceType: ClusterIP + template: + spec: + containers: + - env: + - name: MY_POD_IP + valueFrom: + fieldRef: + fieldPath: status.podIP + image: quay.io/project-codeflare/ray:latest-py39-cu118 + imagePullPolicy: Always + lifecycle: + preStop: + exec: + command: + - /bin/sh + - -c + - ray stop + name: ray-head + ports: + - containerPort: 6379 + name: gcs + - containerPort: 8265 + name: dashboard + - containerPort: 10001 + name: client + resources: + limits: + cpu: 2 + memory: 8G + nvidia.com/gpu: 0 + requests: + cpu: 2 + memory: 8G + nvidia.com/gpu: 0 + rayVersion: 1.12.0 + workerGroupSpecs: + - groupName: small-group-unit-test-cluster + maxReplicas: 2 + minReplicas: 2 + rayStartParams: + block: 'true' + num-gpus: '7' + replicas: 2 + template: + metadata: + annotations: + key: value + labels: + key: value + spec: + containers: + - env: + - name: MY_POD_IP + valueFrom: + fieldRef: + fieldPath: status.podIP + image: quay.io/project-codeflare/ray:latest-py39-cu118 + lifecycle: + preStop: + exec: + command: + - /bin/sh + - -c + - ray stop + name: machine-learning + resources: + limits: + cpu: 4 + memory: 6G + nvidia.com/gpu: 7 + requests: + cpu: 3 + memory: 5G + nvidia.com/gpu: 7 diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index 7fcf1fdc..367703d6 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -31,15 +31,6 @@ spec: serviceType: ClusterIP template: spec: - affinity: - nodeAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - nodeSelectorTerms: - - matchExpressions: - - key: unit-test-cluster-ray - operator: In - values: - - unit-test-cluster-ray containers: - image: quay.io/project-codeflare/ray:latest-py39-cu118 imagePullPolicy: Always @@ -113,15 +104,6 @@ spec: labels: key: value spec: - affinity: - nodeAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - nodeSelectorTerms: - - matchExpressions: - - key: unit-test-cluster-ray - operator: In - values: - - unit-test-cluster-ray containers: - image: quay.io/project-codeflare/ray:latest-py39-cu118 lifecycle: diff --git a/tests/test-case-prio.yaml b/tests/test-case-prio.yaml deleted file mode 100644 index a4d6e68f..00000000 --- a/tests/test-case-prio.yaml +++ /dev/null @@ -1,205 +0,0 @@ -apiVersion: workload.codeflare.dev/v1beta1 -kind: AppWrapper -metadata: - labels: - orderedinstance: cpu.small_gpu.large - name: prio-test-cluster - namespace: ns -spec: - priority: 10 - resources: - GenericItems: - - custompodresources: - - limits: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - replicas: 1 - requests: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - - limits: - cpu: 4 - memory: 6G - nvidia.com/gpu: 7 - replicas: 2 - requests: - cpu: 3 - memory: 5G - nvidia.com/gpu: 7 - generictemplate: - apiVersion: ray.io/v1 - kind: RayCluster - metadata: - labels: - controller-tools.k8s.io: '1.0' - workload.codeflare.dev/appwrapper: prio-test-cluster - name: prio-test-cluster - namespace: ns - spec: - autoscalerOptions: - idleTimeoutSeconds: 60 - imagePullPolicy: Always - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 500m - memory: 512Mi - upscalingMode: Default - enableInTreeAutoscaling: false - headGroupSpec: - enableIngress: false - rayStartParams: - block: 'true' - dashboard-host: 0.0.0.0 - num-gpus: '0' - serviceType: ClusterIP - template: - spec: - affinity: - nodeAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - nodeSelectorTerms: - - matchExpressions: - - key: prio-test-cluster - operator: In - values: - - prio-test-cluster - containers: - - image: quay.io/project-codeflare/ray:latest-py39-cu118 - imagePullPolicy: Always - lifecycle: - preStop: - exec: - command: - - /bin/sh - - -c - - ray stop - name: ray-head - ports: - - containerPort: 6379 - name: gcs - - containerPort: 8265 - name: dashboard - - containerPort: 10001 - name: client - resources: - limits: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - requests: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - volumeMounts: - - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - imagePullSecrets: - - name: unit-test-pull-secret - priorityClassName: default - volumes: - - configMap: - items: - - key: ca-bundle.crt - path: odh-trusted-ca-bundle.crt - name: odh-trusted-ca-bundle - optional: true - name: odh-trusted-ca-cert - - configMap: - items: - - key: odh-ca-bundle.crt - path: odh-ca-bundle.crt - name: odh-trusted-ca-bundle - optional: true - name: odh-ca-cert - rayVersion: 2.7.0 - workerGroupSpecs: - - groupName: small-group-prio-test-cluster - maxReplicas: 2 - minReplicas: 2 - rayStartParams: - block: 'true' - num-gpus: '7' - replicas: 2 - template: - metadata: - annotations: - key: value - labels: - key: value - spec: - affinity: - nodeAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - nodeSelectorTerms: - - matchExpressions: - - key: prio-test-cluster - operator: In - values: - - prio-test-cluster - containers: - - image: quay.io/project-codeflare/ray:latest-py39-cu118 - lifecycle: - preStop: - exec: - command: - - /bin/sh - - -c - - ray stop - name: machine-learning - resources: - limits: - cpu: 4 - memory: 6G - nvidia.com/gpu: 7 - requests: - cpu: 3 - memory: 5G - nvidia.com/gpu: 7 - volumeMounts: - - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - imagePullSecrets: - - name: unit-test-pull-secret - priorityClassName: default - volumes: - - configMap: - items: - - key: ca-bundle.crt - path: odh-trusted-ca-bundle.crt - name: odh-trusted-ca-bundle - optional: true - name: odh-trusted-ca-cert - - configMap: - items: - - key: odh-ca-bundle.crt - path: odh-ca-bundle.crt - name: odh-trusted-ca-bundle - optional: true - name: odh-ca-cert - replicas: 1 - Items: [] diff --git a/tests/test-case.yaml b/tests/test-case.yaml index b97d12a4..98166b37 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -1,202 +1,161 @@ -apiVersion: workload.codeflare.dev/v1beta1 +apiVersion: workload.codeflare.dev/v1beta2 kind: AppWrapper metadata: labels: - orderedinstance: cpu.small_gpu.large + kueue.x-k8s.io/queue-name: local-queue-default name: unit-test-cluster namespace: ns spec: - resources: - GenericItems: - - custompodresources: - - limits: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - replicas: 1 - requests: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - - limits: - cpu: 4 - memory: 6G - nvidia.com/gpu: 7 - replicas: 2 - requests: - cpu: 3 - memory: 5G - nvidia.com/gpu: 7 - generictemplate: - apiVersion: ray.io/v1 - kind: RayCluster - metadata: - labels: - controller-tools.k8s.io: '1.0' - workload.codeflare.dev/appwrapper: unit-test-cluster - name: unit-test-cluster - namespace: ns - spec: - autoscalerOptions: - idleTimeoutSeconds: 60 - imagePullPolicy: Always - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 500m - memory: 512Mi - upscalingMode: Default - enableInTreeAutoscaling: false - headGroupSpec: - enableIngress: false - rayStartParams: - block: 'true' - dashboard-host: 0.0.0.0 - num-gpus: '0' - serviceType: ClusterIP - template: - spec: - affinity: - nodeAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - nodeSelectorTerms: - - matchExpressions: - - key: unit-test-cluster - operator: In - values: - - unit-test-cluster - containers: - - image: quay.io/project-codeflare/ray:latest-py39-cu118 - imagePullPolicy: Always - lifecycle: - preStop: - exec: - command: - - /bin/sh - - -c - - ray stop - name: ray-head - ports: - - containerPort: 6379 - name: gcs - - containerPort: 8265 - name: dashboard - - containerPort: 10001 - name: client - resources: - limits: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - requests: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - volumeMounts: - - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - imagePullSecrets: - - name: unit-test-pull-secret - volumes: - - configMap: - items: - - key: ca-bundle.crt - path: odh-trusted-ca-bundle.crt - name: odh-trusted-ca-bundle - optional: true + components: + - template: + apiVersion: ray.io/v1 + kind: RayCluster + metadata: + labels: + controller-tools.k8s.io: '1.0' + name: unit-test-cluster + namespace: ns + spec: + autoscalerOptions: + idleTimeoutSeconds: 60 + imagePullPolicy: Always + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 500m + memory: 512Mi + upscalingMode: Default + enableInTreeAutoscaling: false + headGroupSpec: + enableIngress: false + rayStartParams: + block: 'true' + dashboard-host: 0.0.0.0 + num-gpus: '0' + serviceType: ClusterIP + template: + spec: + containers: + - image: quay.io/project-codeflare/ray:latest-py39-cu118 + imagePullPolicy: Always + lifecycle: + preStop: + exec: + command: + - /bin/sh + - -c + - ray stop + name: ray-head + ports: + - containerPort: 6379 + name: gcs + - containerPort: 8265 + name: dashboard + - containerPort: 10001 + name: client + resources: + limits: + cpu: 2 + memory: 8G + nvidia.com/gpu: 0 + requests: + cpu: 2 + memory: 8G + nvidia.com/gpu: 0 + volumeMounts: + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert - - configMap: - items: - - key: odh-ca-bundle.crt - path: odh-ca-bundle.crt - name: odh-trusted-ca-bundle - optional: true + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert - rayVersion: 2.7.0 - workerGroupSpecs: - - groupName: small-group-unit-test-cluster - maxReplicas: 2 - minReplicas: 2 - rayStartParams: - block: 'true' - num-gpus: '7' - replicas: 2 - template: - metadata: - annotations: - key: value - labels: - key: value - spec: - affinity: - nodeAffinity: - requiredDuringSchedulingIgnoredDuringExecution: - nodeSelectorTerms: - - matchExpressions: - - key: unit-test-cluster - operator: In - values: - - unit-test-cluster - containers: - - image: quay.io/project-codeflare/ray:latest-py39-cu118 - lifecycle: - preStop: - exec: - command: - - /bin/sh - - -c - - ray stop - name: machine-learning - resources: - limits: - cpu: 4 - memory: 6G - nvidia.com/gpu: 7 - requests: - cpu: 3 - memory: 5G - nvidia.com/gpu: 7 - volumeMounts: - - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - imagePullSecrets: - - name: unit-test-pull-secret - volumes: - - configMap: - items: - - key: ca-bundle.crt - path: odh-trusted-ca-bundle.crt - name: odh-trusted-ca-bundle - optional: true + subPath: odh-ca-bundle.crt + imagePullSecrets: + - name: unit-test-pull-secret + volumes: + - configMap: + items: + - key: ca-bundle.crt + path: odh-trusted-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert + rayVersion: 2.7.0 + workerGroupSpecs: + - groupName: small-group-unit-test-cluster + maxReplicas: 2 + minReplicas: 2 + rayStartParams: + block: 'true' + num-gpus: '7' + replicas: 2 + template: + metadata: + annotations: + key: value + labels: + key: value + spec: + containers: + - image: quay.io/project-codeflare/ray:latest-py39-cu118 + lifecycle: + preStop: + exec: + command: + - /bin/sh + - -c + - ray stop + name: machine-learning + resources: + limits: + cpu: 4 + memory: 6G + nvidia.com/gpu: 7 + requests: + cpu: 3 + memory: 5G + nvidia.com/gpu: 7 + volumeMounts: + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert - - configMap: - items: - - key: odh-ca-bundle.crt - path: odh-ca-bundle.crt - name: odh-trusted-ca-bundle - optional: true + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert - replicas: 1 - Items: [] + subPath: odh-ca-bundle.crt + imagePullSecrets: + - name: unit-test-pull-secret + volumes: + - configMap: + items: + - key: ca-bundle.crt + path: odh-trusted-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert diff --git a/tests/test-default-appwrapper.yaml b/tests/test-default-appwrapper.yaml index c390f619..f754c176 100644 --- a/tests/test-default-appwrapper.yaml +++ b/tests/test-default-appwrapper.yaml @@ -1,180 +1,159 @@ -apiVersion: workload.codeflare.dev/v1beta1 +apiVersion: workload.codeflare.dev/v1beta2 kind: AppWrapper metadata: + labels: + kueue.x-k8s.io/queue-name: local-queue-default name: unit-test-default-cluster namespace: opendatahub spec: - resources: - GenericItems: - - custompodresources: - - limits: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - replicas: 1 - requests: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - - limits: - cpu: 1 - memory: 2G - nvidia.com/gpu: 0 - replicas: 1 - requests: - cpu: 1 - memory: 2G - nvidia.com/gpu: 0 - generictemplate: - apiVersion: ray.io/v1 - kind: RayCluster - metadata: - labels: - controller-tools.k8s.io: '1.0' - workload.codeflare.dev/appwrapper: unit-test-default-cluster - name: unit-test-default-cluster - namespace: opendatahub - spec: - autoscalerOptions: - idleTimeoutSeconds: 60 - imagePullPolicy: Always - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 500m - memory: 512Mi - upscalingMode: Default - enableInTreeAutoscaling: false - headGroupSpec: - enableIngress: false - rayStartParams: - block: 'true' - dashboard-host: 0.0.0.0 - num-gpus: '0' - serviceType: ClusterIP - template: - spec: - containers: - - image: quay.io/project-codeflare/ray:latest-py39-cu118 - imagePullPolicy: Always - lifecycle: - preStop: - exec: - command: - - /bin/sh - - -c - - ray stop - name: ray-head - ports: - - containerPort: 6379 - name: gcs - - containerPort: 8265 - name: dashboard - - containerPort: 10001 - name: client - resources: - limits: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - requests: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - volumeMounts: - - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - imagePullSecrets: [] - volumes: - - configMap: - items: - - key: ca-bundle.crt - path: odh-trusted-ca-bundle.crt - name: odh-trusted-ca-bundle - optional: true + components: + - template: + apiVersion: ray.io/v1 + kind: RayCluster + metadata: + labels: + controller-tools.k8s.io: '1.0' + name: unit-test-default-cluster + namespace: opendatahub + spec: + autoscalerOptions: + idleTimeoutSeconds: 60 + imagePullPolicy: Always + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 500m + memory: 512Mi + upscalingMode: Default + enableInTreeAutoscaling: false + headGroupSpec: + enableIngress: false + rayStartParams: + block: 'true' + dashboard-host: 0.0.0.0 + num-gpus: '0' + serviceType: ClusterIP + template: + spec: + containers: + - image: quay.io/project-codeflare/ray:latest-py39-cu118 + imagePullPolicy: Always + lifecycle: + preStop: + exec: + command: + - /bin/sh + - -c + - ray stop + name: ray-head + ports: + - containerPort: 6379 + name: gcs + - containerPort: 8265 + name: dashboard + - containerPort: 10001 + name: client + resources: + limits: + cpu: 2 + memory: 8G + nvidia.com/gpu: 0 + requests: + cpu: 2 + memory: 8G + nvidia.com/gpu: 0 + volumeMounts: + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert - - configMap: - items: - - key: odh-ca-bundle.crt - path: odh-ca-bundle.crt - name: odh-trusted-ca-bundle - optional: true + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert - rayVersion: 2.7.0 - workerGroupSpecs: - - groupName: small-group-unit-test-default-cluster - maxReplicas: 1 - minReplicas: 1 - rayStartParams: - block: 'true' - num-gpus: '0' - replicas: 1 - template: - metadata: - annotations: - key: value - labels: - key: value - spec: - containers: - - image: quay.io/project-codeflare/ray:latest-py39-cu118 - lifecycle: - preStop: - exec: - command: - - /bin/sh - - -c - - ray stop - name: machine-learning - resources: - limits: - cpu: 1 - memory: 2G - nvidia.com/gpu: 0 - requests: - cpu: 1 - memory: 2G - nvidia.com/gpu: 0 - volumeMounts: - - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - imagePullSecrets: [] - volumes: - - configMap: - items: - - key: ca-bundle.crt - path: odh-trusted-ca-bundle.crt - name: odh-trusted-ca-bundle - optional: true + subPath: odh-ca-bundle.crt + imagePullSecrets: [] + volumes: + - configMap: + items: + - key: ca-bundle.crt + path: odh-trusted-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert + rayVersion: 2.7.0 + workerGroupSpecs: + - groupName: small-group-unit-test-default-cluster + maxReplicas: 1 + minReplicas: 1 + rayStartParams: + block: 'true' + num-gpus: '0' + replicas: 1 + template: + metadata: + annotations: + key: value + labels: + key: value + spec: + containers: + - image: quay.io/project-codeflare/ray:latest-py39-cu118 + lifecycle: + preStop: + exec: + command: + - /bin/sh + - -c + - ray stop + name: machine-learning + resources: + limits: + cpu: 1 + memory: 2G + nvidia.com/gpu: 0 + requests: + cpu: 1 + memory: 2G + nvidia.com/gpu: 0 + volumeMounts: + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert - - configMap: - items: - - key: odh-ca-bundle.crt - path: odh-ca-bundle.crt - name: odh-trusted-ca-bundle - optional: true + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert - replicas: 1 - Items: [] + subPath: odh-ca-bundle.crt + imagePullSecrets: [] + volumes: + - configMap: + items: + - key: ca-bundle.crt + path: odh-trusted-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert diff --git a/tests/unit_test.py b/tests/unit_test.py index 1fe139de..32d730c4 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -265,15 +265,18 @@ def test_config_creation(): assert config.num_gpus == 7 assert config.image == "quay.io/project-codeflare/ray:latest-py39-cu118" assert config.template == f"{parent}/src/codeflare_sdk/templates/base-template.yaml" - assert config.instascale assert config.machine_types == ["cpu.small", "gpu.large"] assert config.image_pull_secrets == ["unit-test-pull-secret"] - assert config.dispatch_priority == None - assert config.mcad == True + assert config.appwrapper == True def test_cluster_creation(mocker): + # Create AppWrapper containing a Ray Cluster with no local queue specified mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), + ) cluster = createClusterWithConfig(mocker) assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster.yaml" assert cluster.app_wrapper_name == "unit-test-cluster" @@ -345,8 +348,8 @@ def test_cluster_creation_no_mcad(mocker): config = createClusterConfig() config.name = "unit-test-cluster-ray" config.write_to_file = True - config.mcad = False config.labels = {"testlabel": "test", "testlabel2": "test"} + config.appwrapper = False cluster = Cluster(config) assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-ray.yaml" @@ -372,7 +375,7 @@ def test_cluster_creation_no_mcad_local_queue(mocker): ) config = createClusterConfig() config.name = "unit-test-cluster-ray" - config.mcad = False + config.appwrapper = False config.write_to_file = True config.local_queue = "local-queue-default" config.labels = {"testlabel": "test", "testlabel2": "test"} @@ -394,12 +397,11 @@ def test_cluster_creation_no_mcad_local_queue(mocker): min_memory=5, max_memory=6, num_gpus=7, - instascale=True, machine_types=["cpu.small", "gpu.large"], image_pull_secrets=["unit-test-pull-secret"], image="quay.io/project-codeflare/ray:latest-py39-cu118", write_to_file=True, - mcad=False, + appwrapper=False, local_queue="local-queue-default", labels={"testlabel": "test", "testlabel2": "test"}, ) @@ -413,40 +415,20 @@ def test_cluster_creation_no_mcad_local_queue(mocker): ) -def test_cluster_creation_priority(mocker): - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_cluster_custom_object", - return_value={"items": [{"metadata": {"name": "default"}, "value": 10}]}, - ) - config = createClusterConfig() - config.name = "prio-test-cluster" - config.dispatch_priority = "default" - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", - return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, - ) - cluster = Cluster(config) - assert cluster.app_wrapper_yaml == f"{aw_dir}prio-test-cluster.yaml" - assert cluster.app_wrapper_name == "prio-test-cluster" - assert filecmp.cmp( - f"{aw_dir}prio-test-cluster.yaml", - f"{parent}/tests/test-case-prio.yaml", - shallow=True, - ) - - def test_default_cluster_creation(mocker): mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch( "codeflare_sdk.cluster.cluster.get_current_namespace", return_value="opendatahub", ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), + ) default_config = ClusterConfiguration( name="unit-test-default-cluster", image="quay.io/project-codeflare/ray:latest-py39-cu118", - mcad=True, + appwrapper=True, ) cluster = Cluster(default_config) test_aw = yaml.load(cluster.app_wrapper_yaml, Loader=yaml.FullLoader) @@ -485,7 +467,7 @@ def arg_check_apply_effect(group, version, namespace, plural, body, *args): assert args == tuple() if plural == "appwrappers": assert group == "workload.codeflare.dev" - assert version == "v1beta1" + assert version == "v1beta2" with open(f"{aw_dir}unit-test-cluster.yaml") as f: aw = yaml.load(f, Loader=yaml.FullLoader) assert body == aw @@ -522,7 +504,7 @@ def arg_check_del_effect(group, version, namespace, plural, name, *args): assert args == tuple() if plural == "appwrappers": assert group == "workload.codeflare.dev" - assert version == "v1beta1" + assert version == "v1beta2" assert name == "unit-test-cluster" elif plural == "rayclusters": assert group == "ray.io" @@ -554,6 +536,10 @@ def test_cluster_up_down(mocker): "kubernetes.client.CustomObjectsApi.list_cluster_custom_object", return_value={"items": []}, ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), + ) cluster = cluster = createClusterWithConfig(mocker) cluster.up() cluster.down() @@ -587,7 +573,7 @@ def test_cluster_up_down_no_mcad(mocker): ) config = createClusterConfig() config.name = "unit-test-cluster-ray" - config.mcad = False + config.appwrapper = False cluster = Cluster(config) cluster.up() cluster.down() @@ -616,7 +602,7 @@ def test_get_ingress_domain(mocker): def aw_status_fields(group, version, namespace, plural, *args): assert group == "workload.codeflare.dev" - assert version == "v1beta1" + assert version == "v1beta2" assert namespace == "test-ns" assert plural == "appwrappers" assert args == tuple() @@ -659,6 +645,10 @@ def test_cluster_uris(mocker): "codeflare_sdk.cluster.cluster._get_ingress_domain", return_value="apps.cluster.awsroute.org", ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), + ) cluster = cluster = createClusterWithConfig(mocker) mocker.patch( "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", @@ -781,6 +771,10 @@ def ingress_retrieval( def test_ray_job_wrapping(mocker): mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), + ) cluster = cluster = createClusterWithConfig(mocker) cluster.config.image = "quay.io/project-codeflare/ray:latest-py39-cu118" mocker.patch( @@ -843,15 +837,11 @@ def test_print_no_cluster(capsys): def test_print_appwrappers(capsys): aw1 = AppWrapper( name="awtest1", - status=AppWrapperStatus.PENDING, - can_run=False, - job_state="queue-state", + status=AppWrapperStatus.SUSPENDED, ) aw2 = AppWrapper( name="awtest2", status=AppWrapperStatus.RUNNING, - can_run=False, - job_state="queue-state", ) try: print_app_wrappers_status([aw1, aw2]) @@ -859,18 +849,18 @@ def test_print_appwrappers(capsys): assert 1 == 0 captured = capsys.readouterr() assert captured.out == ( - "╭───────────────────────╮\n" - "│ 🚀 Cluster Queue │\n" - "│ Status 🚀 │\n" - "│ +---------+---------+ │\n" - "│ | Name | Status | │\n" - "│ +=========+=========+ │\n" - "│ | awtest1 | pending | │\n" - "│ | | | │\n" - "│ | awtest2 | running | │\n" - "│ | | | │\n" - "│ +---------+---------+ │\n" - "╰───────────────────────╯\n" + "╭─────────────────────────╮\n" + "│ 🚀 Cluster Queue │\n" + "│ Status 🚀 │\n" + "│ +---------+-----------+ │\n" + "│ | Name | Status | │\n" + "│ +=========+===========+ │\n" + "│ | awtest1 | suspended | │\n" + "│ | | | │\n" + "│ | awtest2 | running | │\n" + "│ | | | │\n" + "│ +---------+-----------+ │\n" + "╰─────────────────────────╯\n" ) @@ -898,13 +888,18 @@ def test_ray_details(mocker, capsys): "codeflare_sdk.cluster.cluster.Cluster.cluster_dashboard_uri", return_value="", ) + mocker.patch( + "codeflare_sdk.utils.generate_yaml.local_queue_exists", + return_value="true", + ) cf = Cluster( ClusterConfiguration( name="raytest2", namespace="ns", image="quay.io/project-codeflare/ray:latest-py39-cu118", write_to_file=True, - mcad=True, + appwrapper=True, + local_queue="local_default_queue", ) ) captured = capsys.readouterr() @@ -1023,118 +1018,16 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "creationTimestamp": "2024-03-05T09:55:37Z", "generation": 1, "labels": { - "appwrapper.mcad.ibm.com": "quicktest", "controller-tools.k8s.io": "1.0", "resourceName": "quicktest", - "workload.codeflare.dev/appwrapper": "quicktest", "orderedinstance": "m4.xlarge_g4dn.xlarge", "kueue.x-k8s.io/queue-name": "team-a-queue", }, - "managedFields": [ - { - "apiVersion": "ray.io/v1", - "fieldsType": "FieldsV1", - "fieldsV1": { - "f:metadata": { - "f:labels": { - ".": {}, - "f:appwrapper.mcad.ibm.com": {}, - "f:controller-tools.k8s.io": {}, - "f:resourceName": {}, - "f:workload.codeflare.dev/appwrapper": {}, - }, - "f:ownerReferences": { - ".": {}, - 'k:{"uid":"a29b1a7a-0992-4860-a8d5-a689a751a3e8"}': {}, - }, - }, - "f:spec": { - ".": {}, - "f:autoscalerOptions": { - ".": {}, - "f:idleTimeoutSeconds": {}, - "f:imagePullPolicy": {}, - "f:resources": { - ".": {}, - "f:limits": { - ".": {}, - "f:cpu": {}, - "f:memory": {}, - }, - "f:requests": { - ".": {}, - "f:cpu": {}, - "f:memory": {}, - }, - }, - "f:upscalingMode": {}, - }, - "f:enableInTreeAutoscaling": {}, - "f:headGroupSpec": { - ".": {}, - "f:rayStartParams": { - ".": {}, - "f:block": {}, - "f:dashboard-host": {}, - "f:num-gpus": {}, - }, - "f:serviceType": {}, - "f:template": { - ".": {}, - "f:spec": { - ".": {}, - "f:affinity": { - ".": {}, - "f:nodeAffinity": { - ".": {}, - "f:requiredDuringSchedulingIgnoredDuringExecution": {}, - }, - }, - "f:imagePullSecrets": {}, - "f:volumes": {}, - }, - }, - }, - "f:rayVersion": {}, - "f:workerGroupSpecs": {}, - }, - }, - "manager": "codeflare-operator", - "operation": "Update", - "time": "2024-03-05T09:55:37Z", - }, - { - "apiVersion": "ray.io/v1alpha1", - "fieldsType": "FieldsV1", - "fieldsV1": { - "f:status": { - ".": {}, - "f:desiredWorkerReplicas": {}, - "f:endpoints": { - ".": {}, - "f:client": {}, - "f:dashboard": {}, - "f:gcs": {}, - "f:metrics": {}, - }, - "f:head": {".": {}, "f:serviceIP": {}}, - "f:lastUpdateTime": {}, - "f:maxWorkerReplicas": {}, - "f:minWorkerReplicas": {}, - "f:observedGeneration": {}, - } - }, - "manager": "manager", - "operation": "Update", - "subresource": "status", - "time": "2024-03-05T09:55:37Z", - }, - ], "name": "quicktest", "namespace": "ns", "ownerReferences": [ { - "apiVersion": "workload.codeflare.dev/v1beta1", + "apiVersion": "workload.codeflare.dev/v1beta2", "blockOwnerDeletion": True, "controller": True, "kind": "AppWrapper", @@ -1166,23 +1059,6 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "template": { "metadata": {}, "spec": { - "affinity": { - "nodeAffinity": { - "requiredDuringSchedulingIgnoredDuringExecution": { - "nodeSelectorTerms": [ - { - "matchExpressions": [ - { - "key": "quicktest", - "operator": "In", - "values": ["quicktest"], - } - ] - } - ] - } - } - }, "containers": [ { "env": [ @@ -1321,23 +1197,6 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "labels": {"key": "value"}, }, "spec": { - "affinity": { - "nodeAffinity": { - "requiredDuringSchedulingIgnoredDuringExecution": { - "nodeSelectorTerms": [ - { - "matchExpressions": [ - { - "key": "quicktest", - "operator": "In", - "values": ["quicktest"], - } - ] - } - ] - } - } - }, "containers": [ { "env": [ @@ -1468,103 +1327,15 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "creationTimestamp": "2023-02-22T16:26:07Z", "generation": 1, "labels": { - "workload.codeflare.dev/appwrapper": "quicktest2", "controller-tools.k8s.io": "1.0", "resourceName": "quicktest2", "orderedinstance": "m4.xlarge_g4dn.xlarge", }, - "managedFields": [ - { - "apiVersion": "ray.io/v1", - "fieldsType": "FieldsV1", - "fieldsV1": { - "f:metadata": { - "f:labels": { - ".": {}, - "f:workload.codeflare.dev/appwrapper": {}, - "f:controller-tools.k8s.io": {}, - "f:resourceName": {}, - }, - "f:ownerReferences": { - ".": {}, - 'k:{"uid":"6334fc1b-471e-4876-8e7b-0b2277679235"}': {}, - }, - }, - "f:spec": { - ".": {}, - "f:autoscalerOptions": { - ".": {}, - "f:idleTimeoutSeconds": {}, - "f:imagePullPolicy": {}, - "f:resources": { - ".": {}, - "f:limits": { - ".": {}, - "f:cpu": {}, - "f:memory": {}, - }, - "f:requests": { - ".": {}, - "f:cpu": {}, - "f:memory": {}, - }, - }, - "f:upscalingMode": {}, - }, - "f:enableInTreeAutoscaling": {}, - "f:headGroupSpec": { - ".": {}, - "f:rayStartParams": { - ".": {}, - "f:block": {}, - "f:dashboard-host": {}, - "f:num-gpus": {}, - }, - "f:serviceType": {}, - "f:template": { - ".": {}, - "f:spec": {".": {}, "f:containers": {}}, - }, - }, - "f:rayVersion": {}, - "f:workerGroupSpecs": {}, - }, - }, - "manager": "mcad-controller", - "operation": "Update", - "time": "2023-02-22T16:26:07Z", - }, - { - "apiVersion": "ray.io/v1", - "fieldsType": "FieldsV1", - "fieldsV1": { - "f:status": { - ".": {}, - "f:availableWorkerReplicas": {}, - "f:desiredWorkerReplicas": {}, - "f:endpoints": { - ".": {}, - "f:client": {}, - "f:dashboard": {}, - "f:gcs": {}, - }, - "f:lastUpdateTime": {}, - "f:maxWorkerReplicas": {}, - "f:minWorkerReplicas": {}, - "f:state": {}, - } - }, - "manager": "manager", - "operation": "Update", - "subresource": "status", - "time": "2023-02-22T16:26:16Z", - }, - ], "name": "quicktest2", "namespace": "ns", "ownerReferences": [ { - "apiVersion": "workload.codeflare.dev/v1beta1", + "apiVersion": "workload.codeflare.dev/v1beta2", "blockOwnerDeletion": True, "controller": True, "kind": "AppWrapper", @@ -1723,7 +1494,7 @@ def get_ray_obj(group, version, namespace, plural, cls=None): def get_named_aw(group, version, namespace, plural, name): - aws = get_aw_obj("workload.codeflare.dev", "v1beta1", "ns", "appwrappers") + aws = get_aw_obj("workload.codeflare.dev", "v1beta2", "ns", "appwrappers") return aws["items"][0] @@ -1731,144 +1502,128 @@ def get_aw_obj(group, version, namespace, plural): api_obj1 = { "items": [ { - "apiVersion": "workload.codeflare.dev/v1beta1", + "apiVersion": "workload.codeflare.dev/v1beta2", "kind": "AppWrapper", "metadata": { - "annotations": { - "kubectl.kubernetes.io/last-applied-configuration": '{"apiVersion":"codeflare.dev/v1beta1","kind":"AppWrapper","metadata":{"annotations":{},"name":"quicktest1","namespace":"ns"},"spec":{"priority":9,"resources":{"GenericItems":[{"custompodresources":[{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}},{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}],"generictemplate":{"apiVersion":"ray.io/v1","kind":"RayCluster","metadata":{"labels":{"appwrapper.codeflare.dev":"quicktest1","controller-tools.k8s.io":"1.0"},"name":"quicktest1","namespace":"ns"},"spec":{"autoscalerOptions":{"idleTimeoutSeconds":60,"imagePullPolicy":"Always","resources":{"limits":{"cpu":"500m","memory":"512Mi"},"requests":{"cpu":"500m","memory":"512Mi"}},"upscalingMode":"Default"},"enableInTreeAutoscaling":false,"headGroupSpec":{"rayStartParams":{"block":"true","dashboard-host":"0.0.0.0","num-gpus":"0"},"serviceType":"ClusterIP","template":{"spec":{"containers":[{"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","imagePullPolicy":"Always","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"ray-head","ports":[{"containerPort":6379,"name":"gcs"},{"containerPort":8265,"name":"dashboard"},{"containerPort":10001,"name":"client"}],"resources":{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}}}]}}},"rayVersion":"1.12.0","workerGroupSpecs":[{"groupName":"small-group-quicktest","maxReplicas":1,"minReplicas":1,"rayStartParams":{"block":"true","num-gpus":"0"},"replicas":1,"template":{"metadata":{"annotations":{"key":"value"},"labels":{"key":"value"}},"spec":{"containers":[{"env":[{"name":"MY_POD_IP","valueFrom":{"fieldRef":{"fieldPath":"status.podIP"}}}],"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"machine-learning","resources":{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}}],}}}]}},"replicas":1},{"generictemplate":{"apiVersion":"route.openshift.io/v1","kind":"Route","metadata":{"labels":{"odh-ray-cluster-service":"quicktest-head-svc"},"name":"ray-dashboard-quicktest","namespace":"default"},"spec":{"port":{"targetPort":"dashboard"},"to":{"kind":"Service","name":"quicktest-head-svc"}}},"replica":1}],"Items":[]}}}\n' - }, - "creationTimestamp": "2023-02-22T16:26:07Z", - "generation": 4, - "managedFields": [ - { - "apiVersion": "workload.codeflare.dev/v1beta1", - "fieldsType": "FieldsV1", - "fieldsV1": { - "f:spec": { - "f:resources": { - "f:GenericItems": {}, - "f:metadata": {}, - }, - "f:schedulingSpec": {}, - "f:service": {".": {}, "f:spec": {}}, - }, - "f:status": { - ".": {}, - "f:canrun": {}, - "f:conditions": {}, - "f:controllerfirsttimestamp": {}, - "f:filterignore": {}, - "f:queuejobstate": {}, - "f:sender": {}, - "f:state": {}, - "f:systempriority": {}, - }, - }, - "manager": "Go-http-client", - "operation": "Update", - "time": "2023-02-22T16:26:07Z", - }, - { - "apiVersion": "workload.codeflare.dev/v1beta1", - "fieldsType": "FieldsV1", - "fieldsV1": { - "f:metadata": { - "f:annotations": { - ".": {}, - "f:kubectl.kubernetes.io/last-applied-configuration": {}, - } - }, - "f:spec": { - ".": {}, - "f:priority": {}, - "f:resources": {".": {}, "f:Items": {}}, - }, - }, - "manager": "kubectl-client-side-apply", - "operation": "Update", - "time": "2023-02-22T16:26:07Z", - }, - ], "name": "quicktest1", "namespace": "ns", - "resourceVersion": "9482384", - "uid": "6334fc1b-471e-4876-8e7b-0b2277679235", }, "spec": { - "priority": 9, - "resources": { - "GenericItems": [ - { - "allocated": 0, - "custompodresources": [ - { - "limits": { - "cpu": "2", - "memory": "8G", - "nvidia.com/gpu": "0", - }, - "replicas": 1, - "requests": { - "cpu": "2", - "memory": "8G", - "nvidia.com/gpu": "0", - }, + "components": [ + { + "template": { + "apiVersion": "ray.io/v1", + "kind": "RayCluster", + "metadata": { + "labels": { + "controller-tools.k8s.io": "1.0", }, - { - "limits": { - "cpu": "1", - "memory": "2G", - "nvidia.com/gpu": "0", - }, - "replicas": 1, - "requests": { - "cpu": "1", - "memory": "2G", - "nvidia.com/gpu": "0", + "name": "quicktest1", + "namespace": "ns", + }, + "spec": { + "autoscalerOptions": { + "idleTimeoutSeconds": 60, + "imagePullPolicy": "Always", + "resources": { + "limits": { + "cpu": "500m", + "memory": "512Mi", + }, + "requests": { + "cpu": "500m", + "memory": "512Mi", + }, }, + "upscalingMode": "Default", }, - ], - "generictemplate": { - "apiVersion": "ray.io/v1", - "kind": "RayCluster", - "metadata": { - "labels": { - "workload.codeflare.dev/appwrapper": "quicktest1", - "controller-tools.k8s.io": "1.0", + "enableInTreeAutoscaling": False, + "headGroupSpec": { + "rayStartParams": { + "block": "true", + "dashboard-host": "0.0.0.0", + "num-gpus": "0", }, - "name": "quicktest1", - "namespace": "ns", - }, - "spec": { - "autoscalerOptions": { - "idleTimeoutSeconds": 60, - "imagePullPolicy": "Always", - "resources": { - "limits": { - "cpu": "500m", - "memory": "512Mi", - }, - "requests": { - "cpu": "500m", - "memory": "512Mi", - }, - }, - "upscalingMode": "Default", + "serviceType": "ClusterIP", + "template": { + "spec": { + "containers": [ + { + "image": "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103", + "imagePullPolicy": "Always", + "lifecycle": { + "preStop": { + "exec": { + "command": [ + "/bin/sh", + "-c", + "ray stop", + ] + } + } + }, + "name": "ray-head", + "ports": [ + { + "containerPort": 6379, + "name": "gcs", + }, + { + "containerPort": 8265, + "name": "dashboard", + }, + { + "containerPort": 10001, + "name": "client", + }, + ], + "resources": { + "limits": { + "cpu": 2, + "memory": "8G", + "nvidia.com/gpu": 0, + }, + "requests": { + "cpu": 2, + "memory": "8G", + "nvidia.com/gpu": 0, + }, + }, + } + ] + } }, - "enableInTreeAutoscaling": False, - "headGroupSpec": { + }, + "rayVersion": "1.12.0", + "workerGroupSpecs": [ + { + "groupName": "small-group-quicktest", + "maxReplicas": 1, + "minReplicas": 1, "rayStartParams": { "block": "true", - "dashboard-host": "0.0.0.0", "num-gpus": "0", }, - "serviceType": "ClusterIP", + "replicas": 1, "template": { + "metadata": { + "annotations": {"key": "value"}, + "labels": {"key": "value"}, + }, "spec": { "containers": [ { + "env": [ + { + "name": "MY_POD_IP", + "valueFrom": { + "fieldRef": { + "fieldPath": "status.podIP" + } + }, + } + ], "image": "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103", - "imagePullPolicy": "Always", "lifecycle": { "preStop": { "exec": { @@ -1880,322 +1635,190 @@ def get_aw_obj(group, version, namespace, plural): } } }, - "name": "ray-head", - "ports": [ - { - "containerPort": 6379, - "name": "gcs", - }, - { - "containerPort": 8265, - "name": "dashboard", - }, - { - "containerPort": 10001, - "name": "client", - }, - ], + "name": "machine-learning", "resources": { "limits": { - "cpu": 2, - "memory": "8G", + "cpu": 1, + "memory": "2G", "nvidia.com/gpu": 0, }, "requests": { - "cpu": 2, - "memory": "8G", + "cpu": 1, + "memory": "2G", "nvidia.com/gpu": 0, }, }, } - ] - } - }, - }, - "rayVersion": "1.12.0", - "workerGroupSpecs": [ - { - "groupName": "small-group-quicktest", - "maxReplicas": 1, - "minReplicas": 1, - "rayStartParams": { - "block": "true", - "num-gpus": "0", - }, - "replicas": 1, - "template": { - "metadata": { - "annotations": {"key": "value"}, - "labels": {"key": "value"}, - }, - "spec": { - "containers": [ - { - "env": [ - { - "name": "MY_POD_IP", - "valueFrom": { - "fieldRef": { - "fieldPath": "status.podIP" - } - }, - } - ], - "image": "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103", - "lifecycle": { - "preStop": { - "exec": { - "command": [ - "/bin/sh", - "-c", - "ray stop", - ] - } - } - }, - "name": "machine-learning", - "resources": { - "limits": { - "cpu": 1, - "memory": "2G", - "nvidia.com/gpu": 0, - }, - "requests": { - "cpu": 1, - "memory": "2G", - "nvidia.com/gpu": 0, - }, - }, - } - ], - }, + ], }, - } - ], - }, + }, + } + ], }, - "metadata": {}, - "priority": 0, - "priorityslope": 0, - "replicas": 1, }, - { - "allocated": 0, - "generictemplate": { - "apiVersion": "networking.k8s.io/v1", - "kind": "Ingress", - "metadata": { - "labels": { - "ingress-owner": "appwrapper-name", - }, - "name": "ray-dashboard-quicktest", - "namespace": "default", + }, + { + "template": { + "apiVersion": "networking.k8s.io/v1", + "kind": "Ingress", + "metadata": { + "labels": { + "ingress-owner": "appwrapper-name", }, - "spec": { - "ingressClassName": "nginx", - "rules": [ - { - "http": { - "paths": { - "backend": { - "service": { - "name": "quicktest-head-svc", - "port": { - "number": 8265 - }, - }, + "name": "ray-dashboard-quicktest", + "namespace": "default", + }, + "spec": { + "ingressClassName": "nginx", + "rules": [ + { + "http": { + "paths": { + "backend": { + "service": { + "name": "quicktest-head-svc", + "port": {"number": 8265}, }, - "pathType": "Prefix", - "path": "/", }, + "pathType": "Prefix", + "path": "/", }, - "host": "quicktest.awsroute.com", - } - ], - }, + }, + "host": "quicktest.awsroute.com", + } + ], }, - "metadata": {}, - "priority": 0, - "priorityslope": 0, }, - ], - "Items": [], - "metadata": {}, - }, - "schedulingSpec": {}, - "service": {"spec": {}}, - }, - "status": { - "canrun": True, - "conditions": [ - { - "lastTransitionMicroTime": "2023-02-22T16:26:07.559447Z", - "lastUpdateMicroTime": "2023-02-22T16:26:07.559447Z", - "status": "True", - "type": "Init", - }, - { - "lastTransitionMicroTime": "2023-02-22T16:26:07.559551Z", - "lastUpdateMicroTime": "2023-02-22T16:26:07.559551Z", - "reason": "AwaitingHeadOfLine", - "status": "True", - "type": "Queueing", - }, - { - "lastTransitionMicroTime": "2023-02-22T16:26:13.220564Z", - "lastUpdateMicroTime": "2023-02-22T16:26:13.220564Z", - "reason": "AppWrapperRunnable", - "status": "True", - "type": "Dispatched", }, ], - "controllerfirsttimestamp": "2023-02-22T16:26:07.559447Z", - "filterignore": True, - "queuejobstate": "Dispatched", - "sender": "before manageQueueJob - afterEtcdDispatching", - "state": "Running", - "systempriority": 9, + }, + "status": { + "phase": "Running", }, }, { - "apiVersion": "workload.codeflare.dev/v1beta1", + "apiVersion": "workload.codeflare.dev/v1beta2", "kind": "AppWrapper", "metadata": { - "annotations": { - "kubectl.kubernetes.io/last-applied-configuration": '{"apiVersion":"codeflare.dev/v1beta1","kind":"AppWrapper","metadata":{"annotations":{},"name":"quicktest2","namespace":"ns"},"spec":{"priority":9,"resources":{"GenericItems":[{"custompodresources":[{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}},{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"replicas":1,"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}],"generictemplate":{"apiVersion":"ray.io/v1","kind":"RayCluster","metadata":{"labels":{"appwrapper.codeflare.dev":"quicktest2","controller-tools.k8s.io":"1.0"},"name":"quicktest2","namespace":"ns"},"spec":{"autoscalerOptions":{"idleTimeoutSeconds":60,"imagePullPolicy":"Always","resources":{"limits":{"cpu":"500m","memory":"512Mi"},"requests":{"cpu":"500m","memory":"512Mi"}},"upscalingMode":"Default"},"enableInTreeAutoscaling":false,"headGroupSpec":{"rayStartParams":{"block":"true","dashboard-host":"0.0.0.0","num-gpus":"0"},"serviceType":"ClusterIP","template":{"spec":{"containers":[{"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","imagePullPolicy":"Always","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"ray-head","ports":[{"containerPort":6379,"name":"gcs"},{"containerPort":8265,"name":"dashboard"},{"containerPort":10001,"name":"client"}],"resources":{"limits":{"cpu":2,"memory":"8G","nvidia.com/gpu":0},"requests":{"cpu":2,"memory":"8G","nvidia.com/gpu":0}}}]}}},"rayVersion":"1.12.0","workerGroupSpecs":[{"groupName":"small-group-quicktest","maxReplicas":1,"minReplicas":1,"rayStartParams":{"block":"true","num-gpus":"0"},"replicas":1,"template":{"metadata":{"annotations":{"key":"value"},"labels":{"key":"value"}},"spec":{"containers":[{"env":[{"name":"MY_POD_IP","valueFrom":{"fieldRef":{"fieldPath":"status.podIP"}}}],"image":"ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103","lifecycle":{"preStop":{"exec":{"command":["/bin/sh","-c","ray stop"]}}},"name":"machine-learning","resources":{"limits":{"cpu":1,"memory":"2G","nvidia.com/gpu":0},"requests":{"cpu":1,"memory":"2G","nvidia.com/gpu":0}}}],}}}]}},"replicas":1},{"generictemplate":{"apiVersion":"route.openshift.io/v1","kind":"Route","metadata":{"labels":{"odh-ray-cluster-service":"quicktest-head-svc"},"name":"ray-dashboard-quicktest","namespace":"default"},"spec":{"port":{"targetPort":"dashboard"},"to":{"kind":"Service","name":"quicktest-head-svc"}}},"replica":1}],"Items":[]}}}\n' - }, - "creationTimestamp": "2023-02-22T16:26:07Z", - "generation": 4, - "managedFields": [ - { - "apiVersion": "workload.codeflare.dev/v1beta1", - "fieldsType": "FieldsV1", - "fieldsV1": { - "f:spec": { - "f:resources": { - "f:GenericItems": {}, - "f:metadata": {}, - }, - "f:schedulingSpec": {}, - "f:service": {".": {}, "f:spec": {}}, - }, - "f:status": { - ".": {}, - "f:canrun": {}, - "f:conditions": {}, - "f:controllerfirsttimestamp": {}, - "f:filterignore": {}, - "f:queuejobstate": {}, - "f:sender": {}, - "f:state": {}, - "f:systempriority": {}, - }, - }, - "manager": "Go-http-client", - "operation": "Update", - "time": "2023-02-22T16:26:07Z", - }, - { - "apiVersion": "workload.codeflare.dev/v1beta1", - "fieldsType": "FieldsV1", - "fieldsV1": { - "f:metadata": { - "f:annotations": { - ".": {}, - "f:kubectl.kubernetes.io/last-applied-configuration": {}, - } - }, - "f:spec": { - ".": {}, - "f:priority": {}, - "f:resources": {".": {}, "f:Items": {}}, - }, - }, - "manager": "kubectl-client-side-apply", - "operation": "Update", - "time": "2023-02-22T16:26:07Z", - }, - ], "name": "quicktest2", "namespace": "ns", - "resourceVersion": "9482384", - "uid": "6334fc1b-471e-4876-8e7b-0b2277679235", }, "spec": { - "priority": 9, - "resources": { - "GenericItems": [ - { - "allocated": 0, - "custompodresources": [ - { - "limits": { - "cpu": "2", - "memory": "8G", - "nvidia.com/gpu": "0", - }, - "replicas": 1, - "requests": { - "cpu": "2", - "memory": "8G", - "nvidia.com/gpu": "0", - }, + "components": [ + { + "template": { + "apiVersion": "ray.io/v1", + "kind": "RayCluster", + "metadata": { + "labels": { + "controller-tools.k8s.io": "1.0", }, - { - "limits": { - "cpu": "1", - "memory": "2G", - "nvidia.com/gpu": "0", - }, - "replicas": 1, - "requests": { - "cpu": "1", - "memory": "2G", - "nvidia.com/gpu": "0", + "name": "quicktest2", + "namespace": "ns", + }, + "spec": { + "autoscalerOptions": { + "idleTimeoutSeconds": 60, + "imagePullPolicy": "Always", + "resources": { + "limits": { + "cpu": "500m", + "memory": "512Mi", + }, + "requests": { + "cpu": "500m", + "memory": "512Mi", + }, }, + "upscalingMode": "Default", }, - ], - "generictemplate": { - "apiVersion": "ray.io/v1", - "kind": "RayCluster", - "metadata": { - "labels": { - "workload.codeflare.dev/appwrapper": "quicktest2", - "controller-tools.k8s.io": "1.0", + "enableInTreeAutoscaling": False, + "headGroupSpec": { + "rayStartParams": { + "block": "true", + "dashboard-host": "0.0.0.0", + "num-gpus": "0", }, - "name": "quicktest2", - "namespace": "ns", - }, - "spec": { - "autoscalerOptions": { - "idleTimeoutSeconds": 60, - "imagePullPolicy": "Always", - "resources": { - "limits": { - "cpu": "500m", - "memory": "512Mi", - }, - "requests": { - "cpu": "500m", - "memory": "512Mi", - }, - }, - "upscalingMode": "Default", + "serviceType": "ClusterIP", + "template": { + "spec": { + "containers": [ + { + "image": "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103", + "imagePullPolicy": "Always", + "lifecycle": { + "preStop": { + "exec": { + "command": [ + "/bin/sh", + "-c", + "ray stop", + ] + } + } + }, + "name": "ray-head", + "ports": [ + { + "containerPort": 6379, + "name": "gcs", + }, + { + "containerPort": 8265, + "name": "dashboard", + }, + { + "containerPort": 10001, + "name": "client", + }, + ], + "resources": { + "limits": { + "cpu": 2, + "memory": "8G", + "nvidia.com/gpu": 0, + }, + "requests": { + "cpu": 2, + "memory": "8G", + "nvidia.com/gpu": 0, + }, + }, + } + ] + } }, - "enableInTreeAutoscaling": False, - "headGroupSpec": { + }, + "rayVersion": "1.12.0", + "workerGroupSpecs": [ + { + "groupName": "small-group-quicktest", + "maxReplicas": 1, + "minReplicas": 1, "rayStartParams": { "block": "true", - "dashboard-host": "0.0.0.0", "num-gpus": "0", }, - "serviceType": "ClusterIP", + "replicas": 1, "template": { + "metadata": { + "annotations": {"key": "value"}, + "labels": {"key": "value"}, + }, "spec": { "containers": [ { + "env": [ + { + "name": "MY_POD_IP", + "valueFrom": { + "fieldRef": { + "fieldPath": "status.podIP" + } + }, + } + ], "image": "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103", - "imagePullPolicy": "Always", "lifecycle": { "preStop": { "exec": { @@ -2207,166 +1830,52 @@ def get_aw_obj(group, version, namespace, plural): } } }, - "name": "ray-head", - "ports": [ - { - "containerPort": 6379, - "name": "gcs", - }, - { - "containerPort": 8265, - "name": "dashboard", - }, - { - "containerPort": 10001, - "name": "client", - }, - ], + "name": "machine-learning", "resources": { "limits": { - "cpu": 2, - "memory": "8G", + "cpu": 1, + "memory": "2G", "nvidia.com/gpu": 0, }, "requests": { - "cpu": 2, - "memory": "8G", + "cpu": 1, + "memory": "2G", "nvidia.com/gpu": 0, }, }, } - ] - } - }, - }, - "rayVersion": "1.12.0", - "workerGroupSpecs": [ - { - "groupName": "small-group-quicktest", - "maxReplicas": 1, - "minReplicas": 1, - "rayStartParams": { - "block": "true", - "num-gpus": "0", + ], }, - "replicas": 1, - "template": { - "metadata": { - "annotations": {"key": "value"}, - "labels": {"key": "value"}, - }, - "spec": { - "containers": [ - { - "env": [ - { - "name": "MY_POD_IP", - "valueFrom": { - "fieldRef": { - "fieldPath": "status.podIP" - } - }, - } - ], - "image": "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103", - "lifecycle": { - "preStop": { - "exec": { - "command": [ - "/bin/sh", - "-c", - "ray stop", - ] - } - } - }, - "name": "machine-learning", - "resources": { - "limits": { - "cpu": 1, - "memory": "2G", - "nvidia.com/gpu": 0, - }, - "requests": { - "cpu": 1, - "memory": "2G", - "nvidia.com/gpu": 0, - }, - }, - } - ], - }, - }, - } - ], - }, + }, + } + ], }, - "metadata": {}, - "priority": 0, - "priorityslope": 0, - "replicas": 1, }, - { - "allocated": 0, - "generictemplate": { - "apiVersion": "route.openshift.io/v1", - "kind": "Route", - "metadata": { - "labels": { - "odh-ray-cluster-service": "quicktest-head-svc" - }, - "name": "ray-dashboard-quicktest", - "namespace": "default", + }, + { + "template": { + "apiVersion": "route.openshift.io/v1", + "kind": "Route", + "metadata": { + "labels": { + "odh-ray-cluster-service": "quicktest-head-svc" }, - "spec": { - "port": {"targetPort": "dashboard"}, - "to": { - "kind": "Service", - "name": "quicktest-head-svc", - }, + "name": "ray-dashboard-quicktest", + "namespace": "default", + }, + "spec": { + "port": {"targetPort": "dashboard"}, + "to": { + "kind": "Service", + "name": "quicktest-head-svc", }, }, - "metadata": {}, - "priority": 0, - "priorityslope": 0, }, - ], - "Items": [], - "metadata": {}, - }, - "schedulingSpec": {}, - "service": {"spec": {}}, - }, - "status": { - "canrun": True, - "conditions": [ - { - "lastTransitionMicroTime": "2023-02-22T16:26:07.559447Z", - "lastUpdateMicroTime": "2023-02-22T16:26:07.559447Z", - "status": "True", - "type": "Init", - }, - { - "lastTransitionMicroTime": "2023-02-22T16:26:07.559551Z", - "lastUpdateMicroTime": "2023-02-22T16:26:07.559551Z", - "reason": "AwaitingHeadOfLine", - "status": "True", - "type": "Queueing", - }, - { - "lastTransitionMicroTime": "2023-02-22T16:26:13.220564Z", - "lastUpdateMicroTime": "2023-02-22T16:26:13.220564Z", - "reason": "AppWrapperRunnable", - "status": "True", - "type": "Dispatched", }, ], - "controllerfirsttimestamp": "2023-02-22T16:26:07.559447Z", - "filterignore": True, - "queuejobstate": "Dispatched", - "sender": "before manageQueueJob - afterEtcdDispatching", - "state": "Pending", - "systempriority": 9, + }, + "status": { + "phase": "Suspended", }, }, ] @@ -2428,6 +1937,10 @@ def test_get_cluster_openshift(mocker): MagicMock(versions=[MagicMock(group_version="route.openshift.io/v1")]) ] mocker.patch("kubernetes.client.ApisApi", return_value=mock_api) + mocker.patch( + "codeflare_sdk.utils.generate_yaml.local_queue_exists", + return_value="true", + ) assert is_openshift_cluster() @@ -2437,7 +1950,7 @@ def custom_side_effect(group, version, namespace, plural, **kwargs): elif plural == "rayclusters": return get_ray_obj("ray.io", "v1", "ns", "rayclusters") elif plural == "appwrappers": - return get_aw_obj("workload.codeflare.dev", "v1beta1", "ns", "appwrappers") + return get_aw_obj("workload.codeflare.dev", "v1beta2", "ns", "appwrappers") elif plural == "localqueues": return get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues") @@ -2492,6 +2005,10 @@ def test_get_cluster(mocker): "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", return_value=ingress_retrieval(cluster_name="quicktest", client_ing=True), ) + mocker.patch( + "codeflare_sdk.utils.generate_yaml.local_queue_exists", + return_value="true", + ) cluster = get_cluster("quicktest") cluster_config = cluster.config assert cluster_config.name == "quicktest" and cluster_config.namespace == "ns" @@ -2502,7 +2019,6 @@ def test_get_cluster(mocker): assert cluster_config.min_cpus == 1 and cluster_config.max_cpus == 1 assert cluster_config.min_memory == "2G" and cluster_config.max_memory == "2G" assert cluster_config.num_gpus == 0 - assert cluster_config.instascale assert ( cluster_config.image == "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103" @@ -2521,6 +2037,10 @@ def test_get_cluster_no_mcad(mocker): "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", return_value=ingress_retrieval(cluster_name="quicktest", client_ing=True), ) + mocker.patch( + "codeflare_sdk.utils.generate_yaml.local_queue_exists", + return_value="true", + ) cluster = get_cluster("quicktest") cluster_config = cluster.config assert cluster_config.name == "quicktest" and cluster_config.namespace == "ns" @@ -2531,7 +2051,6 @@ def test_get_cluster_no_mcad(mocker): assert cluster_config.min_cpus == 1 and cluster_config.max_cpus == 1 assert cluster_config.min_memory == "2G" and cluster_config.max_memory == "2G" assert cluster_config.num_gpus == 0 - assert cluster_config.instascale assert ( cluster_config.image == "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103" @@ -2672,7 +2191,7 @@ def test_list_queue(mocker, capsys): "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", side_effect=get_obj_none, ) - list_all_queued("ns", mcad=True) + list_all_queued("ns", appwrapper=True) captured = capsys.readouterr() assert captured.out == ( "╭──────────────────────────────────────────────────────────────────────────────╮\n" @@ -2683,21 +2202,21 @@ def test_list_queue(mocker, capsys): "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", side_effect=get_aw_obj, ) - list_all_queued("ns", mcad=True) + list_all_queued("ns", appwrapper=True) captured = capsys.readouterr() assert captured.out == ( - "╭──────────────────────────╮\n" - "│ 🚀 Cluster Queue Status │\n" - "│ 🚀 │\n" - "│ +------------+---------+ │\n" - "│ | Name | Status | │\n" - "│ +============+=========+ │\n" - "│ | quicktest1 | running | │\n" - "│ | | | │\n" - "│ | quicktest2 | pending | │\n" - "│ | | | │\n" - "│ +------------+---------+ │\n" - "╰──────────────────────────╯\n" + "╭────────────────────────────╮\n" + "│ 🚀 Cluster Queue Status │\n" + "│ 🚀 │\n" + "│ +------------+-----------+ │\n" + "│ | Name | Status | │\n" + "│ +============+===========+ │\n" + "│ | quicktest1 | running | │\n" + "│ | | | │\n" + "│ | quicktest2 | suspended | │\n" + "│ | | | │\n" + "│ +------------+-----------+ │\n" + "╰────────────────────────────╯\n" ) @@ -2747,9 +2266,11 @@ def test_list_queue_rayclusters(mocker, capsys): def test_cluster_status(mocker): mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - fake_aw = AppWrapper( - "test", AppWrapperStatus.FAILED, can_run=True, job_state="unused" + mocker.patch( + "codeflare_sdk.utils.generate_yaml.local_queue_exists", + return_value="true", ) + fake_aw = AppWrapper("test", AppWrapperStatus.FAILED) fake_ray = RayCluster( name="test", status=RayClusterStatus.UNKNOWN, @@ -2770,7 +2291,8 @@ def test_cluster_status(mocker): namespace="ns", image="quay.io/project-codeflare/ray:latest-py39-cu118", write_to_file=True, - mcad=True, + appwrapper=True, + local_queue="local_default_queue", ) ) mocker.patch("codeflare_sdk.cluster.cluster._app_wrapper_status", return_value=None) @@ -2786,29 +2308,24 @@ def test_cluster_status(mocker): assert status == CodeFlareClusterStatus.FAILED assert ready == False - fake_aw.status = AppWrapperStatus.DELETED - status, ready = cf.status() - assert status == CodeFlareClusterStatus.FAILED - assert ready == False - - fake_aw.status = AppWrapperStatus.PENDING + fake_aw.status = AppWrapperStatus.SUSPENDED status, ready = cf.status() assert status == CodeFlareClusterStatus.QUEUED assert ready == False - fake_aw.status = AppWrapperStatus.COMPLETED + fake_aw.status = AppWrapperStatus.RESUMING status, ready = cf.status() assert status == CodeFlareClusterStatus.STARTING assert ready == False - fake_aw.status = AppWrapperStatus.RUNNING_HOLD_COMPLETION + fake_aw.status = AppWrapperStatus.RESETTING status, ready = cf.status() assert status == CodeFlareClusterStatus.STARTING assert ready == False fake_aw.status = AppWrapperStatus.RUNNING status, ready = cf.status() - assert status == CodeFlareClusterStatus.STARTING + assert status == CodeFlareClusterStatus.UNKNOWN assert ready == False mocker.patch( @@ -2844,6 +2361,10 @@ def test_wait_ready(mocker, capsys): mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") mocker.patch("codeflare_sdk.cluster.cluster._app_wrapper_status", return_value=None) mocker.patch("codeflare_sdk.cluster.cluster._ray_cluster_status", return_value=None) + mocker.patch( + "codeflare_sdk.utils.generate_yaml.local_queue_exists", + return_value="true", + ) mocker.patch.object( client.CustomObjectsApi, "list_namespaced_custom_object", @@ -2865,7 +2386,8 @@ def test_wait_ready(mocker, capsys): namespace="ns", image="quay.io/project-codeflare/ray:latest-py39-cu118", write_to_file=True, - mcad=True, + appwrapper=True, + local_queue="local-queue-default", ) ) try: @@ -2937,7 +2459,7 @@ def test_AWManager_creation(mocker): def arg_check_aw_apply_effect(group, version, namespace, plural, body, *args): assert group == "workload.codeflare.dev" - assert version == "v1beta1" + assert version == "v1beta2" assert namespace == "ns" assert plural == "appwrappers" with open(f"{aw_dir}test.yaml") as f: @@ -2948,7 +2470,7 @@ def arg_check_aw_apply_effect(group, version, namespace, plural, body, *args): def arg_check_aw_del_effect(group, version, namespace, plural, name, *args): assert group == "workload.codeflare.dev" - assert version == "v1beta1" + assert version == "v1beta2" assert namespace == "ns" assert plural == "appwrappers" assert name == "test" @@ -3255,7 +2777,6 @@ def test_rjc_list_jobs(ray_job_client, mocker): # Make sure to always keep this function last def test_cleanup(): os.remove(f"{aw_dir}unit-test-cluster.yaml") - os.remove(f"{aw_dir}prio-test-cluster.yaml") os.remove(f"{aw_dir}test.yaml") os.remove(f"{aw_dir}raytest2.yaml") os.remove(f"{aw_dir}unit-test-cluster-ray.yaml") diff --git a/tests/unit_test_support.py b/tests/unit_test_support.py index 329df45e..baa14aac 100644 --- a/tests/unit_test_support.py +++ b/tests/unit_test_support.py @@ -14,8 +14,7 @@ def createClusterConfig(): min_memory=5, max_memory=6, num_gpus=7, - mcad=True, - instascale=True, + appwrapper=True, machine_types=["cpu.small", "gpu.large"], image_pull_secrets=["unit-test-pull-secret"], image="quay.io/project-codeflare/ray:latest-py39-cu118", From 23e30e335a7a9cf077252ab9d258b253a75429ac Mon Sep 17 00:00:00 2001 From: codeflare-machine-account <138894154+codeflare-machine-account@users.noreply.github.com> Date: Thu, 6 Jun 2024 09:24:49 -0400 Subject: [PATCH 251/496] Changes in docs for release: v0.16.3 (#555) Co-authored-by: codeflare-machine-account --- .../cluster/awload.html | 12 +- .../cluster/cluster.html | 269 +++------- .../cluster/config.html | 24 +- .../detailed-documentation/cluster/model.html | 79 ++- .../utils/generate_yaml.html | 498 ++++-------------- 5 files changed, 220 insertions(+), 662 deletions(-) diff --git a/docs/detailed-documentation/cluster/awload.html b/docs/detailed-documentation/cluster/awload.html index 57b407e8..297ebc02 100644 --- a/docs/detailed-documentation/cluster/awload.html +++ b/docs/detailed-documentation/cluster/awload.html @@ -93,7 +93,7 @@

    Module codeflare_sdk.cluster.awload

    api_instance = client.CustomObjectsApi(api_config_handler()) api_instance.create_namespaced_custom_object( group="workload.codeflare.dev", - version="v1beta1", + version="v1beta2", namespace=self.namespace, plural="appwrappers", body=self.awyaml, @@ -118,7 +118,7 @@

    Module codeflare_sdk.cluster.awload

    api_instance = client.CustomObjectsApi(api_config_handler()) api_instance.delete_namespaced_custom_object( group="workload.codeflare.dev", - version="v1beta1", + version="v1beta2", namespace=self.namespace, plural="appwrappers", name=self.name, @@ -187,7 +187,7 @@

    Classes

    api_instance = client.CustomObjectsApi(api_config_handler()) api_instance.create_namespaced_custom_object( group="workload.codeflare.dev", - version="v1beta1", + version="v1beta2", namespace=self.namespace, plural="appwrappers", body=self.awyaml, @@ -212,7 +212,7 @@

    Classes

    api_instance = client.CustomObjectsApi(api_config_handler()) api_instance.delete_namespaced_custom_object( group="workload.codeflare.dev", - version="v1beta1", + version="v1beta2", namespace=self.namespace, plural="appwrappers", name=self.name, @@ -249,7 +249,7 @@

    Methods

    api_instance = client.CustomObjectsApi(api_config_handler()) api_instance.delete_namespaced_custom_object( group="workload.codeflare.dev", - version="v1beta1", + version="v1beta2", namespace=self.namespace, plural="appwrappers", name=self.name, @@ -279,7 +279,7 @@

    Methods

    api_instance = client.CustomObjectsApi(api_config_handler()) api_instance.create_namespaced_custom_object( group="workload.codeflare.dev", - version="v1beta1", + version="v1beta2", namespace=self.namespace, plural="appwrappers", body=self.awyaml, diff --git a/docs/detailed-documentation/cluster/cluster.html b/docs/detailed-documentation/cluster/cluster.html index 07aea536..7544b4d5 100644 --- a/docs/detailed-documentation/cluster/cluster.html +++ b/docs/detailed-documentation/cluster/cluster.html @@ -135,26 +135,6 @@

    Module codeflare_sdk.cluster.cluster

    ) return self._job_submission_client - def evaluate_dispatch_priority(self): - priority_class = self.config.dispatch_priority - - try: - config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) - priority_classes = api_instance.list_cluster_custom_object( - group="scheduling.k8s.io", - version="v1", - plural="priorityclasses", - ) - except Exception as e: # pragma: no cover - return _kube_api_error_handling(e) - - for pc in priority_classes["items"]: - if pc["metadata"]["name"] == priority_class: - return pc["value"] - print(f"Priority class {priority_class} is not available in the cluster") - return None - def validate_image_config(self): """ Validates that the image configuration is not empty. @@ -184,18 +164,6 @@

    Module codeflare_sdk.cluster.cluster

    self.validate_image_config() # Before attempting to create the cluster AW, let's evaluate the ClusterConfig - if self.config.dispatch_priority: - if not self.config.mcad: - raise ValueError( - "Invalid Cluster Configuration, cannot have dispatch priority without MCAD" - ) - priority_val = self.evaluate_dispatch_priority() - if priority_val == None: - raise ValueError( - "Invalid Cluster Configuration, AppWrapper not generated" - ) - else: - priority_val = None name = self.config.name namespace = self.config.namespace @@ -210,12 +178,10 @@

    Module codeflare_sdk.cluster.cluster

    workers = self.config.num_workers template = self.config.template image = self.config.image - instascale = self.config.instascale - mcad = self.config.mcad + appwrapper = self.config.appwrapper instance_types = self.config.machine_types env = self.config.envs image_pull_secrets = self.config.image_pull_secrets - dispatch_priority = self.config.dispatch_priority write_to_file = self.config.write_to_file verify_tls = self.config.verify_tls local_queue = self.config.local_queue @@ -234,13 +200,10 @@

    Module codeflare_sdk.cluster.cluster

    workers=workers, template=template, image=image, - instascale=instascale, - mcad=mcad, + appwrapper=appwrapper, instance_types=instance_types, env=env, image_pull_secrets=image_pull_secrets, - dispatch_priority=dispatch_priority, - priority_val=priority_val, write_to_file=write_to_file, verify_tls=verify_tls, local_queue=local_queue, @@ -262,13 +225,13 @@

    Module codeflare_sdk.cluster.cluster

    try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) - if self.config.mcad: + if self.config.appwrapper: if self.config.write_to_file: with open(self.app_wrapper_yaml) as f: aw = yaml.load(f, Loader=yaml.FullLoader) api_instance.create_namespaced_custom_object( group="workload.codeflare.dev", - version="v1beta1", + version="v1beta2", namespace=namespace, plural="appwrappers", body=aw, @@ -277,7 +240,7 @@

    Module codeflare_sdk.cluster.cluster

    aw = yaml.safe_load(self.app_wrapper_yaml) api_instance.create_namespaced_custom_object( group="workload.codeflare.dev", - version="v1beta1", + version="v1beta2", namespace=namespace, plural="appwrappers", body=aw, @@ -316,10 +279,10 @@

    Module codeflare_sdk.cluster.cluster

    try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) - if self.config.mcad: + if self.config.appwrapper: api_instance.delete_namespaced_custom_object( group="workload.codeflare.dev", - version="v1beta1", + version="v1beta2", namespace=namespace, plural="appwrappers", name=self.app_wrapper_name, @@ -338,30 +301,28 @@

    Module codeflare_sdk.cluster.cluster

    """ ready = False status = CodeFlareClusterStatus.UNKNOWN - if self.config.mcad: + if self.config.appwrapper: # check the app wrapper status appwrapper = _app_wrapper_status(self.config.name, self.config.namespace) if appwrapper: if appwrapper.status in [ - AppWrapperStatus.RUNNING, - AppWrapperStatus.COMPLETED, - AppWrapperStatus.RUNNING_HOLD_COMPLETION, + AppWrapperStatus.RESUMING, + AppWrapperStatus.RESETTING, ]: ready = False status = CodeFlareClusterStatus.STARTING elif appwrapper.status in [ AppWrapperStatus.FAILED, - AppWrapperStatus.DELETED, ]: ready = False status = CodeFlareClusterStatus.FAILED # should deleted be separate return status, ready # exit early, no need to check ray status elif appwrapper.status in [ - AppWrapperStatus.PENDING, - AppWrapperStatus.QUEUEING, + AppWrapperStatus.SUSPENDED, + AppWrapperStatus.SUSPENDING, ]: ready = False - if appwrapper.status == AppWrapperStatus.PENDING: + if appwrapper.status == AppWrapperStatus.SUSPENDED: status = CodeFlareClusterStatus.QUEUED else: status = CodeFlareClusterStatus.QUEUEING @@ -533,7 +494,7 @@

    Module codeflare_sdk.cluster.cluster

    def from_k8_cluster_object( rc, - mcad=True, + appwrapper=True, write_to_file=False, verify_tls=True, ): @@ -566,11 +527,10 @@

    Module codeflare_sdk.cluster.cluster

    "resources" ]["limits"]["nvidia.com/gpu"] ), - instascale=True if machine_types else False, image=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][ 0 ]["image"], - mcad=mcad, + appwrapper=appwrapper, write_to_file=write_to_file, verify_tls=verify_tls, local_queue=rc["metadata"] @@ -629,15 +589,15 @@

    Module codeflare_sdk.cluster.cluster

    return clusters -def list_all_queued(namespace: str, print_to_console: bool = True, mcad: bool = False): +def list_all_queued( + namespace: str, print_to_console: bool = True, appwrapper: bool = False +): """ Returns (and prints by default) a list of all currently queued-up Ray Clusters in a given namespace. """ - if mcad: - resources = _get_app_wrappers( - namespace, filter=[AppWrapperStatus.RUNNING, AppWrapperStatus.PENDING] - ) + if appwrapper: + resources = _get_app_wrappers(namespace, filter=[AppWrapperStatus.SUSPENDED]) if print_to_console: pretty_print.print_app_wrappers_status(resources) else: @@ -707,10 +667,10 @@

    Module codeflare_sdk.cluster.cluster

    for rc in rcs["items"]: if rc["metadata"]["name"] == cluster_name: - mcad = _check_aw_exists(cluster_name, namespace) + appwrapper = _check_aw_exists(cluster_name, namespace) return Cluster.from_k8_cluster_object( rc, - mcad=mcad, + appwrapper=appwrapper, write_to_file=write_to_file, verify_tls=verify_tls, ) @@ -753,7 +713,7 @@

    Module codeflare_sdk.cluster.cluster

    api_instance = client.CustomObjectsApi(api_config_handler()) aws = api_instance.list_namespaced_custom_object( group="workload.codeflare.dev", - version="v1beta1", + version="v1beta2", namespace=namespace, plural="appwrappers", ) @@ -813,7 +773,7 @@

    Module codeflare_sdk.cluster.cluster

    api_instance = client.CustomObjectsApi(api_config_handler()) aws = api_instance.list_namespaced_custom_object( group="workload.codeflare.dev", - version="v1beta1", + version="v1beta2", namespace=namespace, plural="appwrappers", ) @@ -883,7 +843,7 @@

    Module codeflare_sdk.cluster.cluster

    api_instance = client.CustomObjectsApi(api_config_handler()) aws = api_instance.list_namespaced_custom_object( group="workload.codeflare.dev", - version="v1beta1", + version="v1beta2", namespace=namespace, plural="appwrappers", ) @@ -977,18 +937,14 @@

    Module codeflare_sdk.cluster.cluster

    def _map_to_app_wrapper(aw) -> AppWrapper: - if "status" in aw and "canrun" in aw["status"]: + if "status" in aw: return AppWrapper( name=aw["metadata"]["name"], - status=AppWrapperStatus(aw["status"]["state"].lower()), - can_run=aw["status"]["canrun"], - job_state=aw["status"]["queuejobstate"], + status=AppWrapperStatus(aw["status"]["phase"].lower()), ) return AppWrapper( name=aw["metadata"]["name"], - status=AppWrapperStatus("queueing"), - can_run=False, - job_state="Still adding to queue", + status=AppWrapperStatus("suspended"), ) @@ -1048,10 +1004,10 @@

    Functions

    for rc in rcs["items"]: if rc["metadata"]["name"] == cluster_name: - mcad = _check_aw_exists(cluster_name, namespace) + appwrapper = _check_aw_exists(cluster_name, namespace) return Cluster.from_k8_cluster_object( rc, - mcad=mcad, + appwrapper=appwrapper, write_to_file=write_to_file, verify_tls=verify_tls, ) @@ -1127,7 +1083,7 @@

    Functions

    -def list_all_queued(namespace: str, print_to_console: bool = True, mcad: bool = False) +def list_all_queued(namespace: str, print_to_console: bool = True, appwrapper: bool = False)

    Returns (and prints by default) a list of all currently queued-up Ray Clusters @@ -1136,15 +1092,15 @@

    Functions

    Expand source code -
    def list_all_queued(namespace: str, print_to_console: bool = True, mcad: bool = False):
    +
    def list_all_queued(
    +    namespace: str, print_to_console: bool = True, appwrapper: bool = False
    +):
         """
         Returns (and prints by default) a list of all currently queued-up Ray Clusters
         in a given namespace.
         """
    -    if mcad:
    -        resources = _get_app_wrappers(
    -            namespace, filter=[AppWrapperStatus.RUNNING, AppWrapperStatus.PENDING]
    -        )
    +    if appwrapper:
    +        resources = _get_app_wrappers(namespace, filter=[AppWrapperStatus.SUSPENDED])
             if print_to_console:
                 pretty_print.print_app_wrappers_status(resources)
         else:
    @@ -1229,26 +1185,6 @@ 

    Classes

    ) return self._job_submission_client - def evaluate_dispatch_priority(self): - priority_class = self.config.dispatch_priority - - try: - config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) - priority_classes = api_instance.list_cluster_custom_object( - group="scheduling.k8s.io", - version="v1", - plural="priorityclasses", - ) - except Exception as e: # pragma: no cover - return _kube_api_error_handling(e) - - for pc in priority_classes["items"]: - if pc["metadata"]["name"] == priority_class: - return pc["value"] - print(f"Priority class {priority_class} is not available in the cluster") - return None - def validate_image_config(self): """ Validates that the image configuration is not empty. @@ -1278,18 +1214,6 @@

    Classes

    self.validate_image_config() # Before attempting to create the cluster AW, let's evaluate the ClusterConfig - if self.config.dispatch_priority: - if not self.config.mcad: - raise ValueError( - "Invalid Cluster Configuration, cannot have dispatch priority without MCAD" - ) - priority_val = self.evaluate_dispatch_priority() - if priority_val == None: - raise ValueError( - "Invalid Cluster Configuration, AppWrapper not generated" - ) - else: - priority_val = None name = self.config.name namespace = self.config.namespace @@ -1304,12 +1228,10 @@

    Classes

    workers = self.config.num_workers template = self.config.template image = self.config.image - instascale = self.config.instascale - mcad = self.config.mcad + appwrapper = self.config.appwrapper instance_types = self.config.machine_types env = self.config.envs image_pull_secrets = self.config.image_pull_secrets - dispatch_priority = self.config.dispatch_priority write_to_file = self.config.write_to_file verify_tls = self.config.verify_tls local_queue = self.config.local_queue @@ -1328,13 +1250,10 @@

    Classes

    workers=workers, template=template, image=image, - instascale=instascale, - mcad=mcad, + appwrapper=appwrapper, instance_types=instance_types, env=env, image_pull_secrets=image_pull_secrets, - dispatch_priority=dispatch_priority, - priority_val=priority_val, write_to_file=write_to_file, verify_tls=verify_tls, local_queue=local_queue, @@ -1356,13 +1275,13 @@

    Classes

    try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) - if self.config.mcad: + if self.config.appwrapper: if self.config.write_to_file: with open(self.app_wrapper_yaml) as f: aw = yaml.load(f, Loader=yaml.FullLoader) api_instance.create_namespaced_custom_object( group="workload.codeflare.dev", - version="v1beta1", + version="v1beta2", namespace=namespace, plural="appwrappers", body=aw, @@ -1371,7 +1290,7 @@

    Classes

    aw = yaml.safe_load(self.app_wrapper_yaml) api_instance.create_namespaced_custom_object( group="workload.codeflare.dev", - version="v1beta1", + version="v1beta2", namespace=namespace, plural="appwrappers", body=aw, @@ -1410,10 +1329,10 @@

    Classes

    try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) - if self.config.mcad: + if self.config.appwrapper: api_instance.delete_namespaced_custom_object( group="workload.codeflare.dev", - version="v1beta1", + version="v1beta2", namespace=namespace, plural="appwrappers", name=self.app_wrapper_name, @@ -1432,30 +1351,28 @@

    Classes

    """ ready = False status = CodeFlareClusterStatus.UNKNOWN - if self.config.mcad: + if self.config.appwrapper: # check the app wrapper status appwrapper = _app_wrapper_status(self.config.name, self.config.namespace) if appwrapper: if appwrapper.status in [ - AppWrapperStatus.RUNNING, - AppWrapperStatus.COMPLETED, - AppWrapperStatus.RUNNING_HOLD_COMPLETION, + AppWrapperStatus.RESUMING, + AppWrapperStatus.RESETTING, ]: ready = False status = CodeFlareClusterStatus.STARTING elif appwrapper.status in [ AppWrapperStatus.FAILED, - AppWrapperStatus.DELETED, ]: ready = False status = CodeFlareClusterStatus.FAILED # should deleted be separate return status, ready # exit early, no need to check ray status elif appwrapper.status in [ - AppWrapperStatus.PENDING, - AppWrapperStatus.QUEUEING, + AppWrapperStatus.SUSPENDED, + AppWrapperStatus.SUSPENDING, ]: ready = False - if appwrapper.status == AppWrapperStatus.PENDING: + if appwrapper.status == AppWrapperStatus.SUSPENDED: status = CodeFlareClusterStatus.QUEUED else: status = CodeFlareClusterStatus.QUEUEING @@ -1627,7 +1544,7 @@

    Classes

    def from_k8_cluster_object( rc, - mcad=True, + appwrapper=True, write_to_file=False, verify_tls=True, ): @@ -1660,11 +1577,10 @@

    Classes

    "resources" ]["limits"]["nvidia.com/gpu"] ), - instascale=True if machine_types else False, image=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][ 0 ]["image"], - mcad=mcad, + appwrapper=appwrapper, write_to_file=write_to_file, verify_tls=verify_tls, local_queue=rc["metadata"] @@ -1845,18 +1761,6 @@

    Methods

    self.validate_image_config() # Before attempting to create the cluster AW, let's evaluate the ClusterConfig - if self.config.dispatch_priority: - if not self.config.mcad: - raise ValueError( - "Invalid Cluster Configuration, cannot have dispatch priority without MCAD" - ) - priority_val = self.evaluate_dispatch_priority() - if priority_val == None: - raise ValueError( - "Invalid Cluster Configuration, AppWrapper not generated" - ) - else: - priority_val = None name = self.config.name namespace = self.config.namespace @@ -1871,12 +1775,10 @@

    Methods

    workers = self.config.num_workers template = self.config.template image = self.config.image - instascale = self.config.instascale - mcad = self.config.mcad + appwrapper = self.config.appwrapper instance_types = self.config.machine_types env = self.config.envs image_pull_secrets = self.config.image_pull_secrets - dispatch_priority = self.config.dispatch_priority write_to_file = self.config.write_to_file verify_tls = self.config.verify_tls local_queue = self.config.local_queue @@ -1895,13 +1797,10 @@

    Methods

    workers=workers, template=template, image=image, - instascale=instascale, - mcad=mcad, + appwrapper=appwrapper, instance_types=instance_types, env=env, image_pull_secrets=image_pull_secrets, - dispatch_priority=dispatch_priority, - priority_val=priority_val, write_to_file=write_to_file, verify_tls=verify_tls, local_queue=local_queue, @@ -1945,10 +1844,10 @@

    Methods

    try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) - if self.config.mcad: + if self.config.appwrapper: api_instance.delete_namespaced_custom_object( group="workload.codeflare.dev", - version="v1beta1", + version="v1beta2", namespace=namespace, plural="appwrappers", name=self.app_wrapper_name, @@ -1959,38 +1858,8 @@

    Methods

    return _kube_api_error_handling(e)
    -
    -def evaluate_dispatch_priority(self) -
    -
    -
    -
    - -Expand source code - -
    def evaluate_dispatch_priority(self):
    -    priority_class = self.config.dispatch_priority
    -
    -    try:
    -        config_check()
    -        api_instance = client.CustomObjectsApi(api_config_handler())
    -        priority_classes = api_instance.list_cluster_custom_object(
    -            group="scheduling.k8s.io",
    -            version="v1",
    -            plural="priorityclasses",
    -        )
    -    except Exception as e:  # pragma: no cover
    -        return _kube_api_error_handling(e)
    -
    -    for pc in priority_classes["items"]:
    -        if pc["metadata"]["name"] == priority_class:
    -            return pc["value"]
    -    print(f"Priority class {priority_class} is not available in the cluster")
    -    return None
    -
    -
    -def from_k8_cluster_object(rc, mcad=True, write_to_file=False, verify_tls=True) +def from_k8_cluster_object(rc, appwrapper=True, write_to_file=False, verify_tls=True)
    @@ -2000,7 +1869,7 @@

    Methods

    def from_k8_cluster_object(
         rc,
    -    mcad=True,
    +    appwrapper=True,
         write_to_file=False,
         verify_tls=True,
     ):
    @@ -2033,11 +1902,10 @@ 

    Methods

    "resources" ]["limits"]["nvidia.com/gpu"] ), - instascale=True if machine_types else False, image=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][ 0 ]["image"], - mcad=mcad, + appwrapper=appwrapper, write_to_file=write_to_file, verify_tls=verify_tls, local_queue=rc["metadata"] @@ -2154,30 +2022,28 @@

    Methods

    """ ready = False status = CodeFlareClusterStatus.UNKNOWN - if self.config.mcad: + if self.config.appwrapper: # check the app wrapper status appwrapper = _app_wrapper_status(self.config.name, self.config.namespace) if appwrapper: if appwrapper.status in [ - AppWrapperStatus.RUNNING, - AppWrapperStatus.COMPLETED, - AppWrapperStatus.RUNNING_HOLD_COMPLETION, + AppWrapperStatus.RESUMING, + AppWrapperStatus.RESETTING, ]: ready = False status = CodeFlareClusterStatus.STARTING elif appwrapper.status in [ AppWrapperStatus.FAILED, - AppWrapperStatus.DELETED, ]: ready = False status = CodeFlareClusterStatus.FAILED # should deleted be separate return status, ready # exit early, no need to check ray status elif appwrapper.status in [ - AppWrapperStatus.PENDING, - AppWrapperStatus.QUEUEING, + AppWrapperStatus.SUSPENDED, + AppWrapperStatus.SUSPENDING, ]: ready = False - if appwrapper.status == AppWrapperStatus.PENDING: + if appwrapper.status == AppWrapperStatus.SUSPENDED: status = CodeFlareClusterStatus.QUEUED else: status = CodeFlareClusterStatus.QUEUEING @@ -2244,13 +2110,13 @@

    Methods

    try: config_check() api_instance = client.CustomObjectsApi(api_config_handler()) - if self.config.mcad: + if self.config.appwrapper: if self.config.write_to_file: with open(self.app_wrapper_yaml) as f: aw = yaml.load(f, Loader=yaml.FullLoader) api_instance.create_namespaced_custom_object( group="workload.codeflare.dev", - version="v1beta1", + version="v1beta2", namespace=namespace, plural="appwrappers", body=aw, @@ -2259,7 +2125,7 @@

    Methods

    aw = yaml.safe_load(self.app_wrapper_yaml) api_instance.create_namespaced_custom_object( group="workload.codeflare.dev", - version="v1beta1", + version="v1beta2", namespace=namespace, plural="appwrappers", body=aw, @@ -2371,7 +2237,6 @@

    create_app_wrapper
  • details
  • down
  • -
  • evaluate_dispatch_priority
  • from_k8_cluster_object
  • is_dashboard_ready
  • job_client
  • diff --git a/docs/detailed-documentation/cluster/config.html b/docs/detailed-documentation/cluster/config.html index 1879afaf..4ce9dcdd 100644 --- a/docs/detailed-documentation/cluster/config.html +++ b/docs/detailed-documentation/cluster/config.html @@ -78,12 +78,10 @@

    Module codeflare_sdk.cluster.config

    max_memory: typing.Union[int, str] = 2 num_gpus: int = 0 template: str = f"{dir}/templates/base-template.yaml" - instascale: bool = False - mcad: bool = False + appwrapper: bool = False envs: dict = field(default_factory=dict) image: str = "" image_pull_secrets: list = field(default_factory=list) - dispatch_priority: str = None write_to_file: bool = False verify_tls: bool = True labels: dict = field(default_factory=dict) @@ -126,7 +124,7 @@

    Classes

    class ClusterConfiguration -(name: str, namespace: str = None, head_info: list = <factory>, head_cpus: Union[int, str] = 2, head_memory: Union[int, str] = 8, head_gpus: int = 0, machine_types: list = <factory>, min_cpus: Union[int, str] = 1, max_cpus: Union[int, str] = 1, num_workers: int = 1, min_memory: Union[int, str] = 2, max_memory: Union[int, str] = 2, num_gpus: int = 0, template: str = '/home/runner/work/codeflare-sdk/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', instascale: bool = False, mcad: bool = False, envs: dict = <factory>, image: str = '', image_pull_secrets: list = <factory>, dispatch_priority: str = None, write_to_file: bool = False, verify_tls: bool = True, labels: dict = <factory>, local_queue: str = None) +(name: str, namespace: str = None, head_info: list = <factory>, head_cpus: Union[int, str] = 2, head_memory: Union[int, str] = 8, head_gpus: int = 0, machine_types: list = <factory>, min_cpus: Union[int, str] = 1, max_cpus: Union[int, str] = 1, num_workers: int = 1, min_memory: Union[int, str] = 2, max_memory: Union[int, str] = 2, num_gpus: int = 0, template: str = '/home/runner/work/codeflare-sdk/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', appwrapper: bool = False, envs: dict = <factory>, image: str = '', image_pull_secrets: list = <factory>, write_to_file: bool = False, verify_tls: bool = True, labels: dict = <factory>, local_queue: str = None)

    This dataclass is used to specify resource requirements and other details, and @@ -155,12 +153,10 @@

    Classes

    max_memory: typing.Union[int, str] = 2 num_gpus: int = 0 template: str = f"{dir}/templates/base-template.yaml" - instascale: bool = False - mcad: bool = False + appwrapper: bool = False envs: dict = field(default_factory=dict) image: str = "" image_pull_secrets: list = field(default_factory=list) - dispatch_priority: str = None write_to_file: bool = False verify_tls: bool = True labels: dict = field(default_factory=dict) @@ -193,7 +189,7 @@

    Classes

    Class variables

    -
    var dispatch_priority : str
    +
    var appwrapper : bool
    @@ -225,10 +221,6 @@

    Class variables

    -
    var instascale : bool
    -
    -
    -
    var labels : dict
    @@ -249,10 +241,6 @@

    Class variables

    -
    var mcad : bool
    -
    -
    -
    var min_cpus : Union[int, str]
    @@ -310,7 +298,7 @@

    Index

  • ClusterConfiguration

      -
    • dispatch_priority
    • +
    • appwrapper
    • envs
    • head_cpus
    • head_gpus
    • @@ -318,13 +306,11 @@

      head_memory
    • image
    • image_pull_secrets
    • -
    • instascale
    • labels
    • local_queue
    • machine_types
    • max_cpus
    • max_memory
    • -
    • mcad
    • min_cpus
    • min_memory
    • name
    • diff --git a/docs/detailed-documentation/cluster/model.html b/docs/detailed-documentation/cluster/model.html index 3843f545..d69357e1 100644 --- a/docs/detailed-documentation/cluster/model.html +++ b/docs/detailed-documentation/cluster/model.html @@ -70,16 +70,17 @@

      Module codeflare_sdk.cluster.model

      class AppWrapperStatus(Enum): """ - Defines the possible reportable states of an AppWrapper. + Defines the possible reportable phases of an AppWrapper. """ - QUEUEING = "queueing" - PENDING = "pending" + SUSPENDED = "suspended" + RESUMING = "resuming" RUNNING = "running" + RESETTING = "resetting" + SUSPENDING = "suspending" + SUCCEEDED = "succeeded" FAILED = "failed" - DELETED = "deleted" - COMPLETED = "completed" - RUNNING_HOLD_COMPLETION = "runningholdcompletion" + TERMINATING = "terminating" class CodeFlareClusterStatus(Enum): @@ -123,9 +124,7 @@

      Module codeflare_sdk.cluster.model

      """ name: str - status: AppWrapperStatus - can_run: bool - job_state: str
  • + status: AppWrapperStatus
    @@ -139,7 +138,7 @@

    Classes

    class AppWrapper -(name: str, status: AppWrapperStatus, can_run: bool, job_state: str) +(name: str, status: AppWrapperStatus)

    For storing information about an AppWrapper.

    @@ -153,20 +152,10 @@

    Classes

    """ name: str - status: AppWrapperStatus - can_run: bool - job_state: str + status: AppWrapperStatus

    Class variables

    -
    var can_run : bool
    -
    -
    -
    -
    var job_state : str
    -
    -
    -
    var name : str
    @@ -182,23 +171,24 @@

    Class variables

    (value, names=None, *, module=None, qualname=None, type=None, start=1)
    -

    Defines the possible reportable states of an AppWrapper.

    +

    Defines the possible reportable phases of an AppWrapper.

    Expand source code
    class AppWrapperStatus(Enum):
         """
    -    Defines the possible reportable states of an AppWrapper.
    +    Defines the possible reportable phases of an AppWrapper.
         """
     
    -    QUEUEING = "queueing"
    -    PENDING = "pending"
    +    SUSPENDED = "suspended"
    +    RESUMING = "resuming"
         RUNNING = "running"
    +    RESETTING = "resetting"
    +    SUSPENDING = "suspending"
    +    SUCCEEDED = "succeeded"
         FAILED = "failed"
    -    DELETED = "deleted"
    -    COMPLETED = "completed"
    -    RUNNING_HOLD_COMPLETION = "runningholdcompletion"
    + TERMINATING = "terminating"

    Ancestors

      @@ -206,31 +196,35 @@

      Ancestors

    Class variables

    -
    var COMPLETED
    +
    var FAILED
    -
    var DELETED
    +
    var RESETTING
    -
    var FAILED
    +
    var RESUMING
    -
    var PENDING
    +
    var RUNNING
    -
    var QUEUEING
    +
    var SUCCEEDED
    -
    var RUNNING
    +
    var SUSPENDED
    +
    +
    +
    +
    var SUSPENDING
    -
    var RUNNING_HOLD_COMPLETION
    +
    var TERMINATING
    @@ -444,22 +438,21 @@

    Index

  • AppWrapper

  • AppWrapperStatus

    -
      -
    • COMPLETED
    • -
    • DELETED
    • +
    • diff --git a/docs/detailed-documentation/utils/generate_yaml.html b/docs/detailed-documentation/utils/generate_yaml.html index b2a3af84..5ee410ce 100644 --- a/docs/detailed-documentation/utils/generate_yaml.html +++ b/docs/detailed-documentation/utils/generate_yaml.html @@ -112,109 +112,11 @@

      Module codeflare_sdk.utils.generate_yaml

      metadata = yaml.get("metadata") metadata["name"] = appwrapper_name metadata["namespace"] = namespace - lower_meta = item.get("generictemplate", {}).get("metadata") - lower_meta["labels"]["workload.codeflare.dev/appwrapper"] = appwrapper_name + lower_meta = item.get("template", {}).get("metadata") lower_meta["name"] = cluster_name lower_meta["namespace"] = namespace -def update_labels(yaml, instascale, instance_types): - metadata = yaml.get("metadata") - if instascale: - if not len(instance_types) > 0: - sys.exit( - "If instascale is set to true, must provide at least one instance type" - ) - type_str = "" - for type in instance_types: - type_str += type + "_" - type_str = type_str[:-1] - metadata["labels"]["orderedinstance"] = type_str - else: - metadata.pop("labels") - - -def update_priority(yaml, item, dispatch_priority, priority_val): - spec = yaml.get("spec") - if dispatch_priority is not None: - if priority_val: - spec["priority"] = priority_val - else: - raise ValueError( - "AW generation error: Priority value is None, while dispatch_priority is defined" - ) - head = item.get("generictemplate").get("spec").get("headGroupSpec") - worker = item.get("generictemplate").get("spec").get("workerGroupSpecs")[0] - head["template"]["spec"]["priorityClassName"] = dispatch_priority - worker["template"]["spec"]["priorityClassName"] = dispatch_priority - else: - spec.pop("priority") - - -def update_custompodresources( - item, - min_cpu, - max_cpu, - min_memory, - max_memory, - gpu, - workers, - head_cpus, - head_memory, - head_gpus, -): - if "custompodresources" in item.keys(): - custompodresources = item.get("custompodresources") - for i in range(len(custompodresources)): - resource = custompodresources[i] - if i == 0: - # Leave head node resources as template default - resource["requests"]["cpu"] = head_cpus - resource["limits"]["cpu"] = head_cpus - resource["requests"]["memory"] = head_memory - resource["limits"]["memory"] = head_memory - resource["requests"]["nvidia.com/gpu"] = head_gpus - resource["limits"]["nvidia.com/gpu"] = head_gpus - - else: - for k, v in resource.items(): - if k == "replicas" and i == 1: - resource[k] = workers - if k == "requests" or k == "limits": - for spec, _ in v.items(): - if spec == "cpu": - if k == "limits": - resource[k][spec] = max_cpu - else: - resource[k][spec] = min_cpu - if spec == "memory": - if k == "limits": - resource[k][spec] = max_memory - else: - resource[k][spec] = min_memory - if spec == "nvidia.com/gpu": - if i == 0: - resource[k][spec] = 0 - else: - resource[k][spec] = gpu - else: - sys.exit("Error: malformed template") - - -def update_affinity(spec, appwrapper_name, instascale): - if instascale: - node_selector_terms = ( - spec.get("affinity") - .get("nodeAffinity") - .get("requiredDuringSchedulingIgnoredDuringExecution") - .get("nodeSelectorTerms") - ) - node_selector_terms[0]["matchExpressions"][0]["values"][0] = appwrapper_name - node_selector_terms[0]["matchExpressions"][0]["key"] = appwrapper_name - else: - spec.pop("affinity") - - def update_image(spec, image): containers = spec.get("containers") for container in containers: @@ -263,18 +165,17 @@

      Module codeflare_sdk.utils.generate_yaml

      gpu, workers, image, - instascale, env, image_pull_secrets, head_cpus, head_memory, head_gpus, ): - if "generictemplate" in item.keys(): - head = item.get("generictemplate").get("spec").get("headGroupSpec") + if "template" in item.keys(): + head = item.get("template").get("spec").get("headGroupSpec") head["rayStartParams"]["num-gpus"] = str(int(head_gpus)) - worker = item.get("generictemplate").get("spec").get("workerGroupSpecs")[0] + worker = item.get("template").get("spec").get("workerGroupSpecs")[0] # Head counts as first worker worker["replicas"] = workers worker["minReplicas"] = workers @@ -284,7 +185,6 @@

      Module codeflare_sdk.utils.generate_yaml

      for comp in [head, worker]: spec = comp.get("template").get("spec") - update_affinity(spec, appwrapper_name, instascale) update_image_pull_secrets(spec, image_pull_secrets) update_image(spec, image) update_env(spec, env) @@ -359,74 +259,52 @@

      Module codeflare_sdk.utils.generate_yaml

      return False +def add_queue_label(item: dict, namespace: str, local_queue: Optional[str]): + lq_name = local_queue or get_default_kueue_name(namespace) + if not local_queue_exists(namespace, lq_name): + raise ValueError( + "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration" + ) + if not "labels" in item["metadata"]: + item["metadata"]["labels"] = {} + item["metadata"]["labels"].update({"kueue.x-k8s.io/queue-name": lq_name}) + + +def augment_labels(item: dict, labels: dict): + if "template" in item: + if not "labels" in item["template"]["metadata"]: + item["template"]["metadata"]["labels"] = {} + item["template"]["metadata"]["labels"].update(labels) + + def write_components( user_yaml: dict, output_file_name: str, - namespace: str, - local_queue: Optional[str], - labels: dict, ): # Create the directory if it doesn't exist directory_path = os.path.dirname(output_file_name) if not os.path.exists(directory_path): os.makedirs(directory_path) - components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") + components = user_yaml.get("spec", "resources").get("components") open(output_file_name, "w").close() - lq_name = local_queue or get_default_kueue_name(namespace) - cluster_labels = labels - if not local_queue_exists(namespace, lq_name): - raise ValueError( - "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration" - ) with open(output_file_name, "a") as outfile: for component in components: - if "generictemplate" in component: - if ( - "workload.codeflare.dev/appwrapper" - in component["generictemplate"]["metadata"]["labels"] - ): - del component["generictemplate"]["metadata"]["labels"][ - "workload.codeflare.dev/appwrapper" - ] - labels = component["generictemplate"]["metadata"]["labels"] - labels.update({"kueue.x-k8s.io/queue-name": lq_name}) - labels.update(cluster_labels) + if "template" in component: outfile.write("---\n") - yaml.dump( - component["generictemplate"], outfile, default_flow_style=False - ) + yaml.dump(component["template"], outfile, default_flow_style=False) print(f"Written to: {output_file_name}") def load_components( user_yaml: dict, name: str, - namespace: str, - local_queue: Optional[str], - labels: dict, ): component_list = [] - components = user_yaml.get("spec", "resources")["resources"].get("GenericItems") - lq_name = local_queue or get_default_kueue_name(namespace) - cluster_labels = labels - if not local_queue_exists(namespace, lq_name): - raise ValueError( - "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration" - ) + components = user_yaml.get("spec", "resources").get("components") for component in components: - if "generictemplate" in component: - if ( - "workload.codeflare.dev/appwrapper" - in component["generictemplate"]["metadata"]["labels"] - ): - del component["generictemplate"]["metadata"]["labels"][ - "workload.codeflare.dev/appwrapper" - ] - labels = component["generictemplate"]["metadata"]["labels"] - labels.update({"kueue.x-k8s.io/queue-name": lq_name}) - labels.update(cluster_labels) - component_list.append(component["generictemplate"]) + if "template" in component: + component_list.append(component["template"]) resources = "---\n" + "---\n".join( [yaml.dump(component) for component in component_list] @@ -456,13 +334,10 @@

      Module codeflare_sdk.utils.generate_yaml

      workers: int, template: str, image: str, - instascale: bool, - mcad: bool, + appwrapper: bool, instance_types: list, env, image_pull_secrets: list, - dispatch_priority: str, - priority_val: int, write_to_file: bool, verify_tls: bool, local_queue: Optional[str], @@ -471,7 +346,7 @@

      Module codeflare_sdk.utils.generate_yaml

      user_yaml = read_template(template) appwrapper_name, cluster_name = gen_names(name) resources = user_yaml.get("spec", "resources") - item = resources["resources"].get("GenericItems")[0] + item = resources.get("components")[0] update_names( user_yaml, item, @@ -479,20 +354,6 @@

      Module codeflare_sdk.utils.generate_yaml

      cluster_name, namespace, ) - update_labels(user_yaml, instascale, instance_types) - update_priority(user_yaml, item, dispatch_priority, priority_val) - update_custompodresources( - item, - min_cpu, - max_cpu, - min_memory, - max_memory, - gpu, - workers, - head_cpus, - head_memory, - head_gpus, - ) update_nodes( item, appwrapper_name, @@ -503,7 +364,6 @@

      Module codeflare_sdk.utils.generate_yaml

      gpu, workers, image, - instascale, env, image_pull_secrets, head_cpus, @@ -511,20 +371,27 @@

      Module codeflare_sdk.utils.generate_yaml

      head_gpus, ) + augment_labels(item, labels) + + if appwrapper: + add_queue_label(user_yaml, namespace, local_queue) + else: + add_queue_label(item["template"], namespace, local_queue) + directory_path = os.path.expanduser("~/.codeflare/resources/") outfile = os.path.join(directory_path, appwrapper_name + ".yaml") if write_to_file: - if mcad: + if appwrapper: write_user_appwrapper(user_yaml, outfile) else: - write_components(user_yaml, outfile, namespace, local_queue, labels) + write_components(user_yaml, outfile) return outfile else: - if mcad: + if appwrapper: user_yaml = load_appwrapper(user_yaml, name) else: - user_yaml = load_components(user_yaml, name, namespace, local_queue, labels) + user_yaml = load_components(user_yaml, name) return user_yaml
  • @@ -535,6 +402,42 @@

    Module codeflare_sdk.utils.generate_yaml

    Functions

    +
    +def add_queue_label(item: dict, namespace: str, local_queue: Optional[str]) +
    +
    +
    +
    + +Expand source code + +
    def add_queue_label(item: dict, namespace: str, local_queue: Optional[str]):
    +    lq_name = local_queue or get_default_kueue_name(namespace)
    +    if not local_queue_exists(namespace, lq_name):
    +        raise ValueError(
    +            "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration"
    +        )
    +    if not "labels" in item["metadata"]:
    +        item["metadata"]["labels"] = {}
    +    item["metadata"]["labels"].update({"kueue.x-k8s.io/queue-name": lq_name})
    +
    +
    +
    +def augment_labels(item: dict, labels: dict) +
    +
    +
    +
    + +Expand source code + +
    def augment_labels(item: dict, labels: dict):
    +    if "template" in item:
    +        if not "labels" in item["template"]["metadata"]:
    +            item["template"]["metadata"]["labels"] = {}
    +    item["template"]["metadata"]["labels"].update(labels)
    +
    +
    def del_from_list_by_name(l: list, target: List[str]) ‑> list
    @@ -568,7 +471,7 @@

    Functions

    -def generate_appwrapper(name: str, namespace: str, head_cpus: int, head_memory: int, head_gpus: int, min_cpu: int, max_cpu: int, min_memory: int, max_memory: int, gpu: int, workers: int, template: str, image: str, instascale: bool, mcad: bool, instance_types: list, env, image_pull_secrets: list, dispatch_priority: str, priority_val: int, write_to_file: bool, verify_tls: bool, local_queue: Optional[str], labels) +def generate_appwrapper(name: str, namespace: str, head_cpus: int, head_memory: int, head_gpus: int, min_cpu: int, max_cpu: int, min_memory: int, max_memory: int, gpu: int, workers: int, template: str, image: str, appwrapper: bool, instance_types: list, env, image_pull_secrets: list, write_to_file: bool, verify_tls: bool, local_queue: Optional[str], labels)
    @@ -590,13 +493,10 @@

    Functions

    workers: int, template: str, image: str, - instascale: bool, - mcad: bool, + appwrapper: bool, instance_types: list, env, image_pull_secrets: list, - dispatch_priority: str, - priority_val: int, write_to_file: bool, verify_tls: bool, local_queue: Optional[str], @@ -605,7 +505,7 @@

    Functions

    user_yaml = read_template(template) appwrapper_name, cluster_name = gen_names(name) resources = user_yaml.get("spec", "resources") - item = resources["resources"].get("GenericItems")[0] + item = resources.get("components")[0] update_names( user_yaml, item, @@ -613,20 +513,6 @@

    Functions

    cluster_name, namespace, ) - update_labels(user_yaml, instascale, instance_types) - update_priority(user_yaml, item, dispatch_priority, priority_val) - update_custompodresources( - item, - min_cpu, - max_cpu, - min_memory, - max_memory, - gpu, - workers, - head_cpus, - head_memory, - head_gpus, - ) update_nodes( item, appwrapper_name, @@ -637,7 +523,6 @@

    Functions

    gpu, workers, image, - instascale, env, image_pull_secrets, head_cpus, @@ -645,20 +530,27 @@

    Functions

    head_gpus, ) + augment_labels(item, labels) + + if appwrapper: + add_queue_label(user_yaml, namespace, local_queue) + else: + add_queue_label(item["template"], namespace, local_queue) + directory_path = os.path.expanduser("~/.codeflare/resources/") outfile = os.path.join(directory_path, appwrapper_name + ".yaml") if write_to_file: - if mcad: + if appwrapper: write_user_appwrapper(user_yaml, outfile) else: - write_components(user_yaml, outfile, namespace, local_queue, labels) + write_components(user_yaml, outfile) return outfile else: - if mcad: + if appwrapper: user_yaml = load_appwrapper(user_yaml, name) else: - user_yaml = load_components(user_yaml, name, namespace, local_queue, labels) + user_yaml = load_components(user_yaml, name) return user_yaml
    @@ -757,7 +649,7 @@

    Functions

    -def load_components(user_yaml: dict, name: str, namespace: str, local_queue: Optional[str], labels: dict) +def load_components(user_yaml: dict, name: str)
    @@ -768,31 +660,12 @@

    Functions

    def load_components(
         user_yaml: dict,
         name: str,
    -    namespace: str,
    -    local_queue: Optional[str],
    -    labels: dict,
     ):
         component_list = []
    -    components = user_yaml.get("spec", "resources")["resources"].get("GenericItems")
    -    lq_name = local_queue or get_default_kueue_name(namespace)
    -    cluster_labels = labels
    -    if not local_queue_exists(namespace, lq_name):
    -        raise ValueError(
    -            "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration"
    -        )
    +    components = user_yaml.get("spec", "resources").get("components")
         for component in components:
    -        if "generictemplate" in component:
    -            if (
    -                "workload.codeflare.dev/appwrapper"
    -                in component["generictemplate"]["metadata"]["labels"]
    -            ):
    -                del component["generictemplate"]["metadata"]["labels"][
    -                    "workload.codeflare.dev/appwrapper"
    -                ]
    -                labels = component["generictemplate"]["metadata"]["labels"]
    -                labels.update({"kueue.x-k8s.io/queue-name": lq_name})
    -                labels.update(cluster_labels)
    -            component_list.append(component["generictemplate"])
    +        if "template" in component:
    +            component_list.append(component["template"])
     
         resources = "---\n" + "---\n".join(
             [yaml.dump(component) for component in component_list]
    @@ -848,88 +721,6 @@ 

    Functions

    print(exc)
    -
    -def update_affinity(spec, appwrapper_name, instascale) -
    -
    -
    -
    - -Expand source code - -
    def update_affinity(spec, appwrapper_name, instascale):
    -    if instascale:
    -        node_selector_terms = (
    -            spec.get("affinity")
    -            .get("nodeAffinity")
    -            .get("requiredDuringSchedulingIgnoredDuringExecution")
    -            .get("nodeSelectorTerms")
    -        )
    -        node_selector_terms[0]["matchExpressions"][0]["values"][0] = appwrapper_name
    -        node_selector_terms[0]["matchExpressions"][0]["key"] = appwrapper_name
    -    else:
    -        spec.pop("affinity")
    -
    -
    -
    -def update_custompodresources(item, min_cpu, max_cpu, min_memory, max_memory, gpu, workers, head_cpus, head_memory, head_gpus) -
    -
    -
    -
    - -Expand source code - -
    def update_custompodresources(
    -    item,
    -    min_cpu,
    -    max_cpu,
    -    min_memory,
    -    max_memory,
    -    gpu,
    -    workers,
    -    head_cpus,
    -    head_memory,
    -    head_gpus,
    -):
    -    if "custompodresources" in item.keys():
    -        custompodresources = item.get("custompodresources")
    -        for i in range(len(custompodresources)):
    -            resource = custompodresources[i]
    -            if i == 0:
    -                # Leave head node resources as template default
    -                resource["requests"]["cpu"] = head_cpus
    -                resource["limits"]["cpu"] = head_cpus
    -                resource["requests"]["memory"] = head_memory
    -                resource["limits"]["memory"] = head_memory
    -                resource["requests"]["nvidia.com/gpu"] = head_gpus
    -                resource["limits"]["nvidia.com/gpu"] = head_gpus
    -
    -            else:
    -                for k, v in resource.items():
    -                    if k == "replicas" and i == 1:
    -                        resource[k] = workers
    -                    if k == "requests" or k == "limits":
    -                        for spec, _ in v.items():
    -                            if spec == "cpu":
    -                                if k == "limits":
    -                                    resource[k][spec] = max_cpu
    -                                else:
    -                                    resource[k][spec] = min_cpu
    -                            if spec == "memory":
    -                                if k == "limits":
    -                                    resource[k][spec] = max_memory
    -                                else:
    -                                    resource[k][spec] = min_memory
    -                            if spec == "nvidia.com/gpu":
    -                                if i == 0:
    -                                    resource[k][spec] = 0
    -                                else:
    -                                    resource[k][spec] = gpu
    -    else:
    -        sys.exit("Error: malformed template")
    -
    -
    def update_env(spec, env)
    @@ -980,31 +771,6 @@

    Functions

    ] -
    -def update_labels(yaml, instascale, instance_types) -
    -
    -
    -
    - -Expand source code - -
    def update_labels(yaml, instascale, instance_types):
    -    metadata = yaml.get("metadata")
    -    if instascale:
    -        if not len(instance_types) > 0:
    -            sys.exit(
    -                "If instascale is set to true, must provide at least one instance type"
    -            )
    -        type_str = ""
    -        for type in instance_types:
    -            type_str += type + "_"
    -        type_str = type_str[:-1]
    -        metadata["labels"]["orderedinstance"] = type_str
    -    else:
    -        metadata.pop("labels")
    -
    -
    def update_names(yaml, item, appwrapper_name, cluster_name, namespace)
    @@ -1018,14 +784,13 @@

    Functions

    metadata = yaml.get("metadata") metadata["name"] = appwrapper_name metadata["namespace"] = namespace - lower_meta = item.get("generictemplate", {}).get("metadata") - lower_meta["labels"]["workload.codeflare.dev/appwrapper"] = appwrapper_name + lower_meta = item.get("template", {}).get("metadata") lower_meta["name"] = cluster_name lower_meta["namespace"] = namespace
    -def update_nodes(item, appwrapper_name, min_cpu, max_cpu, min_memory, max_memory, gpu, workers, image, instascale, env, image_pull_secrets, head_cpus, head_memory, head_gpus) +def update_nodes(item, appwrapper_name, min_cpu, max_cpu, min_memory, max_memory, gpu, workers, image, env, image_pull_secrets, head_cpus, head_memory, head_gpus)
    @@ -1043,18 +808,17 @@

    Functions

    gpu, workers, image, - instascale, env, image_pull_secrets, head_cpus, head_memory, head_gpus, ): - if "generictemplate" in item.keys(): - head = item.get("generictemplate").get("spec").get("headGroupSpec") + if "template" in item.keys(): + head = item.get("template").get("spec").get("headGroupSpec") head["rayStartParams"]["num-gpus"] = str(int(head_gpus)) - worker = item.get("generictemplate").get("spec").get("workerGroupSpecs")[0] + worker = item.get("template").get("spec").get("workerGroupSpecs")[0] # Head counts as first worker worker["replicas"] = workers worker["minReplicas"] = workers @@ -1064,7 +828,6 @@

    Functions

    for comp in [head, worker]: spec = comp.get("template").get("spec") - update_affinity(spec, appwrapper_name, instascale) update_image_pull_secrets(spec, image_pull_secrets) update_image(spec, image) update_env(spec, env) @@ -1077,32 +840,6 @@

    Functions

    update_resources(spec, min_cpu, max_cpu, min_memory, max_memory, gpu)
    -
    -def update_priority(yaml, item, dispatch_priority, priority_val) -
    -
    -
    -
    - -Expand source code - -
    def update_priority(yaml, item, dispatch_priority, priority_val):
    -    spec = yaml.get("spec")
    -    if dispatch_priority is not None:
    -        if priority_val:
    -            spec["priority"] = priority_val
    -        else:
    -            raise ValueError(
    -                "AW generation error: Priority value is None, while dispatch_priority is defined"
    -            )
    -        head = item.get("generictemplate").get("spec").get("headGroupSpec")
    -        worker = item.get("generictemplate").get("spec").get("workerGroupSpecs")[0]
    -        head["template"]["spec"]["priorityClassName"] = dispatch_priority
    -        worker["template"]["spec"]["priorityClassName"] = dispatch_priority
    -    else:
    -        spec.pop("priority")
    -
    -
    def update_resources(spec, min_cpu, max_cpu, min_memory, max_memory, gpu)
    @@ -1128,7 +865,7 @@

    Functions

    -def write_components(user_yaml: dict, output_file_name: str, namespace: str, local_queue: Optional[str], labels: dict) +def write_components(user_yaml: dict, output_file_name: str)
    @@ -1139,40 +876,19 @@

    Functions

    def write_components(
         user_yaml: dict,
         output_file_name: str,
    -    namespace: str,
    -    local_queue: Optional[str],
    -    labels: dict,
     ):
         # Create the directory if it doesn't exist
         directory_path = os.path.dirname(output_file_name)
         if not os.path.exists(directory_path):
             os.makedirs(directory_path)
     
    -    components = user_yaml.get("spec", "resources")["resources"].get("GenericItems")
    +    components = user_yaml.get("spec", "resources").get("components")
         open(output_file_name, "w").close()
    -    lq_name = local_queue or get_default_kueue_name(namespace)
    -    cluster_labels = labels
    -    if not local_queue_exists(namespace, lq_name):
    -        raise ValueError(
    -            "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration"
    -        )
         with open(output_file_name, "a") as outfile:
             for component in components:
    -            if "generictemplate" in component:
    -                if (
    -                    "workload.codeflare.dev/appwrapper"
    -                    in component["generictemplate"]["metadata"]["labels"]
    -                ):
    -                    del component["generictemplate"]["metadata"]["labels"][
    -                        "workload.codeflare.dev/appwrapper"
    -                    ]
    -                    labels = component["generictemplate"]["metadata"]["labels"]
    -                    labels.update({"kueue.x-k8s.io/queue-name": lq_name})
    -                    labels.update(cluster_labels)
    +            if "template" in component:
                     outfile.write("---\n")
    -                yaml.dump(
    -                    component["generictemplate"], outfile, default_flow_style=False
    -                )
    +                yaml.dump(component["template"], outfile, default_flow_style=False)
         print(f"Written to: {output_file_name}")
    @@ -1215,6 +931,8 @@

    Index

  • Functions

      +
    • add_queue_label
    • +
    • augment_labels
    • del_from_list_by_name
    • gen_names
    • generate_appwrapper
    • @@ -1225,15 +943,11 @@

      Index

    • load_components
    • local_queue_exists
    • read_template
    • -
    • update_affinity
    • -
    • update_custompodresources
    • update_env
    • update_image
    • update_image_pull_secrets
    • -
    • update_labels
    • update_names
    • update_nodes
    • -
    • update_priority
    • update_resources
    • write_components
    • write_user_appwrapper
    • From f3cae1adeb70bf009cdbcf040455fcdb50511a5b Mon Sep 17 00:00:00 2001 From: abhijeet-dhumal Date: Wed, 5 Jun 2024 12:48:30 +0530 Subject: [PATCH 252/496] add provision to delete kueue resources created during test execution --- tests/e2e/local_interactive_sdk_kind_test.py | 2 +- tests/e2e/local_interactive_sdk_oauth_test.py | 2 +- .../e2e/mnist_raycluster_sdk_aw_kind_test.py | 1 + tests/e2e/mnist_raycluster_sdk_kind_test.py | 1 + tests/e2e/mnist_raycluster_sdk_oauth_test.py | 2 +- tests/e2e/support.py | 38 +++++++++++++++++-- tests/upgrade/raycluster_sdk_upgrade_test.py | 1 + 7 files changed, 41 insertions(+), 6 deletions(-) diff --git a/tests/e2e/local_interactive_sdk_kind_test.py b/tests/e2e/local_interactive_sdk_kind_test.py index 1cbc5f62..647ac472 100644 --- a/tests/e2e/local_interactive_sdk_kind_test.py +++ b/tests/e2e/local_interactive_sdk_kind_test.py @@ -19,13 +19,13 @@ def setup_method(self): def teardown_method(self): delete_namespace(self) + delete_kueue_resources(self) def test_local_interactives(self): self.setup_method() create_namespace(self) create_kueue_resources(self) self.run_local_interactives() - self.teardown_method() def run_local_interactives(self): ray_image = get_ray_image() diff --git a/tests/e2e/local_interactive_sdk_oauth_test.py b/tests/e2e/local_interactive_sdk_oauth_test.py index 5210fe25..c3fd1d85 100644 --- a/tests/e2e/local_interactive_sdk_oauth_test.py +++ b/tests/e2e/local_interactive_sdk_oauth_test.py @@ -19,13 +19,13 @@ def setup_method(self): def teardown_method(self): delete_namespace(self) + delete_kueue_resources(self) def test_local_interactives(self): self.setup_method() create_namespace(self) create_kueue_resources(self) self.run_local_interactives() - self.teardown_method() def run_local_interactives(self): ray_image = get_ray_image() diff --git a/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py b/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py index 2aa5da16..3ca3498d 100644 --- a/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py +++ b/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py @@ -25,6 +25,7 @@ def test_mnist_ray_cluster_sdk_kind(self): create_namespace(self) create_kueue_resources(self) self.run_mnist_raycluster_sdk_kind() + self.teardown_method() def run_mnist_raycluster_sdk_kind(self): ray_image = get_ray_image() diff --git a/tests/e2e/mnist_raycluster_sdk_kind_test.py b/tests/e2e/mnist_raycluster_sdk_kind_test.py index ece7069f..b486efc8 100644 --- a/tests/e2e/mnist_raycluster_sdk_kind_test.py +++ b/tests/e2e/mnist_raycluster_sdk_kind_test.py @@ -19,6 +19,7 @@ def setup_method(self): def teardown_method(self): delete_namespace(self) + delete_kueue_resources(self) def test_mnist_ray_cluster_sdk_kind(self): self.setup_method() diff --git a/tests/e2e/mnist_raycluster_sdk_oauth_test.py b/tests/e2e/mnist_raycluster_sdk_oauth_test.py index 548c80ad..e489c39f 100644 --- a/tests/e2e/mnist_raycluster_sdk_oauth_test.py +++ b/tests/e2e/mnist_raycluster_sdk_oauth_test.py @@ -19,13 +19,13 @@ def setup_method(self): def teardown_method(self): delete_namespace(self) + delete_kueue_resources(self) def test_mnist_ray_cluster_sdk_auth(self): self.setup_method() create_namespace(self) create_kueue_resources(self) self.run_mnist_raycluster_sdk_oauth() - self.teardown_method() def run_mnist_raycluster_sdk_oauth(self): ray_image = get_ray_image() diff --git a/tests/e2e/support.py b/tests/e2e/support.py index ab9f5c69..8503b95e 100644 --- a/tests/e2e/support.py +++ b/tests/e2e/support.py @@ -59,11 +59,17 @@ def run_oc_command(args): return None +# Global variables for kueue resources +cluster_queue = "cluster-queue-mnist" +flavor = "default-flavor-mnist" +local_queue = "local-queue-mnist" + + def create_kueue_resources( self, - cluster_queue="cluster-queue-mnist", - flavor="default-flavor-mnist", - local_queue="local-queue-mnist", + cluster_queue=cluster_queue, + flavor=flavor, + local_queue=local_queue, ): print("creating Kueue resources ...") resource_flavor_json = { @@ -163,3 +169,29 @@ def create_kueue_resources( body=local_queue_json, ) print(f"'{local_queue}' created in namespace '{self.namespace}'") + + +def delete_kueue_resources(self, cluster_queue=cluster_queue, flavor=flavor): + # Delete if given cluster-queue exists + try: + self.custom_api.delete_cluster_custom_object( + group="kueue.x-k8s.io", + plural="clusterqueues", + version="v1beta1", + name=cluster_queue, + ) + print(f"\n'{cluster_queue}' cluster-queue deleted") + except Exception as e: + print(f"\nError deleting cluster-queue '{cluster_queue}' : {e}") + + # Delete if given resource-flavor exists + try: + self.custom_api.delete_cluster_custom_object( + group="kueue.x-k8s.io", + plural="resourceflavors", + version="v1beta1", + name=flavor, + ) + print(f"'{flavor}' resource-flavor deleted") + except Exception as e: + print(f"\nError deleting resource-flavor '{flavor}' : {e}") diff --git a/tests/upgrade/raycluster_sdk_upgrade_test.py b/tests/upgrade/raycluster_sdk_upgrade_test.py index 8a17654a..ef09d2e7 100644 --- a/tests/upgrade/raycluster_sdk_upgrade_test.py +++ b/tests/upgrade/raycluster_sdk_upgrade_test.py @@ -21,6 +21,7 @@ def setup_method(self): create_kueue_resources(self) except Exception as e: delete_namespace(self) + delete_kueue_resources(self) return _kube_api_error_handling(e) def test_mnist_ray_cluster_sdk_auth(self): From 9295a8e87e6c802887c171ea6402e7abe607fbd2 Mon Sep 17 00:00:00 2001 From: abhijeet-dhumal Date: Thu, 6 Jun 2024 20:04:47 +0530 Subject: [PATCH 253/496] update kueue resource creation flow to create resources using unique name for avoiding naming conflicts and to execute tests parallely --- .../e2e/mnist_raycluster_sdk_aw_kind_test.py | 1 - tests/e2e/mnist_raycluster_sdk_kind_test.py | 1 - tests/e2e/support.py | 124 +++++++++++------- 3 files changed, 73 insertions(+), 53 deletions(-) diff --git a/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py b/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py index 3ca3498d..2aa5da16 100644 --- a/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py +++ b/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py @@ -25,7 +25,6 @@ def test_mnist_ray_cluster_sdk_kind(self): create_namespace(self) create_kueue_resources(self) self.run_mnist_raycluster_sdk_kind() - self.teardown_method() def run_mnist_raycluster_sdk_kind(self): ray_image = get_ray_image() diff --git a/tests/e2e/mnist_raycluster_sdk_kind_test.py b/tests/e2e/mnist_raycluster_sdk_kind_test.py index b486efc8..630bc5df 100644 --- a/tests/e2e/mnist_raycluster_sdk_kind_test.py +++ b/tests/e2e/mnist_raycluster_sdk_kind_test.py @@ -26,7 +26,6 @@ def test_mnist_ray_cluster_sdk_kind(self): create_namespace(self) create_kueue_resources(self) self.run_mnist_raycluster_sdk_kind() - self.teardown_method() def run_mnist_raycluster_sdk_kind(self): ray_image = get_ray_image() diff --git a/tests/e2e/support.py b/tests/e2e/support.py index 8503b95e..a3dfd89b 100644 --- a/tests/e2e/support.py +++ b/tests/e2e/support.py @@ -25,6 +25,21 @@ def create_namespace(self): self.api_instance.create_namespace(namespace_body) +def create_new_resource_flavor(self): + self.resource_flavor = f"test-resource-flavor-{random_choice()}" + create_resource_flavor(self, self.resource_flavor) + + +def create_new_cluster_queue(self): + self.cluster_queue = f"test-cluster-queue-{random_choice()}" + create_cluster_queue(self, self.cluster_queue, self.resource_flavor) + + +def create_new_local_queue(self): + self.local_queue = f"test-local-queue-{random_choice()}" + create_local_queue(self, self.cluster_queue, self.local_queue) + + def create_namespace_with_name(self, namespace_name): self.namespace = namespace_name try: @@ -59,24 +74,7 @@ def run_oc_command(args): return None -# Global variables for kueue resources -cluster_queue = "cluster-queue-mnist" -flavor = "default-flavor-mnist" -local_queue = "local-queue-mnist" - - -def create_kueue_resources( - self, - cluster_queue=cluster_queue, - flavor=flavor, - local_queue=local_queue, -): - print("creating Kueue resources ...") - resource_flavor_json = { - "apiVersion": "kueue.x-k8s.io/v1beta1", - "kind": "ResourceFlavor", - "metadata": {"name": flavor}, - } +def create_cluster_queue(self, cluster_queue, flavor): cluster_queue_json = { "apiVersion": "kueue.x-k8s.io/v1beta1", "kind": "ClusterQueue", @@ -100,54 +98,69 @@ def create_kueue_resources( ], }, } - local_queue_json = { - "apiVersion": "kueue.x-k8s.io/v1beta1", - "kind": "LocalQueue", - "metadata": { - "namespace": self.namespace, - "name": local_queue, - "annotations": {"kueue.x-k8s.io/default-queue": "true"}, - }, - "spec": {"clusterQueue": cluster_queue}, - } try: - # Check if resource flavor exists + # Check if cluster-queue exists self.custom_api.get_cluster_custom_object( group="kueue.x-k8s.io", - plural="resourceflavors", + plural="clusterqueues", version="v1beta1", - name=flavor, + name=cluster_queue, ) - print(f"'{flavor}' already exists") + print(f"'{cluster_queue}' already exists") except: - # create kueue resource flavor + # create cluster-queue self.custom_api.create_cluster_custom_object( group="kueue.x-k8s.io", - plural="resourceflavors", + plural="clusterqueues", version="v1beta1", - body=resource_flavor_json, + body=cluster_queue_json, ) - print(f"'{flavor}' created!") + print(f"'{cluster_queue}' created") + + self.cluster_queue = cluster_queue + + +def create_resource_flavor(self, flavor): + resource_flavor_json = { + "apiVersion": "kueue.x-k8s.io/v1beta1", + "kind": "ResourceFlavor", + "metadata": {"name": flavor}, + } try: - # Check if cluster-queue exists + # Check if resource flavor exists self.custom_api.get_cluster_custom_object( group="kueue.x-k8s.io", - plural="clusterqueues", + plural="resourceflavors", version="v1beta1", - name=cluster_queue, + name=flavor, ) - print(f"'{cluster_queue}' already exists") + print(f"'{flavor}' already exists") except: - # create cluster-queue + # create kueue resource flavor self.custom_api.create_cluster_custom_object( group="kueue.x-k8s.io", - plural="clusterqueues", + plural="resourceflavors", version="v1beta1", - body=cluster_queue_json, + body=resource_flavor_json, ) - print(f"'{cluster_queue}' created") + print(f"'{flavor}' created!") + + self.resource_flavor = flavor + + +def create_local_queue(self, cluster_queue, local_queue): + local_queue_json = { + "apiVersion": "kueue.x-k8s.io/v1beta1", + "kind": "LocalQueue", + "metadata": { + "namespace": self.namespace, + "name": local_queue, + "annotations": {"kueue.x-k8s.io/default-queue": "true"}, + }, + "spec": {"clusterQueue": cluster_queue}, + } try: # Check if local-queue exists in given namespace @@ -170,19 +183,28 @@ def create_kueue_resources( ) print(f"'{local_queue}' created in namespace '{self.namespace}'") + self.local_queue = local_queue + -def delete_kueue_resources(self, cluster_queue=cluster_queue, flavor=flavor): +def create_kueue_resources(self): + print("creating Kueue resources ...") + create_new_resource_flavor(self) + create_new_cluster_queue(self) + create_new_local_queue(self) + + +def delete_kueue_resources(self): # Delete if given cluster-queue exists try: self.custom_api.delete_cluster_custom_object( group="kueue.x-k8s.io", plural="clusterqueues", version="v1beta1", - name=cluster_queue, + name=self.cluster_queue, ) - print(f"\n'{cluster_queue}' cluster-queue deleted") + print(f"\n'{self.cluster_queue}' cluster-queue deleted") except Exception as e: - print(f"\nError deleting cluster-queue '{cluster_queue}' : {e}") + print(f"\nError deleting cluster-queue '{self.cluster_queue}' : {e}") # Delete if given resource-flavor exists try: @@ -190,8 +212,8 @@ def delete_kueue_resources(self, cluster_queue=cluster_queue, flavor=flavor): group="kueue.x-k8s.io", plural="resourceflavors", version="v1beta1", - name=flavor, + name=self.resource_flavor, ) - print(f"'{flavor}' resource-flavor deleted") + print(f"'{self.resource_flavor}' resource-flavor deleted") except Exception as e: - print(f"\nError deleting resource-flavor '{flavor}' : {e}") + print(f"\nError deleting resource-flavor '{self.resource_flavor}' : {e}") From 8cfcbde934bca2cc566e8858154ad0c44a9ce80c Mon Sep 17 00:00:00 2001 From: abhijeet-dhumal Date: Fri, 7 Jun 2024 11:58:00 +0530 Subject: [PATCH 254/496] Update upgrade test to create kueue resources with specfic pre-defined names --- tests/upgrade/raycluster_sdk_upgrade_test.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/tests/upgrade/raycluster_sdk_upgrade_test.py b/tests/upgrade/raycluster_sdk_upgrade_test.py index ef09d2e7..d83e6289 100644 --- a/tests/upgrade/raycluster_sdk_upgrade_test.py +++ b/tests/upgrade/raycluster_sdk_upgrade_test.py @@ -10,6 +10,10 @@ from codeflare_sdk.utils.kube_api_helpers import _kube_api_error_handling namespace = "test-ns-rayupgrade" +# Global variables for kueue resources +cluster_queue = "cluster-queue-mnist" +flavor = "default-flavor-mnist" +local_queue = "local-queue-mnist" # Creates a Ray cluster @@ -18,7 +22,9 @@ def setup_method(self): initialize_kubernetes_client(self) create_namespace_with_name(self, namespace) try: - create_kueue_resources(self) + create_cluster_queue(self, cluster_queue, flavor) + create_resource_flavor(self, flavor) + create_local_queue(self, cluster_queue, local_queue) except Exception as e: delete_namespace(self) delete_kueue_resources(self) From eb643ca0a4d8d66dba5d8571efbf03a16ad2fa71 Mon Sep 17 00:00:00 2001 From: Karel Suta Date: Fri, 7 Jun 2024 09:20:25 +0200 Subject: [PATCH 255/496] Print Pytest output logs for easier test failure analysis --- .github/workflows/e2e_tests.yaml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index 9ef73315..b83afb4b 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -143,6 +143,12 @@ jobs: if: always() && steps.deploy.outcome == 'success' run: kubectl config use-context kind-cluster + - name: Print Pytest output log + if: always() && steps.deploy.outcome == 'success' + run: | + echo "Printing Pytest output logs" + cat ${CODEFLARE_TEST_OUTPUT_DIR}/pytest_output.log + - name: Print CodeFlare operator logs if: always() && steps.deploy.outcome == 'success' run: | From 4cef42e195343175e0514066774cd369af505a31 Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Thu, 30 May 2024 16:55:01 +0100 Subject: [PATCH 256/496] Add notebook prefix annotation to RayCluster --- src/codeflare_sdk/utils/generate_yaml.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index dcd4a42c..d6290c5c 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -246,6 +246,17 @@ def augment_labels(item: dict, labels: dict): item["template"]["metadata"]["labels"].update(labels) +def notebook_annotations(item: dict): + nb_prefix = os.environ.get("NB_PREFIX") + if nb_prefix: + if "template" in item: + if not "annotations" in item["template"]["metadata"]: + item["template"]["metadata"]["annotations"] = {} + item["template"]["metadata"]["annotations"].update( + {"app.kubernetes.io/managed-by": nb_prefix} + ) + + def write_components( user_yaml: dict, output_file_name: str, @@ -341,6 +352,7 @@ def generate_appwrapper( ) augment_labels(item, labels) + notebook_annotations(item) if appwrapper: add_queue_label(user_yaml, namespace, local_queue) From 2408b7a9834f665e329acfcd2d7515596040e06c Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Mon, 10 Jun 2024 09:35:00 +0100 Subject: [PATCH 257/496] Tests for the annotation prefix to RayCluster --- tests/test-case-no-mcad.yamls | 2 ++ tests/unit_test.py | 3 +++ 2 files changed, 5 insertions(+) diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index 367703d6..b69c0d60 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -2,6 +2,8 @@ apiVersion: ray.io/v1 kind: RayCluster metadata: + annotations: + app.kubernetes.io/managed-by: test-prefix labels: controller-tools.k8s.io: '1.0' kueue.x-k8s.io/queue-name: local-queue-default diff --git a/tests/unit_test.py b/tests/unit_test.py index 32d730c4..b54d6549 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -345,6 +345,8 @@ def test_cluster_creation_no_mcad(mocker): "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), ) + mocker.patch("os.environ.get", return_value="test-prefix") + config = createClusterConfig() config.name = "unit-test-cluster-ray" config.write_to_file = True @@ -373,6 +375,7 @@ def test_cluster_creation_no_mcad_local_queue(mocker): "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), ) + mocker.patch("os.environ.get", return_value="test-prefix") config = createClusterConfig() config.name = "unit-test-cluster-ray" config.appwrapper = False From a3d138d22283126102d0bd9808f8cd164ea5972a Mon Sep 17 00:00:00 2001 From: Srihari Date: Tue, 11 Jun 2024 18:24:44 +0530 Subject: [PATCH 258/496] Update TestMnistJobSubmit to skip cluster down for UI verification --- tests/upgrade/raycluster_sdk_upgrade_test.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/upgrade/raycluster_sdk_upgrade_test.py b/tests/upgrade/raycluster_sdk_upgrade_test.py index d83e6289..fb160be1 100644 --- a/tests/upgrade/raycluster_sdk_upgrade_test.py +++ b/tests/upgrade/raycluster_sdk_upgrade_test.py @@ -97,7 +97,6 @@ def test_mnist_job_submission(self): self.assert_jobsubmit_withoutLogin(self.cluster) self.assert_jobsubmit_withlogin(self.cluster) - self.cluster.down() # Assertions def assert_jobsubmit_withoutLogin(self, cluster): @@ -147,7 +146,6 @@ def assert_jobsubmit_withlogin(self, cluster): self.assert_job_completion(status) client.delete_job(submission_id) - cluster.down() def assert_job_completion(self, status): if status == "SUCCEEDED": From 6798b74953d36cbe7968459b4be72ef38e98744c Mon Sep 17 00:00:00 2001 From: Drew Ripberger Date: Tue, 4 Jun 2024 16:42:24 -0400 Subject: [PATCH 259/496] Update requirements.txt to new library versions --- demo-notebooks/guided-demos/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/demo-notebooks/guided-demos/requirements.txt b/demo-notebooks/guided-demos/requirements.txt index 7266b064..af1bb36b 100644 --- a/demo-notebooks/guided-demos/requirements.txt +++ b/demo-notebooks/guided-demos/requirements.txt @@ -1,4 +1,4 @@ -pytorch_lightning==1.5.10 +pytorch_lightning==2.2.5 ray_lightning -torchmetrics==0.9.1 -torchvision==0.12.0 +torchmetrics==1.4.0 +torchvision==0.18.0 From 3864bcfc61e17612b0c726b9b9b3e25fcd3569b2 Mon Sep 17 00:00:00 2001 From: Srihari Venkataramaiah Date: Fri, 14 Jun 2024 15:24:56 +0530 Subject: [PATCH 260/496] update Token Authentication for TestMnistJobSubmit before step get_cluster (#561) --- tests/upgrade/raycluster_sdk_upgrade_test.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/tests/upgrade/raycluster_sdk_upgrade_test.py b/tests/upgrade/raycluster_sdk_upgrade_test.py index fb160be1..235383f6 100644 --- a/tests/upgrade/raycluster_sdk_upgrade_test.py +++ b/tests/upgrade/raycluster_sdk_upgrade_test.py @@ -82,19 +82,18 @@ def run_mnist_raycluster_sdk_oauth(self): class TestMnistJobSubmit: def setup_method(self): initialize_kubernetes_client(self) - self.namespace = namespace - self.cluster = get_cluster("mnist", self.namespace) - if not self.cluster: - raise RuntimeError("TestRayClusterUp needs to be run before this test") - - def test_mnist_job_submission(self): auth = TokenAuthentication( token=run_oc_command(["whoami", "--show-token=true"]), server=run_oc_command(["whoami", "--show-server=true"]), skip_tls=True, ) auth.login() + self.namespace = namespace + self.cluster = get_cluster("mnist", self.namespace) + if not self.cluster: + raise RuntimeError("TestRayClusterUp needs to be run before this test") + def test_mnist_job_submission(self): self.assert_jobsubmit_withoutLogin(self.cluster) self.assert_jobsubmit_withlogin(self.cluster) From 3d9ebc9bd1e359f88fc3f6dce3d26ad5fd2a5966 Mon Sep 17 00:00:00 2001 From: David Grove Date: Fri, 7 Jun 2024 14:10:02 -0400 Subject: [PATCH 261/496] Simplify yaml generation by starting from non-AppWrapper template The v1/beta2 AppWrapper can be wrapped around a RayCluster as a simple prefix to the "real" yaml. So we can simplify yaml generation/manipulation by starting with a vanilla RayCluster and just optionally put it inside an AppWrapper at the end. --- src/codeflare_sdk/cluster/awload.py | 2 +- src/codeflare_sdk/cluster/cluster.py | 8 +- .../templates/base-template.yaml | 383 +++++++++--------- src/codeflare_sdk/utils/generate_yaml.py | 180 +++----- tests/test-case-no-mcad.yamls | 1 - tests/unit_test.py | 2 - 6 files changed, 253 insertions(+), 323 deletions(-) diff --git a/src/codeflare_sdk/cluster/awload.py b/src/codeflare_sdk/cluster/awload.py index c622f877..7455b216 100644 --- a/src/codeflare_sdk/cluster/awload.py +++ b/src/codeflare_sdk/cluster/awload.py @@ -30,7 +30,7 @@ class AWManager: """ An object for submitting and removing existing AppWrapper yamls - to be added to the MCAD queue. + to be added to the Kueue localqueue. """ def __init__(self, filename: str) -> None: diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 35c26b0a..e5bbcd86 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -147,11 +147,9 @@ def create_app_wrapper(self): template = self.config.template image = self.config.image appwrapper = self.config.appwrapper - instance_types = self.config.machine_types env = self.config.envs image_pull_secrets = self.config.image_pull_secrets write_to_file = self.config.write_to_file - verify_tls = self.config.verify_tls local_queue = self.config.local_queue labels = self.config.labels return generate_appwrapper( @@ -169,11 +167,9 @@ def create_app_wrapper(self): template=template, image=image, appwrapper=appwrapper, - instance_types=instance_types, env=env, image_pull_secrets=image_pull_secrets, write_to_file=write_to_file, - verify_tls=verify_tls, local_queue=local_queue, labels=labels, ) @@ -181,8 +177,8 @@ def create_app_wrapper(self): # creates a new cluster with the provided or default spec def up(self): """ - Applies the AppWrapper yaml, pushing the resource request onto - the MCAD queue. + Applies the Cluster yaml, pushing the resource request onto + the Kueue localqueue. """ # check if RayCluster CustomResourceDefinition exists if not throw RuntimeError diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index b6a70b2b..0e071961 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -1,207 +1,198 @@ -apiVersion: workload.codeflare.dev/v1beta2 -kind: AppWrapper +# This config demonstrates KubeRay's Ray autoscaler integration. +# The resource requests and limits in this config are too small for production! +# For an example with more realistic resource configuration, see +# ray-cluster.autoscaler.large.yaml. +apiVersion: ray.io/v1 +kind: RayCluster metadata: - name: aw-kuberay + labels: + controller-tools.k8s.io: "1.0" + # A unique identifier for the head node and workers of this cluster. + name: kuberay-cluster namespace: default spec: - components: - - template: - # This config demonstrates KubeRay's Ray autoscaler integration. - # The resource requests and limits in this config are too small for production! - # For an example with more realistic resource configuration, see - # ray-cluster.autoscaler.large.yaml. - apiVersion: ray.io/v1 - kind: RayCluster + # The version of Ray you are using. Make sure all Ray containers are running this version of Ray. + rayVersion: '2.7.0' + # If enableInTreeAutoscaling is true, the autoscaler sidecar will be added to the Ray head pod. + # Ray autoscaler integration is supported only for Ray versions >= 1.11.0 + # Ray autoscaler integration is Beta with KubeRay >= 0.3.0 and Ray >= 2.0.0. + enableInTreeAutoscaling: false + # autoscalerOptions is an OPTIONAL field specifying configuration overrides for the Ray autoscaler. + # The example configuration shown below below represents the DEFAULT values. + # (You may delete autoscalerOptions if the defaults are suitable.) + autoscalerOptions: + # upscalingMode is "Default" or "Aggressive." + # Conservative: Upscaling is rate-limited; the number of pending worker pods is at most the size of the Ray cluster. + # Default: Upscaling is not rate-limited. + # Aggressive: An alias for Default; upscaling is not rate-limited. + upscalingMode: Default + # idleTimeoutSeconds is the number of seconds to wait before scaling down a worker pod which is not using Ray resources. + idleTimeoutSeconds: 60 + # image optionally overrides the autoscaler's container image. + # If instance.spec.rayVersion is at least "2.0.0", the autoscaler will default to the same image as + # the ray container. For older Ray versions, the autoscaler will default to using the Ray 2.0.0 image. + ## image: "my-repo/my-custom-autoscaler-image:tag" + # imagePullPolicy optionally overrides the autoscaler container's image pull policy. + imagePullPolicy: Always + # resources specifies optional resource request and limit overrides for the autoscaler container. + # For large Ray clusters, we recommend monitoring container resource usage to determine if overriding the defaults is required. + resources: + limits: + cpu: "500m" + memory: "512Mi" + requests: + cpu: "500m" + memory: "512Mi" + ######################headGroupSpec################################# + # head group template and specs, (perhaps 'group' is not needed in the name) + headGroupSpec: + # Kubernetes Service Type, valid values are 'ClusterIP', 'NodePort' and 'LoadBalancer' + serviceType: ClusterIP + enableIngress: false + # logical group name, for this called head-group, also can be functional + # pod type head or worker + # rayNodeType: head # Not needed since it is under the headgroup + # the following params are used to complete the ray start: ray start --head --block ... + rayStartParams: + # Flag "no-monitor" will be automatically set when autoscaling is enabled. + dashboard-host: '0.0.0.0' + block: 'true' + # num-cpus: '1' # can be auto-completed from the limits + # Use `resources` to optionally specify custom resource annotations for the Ray node. + # The value of `resources` is a string-integer mapping. + # Currently, `resources` must be provided in the specific format demonstrated below: + # resources: '"{\"Custom1\": 1, \"Custom2\": 5}"' + num-gpus: '0' + #pod template + template: + spec: + containers: + # The Ray head pod + - name: ray-head + image: quay.io/project-codeflare/ray:latest-py39-cu118 + imagePullPolicy: Always + ports: + - containerPort: 6379 + name: gcs + - containerPort: 8265 + name: dashboard + - containerPort: 10001 + name: client + lifecycle: + preStop: + exec: + command: ["/bin/sh","-c","ray stop"] + resources: + limits: + cpu: 2 + memory: "8G" + nvidia.com/gpu: 0 + requests: + cpu: 2 + memory: "8G" + nvidia.com/gpu: 0 + volumeMounts: + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + volumes: + - name: odh-trusted-ca-cert + configMap: + name: odh-trusted-ca-bundle + items: + - key: ca-bundle.crt + path: odh-trusted-ca-bundle.crt + optional: true + - name: odh-ca-cert + configMap: + name: odh-trusted-ca-bundle + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + optional: true + workerGroupSpecs: + # the pod replicas in this group typed worker + - replicas: 3 + minReplicas: 3 + maxReplicas: 3 + # logical group name, for this called small-group, also can be functional + groupName: small-group + # if worker pods need to be added, we can simply increment the replicas + # if worker pods need to be removed, we decrement the replicas, and populate the podsToDelete list + # the operator will remove pods from the list until the number of replicas is satisfied + # when a pod is confirmed to be deleted, its name will be removed from the list below + #scaleStrategy: + # workersToDelete: + # - raycluster-complete-worker-small-group-bdtwh + # - raycluster-complete-worker-small-group-hv457 + # - raycluster-complete-worker-small-group-k8tj7 + # the following params are used to complete the ray start: ray start --block ... + rayStartParams: + block: 'true' + num-gpus: 1 + #pod template + template: metadata: labels: - controller-tools.k8s.io: "1.0" - # A unique identifier for the head node and workers of this cluster. - name: kuberay-cluster + key: value + # annotations for pod + annotations: + key: value # finalizers: # - kubernetes spec: - # The version of Ray you are using. Make sure all Ray containers are running this version of Ray. - rayVersion: '2.7.0' - # If enableInTreeAutoscaling is true, the autoscaler sidecar will be added to the Ray head pod. - # Ray autoscaler integration is supported only for Ray versions >= 1.11.0 - # Ray autoscaler integration is Beta with KubeRay >= 0.3.0 and Ray >= 2.0.0. - enableInTreeAutoscaling: false - # autoscalerOptions is an OPTIONAL field specifying configuration overrides for the Ray autoscaler. - # The example configuration shown below below represents the DEFAULT values. - # (You may delete autoscalerOptions if the defaults are suitable.) - autoscalerOptions: - # upscalingMode is "Default" or "Aggressive." - # Conservative: Upscaling is rate-limited; the number of pending worker pods is at most the size of the Ray cluster. - # Default: Upscaling is not rate-limited. - # Aggressive: An alias for Default; upscaling is not rate-limited. - upscalingMode: Default - # idleTimeoutSeconds is the number of seconds to wait before scaling down a worker pod which is not using Ray resources. - idleTimeoutSeconds: 60 - # image optionally overrides the autoscaler's container image. - # If instance.spec.rayVersion is at least "2.0.0", the autoscaler will default to the same image as - # the ray container. For older Ray versions, the autoscaler will default to using the Ray 2.0.0 image. - ## image: "my-repo/my-custom-autoscaler-image:tag" - # imagePullPolicy optionally overrides the autoscaler container's image pull policy. - imagePullPolicy: Always - # resources specifies optional resource request and limit overrides for the autoscaler container. - # For large Ray clusters, we recommend monitoring container resource usage to determine if overriding the defaults is required. + containers: + - name: machine-learning # must consist of lower case alphanumeric characters or '-', and must start and end with an alphanumeric character (e.g. 'my-name', or '123-abc' + image: quay.io/project-codeflare/ray:latest-py39-cu118 + # environment variables to set in the container.Optional. + # Refer to https://kubernetes.io/docs/tasks/inject-data-application/define-environment-variable-container/ + lifecycle: + preStop: + exec: + command: ["/bin/sh","-c","ray stop"] resources: limits: - cpu: "500m" - memory: "512Mi" + cpu: "2" + memory: "12G" + nvidia.com/gpu: "1" requests: - cpu: "500m" - memory: "512Mi" - ######################headGroupSpec################################# - # head group template and specs, (perhaps 'group' is not needed in the name) - headGroupSpec: - # Kubernetes Service Type, valid values are 'ClusterIP', 'NodePort' and 'LoadBalancer' - serviceType: ClusterIP - enableIngress: false - # logical group name, for this called head-group, also can be functional - # pod type head or worker - # rayNodeType: head # Not needed since it is under the headgroup - # the following params are used to complete the ray start: ray start --head --block ... - rayStartParams: - # Flag "no-monitor" will be automatically set when autoscaling is enabled. - dashboard-host: '0.0.0.0' - block: 'true' - # num-cpus: '1' # can be auto-completed from the limits - # Use `resources` to optionally specify custom resource annotations for the Ray node. - # The value of `resources` is a string-integer mapping. - # Currently, `resources` must be provided in the specific format demonstrated below: - # resources: '"{\"Custom1\": 1, \"Custom2\": 5}"' - num-gpus: '0' - #pod template - template: - spec: - containers: - # The Ray head pod - - name: ray-head - image: quay.io/project-codeflare/ray:latest-py39-cu118 - imagePullPolicy: Always - ports: - - containerPort: 6379 - name: gcs - - containerPort: 8265 - name: dashboard - - containerPort: 10001 - name: client - lifecycle: - preStop: - exec: - command: ["/bin/sh","-c","ray stop"] - resources: - limits: - cpu: 2 - memory: "8G" - nvidia.com/gpu: 0 - requests: - cpu: 2 - memory: "8G" - nvidia.com/gpu: 0 - volumeMounts: - - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - volumes: - - name: odh-trusted-ca-cert - configMap: - name: odh-trusted-ca-bundle - items: - - key: ca-bundle.crt - path: odh-trusted-ca-bundle.crt - optional: true - - name: odh-ca-cert - configMap: - name: odh-trusted-ca-bundle - items: - - key: odh-ca-bundle.crt - path: odh-ca-bundle.crt - optional: true - workerGroupSpecs: - # the pod replicas in this group typed worker - - replicas: 3 - minReplicas: 3 - maxReplicas: 3 - # logical group name, for this called small-group, also can be functional - groupName: small-group - # if worker pods need to be added, we can simply increment the replicas - # if worker pods need to be removed, we decrement the replicas, and populate the podsToDelete list - # the operator will remove pods from the list until the number of replicas is satisfied - # when a pod is confirmed to be deleted, its name will be removed from the list below - #scaleStrategy: - # workersToDelete: - # - raycluster-complete-worker-small-group-bdtwh - # - raycluster-complete-worker-small-group-hv457 - # - raycluster-complete-worker-small-group-k8tj7 - # the following params are used to complete the ray start: ray start --block ... - rayStartParams: - block: 'true' - num-gpus: 1 - #pod template - template: - metadata: - labels: - key: value - # annotations for pod - annotations: - key: value - # finalizers: - # - kubernetes - spec: - containers: - - name: machine-learning # must consist of lower case alphanumeric characters or '-', and must start and end with an alphanumeric character (e.g. 'my-name', or '123-abc' - image: quay.io/project-codeflare/ray:latest-py39-cu118 - # environment variables to set in the container.Optional. - # Refer to https://kubernetes.io/docs/tasks/inject-data-application/define-environment-variable-container/ - lifecycle: - preStop: - exec: - command: ["/bin/sh","-c","ray stop"] - resources: - limits: - cpu: "2" - memory: "12G" - nvidia.com/gpu: "1" - requests: - cpu: "2" - memory: "12G" - nvidia.com/gpu: "1" - volumeMounts: - - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - volumes: - - name: odh-trusted-ca-cert - configMap: - name: odh-trusted-ca-bundle - items: - - key: ca-bundle.crt - path: odh-trusted-ca-bundle.crt - optional: true - - name: odh-ca-cert - configMap: - name: odh-trusted-ca-bundle - items: - - key: odh-ca-bundle.crt - path: odh-ca-bundle.crt - optional: true + cpu: "2" + memory: "12G" + nvidia.com/gpu: "1" + volumeMounts: + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + volumes: + - name: odh-trusted-ca-cert + configMap: + name: odh-trusted-ca-bundle + items: + - key: ca-bundle.crt + path: odh-trusted-ca-bundle.crt + optional: true + - name: odh-ca-cert + configMap: + name: odh-trusted-ca-bundle + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + optional: true diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index d6290c5c..30edcd91 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -77,13 +77,10 @@ def is_kind_cluster(): return False -def update_names(yaml, item, appwrapper_name, cluster_name, namespace): - metadata = yaml.get("metadata") - metadata["name"] = appwrapper_name - metadata["namespace"] = namespace - lower_meta = item.get("template", {}).get("metadata") - lower_meta["name"] = cluster_name - lower_meta["namespace"] = namespace +def update_names(cluster_yaml, cluster_name, namespace): + meta = cluster_yaml.get("metadata") + meta["name"] = cluster_name + meta["namespace"] = namespace def update_image(spec, image): @@ -125,7 +122,7 @@ def update_resources(spec, min_cpu, max_cpu, min_memory, max_memory, gpu): def update_nodes( - item, + cluster_yaml, appwrapper_name, min_cpu, max_cpu, @@ -140,48 +137,35 @@ def update_nodes( head_memory, head_gpus, ): - if "template" in item.keys(): - head = item.get("template").get("spec").get("headGroupSpec") - head["rayStartParams"]["num-gpus"] = str(int(head_gpus)) - - worker = item.get("template").get("spec").get("workerGroupSpecs")[0] - # Head counts as first worker - worker["replicas"] = workers - worker["minReplicas"] = workers - worker["maxReplicas"] = workers - worker["groupName"] = "small-group-" + appwrapper_name - worker["rayStartParams"]["num-gpus"] = str(int(gpu)) - - for comp in [head, worker]: - spec = comp.get("template").get("spec") - update_image_pull_secrets(spec, image_pull_secrets) - update_image(spec, image) - update_env(spec, env) - if comp == head: - # TODO: Eventually add head node configuration outside of template - update_resources( - spec, head_cpus, head_cpus, head_memory, head_memory, head_gpus - ) - else: - update_resources(spec, min_cpu, max_cpu, min_memory, max_memory, gpu) + head = cluster_yaml.get("spec").get("headGroupSpec") + head["rayStartParams"]["num-gpus"] = str(int(head_gpus)) + + worker = cluster_yaml.get("spec").get("workerGroupSpecs")[0] + # Head counts as first worker + worker["replicas"] = workers + worker["minReplicas"] = workers + worker["maxReplicas"] = workers + worker["groupName"] = "small-group-" + appwrapper_name + worker["rayStartParams"]["num-gpus"] = str(int(gpu)) + + for comp in [head, worker]: + spec = comp.get("template").get("spec") + update_image_pull_secrets(spec, image_pull_secrets) + update_image(spec, image) + update_env(spec, env) + if comp == head: + # TODO: Eventually add head node configuration outside of template + update_resources( + spec, head_cpus, head_cpus, head_memory, head_memory, head_gpus + ) + else: + update_resources(spec, min_cpu, max_cpu, min_memory, max_memory, gpu) def del_from_list_by_name(l: list, target: typing.List[str]) -> list: return [x for x in l if x["name"] not in target] -def write_user_appwrapper(user_yaml, output_file_name): - # Create the directory if it doesn't exist - directory_path = os.path.dirname(output_file_name) - if not os.path.exists(directory_path): - os.makedirs(directory_path) - - with open(output_file_name, "w") as outfile: - yaml.dump(user_yaml, outfile, default_flow_style=False) - - print(f"Written to: {output_file_name}") - - def get_default_kueue_name(namespace: str): # If the local queue is set, use it. Otherwise, try to use the default queue. try: @@ -240,64 +224,40 @@ def add_queue_label(item: dict, namespace: str, local_queue: Optional[str]): def augment_labels(item: dict, labels: dict): - if "template" in item: - if not "labels" in item["template"]["metadata"]: - item["template"]["metadata"]["labels"] = {} - item["template"]["metadata"]["labels"].update(labels) + if not "labels" in item["metadata"]: + item["metadata"]["labels"] = {} + item["metadata"]["labels"].update(labels) def notebook_annotations(item: dict): nb_prefix = os.environ.get("NB_PREFIX") if nb_prefix: - if "template" in item: - if not "annotations" in item["template"]["metadata"]: - item["template"]["metadata"]["annotations"] = {} - item["template"]["metadata"]["annotations"].update( + if not "annotations" in item["metadata"]: + item["metadata"]["annotations"] = {} + item["metadata"]["annotations"].update( {"app.kubernetes.io/managed-by": nb_prefix} ) -def write_components( - user_yaml: dict, - output_file_name: str, -): +def wrap_cluster(cluster_yaml: dict, appwrapper_name: str, namespace: str): + return { + "apiVersion": "workload.codeflare.dev/v1beta2", + "kind": "AppWrapper", + "metadata": {"name": appwrapper_name, "namespace": namespace}, + "spec": {"components": [{"template": cluster_yaml}]}, + } + + +def write_user_yaml(user_yaml, output_file_name): # Create the directory if it doesn't exist directory_path = os.path.dirname(output_file_name) if not os.path.exists(directory_path): os.makedirs(directory_path) - components = user_yaml.get("spec", "resources").get("components") - open(output_file_name, "w").close() - with open(output_file_name, "a") as outfile: - for component in components: - if "template" in component: - outfile.write("---\n") - yaml.dump(component["template"], outfile, default_flow_style=False) - print(f"Written to: {output_file_name}") - - -def load_components( - user_yaml: dict, - name: str, -): - component_list = [] - components = user_yaml.get("spec", "resources").get("components") - for component in components: - if "template" in component: - component_list.append(component["template"]) - - resources = "---\n" + "---\n".join( - [yaml.dump(component) for component in component_list] - ) - user_yaml = resources - print(f"Yaml resources loaded for {name}") - return user_yaml - + with open(output_file_name, "w") as outfile: + yaml.dump(user_yaml, outfile, default_flow_style=False) -def load_appwrapper(user_yaml: dict, name: str): - user_yaml = yaml.dump(user_yaml) - print(f"Yaml resources loaded for {name}") - return user_yaml + print(f"Written to: {output_file_name}") def generate_appwrapper( @@ -315,27 +275,17 @@ def generate_appwrapper( template: str, image: str, appwrapper: bool, - instance_types: list, env, image_pull_secrets: list, write_to_file: bool, - verify_tls: bool, local_queue: Optional[str], labels, ): - user_yaml = read_template(template) + cluster_yaml = read_template(template) appwrapper_name, cluster_name = gen_names(name) - resources = user_yaml.get("spec", "resources") - item = resources.get("components")[0] - update_names( - user_yaml, - item, - appwrapper_name, - cluster_name, - namespace, - ) + update_names(cluster_yaml, cluster_name, namespace) update_nodes( - item, + cluster_yaml, appwrapper_name, min_cpu, max_cpu, @@ -350,27 +300,23 @@ def generate_appwrapper( head_memory, head_gpus, ) + augment_labels(cluster_yaml, labels) + notebook_annotations(cluster_yaml) - augment_labels(item, labels) - notebook_annotations(item) - - if appwrapper: - add_queue_label(user_yaml, namespace, local_queue) - else: - add_queue_label(item["template"], namespace, local_queue) + user_yaml = ( + wrap_cluster(cluster_yaml, appwrapper_name, namespace) + if appwrapper + else cluster_yaml + ) - directory_path = os.path.expanduser("~/.codeflare/resources/") - outfile = os.path.join(directory_path, appwrapper_name + ".yaml") + add_queue_label(user_yaml, namespace, local_queue) if write_to_file: - if appwrapper: - write_user_appwrapper(user_yaml, outfile) - else: - write_components(user_yaml, outfile) + directory_path = os.path.expanduser("~/.codeflare/resources/") + outfile = os.path.join(directory_path, appwrapper_name + ".yaml") + write_user_yaml(user_yaml, outfile) return outfile else: - if appwrapper: - user_yaml = load_appwrapper(user_yaml, name) - else: - user_yaml = load_components(user_yaml, name) + user_yaml = yaml.dump(user_yaml) + print(f"Yaml resources loaded for {name}") return user_yaml diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index b69c0d60..37d5d4db 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -1,4 +1,3 @@ ---- apiVersion: ray.io/v1 kind: RayCluster metadata: diff --git a/tests/unit_test.py b/tests/unit_test.py index b54d6549..bdd5ffad 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -73,8 +73,6 @@ from codeflare_sdk.utils.generate_yaml import ( gen_names, is_openshift_cluster, - read_template, - write_components, ) import openshift From c68d425377105a30ec591364f73b993991afaa47 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Tue, 18 Jun 2024 09:29:52 +0100 Subject: [PATCH 262/496] Added custom Volumes and Volume Mounts support --- src/codeflare_sdk/cluster/cluster.py | 4 ++++ src/codeflare_sdk/cluster/config.py | 2 ++ src/codeflare_sdk/utils/generate_yaml.py | 22 ++++++++++++++++++++++ 3 files changed, 28 insertions(+) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index e5bbcd86..015f15ed 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -152,6 +152,8 @@ def create_app_wrapper(self): write_to_file = self.config.write_to_file local_queue = self.config.local_queue labels = self.config.labels + volumes = self.config.volumes + volume_mounts = self.config.volume_mounts return generate_appwrapper( name=name, namespace=namespace, @@ -172,6 +174,8 @@ def create_app_wrapper(self): write_to_file=write_to_file, local_queue=local_queue, labels=labels, + volumes=volumes, + volume_mounts=volume_mounts, ) # creates a new cluster with the provided or default spec diff --git a/src/codeflare_sdk/cluster/config.py b/src/codeflare_sdk/cluster/config.py index 9e069c37..97067365 100644 --- a/src/codeflare_sdk/cluster/config.py +++ b/src/codeflare_sdk/cluster/config.py @@ -53,6 +53,8 @@ class ClusterConfiguration: write_to_file: bool = False verify_tls: bool = True labels: dict = field(default_factory=dict) + volumes: list = field(default_factory=list) + volume_mounts: list = field(default_factory=list) def __post_init__(self): if not self.verify_tls: diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 30edcd91..3192ae1b 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -96,6 +96,20 @@ def update_image_pull_secrets(spec, image_pull_secrets): ] +def update_volume_mounts(spec, volume_mounts: list): + containers = spec.get("containers") + for volume_mount in volume_mounts: + for container in containers: + volumeMount = client.ApiClient().sanitize_for_serialization(volume_mount) + container["volumeMounts"].append(volumeMount) + + +def update_volumes(spec, volumes: list): + for volume in volumes: + new_volume = client.ApiClient().sanitize_for_serialization(volume) + spec["volumes"].append(new_volume) + + def update_env(spec, env): containers = spec.get("containers") for container in containers: @@ -136,6 +150,8 @@ def update_nodes( head_cpus, head_memory, head_gpus, + volumes, + volume_mounts, ): head = cluster_yaml.get("spec").get("headGroupSpec") head["rayStartParams"]["num-gpus"] = str(int(head_gpus)) @@ -150,6 +166,8 @@ def update_nodes( for comp in [head, worker]: spec = comp.get("template").get("spec") + update_volume_mounts(spec, volume_mounts) + update_volumes(spec, volumes) update_image_pull_secrets(spec, image_pull_secrets) update_image(spec, image) update_env(spec, env) @@ -280,6 +298,8 @@ def generate_appwrapper( write_to_file: bool, local_queue: Optional[str], labels, + volumes: list[client.V1Volume], + volume_mounts: list[client.V1VolumeMount], ): cluster_yaml = read_template(template) appwrapper_name, cluster_name = gen_names(name) @@ -299,6 +319,8 @@ def generate_appwrapper( head_cpus, head_memory, head_gpus, + volumes, + volume_mounts, ) augment_labels(cluster_yaml, labels) notebook_annotations(cluster_yaml) From 20476aa81f30c2faf6781c581bed3b1907ac590e Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 13 Jun 2024 15:25:59 +0100 Subject: [PATCH 263/496] Added Unit tests for custom volumes and volume mounts --- tests/unit-test-volume-cluster.yaml | 186 ++++++++++++++++++++++++++++ tests/unit_test.py | 62 ++++++++++ 2 files changed, 248 insertions(+) create mode 100644 tests/unit-test-volume-cluster.yaml diff --git a/tests/unit-test-volume-cluster.yaml b/tests/unit-test-volume-cluster.yaml new file mode 100644 index 00000000..542f1183 --- /dev/null +++ b/tests/unit-test-volume-cluster.yaml @@ -0,0 +1,186 @@ +apiVersion: ray.io/v1 +kind: RayCluster +metadata: + labels: + controller-tools.k8s.io: '1.0' + kueue.x-k8s.io/queue-name: local-queue-default + name: unit-test-volume-cluster + namespace: opendatahub +spec: + autoscalerOptions: + idleTimeoutSeconds: 60 + imagePullPolicy: Always + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 500m + memory: 512Mi + upscalingMode: Default + enableInTreeAutoscaling: false + headGroupSpec: + enableIngress: false + rayStartParams: + block: 'true' + dashboard-host: 0.0.0.0 + num-gpus: '0' + serviceType: ClusterIP + template: + spec: + containers: + - image: quay.io/project-codeflare/ray:latest-py39-cu118 + imagePullPolicy: Always + lifecycle: + preStop: + exec: + command: + - /bin/sh + - -c + - ray stop + name: ray-head + ports: + - containerPort: 6379 + name: gcs + - containerPort: 8265 + name: dashboard + - containerPort: 10001 + name: client + resources: + limits: + cpu: 2 + memory: 8G + nvidia.com/gpu: 0 + requests: + cpu: 2 + memory: 8G + nvidia.com/gpu: 0 + volumeMounts: + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /home/ray/test1 + name: test + - mountPath: /home/ray/test2 + name: test2 + - mountPath: /home/ray/test2 + name: test3 + imagePullSecrets: [] + volumes: + - configMap: + items: + - key: ca-bundle.crt + path: odh-trusted-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert + - emptyDir: + sizeLimit: 500Gi + name: test + - configMap: + items: + - key: test + path: /home/ray/test2/data.txt + name: config-map-test + name: test2 + - name: test3 + secret: + secretName: test-secret + rayVersion: 2.7.0 + workerGroupSpecs: + - groupName: small-group-unit-test-volume-cluster + maxReplicas: 1 + minReplicas: 1 + rayStartParams: + block: 'true' + num-gpus: '0' + replicas: 1 + template: + metadata: + annotations: + key: value + labels: + key: value + spec: + containers: + - image: quay.io/project-codeflare/ray:latest-py39-cu118 + lifecycle: + preStop: + exec: + command: + - /bin/sh + - -c + - ray stop + name: machine-learning + resources: + limits: + cpu: 1 + memory: 2G + nvidia.com/gpu: 0 + requests: + cpu: 1 + memory: 2G + nvidia.com/gpu: 0 + volumeMounts: + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /home/ray/test1 + name: test + - mountPath: /home/ray/test2 + name: test2 + - mountPath: /home/ray/test2 + name: test3 + imagePullSecrets: [] + volumes: + - configMap: + items: + - key: ca-bundle.crt + path: odh-trusted-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert + - emptyDir: + sizeLimit: 500Gi + name: test + - configMap: + items: + - key: test + path: /home/ray/test2/data.txt + name: config-map-test + name: test2 + - name: test3 + secret: + secretName: test-secret diff --git a/tests/unit_test.py b/tests/unit_test.py index bdd5ffad..892d8fe2 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -444,6 +444,68 @@ def test_default_cluster_creation(mocker): assert cluster.config.namespace == "opendatahub" +def test_cluster_with_custom_volumes(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch( + "codeflare_sdk.cluster.cluster.get_current_namespace", + return_value="opendatahub", + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), + ) + + from kubernetes.client import ( + V1Volume, + V1VolumeMount, + V1EmptyDirVolumeSource, + V1ConfigMapVolumeSource, + V1KeyToPath, + V1SecretVolumeSource, + ) + + volume_mounts = [ + V1VolumeMount(mount_path="/home/ray/test1", name="test"), + V1VolumeMount( + mount_path="/home/ray/test2", + name="test2", + ), + V1VolumeMount( + mount_path="/home/ray/test2", + name="test3", + ), + ] + + volumes = [ + V1Volume( + name="test", + empty_dir=V1EmptyDirVolumeSource(size_limit="500Gi"), + ), + V1Volume( + name="test2", + config_map=V1ConfigMapVolumeSource( + name="config-map-test", + items=[V1KeyToPath(key="test", path="/home/ray/test2/data.txt")], + ), + ), + V1Volume(name="test3", secret=V1SecretVolumeSource(secret_name="test-secret")), + ] + + test_config = ClusterConfiguration( + name="unit-test-volume-cluster", + image="quay.io/project-codeflare/ray:latest-py39-cu118", + volume_mounts=volume_mounts, + volumes=volumes, + ) + cluster = Cluster(test_config) + test_rc = yaml.load(cluster.app_wrapper_yaml, Loader=yaml.FullLoader) + with open( + f"{parent}/tests/unit-test-volume-cluster.yaml", + ) as f: + volume_rc = yaml.load(f, Loader=yaml.FullLoader) + assert test_rc == volume_rc + + def test_gen_names_with_name(mocker): mocker.patch.object( uuid, "uuid4", return_value=uuid.UUID("00000000-0000-0000-0000-000000000001") From f9e2ee8357a08838847c1400c73aa371bce5d024 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Wed, 8 May 2024 14:17:50 +0100 Subject: [PATCH 264/496] Updated Ray version to 2.20.0 --- .github/workflows/e2e_tests.yaml | 1 + poetry.lock | 279 +++++++----------- pyproject.toml | 4 +- .../templates/base-template.yaml | 2 +- tests/test-case-bad.yaml | 2 +- tests/test-case-no-mcad.yamls | 2 +- tests/test-case.yaml | 2 +- tests/test-default-appwrapper.yaml | 2 +- tests/unit_test.py | 6 +- 9 files changed, 114 insertions(+), 186 deletions(-) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index b83afb4b..08aeef11 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -43,6 +43,7 @@ jobs: sudo rm -rf /opt/ghc sudo rm -rf "/usr/local/share/boost" sudo rm -rf "$AGENT_TOOLSDIRECTORY" + sudo apt install python3.9 docker rmi $(docker image ls -aq) echo "Final status:" df -h diff --git a/poetry.lock b/poetry.lock index e0fa9acb..2bcedf96 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.1 and should not be changed by hand. [[package]] name = "aiohttp" @@ -124,17 +124,6 @@ files = [ [package.dependencies] frozenlist = ">=1.1.0" -[[package]] -name = "ansicon" -version = "1.89.0" -description = "Python wrapper for loading Jason Hood's ANSICON" -optional = false -python-versions = "*" -files = [ - {file = "ansicon-1.89.0-py2.py3-none-any.whl", hash = "sha256:f1def52d17f65c2c9682cf8370c03f541f410c1752d6a14029f97318e4b9dfec"}, - {file = "ansicon-1.89.0.tar.gz", hash = "sha256:e4d039def5768a47e4afec8e89e83ec3ae5a26bf00ad851f914d1240b444d2b1"}, -] - [[package]] name = "appnope" version = "0.1.4" @@ -238,22 +227,6 @@ files = [ tests = ["pytest (>=3.2.1,!=3.3.0)"] typecheck = ["mypy"] -[[package]] -name = "blessed" -version = "1.20.0" -description = "Easy, practical library for making terminal apps, by providing an elegant, well-documented interface to Colors, Keyboard input, and screen Positioning capabilities." -optional = false -python-versions = ">=2.7" -files = [ - {file = "blessed-1.20.0-py2.py3-none-any.whl", hash = "sha256:0c542922586a265e699188e52d5f5ac5ec0dd517e5a1041d90d2bbf23f906058"}, - {file = "blessed-1.20.0.tar.gz", hash = "sha256:2cdd67f8746e048f00df47a2880f4d6acbcdb399031b604e34ba8f71d5787680"}, -] - -[package.dependencies] -jinxed = {version = ">=1.1.0", markers = "platform_system == \"Windows\""} -six = ">=1.9.0" -wcwidth = ">=0.1.4" - [[package]] name = "cachetools" version = "5.3.1" @@ -914,25 +887,6 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4 [package.extras] grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] -[[package]] -name = "gpustat" -version = "1.1.1" -description = "An utility to monitor NVIDIA GPU status and usage" -optional = false -python-versions = ">=3.6" -files = [ - {file = "gpustat-1.1.1.tar.gz", hash = "sha256:c18d3ed5518fc16300c42d694debc70aebb3be55cae91f1db64d63b5fa8af9d8"}, -] - -[package.dependencies] -blessed = ">=1.17.1" -nvidia-ml-py = ">=11.450.129" -psutil = ">=5.6.0" - -[package.extras] -completion = ["shtab"] -test = ["mockito (>=1.2.1)", "pytest (>=5.4.1)", "pytest-runner"] - [[package]] name = "grpcio" version = "1.60.0" @@ -1029,24 +983,6 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker perf = ["ipython"] testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] -[[package]] -name = "importlib-resources" -version = "6.1.0" -description = "Read resources from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_resources-6.1.0-py3-none-any.whl", hash = "sha256:aa50258bbfa56d4e33fbd8aa3ef48ded10d1735f11532b8df95388cc6bdb7e83"}, - {file = "importlib_resources-6.1.0.tar.gz", hash = "sha256:9d48dcccc213325e810fd723e7fbb45ccb39f6cf5c31f00cf2b965f5f10f3cb9"}, -] - -[package.dependencies] -zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-ruff", "zipp (>=3.17)"] - [[package]] name = "iniconfig" version = "2.0.0" @@ -1138,18 +1074,21 @@ qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] [[package]] -name = "jinxed" -version = "1.2.1" -description = "Jinxed Terminal Library" +name = "jinja2" +version = "3.1.4" +description = "A very fast and expressive template engine." optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "jinxed-1.2.1-py2.py3-none-any.whl", hash = "sha256:37422659c4925969c66148c5e64979f553386a4226b9484d910d3094ced37d30"}, - {file = "jinxed-1.2.1.tar.gz", hash = "sha256:30c3f861b73279fea1ed928cfd4dfb1f273e16cd62c8a32acfac362da0f78f3f"}, + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, ] [package.dependencies] -ansicon = {version = "*", markers = "platform_system == \"Windows\""} +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] [[package]] name = "jsonschema" @@ -1164,9 +1103,7 @@ files = [ [package.dependencies] attrs = ">=22.2.0" -importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} jsonschema-specifications = ">=2023.03.6" -pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} referencing = ">=0.28.4" rpds-py = ">=0.7.1" @@ -1186,7 +1123,6 @@ files = [ ] [package.dependencies] -importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} referencing = ">=0.28.0" [[package]] @@ -1334,18 +1270,73 @@ files = [ [[package]] name = "matplotlib-inline" -version = "0.1.6" +version = "0.1.7" description = "Inline Matplotlib backend for Jupyter" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" files = [ - {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, - {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, + {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, + {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, ] [package.dependencies] traitlets = "*" +[[package]] +name = "memray" +version = "1.10.0" +description = "A memory profiler for Python applications" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "memray-1.10.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:843a688877691746f9d1835cfa8a65139948471bdd78720435808d20bc30a1cc"}, + {file = "memray-1.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6937d7ef67d18ccc01c3250cdf3b4ef1445b859ee8756f09e3d11bd3ff0c7d67"}, + {file = "memray-1.10.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:23e8c402625cfb32d0e9edb5ec0945f3e5e54bc6b0c5699f6284302082b80bd4"}, + {file = "memray-1.10.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f16c5c8730b616613dc8bafe32649ca6bd7252606251eb00148582011758d0b5"}, + {file = "memray-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7aeb47174c42e99740a8e2b3b6fe0932c95d987258d48a746974ead19176c26"}, + {file = "memray-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2ce59ef485db3634de98b3a026d2450fc0a875e3a58a9ea85f7a89098841defe"}, + {file = "memray-1.10.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:53a8f66af18b1f3bcf5c9f3c95ae4134dd675903a38f9d0e6341b7bca01b63d0"}, + {file = "memray-1.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9627184c926252c8f719c301f1fefe970f0d033c643a6448b93fed2889d1ea94"}, + {file = "memray-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3a14960838d89a91747885897d34134afb65883cc3b0ed7ff30fe1af00f9fe6"}, + {file = "memray-1.10.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f2a47871c172a0539bd72737bb6b294fc10c510464066b825d90fcd3bb4916"}, + {file = "memray-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c401c57f49c4c5f1fecaee1e746f537cdc6680da05fb963dc143bd08ee109bf"}, + {file = "memray-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ce22a887a585ef5020896de89ffc793e531b65ccc81fbafcc7886010c2c562b3"}, + {file = "memray-1.10.0-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:b75040f28e8678d0e9c4907d55c95cf26db8ef5adc9941a228f1b280a9efd9c0"}, + {file = "memray-1.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:95e563d9c976e429ad597ad2720d95cebbe8bac891a3082465439143e2740772"}, + {file = "memray-1.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:663d463e89a64bae4a6b2f8c837d11a3d094834442d536a4165e1d31899a3500"}, + {file = "memray-1.10.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a21745fb516b7a6efcd40aa7487c59e9313fcfc782d0193fcfcf00b48426874"}, + {file = "memray-1.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf6d683c4f8d25c6ad06ae18715f218983c5eb86803953615e902d632fdf6ec1"}, + {file = "memray-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6b311e91203be71e1a0ce5e4f978137765bcb1045f3bf5646129c83c5b96ab3c"}, + {file = "memray-1.10.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:68bd8df023c8a32f44c11d997e5c536837e27c0955daf557d3a377edd55a1dd3"}, + {file = "memray-1.10.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:322ed0b69014a0969b777768d461a785203f81f9864386b666b5b26645d9c294"}, + {file = "memray-1.10.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e985fb7646b0475c303919d19211d2aa54e5a9e2cd2a102472299be5dbebd3"}, + {file = "memray-1.10.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4eba29179772b4a2e440a065b320b03bc2e73fe2648bdf7936aa3b9a086fab4a"}, + {file = "memray-1.10.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:b681519357d94f5f0857fbc6029e7c44d3f41436109e955a14fd312d8317bc35"}, + {file = "memray-1.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8196c684f1be8fe423e5cdd2356d4255a2cb482a1f3e89612b70d2a2862cf5bb"}, + {file = "memray-1.10.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:898acd60f57a10dc5aaf1fd64aa2f821f0420114f3f60c3058083788603f173a"}, + {file = "memray-1.10.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6fd13ef666c7fced9768d1cfabf71dc6dfa6724935a8dff463495ac2dc5e13a4"}, + {file = "memray-1.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e356af93e3b031c83957e9ac1a653f5aaba5df1e357dd17142f5ed19bb3dc660"}, + {file = "memray-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:92c372cb262eddd23049f945ca9527f0e4cc7c40a070aade1802d066f680885b"}, + {file = "memray-1.10.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:38393c86ce6d0a08e6ec0eb1401d49803b7c0c950c2565386751cdc81568cba8"}, + {file = "memray-1.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3a8bb7fbd8303c4f0017ba7faef6b88f904cda2931ed667cbf3b98f024b3bc44"}, + {file = "memray-1.10.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8d56f37a34125684746c13d24bd7a3fb17549b0bb355eb50969eb11e05e3ba62"}, + {file = "memray-1.10.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:85c32d6613d81b075f740e398c4d653e0803cd48e82c33dcd584c109d6782666"}, + {file = "memray-1.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:566602b2143e06b3d592901d98c52ce4599e71aa2555146eeb5cec03506f9498"}, + {file = "memray-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:391aac6c9f744528d3186bc82d708a1acc83525778f804045d7c96f860f8ec98"}, + {file = "memray-1.10.0.tar.gz", hash = "sha256:38322e052b882790993412f1840517a51818aa55c47037f69915b2007f2c4cee"}, +] + +[package.dependencies] +jinja2 = ">=2.9" +rich = ">=11.2.0" + +[package.extras] +benchmark = ["asv"] +dev = ["Cython", "IPython", "asv", "black", "bump2version", "check-manifest", "flake8", "furo", "greenlet", "ipython", "isort", "mypy", "pytest", "pytest-cov", "setuptools", "sphinx", "sphinx-argparse", "towncrier"] +docs = ["IPython", "bump2version", "furo", "sphinx", "sphinx-argparse", "towncrier"] +lint = ["black", "check-manifest", "flake8", "isort", "mypy"] +test = ["Cython", "greenlet", "ipython", "pytest", "pytest-cov", "setuptools"] + [[package]] name = "msgpack" version = "1.0.7" @@ -1542,17 +1533,6 @@ files = [ {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, ] -[[package]] -name = "nvidia-ml-py" -version = "12.535.133" -description = "Python Bindings for the NVIDIA Management Library" -optional = false -python-versions = "*" -files = [ - {file = "nvidia-ml-py-12.535.133.tar.gz", hash = "sha256:b1559af0d57dd20955bf58d05afff7b166ddd44947eb3051c9905638799eb1dc"}, - {file = "nvidia_ml_py-12.535.133-py3-none-any.whl", hash = "sha256:91d808d3f246d30bead2a0a2540b74b9e9fc584a9c3f1f55abfc2940c4e44fd2"}, -] - [[package]] name = "oauthlib" version = "3.2.2" @@ -1765,17 +1745,6 @@ files = [ {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, ] -[[package]] -name = "pkgutil-resolve-name" -version = "1.3.10" -description = "Resolve a name to an object." -optional = false -python-versions = ">=3.6" -files = [ - {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, - {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, -] - [[package]] name = "platformdirs" version = "3.11.0" @@ -1856,34 +1825,6 @@ files = [ {file = "protobuf-4.24.4.tar.gz", hash = "sha256:5a70731910cd9104762161719c3d883c960151eea077134458503723b60e3667"}, ] -[[package]] -name = "psutil" -version = "5.9.6" -description = "Cross-platform lib for process and system monitoring in Python." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "psutil-5.9.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:fb8a697f11b0f5994550555fcfe3e69799e5b060c8ecf9e2f75c69302cc35c0d"}, - {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:91ecd2d9c00db9817a4b4192107cf6954addb5d9d67a969a4f436dbc9200f88c"}, - {file = "psutil-5.9.6-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:10e8c17b4f898d64b121149afb136c53ea8b68c7531155147867b7b1ac9e7e28"}, - {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:18cd22c5db486f33998f37e2bb054cc62fd06646995285e02a51b1e08da97017"}, - {file = "psutil-5.9.6-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:ca2780f5e038379e520281e4c032dddd086906ddff9ef0d1b9dcf00710e5071c"}, - {file = "psutil-5.9.6-cp27-none-win32.whl", hash = "sha256:70cb3beb98bc3fd5ac9ac617a327af7e7f826373ee64c80efd4eb2856e5051e9"}, - {file = "psutil-5.9.6-cp27-none-win_amd64.whl", hash = "sha256:51dc3d54607c73148f63732c727856f5febec1c7c336f8f41fcbd6315cce76ac"}, - {file = "psutil-5.9.6-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c69596f9fc2f8acd574a12d5f8b7b1ba3765a641ea5d60fb4736bf3c08a8214a"}, - {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92e0cc43c524834af53e9d3369245e6cc3b130e78e26100d1f63cdb0abeb3d3c"}, - {file = "psutil-5.9.6-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:748c9dd2583ed86347ed65d0035f45fa8c851e8d90354c122ab72319b5f366f4"}, - {file = "psutil-5.9.6-cp36-cp36m-win32.whl", hash = "sha256:3ebf2158c16cc69db777e3c7decb3c0f43a7af94a60d72e87b2823aebac3d602"}, - {file = "psutil-5.9.6-cp36-cp36m-win_amd64.whl", hash = "sha256:ff18b8d1a784b810df0b0fff3bcb50ab941c3b8e2c8de5726f9c71c601c611aa"}, - {file = "psutil-5.9.6-cp37-abi3-win32.whl", hash = "sha256:a6f01f03bf1843280f4ad16f4bde26b817847b4c1a0db59bf6419807bc5ce05c"}, - {file = "psutil-5.9.6-cp37-abi3-win_amd64.whl", hash = "sha256:6e5fb8dc711a514da83098bc5234264e551ad980cec5f85dabf4d38ed6f15e9a"}, - {file = "psutil-5.9.6-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:daecbcbd29b289aac14ece28eca6a3e60aa361754cf6da3dfb20d4d32b6c7f57"}, - {file = "psutil-5.9.6.tar.gz", hash = "sha256:e4b92ddcd7dd4cdd3f900180ea1e104932c7bce234fb88976e2a3b296441225a"}, -] - -[package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] - [[package]] name = "ptyprocess" version = "0.7.0" @@ -2279,35 +2220,26 @@ files = [ [[package]] name = "ray" -version = "2.7.0" +version = "2.20.0" description = "Ray provides a simple, universal API for building distributed applications." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "ray-2.7.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:bc911655908b61b2e9f59b8df158fcc62cd32080c468b484b539ebf0a4111d04"}, - {file = "ray-2.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0ee8c14e1521559cd5802bfad3f0aba4a77afdfba57dd446162a7449c6e8ff68"}, - {file = "ray-2.7.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:ebde44af7d479ede21d1c2e68b5ccd8264e18df6e4f3c216d9e99c31e819bde6"}, - {file = "ray-2.7.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:b83621f5d2d4079e6ae624c3bf30046a4fefa0ea7ea5e4a4dfe4b50c580b3768"}, - {file = "ray-2.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:8e1b06abba6e227b8dde1ad861c587fb2608a6970d270e4755cd24a6f37ed565"}, - {file = "ray-2.7.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:5442d48719f033831a324f05b332d6e7181970d721e9504be2091cc9d9735394"}, - {file = "ray-2.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ca8225878cce7b9e2d0ca9668d9370893a7cee35629d11a3889a1b66a0007218"}, - {file = "ray-2.7.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:a3f59dbb0780f9fa11f5bf96bef853b4cb95245456d4400e1c7bf2e514d12ab2"}, - {file = "ray-2.7.0-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:8384b3f30bc1446ef810e9e894afa03238c5ac40d3c40c0740d82f347112015d"}, - {file = "ray-2.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:8d4530e7024375505552dabd3f4441fc9ac7a5562365a81ba9afa14185433879"}, - {file = "ray-2.7.0-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:c491b8051eef82b77d136c48a23d16485c0e54233303ccf68e9fe69a06c517e6"}, - {file = "ray-2.7.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:1684c434886cb7b263cdf98ed39d75dec343e949f7b14f3385d83bfe70ee8c80"}, - {file = "ray-2.7.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:856a9ae164b9b0aeaad54f3e78986eb19900ed3c74e26f51b02a7d8826c97e59"}, - {file = "ray-2.7.0-cp37-cp37m-win_amd64.whl", hash = "sha256:34925a90b6239de42592bb4524dcbdc59a9c65f1f74ad4d9f97f636bd59c73d7"}, - {file = "ray-2.7.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:60db240f37d80a80492e09a8d1e29b79d034431c6fcb651401e9e2d24d850793"}, - {file = "ray-2.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:835155fdeb8698eae426f3d9416e6b8165197fe5c1c74e1b02a429fc7f4ddcd2"}, - {file = "ray-2.7.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:442b7568946081d38c8addbc528e7b09fc1ee25453b4800c86b7e5ba4bce9dd3"}, - {file = "ray-2.7.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:3825292b777b423e2cd34bf66e8e1e7701b04c6a5308f9f291ad5929b289dc47"}, - {file = "ray-2.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:ce700322662946ad5c62a39b78e81feebcb855d378c49f5df6477c22f0ac1e5a"}, - {file = "ray-2.7.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:327c23aac5dd26ee4abe6cee70320322d63fdf97c6028fbb9555724b46a8f3e3"}, - {file = "ray-2.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a09021d45312ab7a44109b251984718b65fbff77df0b55e30e651193cdf42bff"}, - {file = "ray-2.7.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f05fcb609962d14f4d23cc88a9d07cafa7077ce3c5d5ee99cd08a19067b7eecf"}, - {file = "ray-2.7.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:0e0f7dbeb4444940c72b64fdecd6f331593466914b2dffeed03ce97225acec14"}, - {file = "ray-2.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:29a0866316756ae18e232dd074adbf408dcdabe95d135a9a96b9a8c24393c983"}, + {file = "ray-2.20.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:8855a5df8b3e6b8bcb5582a8491c50d0237e70751f941e8978bd6408245b7838"}, + {file = "ray-2.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0566b28c75aad1d47b9403c3901a85db586ce7191fdc6978e07ad56e80bf82b"}, + {file = "ray-2.20.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:738c68f4114754f846b3d03b730b42a6468f8b54665732da9f9108aa1d3ecbe3"}, + {file = "ray-2.20.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:2c7f8cd468cbba009d7ebd8a8da66026aeb520f7f4183dd6f49419d75bc84415"}, + {file = "ray-2.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:611d34d0c659652a38ef482a82dfc362074984617765e1d5a414337e4f914cfd"}, + {file = "ray-2.20.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:f7816767e644014f65afbfceb6adfb08c15784a4227aa331b28ac90d1b757a58"}, + {file = "ray-2.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e98df29fd6dac52c87c1f5be5ad99601a8955eaabe921e5cab29b27775250ce"}, + {file = "ray-2.20.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:e84ddad1521e06c91fc641f2b856d33ca2bfa314784172862c41a5184e0e760b"}, + {file = "ray-2.20.0-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:d9b13815fae5c9a68c9a02f21e1c49c58a5bb6565cb9ed5d48571cacce7568f2"}, + {file = "ray-2.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:6ac1dcb303ddf53d2d87bc5b719e8c38f0a5efe41e175b6ba563fb65b5f4e9a2"}, + {file = "ray-2.20.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:1de0810f77ae4a0bf055aa2bdcb161be1d6d1b67b4095e85a5b3fbb6e0dadcd2"}, + {file = "ray-2.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3f3519dd7794ead4d3e17d4570593b2a10e8db062836907517e85b4e769dec1a"}, + {file = "ray-2.20.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:5a2cb9f100bbb6351372519b03ddc21d9fa6c8716621237273a59a6e250a8204"}, + {file = "ray-2.20.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:64b394a6462a2ac2401b1b004f2cc7ac31e429388abf27024072a55702f1159c"}, + {file = "ray-2.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:65938f7bd28a825d90c643465ad6b1334d97d16e381c409b19269e4dcc043341"}, ] [package.dependencies] @@ -2319,18 +2251,14 @@ colorful = {version = "*", optional = true, markers = "extra == \"default\""} filelock = "*" frozenlist = "*" fsspec = {version = "*", optional = true, markers = "extra == \"data\""} -gpustat = {version = ">=1.0.0", optional = true, markers = "extra == \"default\""} grpcio = [ {version = ">=1.32.0", optional = true, markers = "python_version < \"3.10\" and extra == \"default\""}, {version = ">=1.42.0", optional = true, markers = "python_version >= \"3.10\" and extra == \"default\""}, ] jsonschema = "*" +memray = {version = "*", optional = true, markers = "sys_platform != \"win32\" and extra == \"default\""} msgpack = ">=1.0.0,<2.0.0" -numpy = [ - {version = ">=1.16", markers = "python_version < \"3.9\""}, - {version = ">=1.20", optional = true, markers = "extra == \"data\""}, - {version = ">=1.19.3", markers = "python_version >= \"3.9\""}, -] +numpy = {version = ">=1.20", optional = true, markers = "extra == \"data\""} opencensus = {version = "*", optional = true, markers = "extra == \"default\""} packaging = "*" pandas = {version = ">=1.3", optional = true, markers = "extra == \"data\""} @@ -2338,23 +2266,23 @@ prometheus-client = {version = ">=0.7.1", optional = true, markers = "extra == \ protobuf = ">=3.15.3,<3.19.5 || >3.19.5" py-spy = {version = ">=0.2.0", optional = true, markers = "extra == \"default\""} pyarrow = {version = ">=6.0.1", optional = true, markers = "extra == \"data\""} -pydantic = {version = "<2", optional = true, markers = "extra == \"default\""} +pydantic = {version = "<2.0.dev0 || >=2.5.dev0,<3", optional = true, markers = "extra == \"default\""} pyyaml = "*" requests = "*" smart-open = {version = "*", optional = true, markers = "extra == \"default\""} -virtualenv = {version = ">=20.0.24,<20.21.1", optional = true, markers = "extra == \"default\""} +virtualenv = {version = ">=20.0.24,<20.21.1 || >20.21.1", optional = true, markers = "extra == \"default\""} [package.extras] -air = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi", "fsspec", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "numpy (>=1.20)", "opencensus", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "requests", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)", "watchfiles"] -all = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "dm-tree", "fastapi", "fsspec", "gpustat (>=1.0.0)", "grpcio (!=1.56.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "gymnasium (==0.28.1)", "lz4", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml", "ray-cpp (==2.7.0)", "requests", "rich", "scikit-image", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "typer", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)", "watchfiles"] -client = ["grpcio (!=1.56.0)"] -cpp = ["ray-cpp (==2.7.0)"] +air = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "fsspec", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "memray", "numpy (>=1.20)", "opencensus", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +all = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "dm-tree", "fastapi", "fsspec", "grpcio", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "gymnasium (==0.28.1)", "lz4", "memray", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "ray-cpp (==2.20.0)", "requests", "rich", "scikit-image", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "typer", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +client = ["grpcio"] +cpp = ["ray-cpp (==2.20.0)"] data = ["fsspec", "numpy (>=1.20)", "pandas (>=1.3)", "pyarrow (>=6.0.1)"] -default = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2)", "requests", "smart-open", "virtualenv (>=20.0.24,<20.21.1)"] +default = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "memray", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "virtualenv (>=20.0.24,!=20.21.1)"] observability = ["opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk"] rllib = ["dm-tree", "fsspec", "gymnasium (==0.28.1)", "lz4", "pandas", "pyarrow (>=6.0.1)", "pyyaml", "requests", "rich", "scikit-image", "scipy", "tensorboardX (>=1.9)", "typer"] -serve = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2)", "requests", "smart-open", "starlette", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)", "watchfiles"] -serve-grpc = ["aiohttp (>=3.7)", "aiohttp-cors", "aiorwlock", "colorful", "fastapi", "gpustat (>=1.0.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2)", "requests", "smart-open", "starlette", "uvicorn", "virtualenv (>=20.0.24,<20.21.1)", "watchfiles"] +serve = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "memray", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +serve-grpc = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "memray", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] train = ["fsspec", "pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1.9)"] tune = ["fsspec", "pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1.9)"] @@ -2426,7 +2354,6 @@ files = [ [package.dependencies] commonmark = ">=0.9.0,<0.10.0" pygments = ">=2.6.0,<3.0.0" -typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""} [package.extras] jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] @@ -2735,13 +2662,13 @@ test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess [[package]] name = "wcwidth" -version = "0.2.12" +version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" files = [ - {file = "wcwidth-0.2.12-py2.py3-none-any.whl", hash = "sha256:f26ec43d96c8cbfed76a5075dac87680124fa84e0855195a6184da9c187f133c"}, - {file = "wcwidth-0.2.12.tar.gz", hash = "sha256:f01c104efdf57971bcb756f054dd58ddec5204dd15fa31d6503ea57947d97c02"}, + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, ] [[package]] @@ -2891,5 +2818,5 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" -python-versions = "^3.8" -content-hash = "e82f9c28e3f78682ca3e6d5414e1c1fe8f0bd62b5a992f749d8b7bd4bbd0273d" +python-versions = "^3.9" +content-hash = "d834cef86c5776cdd688c9ff9df9e31731bc829e1f319ec4ea86c17d33762af6" diff --git a/pyproject.toml b/pyproject.toml index 99d85a6b..a295e383 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,10 +20,10 @@ homepage = "https://github.com/project-codeflare/codeflare-sdk" keywords = ['codeflare', 'python', 'sdk', 'client', 'batch', 'scale'] [tool.poetry.dependencies] -python = "^3.8" +python = "^3.9" openshift-client = "1.0.18" rich = "^12.5" -ray = {version = "2.7.0", extras = ["data", "default"]} +ray = {version = "2.20.0", extras = ["data", "default"]} kubernetes = ">= 25.3.0, < 27" codeflare-torchx = "0.6.0.dev2" cryptography = "40.0.2" diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index 0e071961..07ab4104 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -12,7 +12,7 @@ metadata: namespace: default spec: # The version of Ray you are using. Make sure all Ray containers are running this version of Ray. - rayVersion: '2.7.0' + rayVersion: '2.20.0' # If enableInTreeAutoscaling is true, the autoscaler sidecar will be added to the Ray head pod. # Ray autoscaler integration is supported only for Ray versions >= 1.11.0 # Ray autoscaler integration is Beta with KubeRay >= 0.3.0 and Ray >= 2.0.0. diff --git a/tests/test-case-bad.yaml b/tests/test-case-bad.yaml index 3c5bf076..b90cdeb1 100644 --- a/tests/test-case-bad.yaml +++ b/tests/test-case-bad.yaml @@ -68,7 +68,7 @@ spec: cpu: 2 memory: 8G nvidia.com/gpu: 0 - rayVersion: 1.12.0 + rayVersion: 2.20.0 workerGroupSpecs: - groupName: small-group-unit-test-cluster maxReplicas: 2 diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index 37d5d4db..b3916c81 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -89,7 +89,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.7.0 + rayVersion: 2.20.0 workerGroupSpecs: - groupName: small-group-unit-test-cluster-ray maxReplicas: 2 diff --git a/tests/test-case.yaml b/tests/test-case.yaml index 98166b37..4fd1edcc 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -94,7 +94,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.7.0 + rayVersion: 2.20.0 workerGroupSpecs: - groupName: small-group-unit-test-cluster maxReplicas: 2 diff --git a/tests/test-default-appwrapper.yaml b/tests/test-default-appwrapper.yaml index f754c176..f2e8a39e 100644 --- a/tests/test-default-appwrapper.yaml +++ b/tests/test-default-appwrapper.yaml @@ -93,7 +93,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.7.0 + rayVersion: 2.20.0 workerGroupSpecs: - groupName: small-group-unit-test-default-cluster maxReplicas: 1 diff --git a/tests/unit_test.py b/tests/unit_test.py index 892d8fe2..a05872c4 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -1245,7 +1245,7 @@ def get_ray_obj(group, version, namespace, plural, cls=None): }, }, }, - "rayVersion": "2.7.0", + "rayVersion": "2.20.0", "workerGroupSpecs": [ { "groupName": "small-group-quicktest", @@ -1479,7 +1479,7 @@ def get_ray_obj(group, version, namespace, plural, cls=None): } }, }, - "rayVersion": "1.12.0", + "rayVersion": "2.20.0", "workerGroupSpecs": [ { "groupName": "small-group-quicktest2", @@ -1852,7 +1852,7 @@ def get_aw_obj(group, version, namespace, plural): } }, }, - "rayVersion": "1.12.0", + "rayVersion": "2.20.0", "workerGroupSpecs": [ { "groupName": "small-group-quicktest", From b3950fc28053af3a67b8491b6647061f541109fc Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Wed, 29 May 2024 09:55:29 +0100 Subject: [PATCH 265/496] Removed python3.9 installation step --- .github/workflows/e2e_tests.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index 08aeef11..b83afb4b 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -43,7 +43,6 @@ jobs: sudo rm -rf /opt/ghc sudo rm -rf "/usr/local/share/boost" sudo rm -rf "$AGENT_TOOLSDIRECTORY" - sudo apt install python3.9 docker rmi $(docker image ls -aq) echo "Final status:" df -h From c51ab98d85deea2893b898cfb9f31fc8790b588e Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Mon, 10 Jun 2024 11:39:31 +0100 Subject: [PATCH 266/496] Replaced old ray image with new 2.20.0 image --- src/codeflare_sdk/templates/base-template.yaml | 4 ++-- tests/e2e/support.py | 2 +- tests/test-case-bad.yaml | 4 ++-- tests/test-case-no-mcad.yamls | 4 ++-- tests/test-case.yaml | 4 ++-- tests/test-default-appwrapper.yaml | 4 ++-- tests/unit-test-volume-cluster.yaml | 6 +++--- tests/unit_test.py | 18 +++++++++--------- tests/unit_test_support.py | 2 +- 9 files changed, 24 insertions(+), 24 deletions(-) diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index 07ab4104..5c0c919d 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -69,7 +69,7 @@ spec: containers: # The Ray head pod - name: ray-head - image: quay.io/project-codeflare/ray:latest-py39-cu118 + image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 imagePullPolicy: Always ports: - containerPort: 6379 @@ -152,7 +152,7 @@ spec: spec: containers: - name: machine-learning # must consist of lower case alphanumeric characters or '-', and must start and end with an alphanumeric character (e.g. 'my-name', or '123-abc' - image: quay.io/project-codeflare/ray:latest-py39-cu118 + image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 # environment variables to set in the container.Optional. # Refer to https://kubernetes.io/docs/tasks/inject-data-application/define-environment-variable-container/ lifecycle: diff --git a/tests/e2e/support.py b/tests/e2e/support.py index a3dfd89b..04c9cb42 100644 --- a/tests/e2e/support.py +++ b/tests/e2e/support.py @@ -8,7 +8,7 @@ def get_ray_image(): - default_ray_image = "quay.io/project-codeflare/ray:latest-py39-cu118" + default_ray_image = "quay.io/project-codeflare/ray:2.20.0-py39-cu118" return os.getenv("RAY_IMAGE", default_ray_image) diff --git a/tests/test-case-bad.yaml b/tests/test-case-bad.yaml index b90cdeb1..18dcb7d7 100644 --- a/tests/test-case-bad.yaml +++ b/tests/test-case-bad.yaml @@ -42,7 +42,7 @@ spec: valueFrom: fieldRef: fieldPath: status.podIP - image: quay.io/project-codeflare/ray:latest-py39-cu118 + image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 imagePullPolicy: Always lifecycle: preStop: @@ -90,7 +90,7 @@ spec: valueFrom: fieldRef: fieldPath: status.podIP - image: quay.io/project-codeflare/ray:latest-py39-cu118 + image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 lifecycle: preStop: exec: diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index b3916c81..d8d2516c 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -33,7 +33,7 @@ spec: template: spec: containers: - - image: quay.io/project-codeflare/ray:latest-py39-cu118 + - image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 imagePullPolicy: Always lifecycle: preStop: @@ -106,7 +106,7 @@ spec: key: value spec: containers: - - image: quay.io/project-codeflare/ray:latest-py39-cu118 + - image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 lifecycle: preStop: exec: diff --git a/tests/test-case.yaml b/tests/test-case.yaml index 4fd1edcc..c5229ce7 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -38,7 +38,7 @@ spec: template: spec: containers: - - image: quay.io/project-codeflare/ray:latest-py39-cu118 + - image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 imagePullPolicy: Always lifecycle: preStop: @@ -111,7 +111,7 @@ spec: key: value spec: containers: - - image: quay.io/project-codeflare/ray:latest-py39-cu118 + - image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 lifecycle: preStop: exec: diff --git a/tests/test-default-appwrapper.yaml b/tests/test-default-appwrapper.yaml index f2e8a39e..8fd1873f 100644 --- a/tests/test-default-appwrapper.yaml +++ b/tests/test-default-appwrapper.yaml @@ -38,7 +38,7 @@ spec: template: spec: containers: - - image: quay.io/project-codeflare/ray:latest-py39-cu118 + - image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 imagePullPolicy: Always lifecycle: preStop: @@ -110,7 +110,7 @@ spec: key: value spec: containers: - - image: quay.io/project-codeflare/ray:latest-py39-cu118 + - image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 lifecycle: preStop: exec: diff --git a/tests/unit-test-volume-cluster.yaml b/tests/unit-test-volume-cluster.yaml index 542f1183..67d2c6b2 100644 --- a/tests/unit-test-volume-cluster.yaml +++ b/tests/unit-test-volume-cluster.yaml @@ -29,7 +29,7 @@ spec: template: spec: containers: - - image: quay.io/project-codeflare/ray:latest-py39-cu118 + - image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 imagePullPolicy: Always lifecycle: preStop: @@ -102,7 +102,7 @@ spec: - name: test3 secret: secretName: test-secret - rayVersion: 2.7.0 + rayVersion: 2.20.0 workerGroupSpecs: - groupName: small-group-unit-test-volume-cluster maxReplicas: 1 @@ -119,7 +119,7 @@ spec: key: value spec: containers: - - image: quay.io/project-codeflare/ray:latest-py39-cu118 + - image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 lifecycle: preStop: exec: diff --git a/tests/unit_test.py b/tests/unit_test.py index a05872c4..4b48b173 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -261,7 +261,7 @@ def test_config_creation(): assert config.min_cpus == 3 and config.max_cpus == 4 assert config.min_memory == "5G" and config.max_memory == "6G" assert config.num_gpus == 7 - assert config.image == "quay.io/project-codeflare/ray:latest-py39-cu118" + assert config.image == "quay.io/project-codeflare/ray:2.20.0-py39-cu118" assert config.template == f"{parent}/src/codeflare_sdk/templates/base-template.yaml" assert config.machine_types == ["cpu.small", "gpu.large"] assert config.image_pull_secrets == ["unit-test-pull-secret"] @@ -400,7 +400,7 @@ def test_cluster_creation_no_mcad_local_queue(mocker): num_gpus=7, machine_types=["cpu.small", "gpu.large"], image_pull_secrets=["unit-test-pull-secret"], - image="quay.io/project-codeflare/ray:latest-py39-cu118", + image="quay.io/project-codeflare/ray:2.20.0-py39-cu118", write_to_file=True, appwrapper=False, local_queue="local-queue-default", @@ -428,7 +428,7 @@ def test_default_cluster_creation(mocker): ) default_config = ClusterConfiguration( name="unit-test-default-cluster", - image="quay.io/project-codeflare/ray:latest-py39-cu118", + image="quay.io/project-codeflare/ray:2.20.0-py39-cu118", appwrapper=True, ) cluster = Cluster(default_config) @@ -493,7 +493,7 @@ def test_cluster_with_custom_volumes(mocker): test_config = ClusterConfiguration( name="unit-test-volume-cluster", - image="quay.io/project-codeflare/ray:latest-py39-cu118", + image="quay.io/project-codeflare/ray:2.20.0-py39-cu118", volume_mounts=volume_mounts, volumes=volumes, ) @@ -839,7 +839,7 @@ def test_ray_job_wrapping(mocker): return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), ) cluster = cluster = createClusterWithConfig(mocker) - cluster.config.image = "quay.io/project-codeflare/ray:latest-py39-cu118" + cluster.config.image = "quay.io/project-codeflare/ray:2.20.0-py39-cu118" mocker.patch( "ray.job_submission.JobSubmissionClient._check_connection_and_version_with_url", return_value="None", @@ -959,7 +959,7 @@ def test_ray_details(mocker, capsys): ClusterConfiguration( name="raytest2", namespace="ns", - image="quay.io/project-codeflare/ray:latest-py39-cu118", + image="quay.io/project-codeflare/ray:2.20.0-py39-cu118", write_to_file=True, appwrapper=True, local_queue="local_default_queue", @@ -2352,7 +2352,7 @@ def test_cluster_status(mocker): ClusterConfiguration( name="test", namespace="ns", - image="quay.io/project-codeflare/ray:latest-py39-cu118", + image="quay.io/project-codeflare/ray:2.20.0-py39-cu118", write_to_file=True, appwrapper=True, local_queue="local_default_queue", @@ -2447,7 +2447,7 @@ def test_wait_ready(mocker, capsys): ClusterConfiguration( name="test", namespace="ns", - image="quay.io/project-codeflare/ray:latest-py39-cu118", + image="quay.io/project-codeflare/ray:2.20.0-py39-cu118", write_to_file=True, appwrapper=True, local_queue="local-queue-default", @@ -2674,7 +2674,7 @@ def throw_if_getting_raycluster(group, version, namespace, plural): cluster = Cluster( ClusterConfiguration( "test_cluster", - image="quay.io/project-codeflare/ray:latest-py39-cu118", + image="quay.io/project-codeflare/ray:2.20.0-py39-cu118", write_to_file=False, ) ) diff --git a/tests/unit_test_support.py b/tests/unit_test_support.py index baa14aac..6e662db4 100644 --- a/tests/unit_test_support.py +++ b/tests/unit_test_support.py @@ -17,7 +17,7 @@ def createClusterConfig(): appwrapper=True, machine_types=["cpu.small", "gpu.large"], image_pull_secrets=["unit-test-pull-secret"], - image="quay.io/project-codeflare/ray:latest-py39-cu118", + image="quay.io/project-codeflare/ray:2.20.0-py39-cu118", write_to_file=True, ) return config From 1e80e62bb2f141ddb0e6a04710ce9e8802f11b32 Mon Sep 17 00:00:00 2001 From: Fiona Waters Date: Thu, 23 May 2024 16:49:29 +0100 Subject: [PATCH 267/496] updating training script in basic_interactive notebook --- .../additional-demos/hf_interactive.ipynb | 125 ++++++++++-------- .../guided-demos/2_basic_interactive.ipynb | 119 +++++++++-------- .../2_basic_interactive.ipynb | 115 +++++++++------- .../preview_nbs/2_basic_interactive.ipynb | 115 +++++++++------- tests/unit_test.py | 4 + 5 files changed, 269 insertions(+), 209 deletions(-) diff --git a/demo-notebooks/additional-demos/hf_interactive.ipynb b/demo-notebooks/additional-demos/hf_interactive.ipynb index 9a362321..a7b004cb 100644 --- a/demo-notebooks/additional-demos/hf_interactive.ipynb +++ b/demo-notebooks/additional-demos/hf_interactive.ipynb @@ -13,7 +13,7 @@ "id": "d4acfb10-1aa1-445d-947e-396ea5ebed1a", "metadata": {}, "source": [ - "In this notebook you will learn how to leverage the **[huggingface](https://huggingface.co/)** support in ray ecosystem to carry out a text classification task using transfer learning. We will be referencing the example **[here](https://huggingface.co/docs/transformers/tasks/sequence_classification)**" + "In this notebook you will learn how to leverage the **[huggingface](https://huggingface.co/)** support in ray ecosystem to carry out a text classification task using transfer learning. We will be referencing the examples **[here](https://huggingface.co/docs/transformers/tasks/sequence_classification)** and **[here](https://docs.ray.io/en/latest/train/getting-started-transformers.html)**." ] }, { @@ -21,9 +21,7 @@ "id": "70b77929-e96c-434e-ada3-8b14795bfbb1", "metadata": {}, "source": [ - "The example carries out a text classification task on **[imdb dataset](https://huggingface.co/datasets/imdb)** and tries to classify the movie reviews as positive or negative. Huggingface library provides an easy way to build a model and the dataset to carry out this classification task. In this case we will be using **distilbert-base-uncased** model which is a **BERT** based model.\n", - "\n", - "Huggingface has a **[built in support for ray ecosystem](https://docs.ray.io/en/releases-1.13.0/_modules/ray/ml/train/integrations/huggingface/huggingface_trainer.html)** which allows the huggingface trainer to scale on CodeFlare and can scale the training as we add additional gpus and can run distributed training across multiple GPUs that will help scale out the training.\n" + "The example carries out a text classification task on **[imdb dataset](https://huggingface.co/datasets/imdb)** and tries to classify the movie reviews as positive or negative. Huggingface library provides an easy way to build a model and the dataset to carry out this classification task. In this case we will be using **distilbert-base-uncased** model which is a **BERT** based model." ] }, { @@ -317,14 +315,13 @@ "assert ray_cluster_uri, \"Ray cluster needs to be started and set before proceeding\"\n", "\n", "import ray\n", - "from ray.air.config import ScalingConfig\n", "\n", "# reset the ray context in case there's already one. \n", "ray.shutdown()\n", "# establish connection to ray cluster\n", "\n", "#install additional libraries that will be required for this training\n", - "runtime_env = {\"pip\": [\"transformers\", \"datasets\", \"evaluate\", \"pyarrow<7.0.0\", \"accelerate\"]}\n", + "runtime_env = {\"pip\": [\"transformers==4.41.2\", \"datasets==2.17.0\", \"accelerate==0.31.0\", \"scikit-learn==1.5.0\"]}\n", "\n", "# NOTE: This will work for in-cluster notebook servers (RHODS/ODH), but not for local machines\n", "# To see how to connect from your laptop, go to demo-notebooks/additional-demos/local_interactive.ipynb\n", @@ -354,7 +351,7 @@ "id": "8bdbe888-4f38-4e9a-ae43-67ce89ff9d42", "metadata": {}, "source": [ - "We are using the code based on the example **[here](https://huggingface.co/docs/transformers/tasks/sequence_classification)** . " + "We are using the code based on the examples **[here](https://huggingface.co/docs/transformers/tasks/sequence_classification)** and **[here](https://docs.ray.io/en/latest/train/getting-started-transformers.html)**. " ] }, { @@ -366,66 +363,83 @@ "source": [ "@ray.remote\n", "def train_fn():\n", - " from datasets import load_dataset\n", - " import transformers\n", - " from transformers import AutoTokenizer, TrainingArguments\n", - " from transformers import AutoModelForSequenceClassification\n", + " import os\n", " import numpy as np\n", - " from datasets import load_metric\n", - " import ray\n", - " from ray import tune\n", - " from ray.train.huggingface import HuggingFaceTrainer\n", - "\n", - " dataset = load_dataset(\"imdb\")\n", - " tokenizer = AutoTokenizer.from_pretrained(\"distilbert-base-uncased\")\n", - "\n", - " def tokenize_function(examples):\n", - " return tokenizer(examples[\"text\"], padding=\"max_length\", truncation=True)\n", + " from datasets import load_dataset, load_metric\n", + " import transformers\n", + " from transformers import (\n", + " Trainer,\n", + " TrainingArguments,\n", + " AutoTokenizer,\n", + " AutoModelForSequenceClassification,\n", + " )\n", + " import ray.train.huggingface.transformers\n", + " from ray.train import ScalingConfig\n", + " from ray.train.torch import TorchTrainer\n", "\n", - " tokenized_datasets = dataset.map(tokenize_function, batched=True)\n", + " # When running in a multi-node cluster you will need persistent storage that is accessible across all worker nodes. \n", + " # See www.github.com/project-codeflare/codeflare-sdk/tree/main/docs/s3-compatible-storage.md for more information.\n", + " \n", + " def train_func():\n", + " # Datasets\n", + " dataset = load_dataset(\"imdb\")\n", + " tokenizer = AutoTokenizer.from_pretrained(\"distilbert-base-uncased\")\n", "\n", - " #using a fraction of dataset but you can run with the full dataset\n", - " small_train_dataset = tokenized_datasets[\"train\"].shuffle(seed=42).select(range(100))\n", - " small_eval_dataset = tokenized_datasets[\"test\"].shuffle(seed=42).select(range(100))\n", + " def tokenize_function(examples):\n", + " return tokenizer(examples[\"text\"], padding=\"max_length\", truncation=True)\n", "\n", - " print(f\"len of train {small_train_dataset} and test {small_eval_dataset}\")\n", + " small_train_dataset = (\n", + " dataset[\"train\"].select(range(100)).map(tokenize_function, batched=True)\n", + " )\n", + " small_eval_dataset = (\n", + " dataset[\"test\"].select(range(100)).map(tokenize_function, batched=True)\n", + " )\n", "\n", - " ray_train_ds = ray.data.from_huggingface(small_train_dataset)\n", - " ray_evaluation_ds = ray.data.from_huggingface(small_eval_dataset)\n", + " # Model\n", + " model = AutoModelForSequenceClassification.from_pretrained(\n", + " \"distilbert-base-uncased\", num_labels=2\n", + " )\n", "\n", - " def compute_metrics(eval_pred):\n", - " metric = load_metric(\"accuracy\")\n", - " logits, labels = eval_pred\n", - " predictions = np.argmax(logits, axis=-1)\n", - " return metric.compute(predictions=predictions, references=labels)\n", + " def compute_metrics(eval_pred):\n", + " metric = load_metric(\"accuracy\")\n", + " logits, labels = eval_pred\n", + " predictions = np.argmax(logits, axis=-1)\n", + " return metric.compute(predictions=predictions, references=labels)\n", "\n", - " def trainer_init_per_worker(train_dataset, eval_dataset, **config):\n", - " model = AutoModelForSequenceClassification.from_pretrained(\"distilbert-base-uncased\", num_labels=2)\n", + " # Hugging Face Trainer\n", + " training_args = TrainingArguments(\n", + " output_dir=\"test_trainer\",\n", + " evaluation_strategy=\"epoch\",\n", + " save_strategy=\"epoch\",\n", + " report_to=\"none\",\n", + " )\n", "\n", - " training_args = TrainingArguments(\"/tmp/hf_imdb/test\", eval_steps=1, disable_tqdm=True, \n", - " num_train_epochs=1, skip_memory_metrics=True,\n", - " learning_rate=2e-5,\n", - " per_device_train_batch_size=16,\n", - " per_device_eval_batch_size=16, \n", - " weight_decay=0.01,)\n", - " return transformers.Trainer(\n", + " trainer = Trainer(\n", " model=model,\n", " args=training_args,\n", - " train_dataset=train_dataset,\n", - " eval_dataset=eval_dataset,\n", - " compute_metrics=compute_metrics\n", + " train_dataset=small_train_dataset,\n", + " eval_dataset=small_eval_dataset,\n", + " compute_metrics=compute_metrics,\n", " )\n", "\n", - " scaling_config = ScalingConfig(num_workers=4, use_gpu=True) #num workers is the number of gpus\n", "\n", - " # we are using the ray native HuggingFaceTrainer, but you can swap out to use non ray Huggingface Trainer. Both have the same method signature. \n", - " # the ray native HFTrainer has built in support for scaling to multiple GPUs\n", - " trainer = HuggingFaceTrainer(\n", - " trainer_init_per_worker=trainer_init_per_worker,\n", - " scaling_config=scaling_config,\n", - " datasets={\"train\": ray_train_ds, \"evaluation\": ray_evaluation_ds},\n", + " callback = ray.train.huggingface.transformers.RayTrainReportCallback()\n", + " trainer.add_callback(callback)\n", + "\n", + " trainer = ray.train.huggingface.transformers.prepare_trainer(trainer)\n", + "\n", + " trainer.train()\n", + "\n", + "\n", + " ray_trainer = TorchTrainer(\n", + " train_func,\n", + " scaling_config=ScalingConfig(num_workers=3, use_gpu=True),\n", + " # Configure persistent storage that is accessible across \n", + " # all worker nodes.\n", + " # Uncomment and update the RunConfig below to include your storage details.\n", + " # run_config=ray.train.RunConfig(storage_path=\"storage path\"),\n", " )\n", - " result = trainer.fit()\n" + " result: ray.train.Result = ray_trainer.fit()" ] }, { @@ -1443,10 +1457,7 @@ "metadata": {}, "source": [ "## Conclusion\n", - "As shown in the above example, you can easily run your Huggingface transfer learning tasks easily and natively on CodeFlare. You can scale them from 1 to n GPUs without requiring you to make any significant code changes and leveraging the native Huggingface trainer. \n", - "\n", - "Also refer to additional notebooks that showcase other use cases\n", - "In our next notebook [./02_codeflare_workflows_encoding.ipynb ] shows an sklearn example and how you can leverage workflows to run experiment pipelines and explore multiple pipelines in parallel on CodeFlare cluster. \n" + "As shown in the above example, you can run your Huggingface transfer learning tasks easily and natively on CodeFlare. You can scale them from 1 to n GPUs without requiring you to make any significant code changes and leveraging the native Huggingface trainer. " ] }, { diff --git a/demo-notebooks/guided-demos/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/2_basic_interactive.ipynb index 6cee82e1..86142714 100644 --- a/demo-notebooks/guided-demos/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/2_basic_interactive.ipynb @@ -148,19 +148,17 @@ "metadata": {}, "outputs": [], "source": [ - "#before proceeding make sure the cluster exists and the uri is not empty\n", + "# before proceeding make sure the cluster exists and the uri is not empty\n", "assert ray_cluster_uri, \"Ray cluster needs to be started and set before proceeding\"\n", "\n", "import ray\n", - "from ray.air.config import ScalingConfig\n", "\n", "# reset the ray context in case there's already one. \n", "ray.shutdown()\n", "# establish connection to ray cluster\n", "\n", - "#install additional libraries that will be required for model training\n", - "runtime_env = {\"pip\": [\"transformers\", \"datasets\", \"evaluate\", \"pyarrow<7.0.0\", \"accelerate\"]}\n", - "\n", + "# install additional libraries that will be required for model training\n", + "runtime_env = {\"pip\": [\"transformers==4.41.2\", \"datasets==2.17.0\", \"accelerate==0.31.0\", \"scikit-learn==1.5.0\"]}\n", "# NOTE: This will work for in-cluster notebook servers (RHODS/ODH), but not for local machines\n", "# To see how to connect from your laptop, go to demo-notebooks/additional-demos/local_interactive.ipynb\n", "ray.init(address=ray_cluster_uri, runtime_env=runtime_env)\n", @@ -173,7 +171,7 @@ "id": "9711030b", "metadata": {}, "source": [ - "Now that we are connected (and have passed in some package requirements), let's try writing some training code for a DistilBERT transformer model via HuggingFace (using IMDB dataset):" + "Now that we are connected (and have passed in some package requirements), let's try writing some training code:" ] }, { @@ -185,66 +183,83 @@ "source": [ "@ray.remote\n", "def train_fn():\n", - " from datasets import load_dataset\n", - " import transformers\n", - " from transformers import AutoTokenizer, TrainingArguments\n", - " from transformers import AutoModelForSequenceClassification\n", + " import os\n", " import numpy as np\n", - " from datasets import load_metric\n", - " import ray\n", - " from ray import tune\n", - " from ray.train.huggingface import HuggingFaceTrainer\n", - "\n", - " dataset = load_dataset(\"imdb\")\n", - " tokenizer = AutoTokenizer.from_pretrained(\"distilbert-base-uncased\")\n", - "\n", - " def tokenize_function(examples):\n", - " return tokenizer(examples[\"text\"], padding=\"max_length\", truncation=True)\n", + " from datasets import load_dataset, load_metric\n", + " import transformers\n", + " from transformers import (\n", + " Trainer,\n", + " TrainingArguments,\n", + " AutoTokenizer,\n", + " AutoModelForSequenceClassification,\n", + " )\n", + " import ray.train.huggingface.transformers\n", + " from ray.train import ScalingConfig\n", + " from ray.train.torch import TorchTrainer\n", "\n", - " tokenized_datasets = dataset.map(tokenize_function, batched=True)\n", + " # When running in a multi-node cluster you will need persistent storage that is accessible across all worker nodes. \n", + " # See www.github.com/project-codeflare/codeflare-sdk/tree/main/docs/s3-compatible-storage.md for more information.\n", + " \n", + " def train_func():\n", + " # Datasets\n", + " dataset = load_dataset(\"imdb\")\n", + " tokenizer = AutoTokenizer.from_pretrained(\"distilbert-base-uncased\")\n", "\n", - " #using a fraction of dataset but you can run with the full dataset\n", - " small_train_dataset = tokenized_datasets[\"train\"].shuffle(seed=42).select(range(100))\n", - " small_eval_dataset = tokenized_datasets[\"test\"].shuffle(seed=42).select(range(100))\n", + " def tokenize_function(examples):\n", + " return tokenizer(examples[\"text\"], padding=\"max_length\", truncation=True)\n", "\n", - " print(f\"len of train {small_train_dataset} and test {small_eval_dataset}\")\n", + " small_train_dataset = (\n", + " dataset[\"train\"].select(range(100)).map(tokenize_function, batched=True)\n", + " )\n", + " small_eval_dataset = (\n", + " dataset[\"test\"].select(range(100)).map(tokenize_function, batched=True)\n", + " )\n", "\n", - " ray_train_ds = ray.data.from_huggingface(small_train_dataset)\n", - " ray_evaluation_ds = ray.data.from_huggingface(small_eval_dataset)\n", + " # Model\n", + " model = AutoModelForSequenceClassification.from_pretrained(\n", + " \"distilbert-base-uncased\", num_labels=2\n", + " )\n", "\n", - " def compute_metrics(eval_pred):\n", - " metric = load_metric(\"accuracy\")\n", - " logits, labels = eval_pred\n", - " predictions = np.argmax(logits, axis=-1)\n", - " return metric.compute(predictions=predictions, references=labels)\n", + " def compute_metrics(eval_pred):\n", + " metric = load_metric(\"accuracy\")\n", + " logits, labels = eval_pred\n", + " predictions = np.argmax(logits, axis=-1)\n", + " return metric.compute(predictions=predictions, references=labels)\n", "\n", - " def trainer_init_per_worker(train_dataset, eval_dataset, **config):\n", - " model = AutoModelForSequenceClassification.from_pretrained(\"distilbert-base-uncased\", num_labels=2)\n", + " # Hugging Face Trainer\n", + " training_args = TrainingArguments(\n", + " output_dir=\"test_trainer\",\n", + " evaluation_strategy=\"epoch\",\n", + " save_strategy=\"epoch\",\n", + " report_to=\"none\",\n", + " )\n", "\n", - " training_args = TrainingArguments(\"/tmp/hf_imdb/test\", eval_steps=1, disable_tqdm=True, \n", - " num_train_epochs=1, skip_memory_metrics=True,\n", - " learning_rate=2e-5,\n", - " per_device_train_batch_size=16,\n", - " per_device_eval_batch_size=16, \n", - " weight_decay=0.01,)\n", - " return transformers.Trainer(\n", + " trainer = Trainer(\n", " model=model,\n", " args=training_args,\n", - " train_dataset=train_dataset,\n", - " eval_dataset=eval_dataset,\n", - " compute_metrics=compute_metrics\n", + " train_dataset=small_train_dataset,\n", + " eval_dataset=small_eval_dataset,\n", + " compute_metrics=compute_metrics,\n", " )\n", "\n", - " scaling_config = ScalingConfig(num_workers=2, use_gpu=True) #num workers is the number of gpus\n", "\n", - " # we are using the ray native HuggingFaceTrainer, but you can swap out to use non ray Huggingface Trainer. Both have the same method signature. \n", - " # the ray native HFTrainer has built in support for scaling to multiple GPUs\n", - " trainer = HuggingFaceTrainer(\n", - " trainer_init_per_worker=trainer_init_per_worker,\n", - " scaling_config=scaling_config,\n", - " datasets={\"train\": ray_train_ds, \"evaluation\": ray_evaluation_ds},\n", + " callback = ray.train.huggingface.transformers.RayTrainReportCallback()\n", + " trainer.add_callback(callback)\n", + "\n", + " trainer = ray.train.huggingface.transformers.prepare_trainer(trainer)\n", + "\n", + " trainer.train()\n", + "\n", + "\n", + " ray_trainer = TorchTrainer(\n", + " train_func,\n", + " scaling_config=ScalingConfig(num_workers=3, use_gpu=True),\n", + " # Configure persistent storage that is accessible across \n", + " # all worker nodes.\n", + " # Uncomment and update the RunConfig below to include your storage details.\n", + " # run_config=ray.train.RunConfig(storage_path=\"storage path\"),\n", " )\n", - " result = trainer.fit()" + " result: ray.train.Result = ray_trainer.fit()" ] }, { diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb index ff88d621..5ae66a0c 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb @@ -244,15 +244,13 @@ "assert ray_cluster_uri, \"Ray cluster needs to be started and set before proceeding\"\n", "\n", "import ray\n", - "from ray.air.config import ScalingConfig\n", "\n", "# reset the ray context in case there's already one. \n", "ray.shutdown()\n", "# establish connection to ray cluster\n", "\n", "#install additional libraries that will be required for model training\n", - "runtime_env = {\"pip\": [\"transformers\", \"datasets\", \"evaluate\", \"pyarrow<7.0.0\", \"accelerate\"]}\n", - "\n", + "runtime_env = {\"pip\": [\"transformers==4.41.2\", \"datasets==2.17.0\", \"accelerate==0.31.0\", \"scikit-learn==1.5.0\"]}\n", "# NOTE: This will work for in-cluster notebook servers (RHODS/ODH), but not for local machines\n", "# To see how to connect from your laptop, go to demo-notebooks/additional-demos/local_interactive.ipynb\n", "ray.init(address=ray_cluster_uri, runtime_env=runtime_env)\n", @@ -265,7 +263,7 @@ "id": "9711030b", "metadata": {}, "source": [ - "Now that we are connected (and have passed in some package requirements), let's try writing some training code for a DistilBERT transformer model via HuggingFace (using IMDB dataset):" + "Now that we are connected (and have passed in some package requirements), let's try writing some training code:" ] }, { @@ -277,66 +275,83 @@ "source": [ "@ray.remote\n", "def train_fn():\n", - " from datasets import load_dataset\n", - " import transformers\n", - " from transformers import AutoTokenizer, TrainingArguments\n", - " from transformers import AutoModelForSequenceClassification\n", + " import os\n", " import numpy as np\n", - " from datasets import load_metric\n", - " import ray\n", - " from ray import tune\n", - " from ray.train.huggingface import HuggingFaceTrainer\n", - "\n", - " dataset = load_dataset(\"imdb\")\n", - " tokenizer = AutoTokenizer.from_pretrained(\"distilbert-base-uncased\")\n", - "\n", - " def tokenize_function(examples):\n", - " return tokenizer(examples[\"text\"], padding=\"max_length\", truncation=True)\n", + " from datasets import load_dataset, load_metric\n", + " import transformers\n", + " from transformers import (\n", + " Trainer,\n", + " TrainingArguments,\n", + " AutoTokenizer,\n", + " AutoModelForSequenceClassification,\n", + " )\n", + " import ray.train.huggingface.transformers\n", + " from ray.train import ScalingConfig\n", + " from ray.train.torch import TorchTrainer\n", "\n", - " tokenized_datasets = dataset.map(tokenize_function, batched=True)\n", + " # When running in a multi-node cluster you will need persistent storage that is accessible across all worker nodes. \n", + " # See www.github.com/project-codeflare/codeflare-sdk/tree/main/docs/s3-compatible-storage.md for more information.\n", + " \n", + " def train_func():\n", + " # Datasets\n", + " dataset = load_dataset(\"imdb\")\n", + " tokenizer = AutoTokenizer.from_pretrained(\"distilbert-base-uncased\")\n", "\n", - " #using a fraction of dataset but you can run with the full dataset\n", - " small_train_dataset = tokenized_datasets[\"train\"].shuffle(seed=42).select(range(100))\n", - " small_eval_dataset = tokenized_datasets[\"test\"].shuffle(seed=42).select(range(100))\n", + " def tokenize_function(examples):\n", + " return tokenizer(examples[\"text\"], padding=\"max_length\", truncation=True)\n", "\n", - " print(f\"len of train {small_train_dataset} and test {small_eval_dataset}\")\n", + " small_train_dataset = (\n", + " dataset[\"train\"].select(range(100)).map(tokenize_function, batched=True)\n", + " )\n", + " small_eval_dataset = (\n", + " dataset[\"test\"].select(range(100)).map(tokenize_function, batched=True)\n", + " )\n", "\n", - " ray_train_ds = ray.data.from_huggingface(small_train_dataset)\n", - " ray_evaluation_ds = ray.data.from_huggingface(small_eval_dataset)\n", + " # Model\n", + " model = AutoModelForSequenceClassification.from_pretrained(\n", + " \"distilbert-base-uncased\", num_labels=2\n", + " )\n", "\n", - " def compute_metrics(eval_pred):\n", - " metric = load_metric(\"accuracy\")\n", - " logits, labels = eval_pred\n", - " predictions = np.argmax(logits, axis=-1)\n", - " return metric.compute(predictions=predictions, references=labels)\n", + " def compute_metrics(eval_pred):\n", + " metric = load_metric(\"accuracy\")\n", + " logits, labels = eval_pred\n", + " predictions = np.argmax(logits, axis=-1)\n", + " return metric.compute(predictions=predictions, references=labels)\n", "\n", - " def trainer_init_per_worker(train_dataset, eval_dataset, **config):\n", - " model = AutoModelForSequenceClassification.from_pretrained(\"distilbert-base-uncased\", num_labels=2)\n", + " # Hugging Face Trainer\n", + " training_args = TrainingArguments(\n", + " output_dir=\"test_trainer\",\n", + " evaluation_strategy=\"epoch\",\n", + " save_strategy=\"epoch\",\n", + " report_to=\"none\",\n", + " )\n", "\n", - " training_args = TrainingArguments(\"/tmp/hf_imdb/test\", eval_steps=1, disable_tqdm=True, \n", - " num_train_epochs=1, skip_memory_metrics=True,\n", - " learning_rate=2e-5,\n", - " per_device_train_batch_size=16,\n", - " per_device_eval_batch_size=16, \n", - " weight_decay=0.01,)\n", - " return transformers.Trainer(\n", + " trainer = Trainer(\n", " model=model,\n", " args=training_args,\n", - " train_dataset=train_dataset,\n", - " eval_dataset=eval_dataset,\n", - " compute_metrics=compute_metrics\n", + " train_dataset=small_train_dataset,\n", + " eval_dataset=small_eval_dataset,\n", + " compute_metrics=compute_metrics,\n", " )\n", "\n", - " scaling_config = ScalingConfig(num_workers=2, use_gpu=True) #num workers is the number of gpus\n", "\n", - " # we are using the ray native HuggingFaceTrainer, but you can swap out to use non ray Huggingface Trainer. Both have the same method signature. \n", - " # the ray native HFTrainer has built in support for scaling to multiple GPUs\n", - " trainer = HuggingFaceTrainer(\n", - " trainer_init_per_worker=trainer_init_per_worker,\n", - " scaling_config=scaling_config,\n", - " datasets={\"train\": ray_train_ds, \"evaluation\": ray_evaluation_ds},\n", + " callback = ray.train.huggingface.transformers.RayTrainReportCallback()\n", + " trainer.add_callback(callback)\n", + "\n", + " trainer = ray.train.huggingface.transformers.prepare_trainer(trainer)\n", + "\n", + " trainer.train()\n", + "\n", + "\n", + " ray_trainer = TorchTrainer(\n", + " train_func,\n", + " scaling_config=ScalingConfig(num_workers=3, use_gpu=True),\n", + " # Configure persistent storage that is accessible across \n", + " # all worker nodes.\n", + " # Uncomment and update the RunConfig below to include your storage details.\n", + " # run_config=ray.train.RunConfig(storage_path=\"storage path\"),\n", " )\n", - " result = trainer.fit()" + " result: ray.train.Result = ray_trainer.fit()" ] }, { diff --git a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb index 44846f61..d7d4d69d 100644 --- a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb @@ -152,15 +152,13 @@ "assert ray_cluster_uri, \"Ray cluster needs to be started and set before proceeding\"\n", "\n", "import ray\n", - "from ray.air.config import ScalingConfig\n", "\n", "# reset the ray context in case there's already one. \n", "ray.shutdown()\n", "# establish connection to ray cluster\n", "\n", "#install additional libraries that will be required for model training\n", - "runtime_env = {\"pip\": [\"transformers\", \"datasets\", \"evaluate\", \"pyarrow<7.0.0\", \"accelerate\"]}\n", - "\n", + "runtime_env = {\"pip\": [\"transformers==4.41.2\", \"datasets==2.17.0\", \"accelerate==0.31.0\", \"scikit-learn==1.5.0\"]}\n", "# NOTE: This will work for in-cluster notebook servers (RHODS/ODH), but not for local machines\n", "# To see how to connect from your laptop, go to demo-notebooks/additional-demos/local_interactive.ipynb\n", "ray.init(address=ray_cluster_uri, runtime_env=runtime_env)\n", @@ -173,7 +171,7 @@ "id": "9711030b", "metadata": {}, "source": [ - "Now that we are connected (and have passed in some package requirements), let's try writing some training code for a DistilBERT transformer model via HuggingFace (using IMDB dataset):" + "Now that we are connected (and have passed in some package requirements), let's try writing some training code:" ] }, { @@ -185,66 +183,83 @@ "source": [ "@ray.remote\n", "def train_fn():\n", - " from datasets import load_dataset\n", - " import transformers\n", - " from transformers import AutoTokenizer, TrainingArguments\n", - " from transformers import AutoModelForSequenceClassification\n", + " import os\n", " import numpy as np\n", - " from datasets import load_metric\n", - " import ray\n", - " from ray import tune\n", - " from ray.train.huggingface import HuggingFaceTrainer\n", - "\n", - " dataset = load_dataset(\"imdb\")\n", - " tokenizer = AutoTokenizer.from_pretrained(\"distilbert-base-uncased\")\n", - "\n", - " def tokenize_function(examples):\n", - " return tokenizer(examples[\"text\"], padding=\"max_length\", truncation=True)\n", + " from datasets import load_dataset, load_metric\n", + " import transformers\n", + " from transformers import (\n", + " Trainer,\n", + " TrainingArguments,\n", + " AutoTokenizer,\n", + " AutoModelForSequenceClassification,\n", + " )\n", + " import ray.train.huggingface.transformers\n", + " from ray.train import ScalingConfig\n", + " from ray.train.torch import TorchTrainer\n", "\n", - " tokenized_datasets = dataset.map(tokenize_function, batched=True)\n", + " # When running in a multi-node cluster you will need persistent storage that is accessible across all worker nodes. \n", + " # See www.github.com/project-codeflare/codeflare-sdk/tree/main/docs/s3-compatible-storage.md for more information.\n", + " \n", + " def train_func():\n", + " # Datasets\n", + " dataset = load_dataset(\"imdb\")\n", + " tokenizer = AutoTokenizer.from_pretrained(\"distilbert-base-uncased\")\n", "\n", - " #using a fraction of dataset but you can run with the full dataset\n", - " small_train_dataset = tokenized_datasets[\"train\"].shuffle(seed=42).select(range(100))\n", - " small_eval_dataset = tokenized_datasets[\"test\"].shuffle(seed=42).select(range(100))\n", + " def tokenize_function(examples):\n", + " return tokenizer(examples[\"text\"], padding=\"max_length\", truncation=True)\n", "\n", - " print(f\"len of train {small_train_dataset} and test {small_eval_dataset}\")\n", + " small_train_dataset = (\n", + " dataset[\"train\"].select(range(100)).map(tokenize_function, batched=True)\n", + " )\n", + " small_eval_dataset = (\n", + " dataset[\"test\"].select(range(100)).map(tokenize_function, batched=True)\n", + " )\n", "\n", - " ray_train_ds = ray.data.from_huggingface(small_train_dataset)\n", - " ray_evaluation_ds = ray.data.from_huggingface(small_eval_dataset)\n", + " # Model\n", + " model = AutoModelForSequenceClassification.from_pretrained(\n", + " \"distilbert-base-uncased\", num_labels=2\n", + " )\n", "\n", - " def compute_metrics(eval_pred):\n", - " metric = load_metric(\"accuracy\")\n", - " logits, labels = eval_pred\n", - " predictions = np.argmax(logits, axis=-1)\n", - " return metric.compute(predictions=predictions, references=labels)\n", + " def compute_metrics(eval_pred):\n", + " metric = load_metric(\"accuracy\")\n", + " logits, labels = eval_pred\n", + " predictions = np.argmax(logits, axis=-1)\n", + " return metric.compute(predictions=predictions, references=labels)\n", "\n", - " def trainer_init_per_worker(train_dataset, eval_dataset, **config):\n", - " model = AutoModelForSequenceClassification.from_pretrained(\"distilbert-base-uncased\", num_labels=2)\n", + " # Hugging Face Trainer\n", + " training_args = TrainingArguments(\n", + " output_dir=\"test_trainer\",\n", + " evaluation_strategy=\"epoch\",\n", + " save_strategy=\"epoch\",\n", + " report_to=\"none\",\n", + " )\n", "\n", - " training_args = TrainingArguments(\"/tmp/hf_imdb/test\", eval_steps=1, disable_tqdm=True, \n", - " num_train_epochs=1, skip_memory_metrics=True,\n", - " learning_rate=2e-5,\n", - " per_device_train_batch_size=16,\n", - " per_device_eval_batch_size=16, \n", - " weight_decay=0.01,)\n", - " return transformers.Trainer(\n", + " trainer = Trainer(\n", " model=model,\n", " args=training_args,\n", - " train_dataset=train_dataset,\n", - " eval_dataset=eval_dataset,\n", - " compute_metrics=compute_metrics\n", + " train_dataset=small_train_dataset,\n", + " eval_dataset=small_eval_dataset,\n", + " compute_metrics=compute_metrics,\n", " )\n", "\n", - " scaling_config = ScalingConfig(num_workers=2, use_gpu=True) #num workers is the number of gpus\n", "\n", - " # we are using the ray native HuggingFaceTrainer, but you can swap out to use non ray Huggingface Trainer. Both have the same method signature. \n", - " # the ray native HFTrainer has built in support for scaling to multiple GPUs\n", - " trainer = HuggingFaceTrainer(\n", - " trainer_init_per_worker=trainer_init_per_worker,\n", - " scaling_config=scaling_config,\n", - " datasets={\"train\": ray_train_ds, \"evaluation\": ray_evaluation_ds},\n", + " callback = ray.train.huggingface.transformers.RayTrainReportCallback()\n", + " trainer.add_callback(callback)\n", + "\n", + " trainer = ray.train.huggingface.transformers.prepare_trainer(trainer)\n", + "\n", + " trainer.train()\n", + "\n", + "\n", + " ray_trainer = TorchTrainer(\n", + " train_func,\n", + " scaling_config=ScalingConfig(num_workers=3, use_gpu=True),\n", + " # Configure persistent storage that is accessible across \n", + " # all worker nodes.\n", + " # Uncomment and update the RunConfig below to include your storage details.\n", + " # run_config=ray.train.RunConfig(storage_path=\"storage path\"),\n", " )\n", - " result = trainer.fit()" + " result: ray.train.Result = ray_trainer.fit()" ] }, { diff --git a/tests/unit_test.py b/tests/unit_test.py index 4b48b173..db908df6 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -2035,6 +2035,10 @@ def custom_side_effect(group, version, namespace, plural, **kwargs): "items" ], ) + mocker.patch( + "codeflare_sdk.utils.generate_yaml.local_queue_exists", + return_value="true", + ) cluster = get_cluster("quicktest") cluster_config = cluster.config From e863e29bbf3766781ea6cc6332c0628d49201430 Mon Sep 17 00:00:00 2001 From: Antonin Stefanutti Date: Thu, 20 Jun 2024 19:56:28 +0200 Subject: [PATCH 268/496] Remove codeflare-torchx dependency --- poetry.lock | 2274 ++++++++++++++++++++++++------------------------ pyproject.toml | 1 - 2 files changed, 1125 insertions(+), 1150 deletions(-) diff --git a/poetry.lock b/poetry.lock index 2bcedf96..1868163e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,88 +1,88 @@ -# This file is automatically @generated by Poetry 1.8.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "aiohttp" -version = "3.9.1" +version = "3.9.5" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1f80197f8b0b846a8d5cf7b7ec6084493950d0882cc5537fb7b96a69e3c8590"}, - {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72444d17777865734aa1a4d167794c34b63e5883abb90356a0364a28904e6c0"}, - {file = "aiohttp-3.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b05d5cbe9dafcdc733262c3a99ccf63d2f7ce02543620d2bd8db4d4f7a22f83"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c4fa235d534b3547184831c624c0b7c1e262cd1de847d95085ec94c16fddcd5"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:289ba9ae8e88d0ba16062ecf02dd730b34186ea3b1e7489046fc338bdc3361c4"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bff7e2811814fa2271be95ab6e84c9436d027a0e59665de60edf44e529a42c1f"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81b77f868814346662c96ab36b875d7814ebf82340d3284a31681085c051320f"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b9c7426923bb7bd66d409da46c41e3fb40f5caf679da624439b9eba92043fa6"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8d44e7bf06b0c0a70a20f9100af9fcfd7f6d9d3913e37754c12d424179b4e48f"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22698f01ff5653fe66d16ffb7658f582a0ac084d7da1323e39fd9eab326a1f26"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ca7ca5abfbfe8d39e653870fbe8d7710be7a857f8a8386fc9de1aae2e02ce7e4"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:8d7f98fde213f74561be1d6d3fa353656197f75d4edfbb3d94c9eb9b0fc47f5d"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5216b6082c624b55cfe79af5d538e499cd5f5b976820eac31951fb4325974501"}, - {file = "aiohttp-3.9.1-cp310-cp310-win32.whl", hash = "sha256:0e7ba7ff228c0d9a2cd66194e90f2bca6e0abca810b786901a569c0de082f489"}, - {file = "aiohttp-3.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:c7e939f1ae428a86e4abbb9a7c4732bf4706048818dfd979e5e2839ce0159f23"}, - {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:df9cf74b9bc03d586fc53ba470828d7b77ce51b0582d1d0b5b2fb673c0baa32d"}, - {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ecca113f19d5e74048c001934045a2b9368d77b0b17691d905af18bd1c21275e"}, - {file = "aiohttp-3.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8cef8710fb849d97c533f259103f09bac167a008d7131d7b2b0e3a33269185c0"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bea94403a21eb94c93386d559bce297381609153e418a3ffc7d6bf772f59cc35"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91c742ca59045dce7ba76cab6e223e41d2c70d79e82c284a96411f8645e2afff"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c93b7c2e52061f0925c3382d5cb8980e40f91c989563d3d32ca280069fd6a87"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee2527134f95e106cc1653e9ac78846f3a2ec1004cf20ef4e02038035a74544d"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11ff168d752cb41e8492817e10fb4f85828f6a0142b9726a30c27c35a1835f01"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b8c3a67eb87394386847d188996920f33b01b32155f0a94f36ca0e0c635bf3e3"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c7b5d5d64e2a14e35a9240b33b89389e0035e6de8dbb7ffa50d10d8b65c57449"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:69985d50a2b6f709412d944ffb2e97d0be154ea90600b7a921f95a87d6f108a2"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:c9110c06eaaac7e1f5562caf481f18ccf8f6fdf4c3323feab28a93d34cc646bd"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d737e69d193dac7296365a6dcb73bbbf53bb760ab25a3727716bbd42022e8d7a"}, - {file = "aiohttp-3.9.1-cp311-cp311-win32.whl", hash = "sha256:4ee8caa925aebc1e64e98432d78ea8de67b2272252b0a931d2ac3bd876ad5544"}, - {file = "aiohttp-3.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:a34086c5cc285be878622e0a6ab897a986a6e8bf5b67ecb377015f06ed316587"}, - {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f800164276eec54e0af5c99feb9494c295118fc10a11b997bbb1348ba1a52065"}, - {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:500f1c59906cd142d452074f3811614be04819a38ae2b3239a48b82649c08821"}, - {file = "aiohttp-3.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0b0a6a36ed7e164c6df1e18ee47afbd1990ce47cb428739d6c99aaabfaf1b3af"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69da0f3ed3496808e8cbc5123a866c41c12c15baaaead96d256477edf168eb57"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:176df045597e674fa950bf5ae536be85699e04cea68fa3a616cf75e413737eb5"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b796b44111f0cab6bbf66214186e44734b5baab949cb5fb56154142a92989aeb"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f27fdaadce22f2ef950fc10dcdf8048407c3b42b73779e48a4e76b3c35bca26c"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcb6532b9814ea7c5a6a3299747c49de30e84472fa72821b07f5a9818bce0f66"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:54631fb69a6e44b2ba522f7c22a6fb2667a02fd97d636048478db2fd8c4e98fe"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4b4c452d0190c5a820d3f5c0f3cd8a28ace48c54053e24da9d6041bf81113183"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:cae4c0c2ca800c793cae07ef3d40794625471040a87e1ba392039639ad61ab5b"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:565760d6812b8d78d416c3c7cfdf5362fbe0d0d25b82fed75d0d29e18d7fc30f"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54311eb54f3a0c45efb9ed0d0a8f43d1bc6060d773f6973efd90037a51cd0a3f"}, - {file = "aiohttp-3.9.1-cp312-cp312-win32.whl", hash = "sha256:85c3e3c9cb1d480e0b9a64c658cd66b3cfb8e721636ab8b0e746e2d79a7a9eed"}, - {file = "aiohttp-3.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:11cb254e397a82efb1805d12561e80124928e04e9c4483587ce7390b3866d213"}, - {file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8a22a34bc594d9d24621091d1b91511001a7eea91d6652ea495ce06e27381f70"}, - {file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:598db66eaf2e04aa0c8900a63b0101fdc5e6b8a7ddd805c56d86efb54eb66672"}, - {file = "aiohttp-3.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c9376e2b09895c8ca8b95362283365eb5c03bdc8428ade80a864160605715f1"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41473de252e1797c2d2293804e389a6d6986ef37cbb4a25208de537ae32141dd"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c5857612c9813796960c00767645cb5da815af16dafb32d70c72a8390bbf690"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffcd828e37dc219a72c9012ec44ad2e7e3066bec6ff3aaa19e7d435dbf4032ca"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:219a16763dc0294842188ac8a12262b5671817042b35d45e44fd0a697d8c8361"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f694dc8a6a3112059258a725a4ebe9acac5fe62f11c77ac4dcf896edfa78ca28"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bcc0ea8d5b74a41b621ad4a13d96c36079c81628ccc0b30cfb1603e3dfa3a014"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:90ec72d231169b4b8d6085be13023ece8fa9b1bb495e4398d847e25218e0f431"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:cf2a0ac0615842b849f40c4d7f304986a242f1e68286dbf3bd7a835e4f83acfd"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:0e49b08eafa4f5707ecfb321ab9592717a319e37938e301d462f79b4e860c32a"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2c59e0076ea31c08553e868cec02d22191c086f00b44610f8ab7363a11a5d9d8"}, - {file = "aiohttp-3.9.1-cp38-cp38-win32.whl", hash = "sha256:4831df72b053b1eed31eb00a2e1aff6896fb4485301d4ccb208cac264b648db4"}, - {file = "aiohttp-3.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:3135713c5562731ee18f58d3ad1bf41e1d8883eb68b363f2ffde5b2ea4b84cc7"}, - {file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cfeadf42840c1e870dc2042a232a8748e75a36b52d78968cda6736de55582766"}, - {file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:70907533db712f7aa791effb38efa96f044ce3d4e850e2d7691abd759f4f0ae0"}, - {file = "aiohttp-3.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cdefe289681507187e375a5064c7599f52c40343a8701761c802c1853a504558"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7481f581251bb5558ba9f635db70908819caa221fc79ee52a7f58392778c636"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:49f0c1b3c2842556e5de35f122fc0f0b721334ceb6e78c3719693364d4af8499"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d406b01a9f5a7e232d1b0d161b40c05275ffbcbd772dc18c1d5a570961a1ca4"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d8e4450e7fe24d86e86b23cc209e0023177b6d59502e33807b732d2deb6975f"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c0266cd6f005e99f3f51e583012de2778e65af6b73860038b968a0a8888487a"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab221850108a4a063c5b8a70f00dd7a1975e5a1713f87f4ab26a46e5feac5a0e"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c88a15f272a0ad3d7773cf3a37cc7b7d077cbfc8e331675cf1346e849d97a4e5"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:237533179d9747080bcaad4d02083ce295c0d2eab3e9e8ce103411a4312991a0"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:02ab6006ec3c3463b528374c4cdce86434e7b89ad355e7bf29e2f16b46c7dd6f"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04fa38875e53eb7e354ece1607b1d2fdee2d175ea4e4d745f6ec9f751fe20c7c"}, - {file = "aiohttp-3.9.1-cp39-cp39-win32.whl", hash = "sha256:82eefaf1a996060602f3cc1112d93ba8b201dbf5d8fd9611227de2003dddb3b7"}, - {file = "aiohttp-3.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:9b05d33ff8e6b269e30a7957bd3244ffbce2a7a35a81b81c382629b80af1a8bf"}, - {file = "aiohttp-3.9.1.tar.gz", hash = "sha256:8fc49a87ac269d4529da45871e2ffb6874e87779c3d0e2ccd813c0899221239d"}, + {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"}, + {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"}, + {file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"}, + {file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"}, + {file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"}, + {file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"}, + {file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"}, + {file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"}, + {file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"}, + {file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"}, + {file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"}, + {file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"}, + {file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"}, + {file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"}, ] [package.dependencies] @@ -124,17 +124,6 @@ files = [ [package.dependencies] frozenlist = ">=1.1.0" -[[package]] -name = "appnope" -version = "0.1.4" -description = "Disable App Nap on macOS >= 10.9" -optional = false -python-versions = ">=3.6" -files = [ - {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, - {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, -] - [[package]] name = "asttokens" version = "2.4.1" @@ -166,61 +155,57 @@ files = [ [[package]] name = "attrs" -version = "23.1.0" +version = "23.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, - {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, ] [package.extras] cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[docs,tests]", "pre-commit"] +dev = ["attrs[tests]", "pre-commit"] docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] - -[[package]] -name = "backcall" -version = "0.2.0" -description = "Specifications for callback functions passed in to an API" -optional = false -python-versions = "*" -files = [ - {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, - {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, -] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] [[package]] name = "bcrypt" -version = "4.0.1" +version = "4.1.3" description = "Modern password hashing for your software and your servers" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"}, - {file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"}, - {file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"}, - {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"}, + {file = "bcrypt-4.1.3-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:48429c83292b57bf4af6ab75809f8f4daf52aa5d480632e53707805cc1ce9b74"}, + {file = "bcrypt-4.1.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a8bea4c152b91fd8319fef4c6a790da5c07840421c2b785084989bf8bbb7455"}, + {file = "bcrypt-4.1.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d3b317050a9a711a5c7214bf04e28333cf528e0ed0ec9a4e55ba628d0f07c1a"}, + {file = "bcrypt-4.1.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:094fd31e08c2b102a14880ee5b3d09913ecf334cd604af27e1013c76831f7b05"}, + {file = "bcrypt-4.1.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4fb253d65da30d9269e0a6f4b0de32bd657a0208a6f4e43d3e645774fb5457f3"}, + {file = "bcrypt-4.1.3-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:193bb49eeeb9c1e2db9ba65d09dc6384edd5608d9d672b4125e9320af9153a15"}, + {file = "bcrypt-4.1.3-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:8cbb119267068c2581ae38790e0d1fbae65d0725247a930fc9900c285d95725d"}, + {file = "bcrypt-4.1.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6cac78a8d42f9d120b3987f82252bdbeb7e6e900a5e1ba37f6be6fe4e3848286"}, + {file = "bcrypt-4.1.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:01746eb2c4299dd0ae1670234bf77704f581dd72cc180f444bfe74eb80495b64"}, + {file = "bcrypt-4.1.3-cp37-abi3-win32.whl", hash = "sha256:037c5bf7c196a63dcce75545c8874610c600809d5d82c305dd327cd4969995bf"}, + {file = "bcrypt-4.1.3-cp37-abi3-win_amd64.whl", hash = "sha256:8a893d192dfb7c8e883c4576813bf18bb9d59e2cfd88b68b725990f033f1b978"}, + {file = "bcrypt-4.1.3-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d4cf6ef1525f79255ef048b3489602868c47aea61f375377f0d00514fe4a78c"}, + {file = "bcrypt-4.1.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5698ce5292a4e4b9e5861f7e53b1d89242ad39d54c3da451a93cac17b61921a"}, + {file = "bcrypt-4.1.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec3c2e1ca3e5c4b9edb94290b356d082b721f3f50758bce7cce11d8a7c89ce84"}, + {file = "bcrypt-4.1.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3a5be252fef513363fe281bafc596c31b552cf81d04c5085bc5dac29670faa08"}, + {file = "bcrypt-4.1.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5f7cd3399fbc4ec290378b541b0cf3d4398e4737a65d0f938c7c0f9d5e686611"}, + {file = "bcrypt-4.1.3-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:c4c8d9b3e97209dd7111bf726e79f638ad9224b4691d1c7cfefa571a09b1b2d6"}, + {file = "bcrypt-4.1.3-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:31adb9cbb8737a581a843e13df22ffb7c84638342de3708a98d5c986770f2834"}, + {file = "bcrypt-4.1.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:551b320396e1d05e49cc18dd77d970accd52b322441628aca04801bbd1d52a73"}, + {file = "bcrypt-4.1.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6717543d2c110a155e6821ce5670c1f512f602eabb77dba95717ca76af79867d"}, + {file = "bcrypt-4.1.3-cp39-abi3-win32.whl", hash = "sha256:6004f5229b50f8493c49232b8e75726b568535fd300e5039e255d919fc3a07f2"}, + {file = "bcrypt-4.1.3-cp39-abi3-win_amd64.whl", hash = "sha256:2505b54afb074627111b5a8dc9b6ae69d0f01fea65c2fcaea403448c503d3991"}, + {file = "bcrypt-4.1.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:cb9c707c10bddaf9e5ba7cdb769f3e889e60b7d4fea22834b261f51ca2b89fed"}, + {file = "bcrypt-4.1.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9f8ea645eb94fb6e7bea0cf4ba121c07a3a182ac52876493870033141aa687bc"}, + {file = "bcrypt-4.1.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:f44a97780677e7ac0ca393bd7982b19dbbd8d7228c1afe10b128fd9550eef5f1"}, + {file = "bcrypt-4.1.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d84702adb8f2798d813b17d8187d27076cca3cd52fe3686bb07a9083930ce650"}, + {file = "bcrypt-4.1.3.tar.gz", hash = "sha256:2ee15dd749f5952fe3f0430d0ff6b74082e159c50332a1413d51b5689cf06623"}, ] [package.extras] @@ -229,24 +214,24 @@ typecheck = ["mypy"] [[package]] name = "cachetools" -version = "5.3.1" +version = "5.3.3" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.1-py3-none-any.whl", hash = "sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590"}, - {file = "cachetools-5.3.1.tar.gz", hash = "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"}, + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, ] [[package]] name = "certifi" -version = "2023.7.22" +version = "2024.6.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, - {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, + {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, + {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, ] [[package]] @@ -315,101 +300,101 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.3.0" +version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.3.0.tar.gz", hash = "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-win32.whl", hash = "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-win32.whl", hash = "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-win32.whl", hash = "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-win32.whl", hash = "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-win32.whl", hash = "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884"}, - {file = "charset_normalizer-3.3.0-py3-none-any.whl", hash = "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2"}, + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] [[package]] @@ -426,34 +411,6 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} -[[package]] -name = "codeflare-torchx" -version = "0.6.0.dev2" -description = "TorchX SDK and Components" -optional = false -python-versions = ">=3.7" -files = [ - {file = "codeflare_torchx-0.6.0.dev2-py3-none-any.whl", hash = "sha256:d303efffb9b1e105390ed672a3358de40174146530929df83c7d7af27372fbcc"}, -] - -[package.dependencies] -docker = "*" -docstring-parser = "0.8.1" -filelock = "*" -fsspec = "*" -importlib-metadata = "*" -pyre-extensions = "*" -pyyaml = "*" -tabulate = "*" -urllib3 = ">=1.21.1,<1.27" - -[package.extras] -dev = ["aiobotocore (==2.4.2)", "ax-platform[mysql] (==0.2.3)", "black (==23.3.0)", "boto3 (==1.24.59)", "captum (>=0.4.0)", "flake8 (==3.9.0)", "fsspec[s3] (==2023.1.0)", "google-api-core", "google-cloud-batch (>=0.5.0)", "google-cloud-logging (>=3.0.0)", "google-cloud-runtimeconfig (>=0.33.2)", "hydra-core", "ipython", "kfp (==1.8.22)", "mlflow-skinny", "moto (==4.1.6)", "pyre-check", "pyre-extensions", "pytest", "pytorch-lightning (==1.5.10)", "ray[default]", "torch (>=1.10.0)", "torch-model-archiver (>=0.4.2)", "torchmetrics (<0.11.0)", "torchserve (>=0.4.2)", "torchtext (>=0.11.0)", "torchvision (>=0.11.1)", "ts (==0.5.1)", "usort (==1.0.2)"] -gcp-batch = ["google-cloud-batch (>=0.5.0)", "google-cloud-logging (>=3.0.0)", "google-cloud-runtimeconfig (>=0.33.2)"] -kfp = ["kfp (==1.6.2)"] -kubernetes = ["kubernetes (>=11)"] -ray = ["ray (>=1.12.1)"] - [[package]] name = "colorama" version = "0.4.6" @@ -467,13 +424,13 @@ files = [ [[package]] name = "colorful" -version = "0.5.5" +version = "0.5.6" description = "Terminal string styling done right, in Python." optional = false python-versions = "*" files = [ - {file = "colorful-0.5.5-py2.py3-none-any.whl", hash = "sha256:62c187e27c1433db9463ff93b1451898d1e7e23a7e553583fd9daeb6325182e4"}, - {file = "colorful-0.5.5.tar.gz", hash = "sha256:66f8c1264b2a26f7293b96a03bb7a76c4bc8b9634369a0bffdcd12d618056a1d"}, + {file = "colorful-0.5.6-py2.py3-none-any.whl", hash = "sha256:eab8c1c809f5025ad2b5238a50bd691e26850da8cac8f90d660ede6ea1af9f1e"}, + {file = "colorful-0.5.6.tar.gz", hash = "sha256:b56d5c01db1dac4898308ea889edcb113fbee3e6ec5df4bacffd61d5241b5b8d"}, ] [package.dependencies] @@ -636,55 +593,24 @@ files = [ [[package]] name = "distlib" -version = "0.3.7" +version = "0.3.8" description = "Distribution utilities" optional = false python-versions = "*" files = [ - {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, - {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, -] - -[[package]] -name = "docker" -version = "6.1.3" -description = "A Python library for the Docker Engine API." -optional = false -python-versions = ">=3.7" -files = [ - {file = "docker-6.1.3-py3-none-any.whl", hash = "sha256:aecd2277b8bf8e506e484f6ab7aec39abe0038e29fa4a6d3ba86c3fe01844ed9"}, - {file = "docker-6.1.3.tar.gz", hash = "sha256:aa6d17830045ba5ef0168d5eaa34d37beeb113948c413affe1d5991fc11f9a20"}, -] - -[package.dependencies] -packaging = ">=14.0" -pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} -requests = ">=2.26.0" -urllib3 = ">=1.26.0" -websocket-client = ">=0.32.0" - -[package.extras] -ssh = ["paramiko (>=2.4.3)"] - -[[package]] -name = "docstring-parser" -version = "0.8.1" -description = "\"Parse Python docstrings in reST, Google and Numpydoc format\"" -optional = false -python-versions = ">=3.5" -files = [ - {file = "docstring_parser-0.8.1.tar.gz", hash = "sha256:dd68bbe33446e9dbdec087fd18bad8f6dc8eedda4c5c2321208951f3acfc5e04"}, + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, ] [[package]] name = "exceptiongroup" -version = "1.1.3" +version = "1.2.1" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, - {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, + {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, + {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, ] [package.extras] @@ -706,99 +632,115 @@ tests = ["asttokens", "littleutils", "pytest", "rich"] [[package]] name = "filelock" -version = "3.12.4" +version = "3.15.3" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.12.4-py3-none-any.whl", hash = "sha256:08c21d87ded6e2b9da6728c3dff51baf1dcecf973b768ef35bcbc3447edb9ad4"}, - {file = "filelock-3.12.4.tar.gz", hash = "sha256:2e6f249f1f3654291606e046b09f1fd5eac39b360664c27f5aad072012f8bcbd"}, + {file = "filelock-3.15.3-py3-none-any.whl", hash = "sha256:0151273e5b5d6cf753a61ec83b3a9b7d8821c39ae9af9d7ecf2f9e2f17404103"}, + {file = "filelock-3.15.3.tar.gz", hash = "sha256:e1199bf5194a2277273dacd50269f0d87d0682088a3c561c15674ea9005d8635"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"] -typing = ["typing-extensions (>=4.7.1)"] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] +typing = ["typing-extensions (>=4.8)"] [[package]] name = "frozenlist" -version = "1.4.0" +version = "1.4.1" description = "A list-like structure which implements collections.abc.MutableSequence" optional = false python-versions = ">=3.8" files = [ - {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:764226ceef3125e53ea2cb275000e309c0aa5464d43bd72abd661e27fffc26ab"}, - {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6484756b12f40003c6128bfcc3fa9f0d49a687e171186c2d85ec82e3758c559"}, - {file = "frozenlist-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ac08e601308e41eb533f232dbf6b7e4cea762f9f84f6357136eed926c15d12c"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d081f13b095d74b67d550de04df1c756831f3b83dc9881c38985834387487f1b"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71932b597f9895f011f47f17d6428252fc728ba2ae6024e13c3398a087c2cdea"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:981b9ab5a0a3178ff413bca62526bb784249421c24ad7381e39d67981be2c326"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e41f3de4df3e80de75845d3e743b3f1c4c8613c3997a912dbf0229fc61a8b963"}, - {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6918d49b1f90821e93069682c06ffde41829c346c66b721e65a5c62b4bab0300"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e5c8764c7829343d919cc2dfc587a8db01c4f70a4ebbc49abde5d4b158b007b"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8d0edd6b1c7fb94922bf569c9b092ee187a83f03fb1a63076e7774b60f9481a8"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e29cda763f752553fa14c68fb2195150bfab22b352572cb36c43c47bedba70eb"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0c7c1b47859ee2cac3846fde1c1dc0f15da6cec5a0e5c72d101e0f83dcb67ff9"}, - {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:901289d524fdd571be1c7be054f48b1f88ce8dddcbdf1ec698b27d4b8b9e5d62"}, - {file = "frozenlist-1.4.0-cp310-cp310-win32.whl", hash = "sha256:1a0848b52815006ea6596c395f87449f693dc419061cc21e970f139d466dc0a0"}, - {file = "frozenlist-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:b206646d176a007466358aa21d85cd8600a415c67c9bd15403336c331a10d956"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de343e75f40e972bae1ef6090267f8260c1446a1695e77096db6cfa25e759a95"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad2a9eb6d9839ae241701d0918f54c51365a51407fd80f6b8289e2dfca977cc3"}, - {file = "frozenlist-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7bd3b3830247580de99c99ea2a01416dfc3c34471ca1298bccabf86d0ff4dc"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdf1847068c362f16b353163391210269e4f0569a3c166bc6a9f74ccbfc7e839"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38461d02d66de17455072c9ba981d35f1d2a73024bee7790ac2f9e361ef1cd0c"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5a32087d720c608f42caed0ef36d2b3ea61a9d09ee59a5142d6070da9041b8f"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd65632acaf0d47608190a71bfe46b209719bf2beb59507db08ccdbe712f969b"}, - {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261b9f5d17cac914531331ff1b1d452125bf5daa05faf73b71d935485b0c510b"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b89ac9768b82205936771f8d2eb3ce88503b1556324c9f903e7156669f521472"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:008eb8b31b3ea6896da16c38c1b136cb9fec9e249e77f6211d479db79a4eaf01"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e74b0506fa5aa5598ac6a975a12aa8928cbb58e1f5ac8360792ef15de1aa848f"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:490132667476f6781b4c9458298b0c1cddf237488abd228b0b3650e5ecba7467"}, - {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:76d4711f6f6d08551a7e9ef28c722f4a50dd0fc204c56b4bcd95c6cc05ce6fbb"}, - {file = "frozenlist-1.4.0-cp311-cp311-win32.whl", hash = "sha256:a02eb8ab2b8f200179b5f62b59757685ae9987996ae549ccf30f983f40602431"}, - {file = "frozenlist-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:515e1abc578dd3b275d6a5114030b1330ba044ffba03f94091842852f806f1c1"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0ed05f5079c708fe74bf9027e95125334b6978bf07fd5ab923e9e55e5fbb9d3"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca265542ca427bf97aed183c1676e2a9c66942e822b14dc6e5f42e038f92a503"}, - {file = "frozenlist-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:491e014f5c43656da08958808588cc6c016847b4360e327a62cb308c791bd2d9"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ae5cd0f333f94f2e03aaf140bb762c64783935cc764ff9c82dff626089bebf"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e78fb68cf9c1a6aa4a9a12e960a5c9dfbdb89b3695197aa7064705662515de2"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5655a942f5f5d2c9ed93d72148226d75369b4f6952680211972a33e59b1dfdc"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11b0746f5d946fecf750428a95f3e9ebe792c1ee3b1e96eeba145dc631a9672"}, - {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e66d2a64d44d50d2543405fb183a21f76b3b5fd16f130f5c99187c3fb4e64919"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:88f7bc0fcca81f985f78dd0fa68d2c75abf8272b1f5c323ea4a01a4d7a614efc"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5833593c25ac59ede40ed4de6d67eb42928cca97f26feea219f21d0ed0959b79"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fec520865f42e5c7f050c2a79038897b1c7d1595e907a9e08e3353293ffc948e"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:b826d97e4276750beca7c8f0f1a4938892697a6bcd8ec8217b3312dad6982781"}, - {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ceb6ec0a10c65540421e20ebd29083c50e6d1143278746a4ef6bcf6153171eb8"}, - {file = "frozenlist-1.4.0-cp38-cp38-win32.whl", hash = "sha256:2b8bcf994563466db019fab287ff390fffbfdb4f905fc77bc1c1d604b1c689cc"}, - {file = "frozenlist-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:a6c8097e01886188e5be3e6b14e94ab365f384736aa1fca6a0b9e35bd4a30bc7"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6c38721585f285203e4b4132a352eb3daa19121a035f3182e08e437cface44bf"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0c6da9aee33ff0b1a451e867da0c1f47408112b3391dd43133838339e410963"}, - {file = "frozenlist-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93ea75c050c5bb3d98016b4ba2497851eadf0ac154d88a67d7a6816206f6fa7f"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f61e2dc5ad442c52b4887f1fdc112f97caeff4d9e6ebe78879364ac59f1663e1"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa384489fefeb62321b238e64c07ef48398fe80f9e1e6afeff22e140e0850eef"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10ff5faaa22786315ef57097a279b833ecab1a0bfb07d604c9cbb1c4cdc2ed87"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:007df07a6e3eb3e33e9a1fe6a9db7af152bbd8a185f9aaa6ece10a3529e3e1c6"}, - {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4f399d28478d1f604c2ff9119907af9726aed73680e5ed1ca634d377abb087"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5374b80521d3d3f2ec5572e05adc94601985cc526fb276d0c8574a6d749f1b3"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ce31ae3e19f3c902de379cf1323d90c649425b86de7bbdf82871b8a2a0615f3d"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7211ef110a9194b6042449431e08c4d80c0481e5891e58d429df5899690511c2"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:556de4430ce324c836789fa4560ca62d1591d2538b8ceb0b4f68fb7b2384a27a"}, - {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7645a8e814a3ee34a89c4a372011dcd817964ce8cb273c8ed6119d706e9613e3"}, - {file = "frozenlist-1.4.0-cp39-cp39-win32.whl", hash = "sha256:19488c57c12d4e8095a922f328df3f179c820c212940a498623ed39160bc3c2f"}, - {file = "frozenlist-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:6221d84d463fb110bdd7619b69cb43878a11d51cbb9394ae3105d082d5199167"}, - {file = "frozenlist-1.4.0.tar.gz", hash = "sha256:09163bdf0b2907454042edb19f887c6d33806adc71fbd54afc14908bfdc22251"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, ] [[package]] name = "fsspec" -version = "2023.9.2" +version = "2024.6.0" description = "File-system specification" optional = false python-versions = ">=3.8" files = [ - {file = "fsspec-2023.9.2-py3-none-any.whl", hash = "sha256:603dbc52c75b84da501b9b2ec8c11e1f61c25984c4a0dda1f129ef391fbfc9b4"}, - {file = "fsspec-2023.9.2.tar.gz", hash = "sha256:80bfb8c70cc27b2178cc62a935ecf242fc6e8c3fb801f9c571fc01b1e715ba7d"}, + {file = "fsspec-2024.6.0-py3-none-any.whl", hash = "sha256:58d7122eb8a1a46f7f13453187bfea4972d66bf01618d37366521b1998034cee"}, + {file = "fsspec-2024.6.0.tar.gz", hash = "sha256:f579960a56e6d8038a9efc8f9c77279ec12e6299aa86b0769a7e9c46b94527c2"}, ] [package.extras] @@ -806,7 +748,8 @@ abfs = ["adlfs"] adl = ["adlfs"] arrow = ["pyarrow (>=1)"] dask = ["dask", "distributed"] -devel = ["pytest", "pytest-cov"] +dev = ["pre-commit", "ruff"] +doc = ["numpydoc", "sphinx", "sphinx-design", "sphinx-rtd-theme", "yarl"] dropbox = ["dropbox", "dropboxdrivefs", "requests"] full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] fuse = ["fusepy"] @@ -816,46 +759,49 @@ github = ["requests"] gs = ["gcsfs"] gui = ["panel"] hdfs = ["pyarrow (>=1)"] -http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "requests"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"] libarchive = ["libarchive-c"] oci = ["ocifs"] s3 = ["s3fs"] sftp = ["paramiko"] smb = ["smbprotocol"] ssh = ["paramiko"] +test = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "numpy", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "requests"] +test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask-expr", "dask[dataframe,test]", "moto[server] (>4,<5)", "pytest-timeout", "xarray"] +test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard"] tqdm = ["tqdm"] [[package]] name = "google-api-core" -version = "2.15.0" +version = "2.8.0" description = "Google API client core library" optional = false -python-versions = ">=3.7" +python-versions = ">=3.6" files = [ - {file = "google-api-core-2.15.0.tar.gz", hash = "sha256:abc978a72658f14a2df1e5e12532effe40f94f868f6e23d95133bd6abcca35ca"}, - {file = "google_api_core-2.15.0-py3-none-any.whl", hash = "sha256:2aa56d2be495551e66bbff7f729b790546f87d5c90e74781aa77233bcb395a8a"}, + {file = "google-api-core-2.8.0.tar.gz", hash = "sha256:065bb8e11c605fd232707ae50963dc1c8af5b3c95b4568887515985e6c1156b3"}, + {file = "google_api_core-2.8.0-py3-none-any.whl", hash = "sha256:1b9f59236ce1bae9a687c1d4f22957e79a2669e53d032893f6bf0fca54f6931d"}, ] [package.dependencies] -google-auth = ">=2.14.1,<3.0.dev0" -googleapis-common-protos = ">=1.56.2,<2.0.dev0" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" -requests = ">=2.18.0,<3.0.0.dev0" +google-auth = ">=1.25.0,<3.0dev" +googleapis-common-protos = ">=1.52.0,<2.0dev" +protobuf = ">=3.12.0" +requests = ">=2.18.0,<3.0.0dev" [package.extras] -grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] -grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] -grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio-status (>=1.33.2,<2.0dev)"] +grpcgcp = ["grpcio-gcp (>=0.2.2)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2)"] [[package]] name = "google-auth" -version = "2.23.3" +version = "2.30.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.23.3.tar.gz", hash = "sha256:6864247895eea5d13b9c57c9e03abb49cb94ce2dc7c58e91cba3248c7477c9e3"}, - {file = "google_auth-2.23.3-py2.py3-none-any.whl", hash = "sha256:a8f4608e65c244ead9e0538f181a96c6e11199ec114d41f1d7b1bffa96937bda"}, + {file = "google-auth-2.30.0.tar.gz", hash = "sha256:ab630a1320f6720909ad76a7dbdb6841cdf5c66b328d690027e4867bdfb16688"}, + {file = "google_auth-2.30.0-py2.py3-none-any.whl", hash = "sha256:8df7da660f62757388b8a7f249df13549b3373f24388cb5d2f1dd91cc18180b5"}, ] [package.dependencies] @@ -872,116 +818,108 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] [[package]] name = "googleapis-common-protos" -version = "1.62.0" +version = "1.63.1" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, - {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, + {file = "googleapis-common-protos-1.63.1.tar.gz", hash = "sha256:c6442f7a0a6b2a80369457d79e6672bb7dcbaab88e0848302497e3ec80780a6a"}, + {file = "googleapis_common_protos-1.63.1-py2.py3-none-any.whl", hash = "sha256:0e1c2cdfcbc354b76e4a211a35ea35d6926a835cba1377073c4861db904a1877"}, ] [package.dependencies] -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" [package.extras] grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] [[package]] name = "grpcio" -version = "1.60.0" +version = "1.64.1" description = "HTTP/2-based RPC framework" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "grpcio-1.60.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:d020cfa595d1f8f5c6b343530cd3ca16ae5aefdd1e832b777f9f0eb105f5b139"}, - {file = "grpcio-1.60.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b98f43fcdb16172dec5f4b49f2fece4b16a99fd284d81c6bbac1b3b69fcbe0ff"}, - {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:20e7a4f7ded59097c84059d28230907cd97130fa74f4a8bfd1d8e5ba18c81491"}, - {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452ca5b4afed30e7274445dd9b441a35ece656ec1600b77fff8c216fdf07df43"}, - {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43e636dc2ce9ece583b3e2ca41df5c983f4302eabc6d5f9cd04f0562ee8ec1ae"}, - {file = "grpcio-1.60.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e306b97966369b889985a562ede9d99180def39ad42c8014628dd3cc343f508"}, - {file = "grpcio-1.60.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f897c3b127532e6befdcf961c415c97f320d45614daf84deba0a54e64ea2457b"}, - {file = "grpcio-1.60.0-cp310-cp310-win32.whl", hash = "sha256:b87efe4a380887425bb15f220079aa8336276398dc33fce38c64d278164f963d"}, - {file = "grpcio-1.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:a9c7b71211f066908e518a2ef7a5e211670761651039f0d6a80d8d40054047df"}, - {file = "grpcio-1.60.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:fb464479934778d7cc5baf463d959d361954d6533ad34c3a4f1d267e86ee25fd"}, - {file = "grpcio-1.60.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:4b44d7e39964e808b071714666a812049765b26b3ea48c4434a3b317bac82f14"}, - {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:90bdd76b3f04bdb21de5398b8a7c629676c81dfac290f5f19883857e9371d28c"}, - {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91229d7203f1ef0ab420c9b53fe2ca5c1fbeb34f69b3bc1b5089466237a4a134"}, - {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b36a2c6d4920ba88fa98075fdd58ff94ebeb8acc1215ae07d01a418af4c0253"}, - {file = "grpcio-1.60.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:297eef542156d6b15174a1231c2493ea9ea54af8d016b8ca7d5d9cc65cfcc444"}, - {file = "grpcio-1.60.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:87c9224acba0ad8bacddf427a1c2772e17ce50b3042a789547af27099c5f751d"}, - {file = "grpcio-1.60.0-cp311-cp311-win32.whl", hash = "sha256:95ae3e8e2c1b9bf671817f86f155c5da7d49a2289c5cf27a319458c3e025c320"}, - {file = "grpcio-1.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:467a7d31554892eed2aa6c2d47ded1079fc40ea0b9601d9f79204afa8902274b"}, - {file = "grpcio-1.60.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:a7152fa6e597c20cb97923407cf0934e14224af42c2b8d915f48bc3ad2d9ac18"}, - {file = "grpcio-1.60.0-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:7db16dd4ea1b05ada504f08d0dca1cd9b926bed3770f50e715d087c6f00ad748"}, - {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:b0571a5aef36ba9177e262dc88a9240c866d903a62799e44fd4aae3f9a2ec17e"}, - {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fd9584bf1bccdfff1512719316efa77be235469e1e3295dce64538c4773840b"}, - {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6a478581b1a1a8fdf3318ecb5f4d0cda41cacdffe2b527c23707c9c1b8fdb55"}, - {file = "grpcio-1.60.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:77c8a317f0fd5a0a2be8ed5cbe5341537d5c00bb79b3bb27ba7c5378ba77dbca"}, - {file = "grpcio-1.60.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1c30bb23a41df95109db130a6cc1b974844300ae2e5d68dd4947aacba5985aa5"}, - {file = "grpcio-1.60.0-cp312-cp312-win32.whl", hash = "sha256:2aef56e85901c2397bd557c5ba514f84de1f0ae5dd132f5d5fed042858115951"}, - {file = "grpcio-1.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:e381fe0c2aa6c03b056ad8f52f8efca7be29fb4d9ae2f8873520843b6039612a"}, - {file = "grpcio-1.60.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:92f88ca1b956eb8427a11bb8b4a0c0b2b03377235fc5102cb05e533b8693a415"}, - {file = "grpcio-1.60.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:e278eafb406f7e1b1b637c2cf51d3ad45883bb5bd1ca56bc05e4fc135dfdaa65"}, - {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:a48edde788b99214613e440fce495bbe2b1e142a7f214cce9e0832146c41e324"}, - {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de2ad69c9a094bf37c1102b5744c9aec6cf74d2b635558b779085d0263166454"}, - {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:073f959c6f570797272f4ee9464a9997eaf1e98c27cb680225b82b53390d61e6"}, - {file = "grpcio-1.60.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c826f93050c73e7769806f92e601e0efdb83ec8d7c76ddf45d514fee54e8e619"}, - {file = "grpcio-1.60.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9e30be89a75ee66aec7f9e60086fadb37ff8c0ba49a022887c28c134341f7179"}, - {file = "grpcio-1.60.0-cp37-cp37m-win_amd64.whl", hash = "sha256:b0fb2d4801546598ac5cd18e3ec79c1a9af8b8f2a86283c55a5337c5aeca4b1b"}, - {file = "grpcio-1.60.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:9073513ec380434eb8d21970e1ab3161041de121f4018bbed3146839451a6d8e"}, - {file = "grpcio-1.60.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:74d7d9fa97809c5b892449b28a65ec2bfa458a4735ddad46074f9f7d9550ad13"}, - {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:1434ca77d6fed4ea312901122dc8da6c4389738bf5788f43efb19a838ac03ead"}, - {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e61e76020e0c332a98290323ecfec721c9544f5b739fab925b6e8cbe1944cf19"}, - {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675997222f2e2f22928fbba640824aebd43791116034f62006e19730715166c0"}, - {file = "grpcio-1.60.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5208a57eae445ae84a219dfd8b56e04313445d146873117b5fa75f3245bc1390"}, - {file = "grpcio-1.60.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:428d699c8553c27e98f4d29fdc0f0edc50e9a8a7590bfd294d2edb0da7be3629"}, - {file = "grpcio-1.60.0-cp38-cp38-win32.whl", hash = "sha256:83f2292ae292ed5a47cdcb9821039ca8e88902923198f2193f13959360c01860"}, - {file = "grpcio-1.60.0-cp38-cp38-win_amd64.whl", hash = "sha256:705a68a973c4c76db5d369ed573fec3367d7d196673fa86614b33d8c8e9ebb08"}, - {file = "grpcio-1.60.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c193109ca4070cdcaa6eff00fdb5a56233dc7610216d58fb81638f89f02e4968"}, - {file = "grpcio-1.60.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:676e4a44e740deaba0f4d95ba1d8c5c89a2fcc43d02c39f69450b1fa19d39590"}, - {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:5ff21e000ff2f658430bde5288cb1ac440ff15c0d7d18b5fb222f941b46cb0d2"}, - {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c86343cf9ff7b2514dd229bdd88ebba760bd8973dac192ae687ff75e39ebfab"}, - {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fd3b3968ffe7643144580f260f04d39d869fcc2cddb745deef078b09fd2b328"}, - {file = "grpcio-1.60.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:30943b9530fe3620e3b195c03130396cd0ee3a0d10a66c1bee715d1819001eaf"}, - {file = "grpcio-1.60.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b10241250cb77657ab315270b064a6c7f1add58af94befa20687e7c8d8603ae6"}, - {file = "grpcio-1.60.0-cp39-cp39-win32.whl", hash = "sha256:79a050889eb8d57a93ed21d9585bb63fca881666fc709f5d9f7f9372f5e7fd03"}, - {file = "grpcio-1.60.0-cp39-cp39-win_amd64.whl", hash = "sha256:8a97a681e82bc11a42d4372fe57898d270a2707f36c45c6676e49ce0d5c41353"}, - {file = "grpcio-1.60.0.tar.gz", hash = "sha256:2199165a1affb666aa24adf0c97436686d0a61bc5fc113c037701fb7c7fceb96"}, + {file = "grpcio-1.64.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:55697ecec192bc3f2f3cc13a295ab670f51de29884ca9ae6cd6247df55df2502"}, + {file = "grpcio-1.64.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:3b64ae304c175671efdaa7ec9ae2cc36996b681eb63ca39c464958396697daff"}, + {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:bac71b4b28bc9af61efcdc7630b166440bbfbaa80940c9a697271b5e1dabbc61"}, + {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c024ffc22d6dc59000faf8ad781696d81e8e38f4078cb0f2630b4a3cf231a90"}, + {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7cd5c1325f6808b8ae31657d281aadb2a51ac11ab081ae335f4f7fc44c1721d"}, + {file = "grpcio-1.64.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0a2813093ddb27418a4c99f9b1c223fab0b053157176a64cc9db0f4557b69bd9"}, + {file = "grpcio-1.64.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2981c7365a9353f9b5c864595c510c983251b1ab403e05b1ccc70a3d9541a73b"}, + {file = "grpcio-1.64.1-cp310-cp310-win32.whl", hash = "sha256:1262402af5a511c245c3ae918167eca57342c72320dffae5d9b51840c4b2f86d"}, + {file = "grpcio-1.64.1-cp310-cp310-win_amd64.whl", hash = "sha256:19264fc964576ddb065368cae953f8d0514ecc6cb3da8903766d9fb9d4554c33"}, + {file = "grpcio-1.64.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:58b1041e7c870bb30ee41d3090cbd6f0851f30ae4eb68228955d973d3efa2e61"}, + {file = "grpcio-1.64.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bbc5b1d78a7822b0a84c6f8917faa986c1a744e65d762ef6d8be9d75677af2ca"}, + {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:5841dd1f284bd1b3d8a6eca3a7f062b06f1eec09b184397e1d1d43447e89a7ae"}, + {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8caee47e970b92b3dd948371230fcceb80d3f2277b3bf7fbd7c0564e7d39068e"}, + {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73819689c169417a4f978e562d24f2def2be75739c4bed1992435d007819da1b"}, + {file = "grpcio-1.64.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6503b64c8b2dfad299749cad1b595c650c91e5b2c8a1b775380fcf8d2cbba1e9"}, + {file = "grpcio-1.64.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1de403fc1305fd96cfa75e83be3dee8538f2413a6b1685b8452301c7ba33c294"}, + {file = "grpcio-1.64.1-cp311-cp311-win32.whl", hash = "sha256:d4d29cc612e1332237877dfa7fe687157973aab1d63bd0f84cf06692f04c0367"}, + {file = "grpcio-1.64.1-cp311-cp311-win_amd64.whl", hash = "sha256:5e56462b05a6f860b72f0fa50dca06d5b26543a4e88d0396259a07dc30f4e5aa"}, + {file = "grpcio-1.64.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:4657d24c8063e6095f850b68f2d1ba3b39f2b287a38242dcabc166453e950c59"}, + {file = "grpcio-1.64.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:62b4e6eb7bf901719fce0ca83e3ed474ae5022bb3827b0a501e056458c51c0a1"}, + {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:ee73a2f5ca4ba44fa33b4d7d2c71e2c8a9e9f78d53f6507ad68e7d2ad5f64a22"}, + {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:198908f9b22e2672a998870355e226a725aeab327ac4e6ff3a1399792ece4762"}, + {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39b9d0acaa8d835a6566c640f48b50054f422d03e77e49716d4c4e8e279665a1"}, + {file = "grpcio-1.64.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5e42634a989c3aa6049f132266faf6b949ec2a6f7d302dbb5c15395b77d757eb"}, + {file = "grpcio-1.64.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b1a82e0b9b3022799c336e1fc0f6210adc019ae84efb7321d668129d28ee1efb"}, + {file = "grpcio-1.64.1-cp312-cp312-win32.whl", hash = "sha256:55260032b95c49bee69a423c2f5365baa9369d2f7d233e933564d8a47b893027"}, + {file = "grpcio-1.64.1-cp312-cp312-win_amd64.whl", hash = "sha256:c1a786ac592b47573a5bb7e35665c08064a5d77ab88a076eec11f8ae86b3e3f6"}, + {file = "grpcio-1.64.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:a011ac6c03cfe162ff2b727bcb530567826cec85eb8d4ad2bfb4bd023287a52d"}, + {file = "grpcio-1.64.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4d6dab6124225496010bd22690f2d9bd35c7cbb267b3f14e7a3eb05c911325d4"}, + {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:a5e771d0252e871ce194d0fdcafd13971f1aae0ddacc5f25615030d5df55c3a2"}, + {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c3c1b90ab93fed424e454e93c0ed0b9d552bdf1b0929712b094f5ecfe7a23ad"}, + {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20405cb8b13fd779135df23fabadc53b86522d0f1cba8cca0e87968587f50650"}, + {file = "grpcio-1.64.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0cc79c982ccb2feec8aad0e8fb0d168bcbca85bc77b080d0d3c5f2f15c24ea8f"}, + {file = "grpcio-1.64.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a3a035c37ce7565b8f4f35ff683a4db34d24e53dc487e47438e434eb3f701b2a"}, + {file = "grpcio-1.64.1-cp38-cp38-win32.whl", hash = "sha256:1257b76748612aca0f89beec7fa0615727fd6f2a1ad580a9638816a4b2eb18fd"}, + {file = "grpcio-1.64.1-cp38-cp38-win_amd64.whl", hash = "sha256:0a12ddb1678ebc6a84ec6b0487feac020ee2b1659cbe69b80f06dbffdb249122"}, + {file = "grpcio-1.64.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:75dbbf415026d2862192fe1b28d71f209e2fd87079d98470db90bebe57b33179"}, + {file = "grpcio-1.64.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e3d9f8d1221baa0ced7ec7322a981e28deb23749c76eeeb3d33e18b72935ab62"}, + {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:5f8b75f64d5d324c565b263c67dbe4f0af595635bbdd93bb1a88189fc62ed2e5"}, + {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c84ad903d0d94311a2b7eea608da163dace97c5fe9412ea311e72c3684925602"}, + {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:940e3ec884520155f68a3b712d045e077d61c520a195d1a5932c531f11883489"}, + {file = "grpcio-1.64.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f10193c69fc9d3d726e83bbf0f3d316f1847c3071c8c93d8090cf5f326b14309"}, + {file = "grpcio-1.64.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac15b6c2c80a4d1338b04d42a02d376a53395ddf0ec9ab157cbaf44191f3ffdd"}, + {file = "grpcio-1.64.1-cp39-cp39-win32.whl", hash = "sha256:03b43d0ccf99c557ec671c7dede64f023c7da9bb632ac65dbc57f166e4970040"}, + {file = "grpcio-1.64.1-cp39-cp39-win_amd64.whl", hash = "sha256:ed6091fa0adcc7e4ff944090cf203a52da35c37a130efa564ded02b7aff63bcd"}, + {file = "grpcio-1.64.1.tar.gz", hash = "sha256:8d51dd1c59d5fa0f34266b80a3805ec29a1f26425c2a54736133f6d87fc4968a"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.60.0)"] +protobuf = ["grpcio-tools (>=1.64.1)"] [[package]] name = "idna" -version = "3.4" +version = "3.7" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] [[package]] name = "importlib-metadata" -version = "6.8.0" +version = "7.2.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, - {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, + {file = "importlib_metadata-7.2.0-py3-none-any.whl", hash = "sha256:04e4aad329b8b948a5711d394fa8759cb80f009225441b4f2a02bd4d8e5f426c"}, + {file = "importlib_metadata-7.2.0.tar.gz", hash = "sha256:3ff4519071ed42740522d494d04819b666541b9752c43012f85afb2cc220fcc6"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "iniconfig" @@ -996,42 +934,40 @@ files = [ [[package]] name = "ipython" -version = "8.12.3" +version = "8.18.1" description = "IPython: Productive Interactive Computing" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "ipython-8.12.3-py3-none-any.whl", hash = "sha256:b0340d46a933d27c657b211a329d0be23793c36595acf9e6ef4164bc01a1804c"}, - {file = "ipython-8.12.3.tar.gz", hash = "sha256:3910c4b54543c2ad73d06579aa771041b7d5707b033bd488669b4cf544e3b363"}, + {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, + {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"}, ] [package.dependencies] -appnope = {version = "*", markers = "sys_platform == \"darwin\""} -backcall = "*" colorama = {version = "*", markers = "sys_platform == \"win32\""} decorator = "*" +exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} jedi = ">=0.16" matplotlib-inline = "*" pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} -pickleshare = "*" -prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0" +prompt-toolkit = ">=3.0.41,<3.1.0" pygments = ">=2.4.0" stack-data = "*" traitlets = ">=5" typing-extensions = {version = "*", markers = "python_version < \"3.10\""} [package.extras] -all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] black = ["black"] -doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] kernel = ["ipykernel"] nbconvert = ["nbconvert"] nbformat = ["nbformat"] notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] -test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] -test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] +test = ["pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath", "trio"] [[package]] name = "ipywidgets" @@ -1092,13 +1028,13 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonschema" -version = "4.19.1" +version = "4.22.0" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema-4.19.1-py3-none-any.whl", hash = "sha256:cd5f1f9ed9444e554b38ba003af06c0a8c2868131e56bfbef0550fb450c0330e"}, - {file = "jsonschema-4.19.1.tar.gz", hash = "sha256:ec84cc37cfa703ef7cd4928db24f9cb31428a5d0fa77747b8b51a847458e0bbf"}, + {file = "jsonschema-4.22.0-py3-none-any.whl", hash = "sha256:ff4cfd6b1367a40e7bc6411caec72effadd3db0bbe5017de188f2d6108335802"}, + {file = "jsonschema-4.22.0.tar.gz", hash = "sha256:5b22d434a45935119af990552c862e5d6d564e8f6601206b305a61fdf661a2b7"}, ] [package.dependencies] @@ -1113,27 +1049,27 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- [[package]] name = "jsonschema-specifications" -version = "2023.7.1" +version = "2023.12.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema_specifications-2023.7.1-py3-none-any.whl", hash = "sha256:05adf340b659828a004220a9613be00fa3f223f2b82002e273dee62fd50524b1"}, - {file = "jsonschema_specifications-2023.7.1.tar.gz", hash = "sha256:c91a50404e88a1f6ba40636778e2ee08f6e24c5613fe4c53ac24578a5a7f72bb"}, + {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, + {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, ] [package.dependencies] -referencing = ">=0.28.0" +referencing = ">=0.31.0" [[package]] name = "jupyterlab-widgets" -version = "3.0.10" +version = "3.0.11" description = "Jupyter interactive widgets for JupyterLab" optional = false python-versions = ">=3.7" files = [ - {file = "jupyterlab_widgets-3.0.10-py3-none-any.whl", hash = "sha256:dd61f3ae7a5a7f80299e14585ce6cf3d6925a96c9103c978eda293197730cb64"}, - {file = "jupyterlab_widgets-3.0.10.tar.gz", hash = "sha256:04f2ac04976727e4f9d0fa91cdc2f1ab860f965e504c29dbd6a65c882c9d04c0"}, + {file = "jupyterlab_widgets-3.0.11-py3-none-any.whl", hash = "sha256:78287fd86d20744ace330a61625024cf5521e1c012a352ddc0a3cdc2348becd0"}, + {file = "jupyterlab_widgets-3.0.11.tar.gz", hash = "sha256:dd5ac679593c969af29c9bed054c24f26842baa51352114736756bc035deee27"}, ] [[package]] @@ -1164,13 +1100,13 @@ adal = ["adal (>=1.0.2)"] [[package]] name = "mako" -version = "1.2.4" +version = "1.3.5" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "Mako-1.2.4-py3-none-any.whl", hash = "sha256:c97c79c018b9165ac9922ae4f32da095ffd3c4e6872b45eded42926deea46818"}, - {file = "Mako-1.2.4.tar.gz", hash = "sha256:d60a3903dc3bb01a18ad6a89cdbe2e4eadc69c0bc8ef1e3773ba53d44c3f7a34"}, + {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, + {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, ] [package.dependencies] @@ -1183,13 +1119,13 @@ testing = ["pytest"] [[package]] name = "markdown" -version = "3.5" +version = "3.6" description = "Python implementation of John Gruber's Markdown." optional = false python-versions = ">=3.8" files = [ - {file = "Markdown-3.5-py3-none-any.whl", hash = "sha256:4afb124395ce5fc34e6d9886dab977fd9ae987fc6e85689f08278cf0c69d4bf3"}, - {file = "Markdown-3.5.tar.gz", hash = "sha256:a807eb2e4778d9156c8f07876c6e4d50b5494c5665c4834f67b06459dfd877b3"}, + {file = "Markdown-3.6-py3-none-any.whl", hash = "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f"}, + {file = "Markdown-3.6.tar.gz", hash = "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"}, ] [package.dependencies] @@ -1201,71 +1137,71 @@ testing = ["coverage", "pyyaml"] [[package]] name = "markupsafe" -version = "2.1.3" +version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, - {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] @@ -1339,198 +1275,220 @@ test = ["Cython", "greenlet", "ipython", "pytest", "pytest-cov", "setuptools"] [[package]] name = "msgpack" -version = "1.0.7" +version = "1.0.8" description = "MessagePack serializer" optional = false python-versions = ">=3.8" files = [ - {file = "msgpack-1.0.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:04ad6069c86e531682f9e1e71b71c1c3937d6014a7c3e9edd2aa81ad58842862"}, - {file = "msgpack-1.0.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cca1b62fe70d761a282496b96a5e51c44c213e410a964bdffe0928e611368329"}, - {file = "msgpack-1.0.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e50ebce52f41370707f1e21a59514e3375e3edd6e1832f5e5235237db933c98b"}, - {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7b4f35de6a304b5533c238bee86b670b75b03d31b7797929caa7a624b5dda6"}, - {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28efb066cde83c479dfe5a48141a53bc7e5f13f785b92ddde336c716663039ee"}, - {file = "msgpack-1.0.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cb14ce54d9b857be9591ac364cb08dc2d6a5c4318c1182cb1d02274029d590d"}, - {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b573a43ef7c368ba4ea06050a957c2a7550f729c31f11dd616d2ac4aba99888d"}, - {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ccf9a39706b604d884d2cb1e27fe973bc55f2890c52f38df742bc1d79ab9f5e1"}, - {file = "msgpack-1.0.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cb70766519500281815dfd7a87d3a178acf7ce95390544b8c90587d76b227681"}, - {file = "msgpack-1.0.7-cp310-cp310-win32.whl", hash = "sha256:b610ff0f24e9f11c9ae653c67ff8cc03c075131401b3e5ef4b82570d1728f8a9"}, - {file = "msgpack-1.0.7-cp310-cp310-win_amd64.whl", hash = "sha256:a40821a89dc373d6427e2b44b572efc36a2778d3f543299e2f24eb1a5de65415"}, - {file = "msgpack-1.0.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:576eb384292b139821c41995523654ad82d1916da6a60cff129c715a6223ea84"}, - {file = "msgpack-1.0.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:730076207cb816138cf1af7f7237b208340a2c5e749707457d70705715c93b93"}, - {file = "msgpack-1.0.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:85765fdf4b27eb5086f05ac0491090fc76f4f2b28e09d9350c31aac25a5aaff8"}, - {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3476fae43db72bd11f29a5147ae2f3cb22e2f1a91d575ef130d2bf49afd21c46"}, - {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d4c80667de2e36970ebf74f42d1088cc9ee7ef5f4e8c35eee1b40eafd33ca5b"}, - {file = "msgpack-1.0.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b0bf0effb196ed76b7ad883848143427a73c355ae8e569fa538365064188b8e"}, - {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f9a7c509542db4eceed3dcf21ee5267ab565a83555c9b88a8109dcecc4709002"}, - {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:84b0daf226913133f899ea9b30618722d45feffa67e4fe867b0b5ae83a34060c"}, - {file = "msgpack-1.0.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ec79ff6159dffcc30853b2ad612ed572af86c92b5168aa3fc01a67b0fa40665e"}, - {file = "msgpack-1.0.7-cp311-cp311-win32.whl", hash = "sha256:3e7bf4442b310ff154b7bb9d81eb2c016b7d597e364f97d72b1acc3817a0fdc1"}, - {file = "msgpack-1.0.7-cp311-cp311-win_amd64.whl", hash = "sha256:3f0c8c6dfa6605ab8ff0611995ee30d4f9fcff89966cf562733b4008a3d60d82"}, - {file = "msgpack-1.0.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f0936e08e0003f66bfd97e74ee530427707297b0d0361247e9b4f59ab78ddc8b"}, - {file = "msgpack-1.0.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98bbd754a422a0b123c66a4c341de0474cad4a5c10c164ceed6ea090f3563db4"}, - {file = "msgpack-1.0.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b291f0ee7961a597cbbcc77709374087fa2a9afe7bdb6a40dbbd9b127e79afee"}, - {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebbbba226f0a108a7366bf4b59bf0f30a12fd5e75100c630267d94d7f0ad20e5"}, - {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e2d69948e4132813b8d1131f29f9101bc2c915f26089a6d632001a5c1349672"}, - {file = "msgpack-1.0.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdf38ba2d393c7911ae989c3bbba510ebbcdf4ecbdbfec36272abe350c454075"}, - {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:993584fc821c58d5993521bfdcd31a4adf025c7d745bbd4d12ccfecf695af5ba"}, - {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:52700dc63a4676669b341ba33520f4d6e43d3ca58d422e22ba66d1736b0a6e4c"}, - {file = "msgpack-1.0.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e45ae4927759289c30ccba8d9fdce62bb414977ba158286b5ddaf8df2cddb5c5"}, - {file = "msgpack-1.0.7-cp312-cp312-win32.whl", hash = "sha256:27dcd6f46a21c18fa5e5deed92a43d4554e3df8d8ca5a47bf0615d6a5f39dbc9"}, - {file = "msgpack-1.0.7-cp312-cp312-win_amd64.whl", hash = "sha256:7687e22a31e976a0e7fc99c2f4d11ca45eff652a81eb8c8085e9609298916dcf"}, - {file = "msgpack-1.0.7-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5b6ccc0c85916998d788b295765ea0e9cb9aac7e4a8ed71d12e7d8ac31c23c95"}, - {file = "msgpack-1.0.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:235a31ec7db685f5c82233bddf9858748b89b8119bf4538d514536c485c15fe0"}, - {file = "msgpack-1.0.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cab3db8bab4b7e635c1c97270d7a4b2a90c070b33cbc00c99ef3f9be03d3e1f7"}, - {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bfdd914e55e0d2c9e1526de210f6fe8ffe9705f2b1dfcc4aecc92a4cb4b533d"}, - {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36e17c4592231a7dbd2ed09027823ab295d2791b3b1efb2aee874b10548b7524"}, - {file = "msgpack-1.0.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38949d30b11ae5f95c3c91917ee7a6b239f5ec276f271f28638dec9156f82cfc"}, - {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ff1d0899f104f3921d94579a5638847f783c9b04f2d5f229392ca77fba5b82fc"}, - {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dc43f1ec66eb8440567186ae2f8c447d91e0372d793dfe8c222aec857b81a8cf"}, - {file = "msgpack-1.0.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dd632777ff3beaaf629f1ab4396caf7ba0bdd075d948a69460d13d44357aca4c"}, - {file = "msgpack-1.0.7-cp38-cp38-win32.whl", hash = "sha256:4e71bc4416de195d6e9b4ee93ad3f2f6b2ce11d042b4d7a7ee00bbe0358bd0c2"}, - {file = "msgpack-1.0.7-cp38-cp38-win_amd64.whl", hash = "sha256:8f5b234f567cf76ee489502ceb7165c2a5cecec081db2b37e35332b537f8157c"}, - {file = "msgpack-1.0.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfef2bb6ef068827bbd021017a107194956918ab43ce4d6dc945ffa13efbc25f"}, - {file = "msgpack-1.0.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:484ae3240666ad34cfa31eea7b8c6cd2f1fdaae21d73ce2974211df099a95d81"}, - {file = "msgpack-1.0.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3967e4ad1aa9da62fd53e346ed17d7b2e922cba5ab93bdd46febcac39be636fc"}, - {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dd178c4c80706546702c59529ffc005681bd6dc2ea234c450661b205445a34d"}, - {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6ffbc252eb0d229aeb2f9ad051200668fc3a9aaa8994e49f0cb2ffe2b7867e7"}, - {file = "msgpack-1.0.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:822ea70dc4018c7e6223f13affd1c5c30c0f5c12ac1f96cd8e9949acddb48a61"}, - {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:384d779f0d6f1b110eae74cb0659d9aa6ff35aaf547b3955abf2ab4c901c4819"}, - {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f64e376cd20d3f030190e8c32e1c64582eba56ac6dc7d5b0b49a9d44021b52fd"}, - {file = "msgpack-1.0.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ed82f5a7af3697b1c4786053736f24a0efd0a1b8a130d4c7bfee4b9ded0f08f"}, - {file = "msgpack-1.0.7-cp39-cp39-win32.whl", hash = "sha256:f26a07a6e877c76a88e3cecac8531908d980d3d5067ff69213653649ec0f60ad"}, - {file = "msgpack-1.0.7-cp39-cp39-win_amd64.whl", hash = "sha256:1dc93e8e4653bdb5910aed79f11e165c85732067614f180f70534f056da97db3"}, - {file = "msgpack-1.0.7.tar.gz", hash = "sha256:572efc93db7a4d27e404501975ca6d2d9775705c2d922390d878fcf768d92c87"}, + {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:505fe3d03856ac7d215dbe005414bc28505d26f0c128906037e66d98c4e95868"}, + {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b7842518a63a9f17107eb176320960ec095a8ee3b4420b5f688e24bf50c53c"}, + {file = "msgpack-1.0.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:376081f471a2ef24828b83a641a02c575d6103a3ad7fd7dade5486cad10ea659"}, + {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e390971d082dba073c05dbd56322427d3280b7cc8b53484c9377adfbae67dc2"}, + {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e073efcba9ea99db5acef3959efa45b52bc67b61b00823d2a1a6944bf45982"}, + {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82d92c773fbc6942a7a8b520d22c11cfc8fd83bba86116bfcf962c2f5c2ecdaa"}, + {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9ee32dcb8e531adae1f1ca568822e9b3a738369b3b686d1477cbc643c4a9c128"}, + {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e3aa7e51d738e0ec0afbed661261513b38b3014754c9459508399baf14ae0c9d"}, + {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69284049d07fce531c17404fcba2bb1df472bc2dcdac642ae71a2d079d950653"}, + {file = "msgpack-1.0.8-cp310-cp310-win32.whl", hash = "sha256:13577ec9e247f8741c84d06b9ece5f654920d8365a4b636ce0e44f15e07ec693"}, + {file = "msgpack-1.0.8-cp310-cp310-win_amd64.whl", hash = "sha256:e532dbd6ddfe13946de050d7474e3f5fb6ec774fbb1a188aaf469b08cf04189a"}, + {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9517004e21664f2b5a5fd6333b0731b9cf0817403a941b393d89a2f1dc2bd836"}, + {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d16a786905034e7e34098634b184a7d81f91d4c3d246edc6bd7aefb2fd8ea6ad"}, + {file = "msgpack-1.0.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2872993e209f7ed04d963e4b4fbae72d034844ec66bc4ca403329db2074377b"}, + {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c330eace3dd100bdb54b5653b966de7f51c26ec4a7d4e87132d9b4f738220ba"}, + {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83b5c044f3eff2a6534768ccfd50425939e7a8b5cf9a7261c385de1e20dcfc85"}, + {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1876b0b653a808fcd50123b953af170c535027bf1d053b59790eebb0aeb38950"}, + {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dfe1f0f0ed5785c187144c46a292b8c34c1295c01da12e10ccddfc16def4448a"}, + {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3528807cbbb7f315bb81959d5961855e7ba52aa60a3097151cb21956fbc7502b"}, + {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e2f879ab92ce502a1e65fce390eab619774dda6a6ff719718069ac94084098ce"}, + {file = "msgpack-1.0.8-cp311-cp311-win32.whl", hash = "sha256:26ee97a8261e6e35885c2ecd2fd4a6d38252246f94a2aec23665a4e66d066305"}, + {file = "msgpack-1.0.8-cp311-cp311-win_amd64.whl", hash = "sha256:eadb9f826c138e6cf3c49d6f8de88225a3c0ab181a9b4ba792e006e5292d150e"}, + {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:114be227f5213ef8b215c22dde19532f5da9652e56e8ce969bf0a26d7c419fee"}, + {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d661dc4785affa9d0edfdd1e59ec056a58b3dbb9f196fa43587f3ddac654ac7b"}, + {file = "msgpack-1.0.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d56fd9f1f1cdc8227d7b7918f55091349741904d9520c65f0139a9755952c9e8"}, + {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0726c282d188e204281ebd8de31724b7d749adebc086873a59efb8cf7ae27df3"}, + {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8db8e423192303ed77cff4dce3a4b88dbfaf43979d280181558af5e2c3c71afc"}, + {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99881222f4a8c2f641f25703963a5cefb076adffd959e0558dc9f803a52d6a58"}, + {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b5505774ea2a73a86ea176e8a9a4a7c8bf5d521050f0f6f8426afe798689243f"}, + {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ef254a06bcea461e65ff0373d8a0dd1ed3aa004af48839f002a0c994a6f72d04"}, + {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e1dd7839443592d00e96db831eddb4111a2a81a46b028f0facd60a09ebbdd543"}, + {file = "msgpack-1.0.8-cp312-cp312-win32.whl", hash = "sha256:64d0fcd436c5683fdd7c907eeae5e2cbb5eb872fafbc03a43609d7941840995c"}, + {file = "msgpack-1.0.8-cp312-cp312-win_amd64.whl", hash = "sha256:74398a4cf19de42e1498368c36eed45d9528f5fd0155241e82c4082b7e16cffd"}, + {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ceea77719d45c839fd73abcb190b8390412a890df2f83fb8cf49b2a4b5c2f40"}, + {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ab0bbcd4d1f7b6991ee7c753655b481c50084294218de69365f8f1970d4c151"}, + {file = "msgpack-1.0.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1cce488457370ffd1f953846f82323cb6b2ad2190987cd4d70b2713e17268d24"}, + {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3923a1778f7e5ef31865893fdca12a8d7dc03a44b33e2a5f3295416314c09f5d"}, + {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22e47578b30a3e199ab067a4d43d790249b3c0587d9a771921f86250c8435db"}, + {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd739c9251d01e0279ce729e37b39d49a08c0420d3fee7f2a4968c0576678f77"}, + {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d3420522057ebab1728b21ad473aa950026d07cb09da41103f8e597dfbfaeb13"}, + {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5845fdf5e5d5b78a49b826fcdc0eb2e2aa7191980e3d2cfd2a30303a74f212e2"}, + {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a0e76621f6e1f908ae52860bdcb58e1ca85231a9b0545e64509c931dd34275a"}, + {file = "msgpack-1.0.8-cp38-cp38-win32.whl", hash = "sha256:374a8e88ddab84b9ada695d255679fb99c53513c0a51778796fcf0944d6c789c"}, + {file = "msgpack-1.0.8-cp38-cp38-win_amd64.whl", hash = "sha256:f3709997b228685fe53e8c433e2df9f0cdb5f4542bd5114ed17ac3c0129b0480"}, + {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f51bab98d52739c50c56658cc303f190785f9a2cd97b823357e7aeae54c8f68a"}, + {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:73ee792784d48aa338bba28063e19a27e8d989344f34aad14ea6e1b9bd83f596"}, + {file = "msgpack-1.0.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f9904e24646570539a8950400602d66d2b2c492b9010ea7e965025cb71d0c86d"}, + {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e75753aeda0ddc4c28dce4c32ba2f6ec30b1b02f6c0b14e547841ba5b24f753f"}, + {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5dbf059fb4b7c240c873c1245ee112505be27497e90f7c6591261c7d3c3a8228"}, + {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4916727e31c28be8beaf11cf117d6f6f188dcc36daae4e851fee88646f5b6b18"}, + {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7938111ed1358f536daf311be244f34df7bf3cdedb3ed883787aca97778b28d8"}, + {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:493c5c5e44b06d6c9268ce21b302c9ca055c1fd3484c25ba41d34476c76ee746"}, + {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, + {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, + {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, + {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, ] [[package]] name = "multidict" -version = "6.0.4" +version = "6.0.5" description = "multidict implementation" optional = false python-versions = ">=3.7" files = [ - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, - {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, - {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, - {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, - {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, - {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, - {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, - {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, - {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, - {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, - {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, - {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, - {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, -] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, + {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, + {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, + {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, + {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, + {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, + {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, + {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, + {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, + {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, + {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, + {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, + {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, + {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, + {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, + {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, ] [[package]] name = "numpy" -version = "1.24.4" +version = "2.0.0" description = "Fundamental package for array computing in Python" optional = false -python-versions = ">=3.8" -files = [ - {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, - {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, - {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, - {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"}, - {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"}, - {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"}, - {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"}, - {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"}, - {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"}, - {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"}, - {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"}, - {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"}, - {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"}, - {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"}, - {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"}, - {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"}, - {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"}, - {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"}, - {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"}, - {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"}, - {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"}, - {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"}, - {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"}, - {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, - {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, - {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, +python-versions = ">=3.9" +files = [ + {file = "numpy-2.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:04494f6ec467ccb5369d1808570ae55f6ed9b5809d7f035059000a37b8d7e86f"}, + {file = "numpy-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2635dbd200c2d6faf2ef9a0d04f0ecc6b13b3cad54f7c67c61155138835515d2"}, + {file = "numpy-2.0.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:0a43f0974d501842866cc83471bdb0116ba0dffdbaac33ec05e6afed5b615238"}, + {file = "numpy-2.0.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:8d83bb187fb647643bd56e1ae43f273c7f4dbcdf94550d7938cfc32566756514"}, + {file = "numpy-2.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79e843d186c8fb1b102bef3e2bc35ef81160ffef3194646a7fdd6a73c6b97196"}, + {file = "numpy-2.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d7696c615765091cc5093f76fd1fa069870304beaccfd58b5dcc69e55ef49c1"}, + {file = "numpy-2.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b4c76e3d4c56f145d41b7b6751255feefae92edbc9a61e1758a98204200f30fc"}, + {file = "numpy-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:acd3a644e4807e73b4e1867b769fbf1ce8c5d80e7caaef0d90dcdc640dfc9787"}, + {file = "numpy-2.0.0-cp310-cp310-win32.whl", hash = "sha256:cee6cc0584f71adefe2c908856ccc98702baf95ff80092e4ca46061538a2ba98"}, + {file = "numpy-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:ed08d2703b5972ec736451b818c2eb9da80d66c3e84aed1deeb0c345fefe461b"}, + {file = "numpy-2.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad0c86f3455fbd0de6c31a3056eb822fc939f81b1618f10ff3406971893b62a5"}, + {file = "numpy-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7f387600d424f91576af20518334df3d97bc76a300a755f9a8d6e4f5cadd289"}, + {file = "numpy-2.0.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:34f003cb88b1ba38cb9a9a4a3161c1604973d7f9d5552c38bc2f04f829536609"}, + {file = "numpy-2.0.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:b6f6a8f45d0313db07d6d1d37bd0b112f887e1369758a5419c0370ba915b3871"}, + {file = "numpy-2.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f64641b42b2429f56ee08b4f427a4d2daf916ec59686061de751a55aafa22e4"}, + {file = "numpy-2.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7039a136017eaa92c1848152827e1424701532ca8e8967fe480fe1569dae581"}, + {file = "numpy-2.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46e161722e0f619749d1cd892167039015b2c2817296104487cd03ed4a955995"}, + {file = "numpy-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0e50842b2295ba8414c8c1d9d957083d5dfe9e16828b37de883f51fc53c4016f"}, + {file = "numpy-2.0.0-cp311-cp311-win32.whl", hash = "sha256:2ce46fd0b8a0c947ae047d222f7136fc4d55538741373107574271bc00e20e8f"}, + {file = "numpy-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbd6acc766814ea6443628f4e6751d0da6593dae29c08c0b2606164db026970c"}, + {file = "numpy-2.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:354f373279768fa5a584bac997de6a6c9bc535c482592d7a813bb0c09be6c76f"}, + {file = "numpy-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4d2f62e55a4cd9c58c1d9a1c9edaedcd857a73cb6fda875bf79093f9d9086f85"}, + {file = "numpy-2.0.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:1e72728e7501a450288fc8e1f9ebc73d90cfd4671ebbd631f3e7857c39bd16f2"}, + {file = "numpy-2.0.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:84554fc53daa8f6abf8e8a66e076aff6ece62de68523d9f665f32d2fc50fd66e"}, + {file = "numpy-2.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c73aafd1afca80afecb22718f8700b40ac7cab927b8abab3c3e337d70e10e5a2"}, + {file = "numpy-2.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49d9f7d256fbc804391a7f72d4a617302b1afac1112fac19b6c6cec63fe7fe8a"}, + {file = "numpy-2.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0ec84b9ba0654f3b962802edc91424331f423dcf5d5f926676e0150789cb3d95"}, + {file = "numpy-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:feff59f27338135776f6d4e2ec7aeeac5d5f7a08a83e80869121ef8164b74af9"}, + {file = "numpy-2.0.0-cp312-cp312-win32.whl", hash = "sha256:c5a59996dc61835133b56a32ebe4ef3740ea5bc19b3983ac60cc32be5a665d54"}, + {file = "numpy-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:a356364941fb0593bb899a1076b92dfa2029f6f5b8ba88a14fd0984aaf76d0df"}, + {file = "numpy-2.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e61155fae27570692ad1d327e81c6cf27d535a5d7ef97648a17d922224b216de"}, + {file = "numpy-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4554eb96f0fd263041baf16cf0881b3f5dafae7a59b1049acb9540c4d57bc8cb"}, + {file = "numpy-2.0.0-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:903703372d46bce88b6920a0cd86c3ad82dae2dbef157b5fc01b70ea1cfc430f"}, + {file = "numpy-2.0.0-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:3e8e01233d57639b2e30966c63d36fcea099d17c53bf424d77f088b0f4babd86"}, + {file = "numpy-2.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cde1753efe513705a0c6d28f5884e22bdc30438bf0085c5c486cdaff40cd67a"}, + {file = "numpy-2.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:821eedb7165ead9eebdb569986968b541f9908979c2da8a4967ecac4439bae3d"}, + {file = "numpy-2.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a1712c015831da583b21c5bfe15e8684137097969c6d22e8316ba66b5baabe4"}, + {file = "numpy-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9c27f0946a3536403efb0e1c28def1ae6730a72cd0d5878db38824855e3afc44"}, + {file = "numpy-2.0.0-cp39-cp39-win32.whl", hash = "sha256:63b92c512d9dbcc37f9d81b123dec99fdb318ba38c8059afc78086fe73820275"}, + {file = "numpy-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:3f6bed7f840d44c08ebdb73b1825282b801799e325bcbdfa6bc5c370e5aecc65"}, + {file = "numpy-2.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9416a5c2e92ace094e9f0082c5fd473502c91651fb896bc17690d6fc475128d6"}, + {file = "numpy-2.0.0-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:17067d097ed036636fa79f6a869ac26df7db1ba22039d962422506640314933a"}, + {file = "numpy-2.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ecb5b0582cd125f67a629072fed6f83562d9dd04d7e03256c9829bdec027ad"}, + {file = "numpy-2.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cef04d068f5fb0518a77857953193b6bb94809a806bd0a14983a8f12ada060c9"}, + {file = "numpy-2.0.0.tar.gz", hash = "sha256:cf5d1c9e6837f8af9f92b6bd3e86d513cdc11f60fd62185cc49ec7d1aba34864"}, ] [[package]] @@ -1551,18 +1509,19 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "opencensus" -version = "0.11.3" +version = "0.11.4" description = "A stats collection and distributed tracing framework" optional = false python-versions = "*" files = [ - {file = "opencensus-0.11.3-py2.py3-none-any.whl", hash = "sha256:9c33d572059f0f0e874fc34c697a39a4193aa9cf3203f7e777df42e9edeea56a"}, - {file = "opencensus-0.11.3.tar.gz", hash = "sha256:af7a98bd51e63968144d772f346d696ed498a32dbdc4be267cd6011c4ce05da8"}, + {file = "opencensus-0.11.4-py2.py3-none-any.whl", hash = "sha256:a18487ce68bc19900336e0ff4655c5a116daf10c1b3685ece8d971bddad6a864"}, + {file = "opencensus-0.11.4.tar.gz", hash = "sha256:cbef87d8b8773064ab60e5c2a1ced58bbaa38a6d052c41aec224958ce544eff2"}, ] [package.dependencies] google-api-core = {version = ">=1.0.0,<3.0.0", markers = "python_version >= \"3.6\""} opencensus-context = ">=0.1.3" +six = ">=1.16,<2.0" [[package]] name = "opencensus-context" @@ -1593,91 +1552,97 @@ six = "*" [[package]] name = "packaging" -version = "23.2" +version = "24.1" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] name = "pandas" -version = "2.0.3" +version = "2.2.2" description = "Powerful data structures for data analysis, time series, and statistics" optional = false -python-versions = ">=3.8" -files = [ - {file = "pandas-2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4c7c9f27a4185304c7caf96dc7d91bc60bc162221152de697c98eb0b2648dd8"}, - {file = "pandas-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f167beed68918d62bffb6ec64f2e1d8a7d297a038f86d4aed056b9493fca407f"}, - {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce0c6f76a0f1ba361551f3e6dceaff06bde7514a374aa43e33b588ec10420183"}, - {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba619e410a21d8c387a1ea6e8a0e49bb42216474436245718d7f2e88a2f8d7c0"}, - {file = "pandas-2.0.3-cp310-cp310-win32.whl", hash = "sha256:3ef285093b4fe5058eefd756100a367f27029913760773c8bf1d2d8bebe5d210"}, - {file = "pandas-2.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:9ee1a69328d5c36c98d8e74db06f4ad518a1840e8ccb94a4ba86920986bb617e"}, - {file = "pandas-2.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b084b91d8d66ab19f5bb3256cbd5ea661848338301940e17f4492b2ce0801fe8"}, - {file = "pandas-2.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:37673e3bdf1551b95bf5d4ce372b37770f9529743d2498032439371fc7b7eb26"}, - {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9cb1e14fdb546396b7e1b923ffaeeac24e4cedd14266c3497216dd4448e4f2d"}, - {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9cd88488cceb7635aebb84809d087468eb33551097d600c6dad13602029c2df"}, - {file = "pandas-2.0.3-cp311-cp311-win32.whl", hash = "sha256:694888a81198786f0e164ee3a581df7d505024fbb1f15202fc7db88a71d84ebd"}, - {file = "pandas-2.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:6a21ab5c89dcbd57f78d0ae16630b090eec626360085a4148693def5452d8a6b"}, - {file = "pandas-2.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4da0d45e7f34c069fe4d522359df7d23badf83abc1d1cef398895822d11061"}, - {file = "pandas-2.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:32fca2ee1b0d93dd71d979726b12b61faa06aeb93cf77468776287f41ff8fdc5"}, - {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:258d3624b3ae734490e4d63c430256e716f488c4fcb7c8e9bde2d3aa46c29089"}, - {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eae3dc34fa1aa7772dd3fc60270d13ced7346fcbcfee017d3132ec625e23bb0"}, - {file = "pandas-2.0.3-cp38-cp38-win32.whl", hash = "sha256:f3421a7afb1a43f7e38e82e844e2bca9a6d793d66c1a7f9f0ff39a795bbc5e02"}, - {file = "pandas-2.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:69d7f3884c95da3a31ef82b7618af5710dba95bb885ffab339aad925c3e8ce78"}, - {file = "pandas-2.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5247fb1ba347c1261cbbf0fcfba4a3121fbb4029d95d9ef4dc45406620b25c8b"}, - {file = "pandas-2.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:81af086f4543c9d8bb128328b5d32e9986e0c84d3ee673a2ac6fb57fd14f755e"}, - {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1994c789bf12a7c5098277fb43836ce090f1073858c10f9220998ac74f37c69b"}, - {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ec591c48e29226bcbb316e0c1e9423622bc7a4eaf1ef7c3c9fa1a3981f89641"}, - {file = "pandas-2.0.3-cp39-cp39-win32.whl", hash = "sha256:04dbdbaf2e4d46ca8da896e1805bc04eb85caa9a82e259e8eed00254d5e0c682"}, - {file = "pandas-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:1168574b036cd8b93abc746171c9b4f1b83467438a5e45909fed645cf8692dbc"}, - {file = "pandas-2.0.3.tar.gz", hash = "sha256:c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c"}, +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, + {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, + {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, + {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, + {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, + {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, + {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, ] [package.dependencies] numpy = [ - {version = ">=1.20.3", markers = "python_version < \"3.10\""}, - {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, - {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" -tzdata = ">=2022.1" +tzdata = ">=2022.7" [package.extras] -all = ["PyQt5 (>=5.15.1)", "SQLAlchemy (>=1.4.16)", "beautifulsoup4 (>=4.9.3)", "bottleneck (>=1.3.2)", "brotlipy (>=0.7.0)", "fastparquet (>=0.6.3)", "fsspec (>=2021.07.0)", "gcsfs (>=2021.07.0)", "html5lib (>=1.1)", "hypothesis (>=6.34.2)", "jinja2 (>=3.0.0)", "lxml (>=4.6.3)", "matplotlib (>=3.6.1)", "numba (>=0.53.1)", "numexpr (>=2.7.3)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pandas-gbq (>=0.15.0)", "psycopg2 (>=2.8.6)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "python-snappy (>=0.6.0)", "pyxlsb (>=1.0.8)", "qtpy (>=2.2.0)", "s3fs (>=2021.08.0)", "scipy (>=1.7.1)", "tables (>=3.6.1)", "tabulate (>=0.8.9)", "xarray (>=0.21.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)", "zstandard (>=0.15.2)"] -aws = ["s3fs (>=2021.08.0)"] -clipboard = ["PyQt5 (>=5.15.1)", "qtpy (>=2.2.0)"] -compression = ["brotlipy (>=0.7.0)", "python-snappy (>=0.6.0)", "zstandard (>=0.15.2)"] -computation = ["scipy (>=1.7.1)", "xarray (>=0.21.0)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pyxlsb (>=1.0.8)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)"] -feather = ["pyarrow (>=7.0.0)"] -fss = ["fsspec (>=2021.07.0)"] -gcp = ["gcsfs (>=2021.07.0)", "pandas-gbq (>=0.15.0)"] -hdf5 = ["tables (>=3.6.1)"] -html = ["beautifulsoup4 (>=4.9.3)", "html5lib (>=1.1)", "lxml (>=4.6.3)"] -mysql = ["SQLAlchemy (>=1.4.16)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.0.0)", "tabulate (>=0.8.9)"] -parquet = ["pyarrow (>=7.0.0)"] -performance = ["bottleneck (>=1.3.2)", "numba (>=0.53.1)", "numexpr (>=2.7.1)"] -plot = ["matplotlib (>=3.6.1)"] -postgresql = ["SQLAlchemy (>=1.4.16)", "psycopg2 (>=2.8.6)"] -spss = ["pyreadstat (>=1.1.2)"] -sql-other = ["SQLAlchemy (>=1.4.16)"] -test = ["hypothesis (>=6.34.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.6.3)"] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] [[package]] name = "paramiko" -version = "3.3.1" +version = "3.4.0" description = "SSH2 protocol library" optional = false python-versions = ">=3.6" files = [ - {file = "paramiko-3.3.1-py3-none-any.whl", hash = "sha256:b7bc5340a43de4287bbe22fe6de728aa2c22468b2a849615498dd944c2f275eb"}, - {file = "paramiko-3.3.1.tar.gz", hash = "sha256:6a3777a961ac86dbef375c5f5b8d50014a1a96d0fd7f054a43bc880134b0ff77"}, + {file = "paramiko-3.4.0-py3-none-any.whl", hash = "sha256:43f0b51115a896f9c00f59618023484cb3a14b98bbceab43394a39c6739b7ee7"}, + {file = "paramiko-3.4.0.tar.gz", hash = "sha256:aac08f26a31dc4dffd92821527d1682d99d52f9ef6851968114a8728f3c274d3"}, ] [package.dependencies] @@ -1692,18 +1657,18 @@ invoke = ["invoke (>=2.0)"] [[package]] name = "parso" -version = "0.8.3" +version = "0.8.4" description = "A Python Parser" optional = false python-versions = ">=3.6" files = [ - {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, - {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, + {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, + {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, ] [package.extras] -qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] -testing = ["docopt", "pytest (<6.0.0)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["docopt", "pytest"] [[package]] name = "pdoc3" @@ -1734,41 +1699,31 @@ files = [ [package.dependencies] ptyprocess = ">=0.5" -[[package]] -name = "pickleshare" -version = "0.7.5" -description = "Tiny 'shelve'-like database with concurrency support" -optional = false -python-versions = "*" -files = [ - {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, - {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, -] - [[package]] name = "platformdirs" -version = "3.11.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, - {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] [[package]] name = "pluggy" -version = "1.3.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] @@ -1777,13 +1732,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "prometheus-client" -version = "0.19.0" +version = "0.20.0" description = "Python client for the Prometheus monitoring system." optional = false python-versions = ">=3.8" files = [ - {file = "prometheus_client-0.19.0-py3-none-any.whl", hash = "sha256:c88b1e6ecf6b41cd8fb5731c7ae919bf66df6ec6fafa555cd6c0e16ca169ae92"}, - {file = "prometheus_client-0.19.0.tar.gz", hash = "sha256:4585b0d1223148c27a225b10dbec5ae9bc4c81a99a3fa80774fa6209935324e1"}, + {file = "prometheus_client-0.20.0-py3-none-any.whl", hash = "sha256:cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7"}, + {file = "prometheus_client-0.20.0.tar.gz", hash = "sha256:287629d00b147a32dcb2be0b9df905da599b2d82f80377083ec8463309a4bb89"}, ] [package.extras] @@ -1791,13 +1746,13 @@ twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.43" +version = "3.0.47" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, - {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, + {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, + {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, ] [package.dependencies] @@ -1805,24 +1760,22 @@ wcwidth = "*" [[package]] name = "protobuf" -version = "4.24.4" +version = "5.27.1" description = "" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "protobuf-4.24.4-cp310-abi3-win32.whl", hash = "sha256:ec9912d5cb6714a5710e28e592ee1093d68c5ebfeda61983b3f40331da0b1ebb"}, - {file = "protobuf-4.24.4-cp310-abi3-win_amd64.whl", hash = "sha256:1badab72aa8a3a2b812eacfede5020472e16c6b2212d737cefd685884c191085"}, - {file = "protobuf-4.24.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e61a27f362369c2f33248a0ff6896c20dcd47b5d48239cb9720134bef6082e4"}, - {file = "protobuf-4.24.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:bffa46ad9612e6779d0e51ae586fde768339b791a50610d85eb162daeb23661e"}, - {file = "protobuf-4.24.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:b493cb590960ff863743b9ff1452c413c2ee12b782f48beca77c8da3e2ffe9d9"}, - {file = "protobuf-4.24.4-cp37-cp37m-win32.whl", hash = "sha256:dbbed8a56e56cee8d9d522ce844a1379a72a70f453bde6243e3c86c30c2a3d46"}, - {file = "protobuf-4.24.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6b7d2e1c753715dcfe9d284a25a52d67818dd43c4932574307daf836f0071e37"}, - {file = "protobuf-4.24.4-cp38-cp38-win32.whl", hash = "sha256:02212557a76cd99574775a81fefeba8738d0f668d6abd0c6b1d3adcc75503dbe"}, - {file = "protobuf-4.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:2fa3886dfaae6b4c5ed2730d3bf47c7a38a72b3a1f0acb4d4caf68e6874b947b"}, - {file = "protobuf-4.24.4-cp39-cp39-win32.whl", hash = "sha256:b77272f3e28bb416e2071186cb39efd4abbf696d682cbb5dc731308ad37fa6dd"}, - {file = "protobuf-4.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:9fee5e8aa20ef1b84123bb9232b3f4a5114d9897ed89b4b8142d81924e05d79b"}, - {file = "protobuf-4.24.4-py3-none-any.whl", hash = "sha256:80797ce7424f8c8d2f2547e2d42bfbb6c08230ce5832d6c099a37335c9c90a92"}, - {file = "protobuf-4.24.4.tar.gz", hash = "sha256:5a70731910cd9104762161719c3d883c960151eea077134458503723b60e3667"}, + {file = "protobuf-5.27.1-cp310-abi3-win32.whl", hash = "sha256:3adc15ec0ff35c5b2d0992f9345b04a540c1e73bfee3ff1643db43cc1d734333"}, + {file = "protobuf-5.27.1-cp310-abi3-win_amd64.whl", hash = "sha256:25236b69ab4ce1bec413fd4b68a15ef8141794427e0b4dc173e9d5d9dffc3bcd"}, + {file = "protobuf-5.27.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4e38fc29d7df32e01a41cf118b5a968b1efd46b9c41ff515234e794011c78b17"}, + {file = "protobuf-5.27.1-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:917ed03c3eb8a2d51c3496359f5b53b4e4b7e40edfbdd3d3f34336e0eef6825a"}, + {file = "protobuf-5.27.1-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:ee52874a9e69a30271649be88ecbe69d374232e8fd0b4e4b0aaaa87f429f1631"}, + {file = "protobuf-5.27.1-cp38-cp38-win32.whl", hash = "sha256:7a97b9c5aed86b9ca289eb5148df6c208ab5bb6906930590961e08f097258107"}, + {file = "protobuf-5.27.1-cp38-cp38-win_amd64.whl", hash = "sha256:f6abd0f69968792da7460d3c2cfa7d94fd74e1c21df321eb6345b963f9ec3d8d"}, + {file = "protobuf-5.27.1-cp39-cp39-win32.whl", hash = "sha256:dfddb7537f789002cc4eb00752c92e67885badcc7005566f2c5de9d969d3282d"}, + {file = "protobuf-5.27.1-cp39-cp39-win_amd64.whl", hash = "sha256:39309898b912ca6febb0084ea912e976482834f401be35840a008da12d189340"}, + {file = "protobuf-5.27.1-py3-none-any.whl", hash = "sha256:4ac7249a1530a2ed50e24201d6630125ced04b30619262f06224616e0030b6cf"}, + {file = "protobuf-5.27.1.tar.gz", hash = "sha256:df5e5b8e39b7d1c25b186ffdf9f44f40f810bbcc9d2b71d9d3156fee5a9adf15"}, ] [[package]] @@ -1868,47 +1821,47 @@ files = [ [[package]] name = "pyarrow" -version = "14.0.1" +version = "16.1.0" description = "Python library for Apache Arrow" optional = false python-versions = ">=3.8" files = [ - {file = "pyarrow-14.0.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:96d64e5ba7dceb519a955e5eeb5c9adcfd63f73a56aea4722e2cc81364fc567a"}, - {file = "pyarrow-14.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a8ae88c0038d1bc362a682320112ee6774f006134cd5afc291591ee4bc06505"}, - {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f6f053cb66dc24091f5511e5920e45c83107f954a21032feadc7b9e3a8e7851"}, - {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:906b0dc25f2be12e95975722f1e60e162437023f490dbd80d0deb7375baf3171"}, - {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:78d4a77a46a7de9388b653af1c4ce539350726cd9af62e0831e4f2bd0c95a2f4"}, - {file = "pyarrow-14.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:06ca79080ef89d6529bb8e5074d4b4f6086143b2520494fcb7cf8a99079cde93"}, - {file = "pyarrow-14.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:32542164d905002c42dff896efdac79b3bdd7291b1b74aa292fac8450d0e4dcd"}, - {file = "pyarrow-14.0.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:c7331b4ed3401b7ee56f22c980608cf273f0380f77d0f73dd3c185f78f5a6220"}, - {file = "pyarrow-14.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:922e8b49b88da8633d6cac0e1b5a690311b6758d6f5d7c2be71acb0f1e14cd61"}, - {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58c889851ca33f992ea916b48b8540735055201b177cb0dcf0596a495a667b00"}, - {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30d8494870d9916bb53b2a4384948491444741cb9a38253c590e21f836b01222"}, - {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:be28e1a07f20391bb0b15ea03dcac3aade29fc773c5eb4bee2838e9b2cdde0cb"}, - {file = "pyarrow-14.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:981670b4ce0110d8dcb3246410a4aabf5714db5d8ea63b15686bce1c914b1f83"}, - {file = "pyarrow-14.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:4756a2b373a28f6166c42711240643fb8bd6322467e9aacabd26b488fa41ec23"}, - {file = "pyarrow-14.0.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:cf87e2cec65dd5cf1aa4aba918d523ef56ef95597b545bbaad01e6433851aa10"}, - {file = "pyarrow-14.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:470ae0194fbfdfbf4a6b65b4f9e0f6e1fa0ea5b90c1ee6b65b38aecee53508c8"}, - {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6263cffd0c3721c1e348062997babdf0151301f7353010c9c9a8ed47448f82ab"}, - {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8089d7e77d1455d529dbd7cff08898bbb2666ee48bc4085203af1d826a33cc"}, - {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:fada8396bc739d958d0b81d291cfd201126ed5e7913cb73de6bc606befc30226"}, - {file = "pyarrow-14.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:2a145dab9ed7849fc1101bf03bcdc69913547f10513fdf70fc3ab6c0a50c7eee"}, - {file = "pyarrow-14.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:05fe7994745b634c5fb16ce5717e39a1ac1fac3e2b0795232841660aa76647cd"}, - {file = "pyarrow-14.0.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:a8eeef015ae69d104c4c3117a6011e7e3ecd1abec79dc87fd2fac6e442f666ee"}, - {file = "pyarrow-14.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3c76807540989fe8fcd02285dd15e4f2a3da0b09d27781abec3adc265ddbeba1"}, - {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:450e4605e3c20e558485f9161a79280a61c55efe585d51513c014de9ae8d393f"}, - {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:323cbe60210173ffd7db78bfd50b80bdd792c4c9daca8843ef3cd70b186649db"}, - {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0140c7e2b740e08c5a459439d87acd26b747fc408bde0a8806096ee0baaa0c15"}, - {file = "pyarrow-14.0.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:e592e482edd9f1ab32f18cd6a716c45b2c0f2403dc2af782f4e9674952e6dd27"}, - {file = "pyarrow-14.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d264ad13605b61959f2ae7c1d25b1a5b8505b112715c961418c8396433f213ad"}, - {file = "pyarrow-14.0.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:01e44de9749cddc486169cb632f3c99962318e9dacac7778315a110f4bf8a450"}, - {file = "pyarrow-14.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d0351fecf0e26e152542bc164c22ea2a8e8c682726fce160ce4d459ea802d69c"}, - {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33c1f6110c386464fd2e5e4ea3624466055bbe681ff185fd6c9daa98f30a3f9a"}, - {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11e045dfa09855b6d3e7705a37c42e2dc2c71d608fab34d3c23df2e02df9aec3"}, - {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:097828b55321897db0e1dbfc606e3ff8101ae5725673498cbfa7754ee0da80e4"}, - {file = "pyarrow-14.0.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1daab52050a1c48506c029e6fa0944a7b2436334d7e44221c16f6f1b2cc9c510"}, - {file = "pyarrow-14.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:3f6d5faf4f1b0d5a7f97be987cf9e9f8cd39902611e818fe134588ee99bf0283"}, - {file = "pyarrow-14.0.1.tar.gz", hash = "sha256:b8b3f4fe8d4ec15e1ef9b599b94683c5216adaed78d5cb4c606180546d1e2ee1"}, + {file = "pyarrow-16.1.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:17e23b9a65a70cc733d8b738baa6ad3722298fa0c81d88f63ff94bf25eaa77b9"}, + {file = "pyarrow-16.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4740cc41e2ba5d641071d0ab5e9ef9b5e6e8c7611351a5cb7c1d175eaf43674a"}, + {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98100e0268d04e0eec47b73f20b39c45b4006f3c4233719c3848aa27a03c1aef"}, + {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f68f409e7b283c085f2da014f9ef81e885d90dcd733bd648cfba3ef265961848"}, + {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:a8914cd176f448e09746037b0c6b3a9d7688cef451ec5735094055116857580c"}, + {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:48be160782c0556156d91adbdd5a4a7e719f8d407cb46ae3bb4eaee09b3111bd"}, + {file = "pyarrow-16.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9cf389d444b0f41d9fe1444b70650fea31e9d52cfcb5f818b7888b91b586efff"}, + {file = "pyarrow-16.1.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:d0ebea336b535b37eee9eee31761813086d33ed06de9ab6fc6aaa0bace7b250c"}, + {file = "pyarrow-16.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e73cfc4a99e796727919c5541c65bb88b973377501e39b9842ea71401ca6c1c"}, + {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf9251264247ecfe93e5f5a0cd43b8ae834f1e61d1abca22da55b20c788417f6"}, + {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddf5aace92d520d3d2a20031d8b0ec27b4395cab9f74e07cc95edf42a5cc0147"}, + {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:25233642583bf658f629eb230b9bb79d9af4d9f9229890b3c878699c82f7d11e"}, + {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a33a64576fddfbec0a44112eaf844c20853647ca833e9a647bfae0582b2ff94b"}, + {file = "pyarrow-16.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:185d121b50836379fe012753cf15c4ba9638bda9645183ab36246923875f8d1b"}, + {file = "pyarrow-16.1.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:2e51ca1d6ed7f2e9d5c3c83decf27b0d17bb207a7dea986e8dc3e24f80ff7d6f"}, + {file = "pyarrow-16.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06ebccb6f8cb7357de85f60d5da50e83507954af617d7b05f48af1621d331c9a"}, + {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b04707f1979815f5e49824ce52d1dceb46e2f12909a48a6a753fe7cafbc44a0c"}, + {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d32000693deff8dc5df444b032b5985a48592c0697cb6e3071a5d59888714e2"}, + {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:8785bb10d5d6fd5e15d718ee1d1f914fe768bf8b4d1e5e9bf253de8a26cb1628"}, + {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:e1369af39587b794873b8a307cc6623a3b1194e69399af0efd05bb202195a5a7"}, + {file = "pyarrow-16.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:febde33305f1498f6df85e8020bca496d0e9ebf2093bab9e0f65e2b4ae2b3444"}, + {file = "pyarrow-16.1.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b5f5705ab977947a43ac83b52ade3b881eb6e95fcc02d76f501d549a210ba77f"}, + {file = "pyarrow-16.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0d27bf89dfc2576f6206e9cd6cf7a107c9c06dc13d53bbc25b0bd4556f19cf5f"}, + {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d07de3ee730647a600037bc1d7b7994067ed64d0eba797ac74b2bc77384f4c2"}, + {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbef391b63f708e103df99fbaa3acf9f671d77a183a07546ba2f2c297b361e83"}, + {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:19741c4dbbbc986d38856ee7ddfdd6a00fc3b0fc2d928795b95410d38bb97d15"}, + {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:f2c5fb249caa17b94e2b9278b36a05ce03d3180e6da0c4c3b3ce5b2788f30eed"}, + {file = "pyarrow-16.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:e6b6d3cd35fbb93b70ade1336022cc1147b95ec6af7d36906ca7fe432eb09710"}, + {file = "pyarrow-16.1.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:18da9b76a36a954665ccca8aa6bd9f46c1145f79c0bb8f4f244f5f8e799bca55"}, + {file = "pyarrow-16.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:99f7549779b6e434467d2aa43ab2b7224dd9e41bdde486020bae198978c9e05e"}, + {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f07fdffe4fd5b15f5ec15c8b64584868d063bc22b86b46c9695624ca3505b7b4"}, + {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddfe389a08ea374972bd4065d5f25d14e36b43ebc22fc75f7b951f24378bf0b5"}, + {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:3b20bd67c94b3a2ea0a749d2a5712fc845a69cb5d52e78e6449bbd295611f3aa"}, + {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:ba8ac20693c0bb0bf4b238751d4409e62852004a8cf031c73b0e0962b03e45e3"}, + {file = "pyarrow-16.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:31a1851751433d89a986616015841977e0a188662fcffd1a5677453f1df2de0a"}, + {file = "pyarrow-16.1.0.tar.gz", hash = "sha256:15fbb22ea96d11f0b5768504a3f961edab25eaf4197c341720c4a387f6c60315"}, ] [package.dependencies] @@ -1916,83 +1869,90 @@ numpy = ">=1.16.6" [[package]] name = "pyasn1" -version = "0.5.0" +version = "0.6.0" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "pyasn1-0.5.0-py2.py3-none-any.whl", hash = "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57"}, - {file = "pyasn1-0.5.0.tar.gz", hash = "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde"}, + {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, + {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, ] [[package]] name = "pyasn1-modules" -version = "0.3.0" +version = "0.4.0" description = "A collection of ASN.1-based protocols modules" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, - {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, + {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, + {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, ] [package.dependencies] -pyasn1 = ">=0.4.6,<0.6.0" +pyasn1 = ">=0.4.6,<0.7.0" [[package]] name = "pycparser" -version = "2.21" +version = "2.22" description = "C parser in Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.8" files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] [[package]] name = "pydantic" -version = "1.10.13" +version = "1.10.17" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"}, - {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"}, - {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"}, - {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"}, - {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"}, - {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"}, - {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"}, - {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, - {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, - {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, - {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, - {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, - {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, - {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, - {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"}, - {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"}, - {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"}, - {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"}, - {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"}, - {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"}, - {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"}, - {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"}, - {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"}, - {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"}, - {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"}, - {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"}, - {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"}, - {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"}, - {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"}, - {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"}, - {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"}, - {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"}, - {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"}, - {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"}, - {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, - {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, + {file = "pydantic-1.10.17-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fa51175313cc30097660b10eec8ca55ed08bfa07acbfe02f7a42f6c242e9a4b"}, + {file = "pydantic-1.10.17-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7e8988bb16988890c985bd2093df9dd731bfb9d5e0860db054c23034fab8f7a"}, + {file = "pydantic-1.10.17-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:371dcf1831f87c9e217e2b6a0c66842879a14873114ebb9d0861ab22e3b5bb1e"}, + {file = "pydantic-1.10.17-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4866a1579c0c3ca2c40575398a24d805d4db6cb353ee74df75ddeee3c657f9a7"}, + {file = "pydantic-1.10.17-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:543da3c6914795b37785703ffc74ba4d660418620cc273490d42c53949eeeca6"}, + {file = "pydantic-1.10.17-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7623b59876f49e61c2e283551cc3647616d2fbdc0b4d36d3d638aae8547ea681"}, + {file = "pydantic-1.10.17-cp310-cp310-win_amd64.whl", hash = "sha256:409b2b36d7d7d19cd8310b97a4ce6b1755ef8bd45b9a2ec5ec2b124db0a0d8f3"}, + {file = "pydantic-1.10.17-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fa43f362b46741df8f201bf3e7dff3569fa92069bcc7b4a740dea3602e27ab7a"}, + {file = "pydantic-1.10.17-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2a72d2a5ff86a3075ed81ca031eac86923d44bc5d42e719d585a8eb547bf0c9b"}, + {file = "pydantic-1.10.17-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4ad32aed3bf5eea5ca5decc3d1bbc3d0ec5d4fbcd72a03cdad849458decbc63"}, + {file = "pydantic-1.10.17-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb4e741782e236ee7dc1fb11ad94dc56aabaf02d21df0e79e0c21fe07c95741"}, + {file = "pydantic-1.10.17-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d2f89a719411cb234105735a520b7c077158a81e0fe1cb05a79c01fc5eb59d3c"}, + {file = "pydantic-1.10.17-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db3b48d9283d80a314f7a682f7acae8422386de659fffaba454b77a083c3937d"}, + {file = "pydantic-1.10.17-cp311-cp311-win_amd64.whl", hash = "sha256:9c803a5113cfab7bbb912f75faa4fc1e4acff43e452c82560349fff64f852e1b"}, + {file = "pydantic-1.10.17-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:820ae12a390c9cbb26bb44913c87fa2ff431a029a785642c1ff11fed0a095fcb"}, + {file = "pydantic-1.10.17-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c1e51d1af306641b7d1574d6d3307eaa10a4991542ca324f0feb134fee259815"}, + {file = "pydantic-1.10.17-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e53fb834aae96e7b0dadd6e92c66e7dd9cdf08965340ed04c16813102a47fab"}, + {file = "pydantic-1.10.17-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e2495309b1266e81d259a570dd199916ff34f7f51f1b549a0d37a6d9b17b4dc"}, + {file = "pydantic-1.10.17-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:098ad8de840c92ea586bf8efd9e2e90c6339d33ab5c1cfbb85be66e4ecf8213f"}, + {file = "pydantic-1.10.17-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:525bbef620dac93c430d5d6bdbc91bdb5521698d434adf4434a7ef6ffd5c4b7f"}, + {file = "pydantic-1.10.17-cp312-cp312-win_amd64.whl", hash = "sha256:6654028d1144df451e1da69a670083c27117d493f16cf83da81e1e50edce72ad"}, + {file = "pydantic-1.10.17-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c87cedb4680d1614f1d59d13fea353faf3afd41ba5c906a266f3f2e8c245d655"}, + {file = "pydantic-1.10.17-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11289fa895bcbc8f18704efa1d8020bb9a86314da435348f59745473eb042e6b"}, + {file = "pydantic-1.10.17-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94833612d6fd18b57c359a127cbfd932d9150c1b72fea7c86ab58c2a77edd7c7"}, + {file = "pydantic-1.10.17-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d4ecb515fa7cb0e46e163ecd9d52f9147ba57bc3633dca0e586cdb7a232db9e3"}, + {file = "pydantic-1.10.17-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7017971ffa7fd7808146880aa41b266e06c1e6e12261768a28b8b41ba55c8076"}, + {file = "pydantic-1.10.17-cp37-cp37m-win_amd64.whl", hash = "sha256:e840e6b2026920fc3f250ea8ebfdedf6ea7a25b77bf04c6576178e681942ae0f"}, + {file = "pydantic-1.10.17-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bfbb18b616abc4df70591b8c1ff1b3eabd234ddcddb86b7cac82657ab9017e33"}, + {file = "pydantic-1.10.17-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebb249096d873593e014535ab07145498957091aa6ae92759a32d40cb9998e2e"}, + {file = "pydantic-1.10.17-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8c209af63ccd7b22fba94b9024e8b7fd07feffee0001efae50dd99316b27768"}, + {file = "pydantic-1.10.17-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b40c9e13a0b61583e5599e7950490c700297b4a375b55b2b592774332798b7"}, + {file = "pydantic-1.10.17-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c31d281c7485223caf6474fc2b7cf21456289dbaa31401844069b77160cab9c7"}, + {file = "pydantic-1.10.17-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae5184e99a060a5c80010a2d53c99aee76a3b0ad683d493e5f0620b5d86eeb75"}, + {file = "pydantic-1.10.17-cp38-cp38-win_amd64.whl", hash = "sha256:ad1e33dc6b9787a6f0f3fd132859aa75626528b49cc1f9e429cdacb2608ad5f0"}, + {file = "pydantic-1.10.17-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e17c0ee7192e54a10943f245dc79e36d9fe282418ea05b886e1c666063a7b54"}, + {file = "pydantic-1.10.17-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cafb9c938f61d1b182dfc7d44a7021326547b7b9cf695db5b68ec7b590214773"}, + {file = "pydantic-1.10.17-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95ef534e3c22e5abbdbdd6f66b6ea9dac3ca3e34c5c632894f8625d13d084cbe"}, + {file = "pydantic-1.10.17-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62d96b8799ae3d782df7ec9615cb59fc32c32e1ed6afa1b231b0595f6516e8ab"}, + {file = "pydantic-1.10.17-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ab2f976336808fd5d539fdc26eb51f9aafc1f4b638e212ef6b6f05e753c8011d"}, + {file = "pydantic-1.10.17-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8ad363330557beac73159acfbeed220d5f1bfcd6b930302a987a375e02f74fd"}, + {file = "pydantic-1.10.17-cp39-cp39-win_amd64.whl", hash = "sha256:48db882e48575ce4b39659558b2f9f37c25b8d348e37a2b4e32971dd5a7d6227"}, + {file = "pydantic-1.10.17-py3-none-any.whl", hash = "sha256:e41b5b973e5c64f674b3b4720286ded184dcc26a691dd55f34391c62c6934688"}, + {file = "pydantic-1.10.17.tar.gz", hash = "sha256:f434160fb14b353caf634149baaf847206406471ba70e64657c1e8330277a991"}, ] [package.dependencies] @@ -2004,17 +1964,17 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pygments" -version = "2.16.1" +version = "2.18.0" description = "Pygments is a syntax highlighting package written in Python." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, - {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, ] [package.extras] -plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pynacl" @@ -2042,21 +2002,6 @@ cffi = ">=1.4.1" docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] -[[package]] -name = "pyre-extensions" -version = "0.0.30" -description = "Type system extensions for use with the pyre type checker" -optional = false -python-versions = "*" -files = [ - {file = "pyre-extensions-0.0.30.tar.gz", hash = "sha256:ba7923c486e089afb37a10623a8f4ae82d73cff42426d711c48af070e5bc31b2"}, - {file = "pyre_extensions-0.0.30-py3-none-any.whl", hash = "sha256:32b37ede4eed0ea879fdd6d84e0c7811e129f19b76614f1be3a6b47f9a4b1fa0"}, -] - -[package.dependencies] -typing-extensions = "*" -typing-inspect = "*" - [[package]] name = "pytest" version = "7.4.0" @@ -2112,13 +2057,13 @@ pytest = ">=5.0.0" [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -2126,36 +2071,13 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2023.3.post1" +version = "2024.1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, - {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, -] - -[[package]] -name = "pywin32" -version = "306" -description = "Python for Window Extensions" -optional = false -python-versions = "*" -files = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, - {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, - {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, - {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, - {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, - {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, - {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, ] [[package]] @@ -2288,13 +2210,13 @@ tune = ["fsspec", "pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1. [[package]] name = "referencing" -version = "0.30.2" +version = "0.35.1" description = "JSON Referencing + Python" optional = false python-versions = ">=3.8" files = [ - {file = "referencing-0.30.2-py3-none-any.whl", hash = "sha256:449b6669b6121a9e96a7f9e410b245d471e8d48964c67113ce9afe50c8dd7bdf"}, - {file = "referencing-0.30.2.tar.gz", hash = "sha256:794ad8003c65938edcdbc027f1933215e0d0ccc0291e3ce20a4d87432b59efc0"}, + {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, + {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, ] [package.dependencies] @@ -2303,13 +2225,13 @@ rpds-py = ">=0.7.0" [[package]] name = "requests" -version = "2.31.0" +version = "2.32.3" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -2324,13 +2246,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-oauthlib" -version = "1.3.1" +version = "2.0.0" description = "OAuthlib authentication support for Requests." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.4" files = [ - {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, - {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, + {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, + {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, ] [package.dependencies] @@ -2360,110 +2282,110 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] [[package]] name = "rpds-py" -version = "0.10.4" +version = "0.18.1" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.10.4-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:e41824343c2c129599645373992b1ce17720bb8a514f04ff9567031e1c26951e"}, - {file = "rpds_py-0.10.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b9d8884d58ea8801e5906a491ab34af975091af76d1a389173db491ee7e316bb"}, - {file = "rpds_py-0.10.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5db93f9017b384a4f194e1d89e1ce82d0a41b1fafdbbd3e0c8912baf13f2950f"}, - {file = "rpds_py-0.10.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c31ecfc53ac03dad4928a1712f3a2893008bfba1b3cde49e1c14ff67faae2290"}, - {file = "rpds_py-0.10.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4f92d2372ec992c82fd7c74aa21e2a1910b3dcdc6a7e6392919a138f21d528a3"}, - {file = "rpds_py-0.10.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f7ea49ddf51d5ec0c3cbd95190dd15e077a3153c8d4b22a33da43b5dd2b3c640"}, - {file = "rpds_py-0.10.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c27942722cd5039bbf5098c7e21935a96243fed00ea11a9589f3c6c6424bd84"}, - {file = "rpds_py-0.10.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:08f07150c8ebbdbce1d2d51b8e9f4d588749a2af6a98035485ebe45c7ad9394e"}, - {file = "rpds_py-0.10.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f3331a3684192659fa1090bf2b448db928152fcba08222e58106f44758ef25f7"}, - {file = "rpds_py-0.10.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:efffa359cc69840c8793f0c05a7b663de6afa7b9078fa6c80309ee38b9db677d"}, - {file = "rpds_py-0.10.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:86e8d6ff15fa7a9590c0addaf3ce52fb58bda4299cab2c2d0afa404db6848dab"}, - {file = "rpds_py-0.10.4-cp310-none-win32.whl", hash = "sha256:8f90fc6dd505867514c8b8ef68a712dc0be90031a773c1ae2ad469f04062daef"}, - {file = "rpds_py-0.10.4-cp310-none-win_amd64.whl", hash = "sha256:9f9184744fb800c9f28e155a5896ecb54816296ee79d5d1978be6a2ae60f53c4"}, - {file = "rpds_py-0.10.4-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:72e9b1e92830c876cd49565d8404e4dcc9928302d348ea2517bc3f9e3a873a2a"}, - {file = "rpds_py-0.10.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3650eae998dc718960e90120eb45d42bd57b18b21b10cb9ee05f91bff2345d48"}, - {file = "rpds_py-0.10.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f40413d2859737ce6d95c29ce2dde0ef7cdc3063b5830ae4342fef5922c3bba7"}, - {file = "rpds_py-0.10.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b953d11b544ca5f2705bb77b177d8e17ab1bfd69e0fd99790a11549d2302258c"}, - {file = "rpds_py-0.10.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:28b4942ec7d9d6114c1e08cace0157db92ef674636a38093cab779ace5742d3a"}, - {file = "rpds_py-0.10.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e0e2e01c5f61ddf47e3ed2d1fe1c9136e780ca6222d57a2517b9b02afd4710c"}, - {file = "rpds_py-0.10.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:927e3461dae0c09b1f2e0066e50c1a9204f8a64a3060f596e9a6742d3b307785"}, - {file = "rpds_py-0.10.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8e69bbe0ede8f7fe2616e779421bbdb37f025c802335a90f6416e4d98b368a37"}, - {file = "rpds_py-0.10.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cc688a59c100f038fa9fec9e4ab457c2e2d1fca350fe7ea395016666f0d0a2dc"}, - {file = "rpds_py-0.10.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ec001689402b9104700b50a005c2d3d0218eae90eaa8bdbbd776fe78fe8a74b7"}, - {file = "rpds_py-0.10.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:628fbb8be71a103499d10b189af7764996ab2634ed7b44b423f1e19901606e0e"}, - {file = "rpds_py-0.10.4-cp311-none-win32.whl", hash = "sha256:e3f9c9e5dd8eba4768e15f19044e1b5e216929a43a54b4ab329e103aed9f3eda"}, - {file = "rpds_py-0.10.4-cp311-none-win_amd64.whl", hash = "sha256:3bc561c183684636c0099f9c3fbab8c1671841942edbce784bb01b4707d17924"}, - {file = "rpds_py-0.10.4-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:36ff30385fb9fb3ac23a28bffdd4a230a5229ed5b15704b708b7c84bfb7fce51"}, - {file = "rpds_py-0.10.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:db0589e0bf41ff6ce284ab045ca89f27be1adf19e7bce26c2e7de6739a70c18b"}, - {file = "rpds_py-0.10.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c330cb125983c5d380fef4a4155248a276297c86d64625fdaf500157e1981c"}, - {file = "rpds_py-0.10.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d230fddc60caced271cc038e43e6fb8f4dd6b2dbaa44ac9763f2d76d05b0365a"}, - {file = "rpds_py-0.10.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a9e864ec051a58fdb6bb2e6da03942adb20273897bc70067aee283e62bbac4d"}, - {file = "rpds_py-0.10.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e41d5b334e8de4bc3f38843f31b2afa9a0c472ebf73119d3fd55cde08974bdf"}, - {file = "rpds_py-0.10.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5bb3f3cb6072c73e6ec1f865d8b80419b599f1597acf33f63fbf02252aab5a03"}, - {file = "rpds_py-0.10.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:576d48e1e45c211e99fc02655ade65c32a75d3e383ccfd98ce59cece133ed02c"}, - {file = "rpds_py-0.10.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b28b9668a22ca2cfca4433441ba9acb2899624a323787a509a3dc5fbfa79c49d"}, - {file = "rpds_py-0.10.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ddbd113a37307638f94be5ae232a325155fd24dbfae2c56455da8724b471e7be"}, - {file = "rpds_py-0.10.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd0ad98c7d72b0e4cbfe89cdfa12cd07d2fd6ed22864341cdce12b318a383442"}, - {file = "rpds_py-0.10.4-cp312-none-win32.whl", hash = "sha256:2a97406d5e08b7095428f01dac0d3c091dc072351151945a167e7968d2755559"}, - {file = "rpds_py-0.10.4-cp312-none-win_amd64.whl", hash = "sha256:aab24b9bbaa3d49e666e9309556591aa00748bd24ea74257a405f7fed9e8b10d"}, - {file = "rpds_py-0.10.4-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:6c5ca3eb817fb54bfd066740b64a2b31536eb8fe0b183dc35b09a7bd628ed680"}, - {file = "rpds_py-0.10.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fd37ab9a24021821b715478357af1cf369d5a42ac7405e83e5822be00732f463"}, - {file = "rpds_py-0.10.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2573ec23ad3a59dd2bc622befac845695972f3f2d08dc1a4405d017d20a6c225"}, - {file = "rpds_py-0.10.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:362faeae52dc6ccc50c0b6a01fa2ec0830bb61c292033f3749a46040b876f4ba"}, - {file = "rpds_py-0.10.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40f6e53461b19ddbb3354fe5bcf3d50d4333604ae4bf25b478333d83ca68002c"}, - {file = "rpds_py-0.10.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6090ba604ea06b525a231450ae5d343917a393cbf50423900dea968daf61d16f"}, - {file = "rpds_py-0.10.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28e29dac59df890972f73c511948072897f512974714a803fe793635b80ff8c7"}, - {file = "rpds_py-0.10.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f82abb5c5b83dc30e96be99ce76239a030b62a73a13c64410e429660a5602bfd"}, - {file = "rpds_py-0.10.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a3628815fd170a64624001bfb4e28946fd515bd672e68a1902d9e0290186eaf3"}, - {file = "rpds_py-0.10.4-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:d37f27ad80f742ef82796af3fe091888864958ad0bc8bab03da1830fa00c6004"}, - {file = "rpds_py-0.10.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:255a23bded80605e9f3997753e3a4b89c9aec9efb07ec036b1ca81440efcc1a9"}, - {file = "rpds_py-0.10.4-cp38-none-win32.whl", hash = "sha256:049098dabfe705e9638c55a3321137a821399c50940041a6fcce267a22c70db2"}, - {file = "rpds_py-0.10.4-cp38-none-win_amd64.whl", hash = "sha256:aa45cc71bf23a3181b8aa62466b5a2b7b7fb90fdc01df67ca433cd4fce7ec94d"}, - {file = "rpds_py-0.10.4-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:3507c459767cf24c11e9520e2a37c89674266abe8e65453e5cb66398aa47ee7b"}, - {file = "rpds_py-0.10.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2603e084054351cc65097da326570102c4c5bd07426ba8471ceaefdb0b642cc9"}, - {file = "rpds_py-0.10.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0f1d336786cb62613c72c00578c98e5bb8cd57b49c5bae5d4ab906ca7872f98"}, - {file = "rpds_py-0.10.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf032367f921201deaecf221d4cc895ea84b3decf50a9c73ee106f961885a0ad"}, - {file = "rpds_py-0.10.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f050ceffd8c730c1619a16bbf0b9cd037dcdb94b54710928ba38c7bde67e4a4"}, - {file = "rpds_py-0.10.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8709eb4ab477c533b7d0a76cd3065d7d95c9e25e6b9f6e27caeeb8c63e8799c9"}, - {file = "rpds_py-0.10.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc20dadb102140dff63529e08ce6f9745dbd36e673ebb2b1c4a63e134bca81c2"}, - {file = "rpds_py-0.10.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cd7da2adc721ccf19ac7ec86cae3a4fcaba03d9c477d5bd64ded6e9bb817bf3f"}, - {file = "rpds_py-0.10.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e5dba1c11e089b526379e74f6c636202e4c5bad9a48c7416502b8a5b0d026c91"}, - {file = "rpds_py-0.10.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ffd539d213c1ea2989ab92a5b9371ae7159c8c03cf2bcb9f2f594752f755ecd3"}, - {file = "rpds_py-0.10.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e791e3d13b14d0a7921804d0efe4d7bd15508bbcf8cb7a0c1ee1a27319a5f033"}, - {file = "rpds_py-0.10.4-cp39-none-win32.whl", hash = "sha256:2f2ac8bb01f705c5caaa7fe77ffd9b03f92f1b5061b94228f6ea5eaa0fca68ad"}, - {file = "rpds_py-0.10.4-cp39-none-win_amd64.whl", hash = "sha256:7c7ca791bedda059e5195cf7c6b77384657a51429357cdd23e64ac1d4973d6dc"}, - {file = "rpds_py-0.10.4-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:9c7e7bd1fa1f535af71dfcd3700fc83a6dc261a1204f8f5327d8ffe82e52905d"}, - {file = "rpds_py-0.10.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:7089d8bfa8064b28b2e39f5af7bf12d42f61caed884e35b9b4ea9e6fb1175077"}, - {file = "rpds_py-0.10.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1f191befea279cb9669b57be97ab1785781c8bab805900e95742ebfaa9cbf1d"}, - {file = "rpds_py-0.10.4-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:98c0aecf661c175ce9cb17347fc51a5c98c3e9189ca57e8fcd9348dae18541db"}, - {file = "rpds_py-0.10.4-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d81359911c3bb31c899c6a5c23b403bdc0279215e5b3bc0d2a692489fed38632"}, - {file = "rpds_py-0.10.4-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:83da147124499fe41ed86edf34b4e81e951b3fe28edcc46288aac24e8a5c8484"}, - {file = "rpds_py-0.10.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49db6c0a0e6626c2b97f5e7f8f7074da21cbd8ec73340c25e839a2457c007efa"}, - {file = "rpds_py-0.10.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:125776d5db15162fdd9135372bef7fe4fb7c5f5810cf25898eb74a06a0816aec"}, - {file = "rpds_py-0.10.4-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:32819b662e3b4c26355a4403ea2f60c0a00db45b640fe722dd12db3d2ef807fb"}, - {file = "rpds_py-0.10.4-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:3bd38b80491ef9686f719c1ad3d24d14fbd0e069988fdd4e7d1a6ffcdd7f4a13"}, - {file = "rpds_py-0.10.4-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:2e79eeeff8394284b09577f36316d410525e0cf0133abb3de10660e704d3d38e"}, - {file = "rpds_py-0.10.4-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:3e37f1f134037601eb4b1f46854194f0cc082435dac2ee3de11e51529f7831f2"}, - {file = "rpds_py-0.10.4-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:ba3246c60303eab3d0e562addf25a983d60bddc36f4d1edc2510f056d19df255"}, - {file = "rpds_py-0.10.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9123ba0f3f98ff79780eebca9984a2b525f88563844b740f94cffb9099701230"}, - {file = "rpds_py-0.10.4-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d98802b78093c7083cc51f83da41a5be5a57d406798c9f69424bd75f8ae0812a"}, - {file = "rpds_py-0.10.4-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:58bae860d1d116e6b4e1aad0cdc48a187d5893994f56d26db0c5534df7a47afd"}, - {file = "rpds_py-0.10.4-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd7e62e7d5bcfa38a62d8397fba6d0428b970ab7954c2197501cd1624f7f0bbb"}, - {file = "rpds_py-0.10.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83f5228459b84fa6279e4126a53abfdd73cd9cc183947ee5084153880f65d7"}, - {file = "rpds_py-0.10.4-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4bcb1abecd998a72ad4e36a0fca93577fd0c059a6aacc44f16247031b98f6ff4"}, - {file = "rpds_py-0.10.4-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:9e7b3ad9f53ea9e085b3d27286dd13f8290969c0a153f8a52c8b5c46002c374b"}, - {file = "rpds_py-0.10.4-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:cbec8e43cace64e63398155dc585dc479a89fef1e57ead06c22d3441e1bd09c3"}, - {file = "rpds_py-0.10.4-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ad21c60fc880204798f320387164dcacc25818a7b4ec2a0bf6b6c1d57b007d23"}, - {file = "rpds_py-0.10.4-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:6baea8a4f6f01e69e75cfdef3edd4a4d1c4b56238febbdf123ce96d09fbff010"}, - {file = "rpds_py-0.10.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:94876c21512535955a960f42a155213315e6ab06a4ce8ce372341a2a1b143eeb"}, - {file = "rpds_py-0.10.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cb55454a20d1b935f9eaab52e6ceab624a2efd8b52927c7ae7a43e02828dbe0"}, - {file = "rpds_py-0.10.4-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:13cbd79ccedc6b39c279af31ebfb0aec0467ad5d14641ddb15738bf6e4146157"}, - {file = "rpds_py-0.10.4-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00a88003db3cc953f8656b59fc9af9d0637a1fb93c235814007988f8c153b2f2"}, - {file = "rpds_py-0.10.4-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0f7f77a77c37159c9f417b8dd847f67a29e98c6acb52ee98fc6b91efbd1b2b6"}, - {file = "rpds_py-0.10.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70563a1596d2e0660ca2cebb738443437fc0e38597e7cbb276de0a7363924a52"}, - {file = "rpds_py-0.10.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e3ece9aa6d07e18c966f14b4352a4c6f40249f6174d3d2c694c1062e19c6adbb"}, - {file = "rpds_py-0.10.4-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d5ad7b1a1f6964d19b1a8acfc14bf7864f39587b3e25c16ca04f6cd1815026b3"}, - {file = "rpds_py-0.10.4-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:60018626e637528a1fa64bb3a2b3e46ab7bf672052316d61c3629814d5e65052"}, - {file = "rpds_py-0.10.4-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ae8a32ab77a84cc870bbfb60645851ca0f7d58fd251085ad67464b1445d632ca"}, - {file = "rpds_py-0.10.4.tar.gz", hash = "sha256:18d5ff7fbd305a1d564273e9eb22de83ae3cd9cd6329fddc8f12f6428a711a6a"}, + {file = "rpds_py-0.18.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d31dea506d718693b6b2cffc0648a8929bdc51c70a311b2770f09611caa10d53"}, + {file = "rpds_py-0.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:732672fbc449bab754e0b15356c077cc31566df874964d4801ab14f71951ea80"}, + {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a98a1f0552b5f227a3d6422dbd61bc6f30db170939bd87ed14f3c339aa6c7c9"}, + {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f1944ce16401aad1e3f7d312247b3d5de7981f634dc9dfe90da72b87d37887d"}, + {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38e14fb4e370885c4ecd734f093a2225ee52dc384b86fa55fe3f74638b2cfb09"}, + {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08d74b184f9ab6289b87b19fe6a6d1a97fbfea84b8a3e745e87a5de3029bf944"}, + {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d70129cef4a8d979caa37e7fe957202e7eee8ea02c5e16455bc9808a59c6b2f0"}, + {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ce0bb20e3a11bd04461324a6a798af34d503f8d6f1aa3d2aa8901ceaf039176d"}, + {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:81c5196a790032e0fc2464c0b4ab95f8610f96f1f2fa3d4deacce6a79852da60"}, + {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f3027be483868c99b4985fda802a57a67fdf30c5d9a50338d9db646d590198da"}, + {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d44607f98caa2961bab4fa3c4309724b185b464cdc3ba6f3d7340bac3ec97cc1"}, + {file = "rpds_py-0.18.1-cp310-none-win32.whl", hash = "sha256:c273e795e7a0f1fddd46e1e3cb8be15634c29ae8ff31c196debb620e1edb9333"}, + {file = "rpds_py-0.18.1-cp310-none-win_amd64.whl", hash = "sha256:8352f48d511de5f973e4f2f9412736d7dea76c69faa6d36bcf885b50c758ab9a"}, + {file = "rpds_py-0.18.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6b5ff7e1d63a8281654b5e2896d7f08799378e594f09cf3674e832ecaf396ce8"}, + {file = "rpds_py-0.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8927638a4d4137a289e41d0fd631551e89fa346d6dbcfc31ad627557d03ceb6d"}, + {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:154bf5c93d79558b44e5b50cc354aa0459e518e83677791e6adb0b039b7aa6a7"}, + {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07f2139741e5deb2c5154a7b9629bc5aa48c766b643c1a6750d16f865a82c5fc"}, + {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c7672e9fba7425f79019db9945b16e308ed8bc89348c23d955c8c0540da0a07"}, + {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:489bdfe1abd0406eba6b3bb4fdc87c7fa40f1031de073d0cfb744634cc8fa261"}, + {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c20f05e8e3d4fc76875fc9cb8cf24b90a63f5a1b4c5b9273f0e8225e169b100"}, + {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:967342e045564cef76dfcf1edb700b1e20838d83b1aa02ab313e6a497cf923b8"}, + {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2cc7c1a47f3a63282ab0f422d90ddac4aa3034e39fc66a559ab93041e6505da7"}, + {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f7afbfee1157e0f9376c00bb232e80a60e59ed716e3211a80cb8506550671e6e"}, + {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e6934d70dc50f9f8ea47081ceafdec09245fd9f6032669c3b45705dea096b88"}, + {file = "rpds_py-0.18.1-cp311-none-win32.whl", hash = "sha256:c69882964516dc143083d3795cb508e806b09fc3800fd0d4cddc1df6c36e76bb"}, + {file = "rpds_py-0.18.1-cp311-none-win_amd64.whl", hash = "sha256:70a838f7754483bcdc830444952fd89645569e7452e3226de4a613a4c1793fb2"}, + {file = "rpds_py-0.18.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:3dd3cd86e1db5aadd334e011eba4e29d37a104b403e8ca24dcd6703c68ca55b3"}, + {file = "rpds_py-0.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:05f3d615099bd9b13ecf2fc9cf2d839ad3f20239c678f461c753e93755d629ee"}, + {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35b2b771b13eee8729a5049c976197ff58a27a3829c018a04341bcf1ae409b2b"}, + {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ee17cd26b97d537af8f33635ef38be873073d516fd425e80559f4585a7b90c43"}, + {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b646bf655b135ccf4522ed43d6902af37d3f5dbcf0da66c769a2b3938b9d8184"}, + {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19ba472b9606c36716062c023afa2484d1e4220548751bda14f725a7de17b4f6"}, + {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e30ac5e329098903262dc5bdd7e2086e0256aa762cc8b744f9e7bf2a427d3f8"}, + {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d58ad6317d188c43750cb76e9deacf6051d0f884d87dc6518e0280438648a9ac"}, + {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e1735502458621921cee039c47318cb90b51d532c2766593be6207eec53e5c4c"}, + {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f5bab211605d91db0e2995a17b5c6ee5edec1270e46223e513eaa20da20076ac"}, + {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2fc24a329a717f9e2448f8cd1f960f9dac4e45b6224d60734edeb67499bab03a"}, + {file = "rpds_py-0.18.1-cp312-none-win32.whl", hash = "sha256:1805d5901779662d599d0e2e4159d8a82c0b05faa86ef9222bf974572286b2b6"}, + {file = "rpds_py-0.18.1-cp312-none-win_amd64.whl", hash = "sha256:720edcb916df872d80f80a1cc5ea9058300b97721efda8651efcd938a9c70a72"}, + {file = "rpds_py-0.18.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:c827576e2fa017a081346dce87d532a5310241648eb3700af9a571a6e9fc7e74"}, + {file = "rpds_py-0.18.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:aa3679e751408d75a0b4d8d26d6647b6d9326f5e35c00a7ccd82b78ef64f65f8"}, + {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0abeee75434e2ee2d142d650d1e54ac1f8b01e6e6abdde8ffd6eeac6e9c38e20"}, + {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed402d6153c5d519a0faf1bb69898e97fb31613b49da27a84a13935ea9164dfc"}, + {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:338dee44b0cef8b70fd2ef54b4e09bb1b97fc6c3a58fea5db6cc083fd9fc2724"}, + {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7750569d9526199c5b97e5a9f8d96a13300950d910cf04a861d96f4273d5b104"}, + {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:607345bd5912aacc0c5a63d45a1f73fef29e697884f7e861094e443187c02be5"}, + {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:207c82978115baa1fd8d706d720b4a4d2b0913df1c78c85ba73fe6c5804505f0"}, + {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6d1e42d2735d437e7e80bab4d78eb2e459af48c0a46e686ea35f690b93db792d"}, + {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5463c47c08630007dc0fe99fb480ea4f34a89712410592380425a9b4e1611d8e"}, + {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:06d218939e1bf2ca50e6b0ec700ffe755e5216a8230ab3e87c059ebb4ea06afc"}, + {file = "rpds_py-0.18.1-cp38-none-win32.whl", hash = "sha256:312fe69b4fe1ffbe76520a7676b1e5ac06ddf7826d764cc10265c3b53f96dbe9"}, + {file = "rpds_py-0.18.1-cp38-none-win_amd64.whl", hash = "sha256:9437ca26784120a279f3137ee080b0e717012c42921eb07861b412340f85bae2"}, + {file = "rpds_py-0.18.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:19e515b78c3fc1039dd7da0a33c28c3154458f947f4dc198d3c72db2b6b5dc93"}, + {file = "rpds_py-0.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a7b28c5b066bca9a4eb4e2f2663012debe680f097979d880657f00e1c30875a0"}, + {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:673fdbbf668dd958eff750e500495ef3f611e2ecc209464f661bc82e9838991e"}, + {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d960de62227635d2e61068f42a6cb6aae91a7fe00fca0e3aeed17667c8a34611"}, + {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:352a88dc7892f1da66b6027af06a2e7e5d53fe05924cc2cfc56495b586a10b72"}, + {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e0ee01ad8260184db21468a6e1c37afa0529acc12c3a697ee498d3c2c4dcaf3"}, + {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4c39ad2f512b4041343ea3c7894339e4ca7839ac38ca83d68a832fc8b3748ab"}, + {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aaa71ee43a703c321906813bb252f69524f02aa05bf4eec85f0c41d5d62d0f4c"}, + {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6cd8098517c64a85e790657e7b1e509b9fe07487fd358e19431cb120f7d96338"}, + {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4adec039b8e2928983f885c53b7cc4cda8965b62b6596501a0308d2703f8af1b"}, + {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:32b7daaa3e9389db3695964ce8e566e3413b0c43e3394c05e4b243a4cd7bef26"}, + {file = "rpds_py-0.18.1-cp39-none-win32.whl", hash = "sha256:2625f03b105328729f9450c8badda34d5243231eef6535f80064d57035738360"}, + {file = "rpds_py-0.18.1-cp39-none-win_amd64.whl", hash = "sha256:bf18932d0003c8c4d51a39f244231986ab23ee057d235a12b2684ea26a353590"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cbfbea39ba64f5e53ae2915de36f130588bba71245b418060ec3330ebf85678e"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a3d456ff2a6a4d2adcdf3c1c960a36f4fd2fec6e3b4902a42a384d17cf4e7a65"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7700936ef9d006b7ef605dc53aa364da2de5a3aa65516a1f3ce73bf82ecfc7ae"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:51584acc5916212e1bf45edd17f3a6b05fe0cbb40482d25e619f824dccb679de"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:942695a206a58d2575033ff1e42b12b2aece98d6003c6bc739fbf33d1773b12f"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b906b5f58892813e5ba5c6056d6a5ad08f358ba49f046d910ad992196ea61397"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f8e3fecca256fefc91bb6765a693d96692459d7d4c644660a9fff32e517843"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7732770412bab81c5a9f6d20aeb60ae943a9b36dcd990d876a773526468e7163"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:bd1105b50ede37461c1d51b9698c4f4be6e13e69a908ab7751e3807985fc0346"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:618916f5535784960f3ecf8111581f4ad31d347c3de66d02e728de460a46303c"}, + {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:17c6d2155e2423f7e79e3bb18151c686d40db42d8645e7977442170c360194d4"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c4c4c3f878df21faf5fac86eda32671c27889e13570645a9eea0a1abdd50922"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:fab6ce90574645a0d6c58890e9bcaac8d94dff54fb51c69e5522a7358b80ab64"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:531796fb842b53f2695e94dc338929e9f9dbf473b64710c28af5a160b2a8927d"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:740884bc62a5e2bbb31e584f5d23b32320fd75d79f916f15a788d527a5e83644"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:998125738de0158f088aef3cb264a34251908dd2e5d9966774fdab7402edfab7"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2be6e9dd4111d5b31ba3b74d17da54a8319d8168890fbaea4b9e5c3de630ae5"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0cee71bc618cd93716f3c1bf56653740d2d13ddbd47673efa8bf41435a60daa"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2c3caec4ec5cd1d18e5dd6ae5194d24ed12785212a90b37f5f7f06b8bedd7139"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:27bba383e8c5231cd559affe169ca0b96ec78d39909ffd817f28b166d7ddd4d8"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:a888e8bdb45916234b99da2d859566f1e8a1d2275a801bb8e4a9644e3c7e7909"}, + {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6031b25fb1b06327b43d841f33842b383beba399884f8228a6bb3df3088485ff"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48c2faaa8adfacefcbfdb5f2e2e7bdad081e5ace8d182e5f4ade971f128e6bb3"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:d85164315bd68c0806768dc6bb0429c6f95c354f87485ee3593c4f6b14def2bd"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6afd80f6c79893cfc0574956f78a0add8c76e3696f2d6a15bca2c66c415cf2d4"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa242ac1ff583e4ec7771141606aafc92b361cd90a05c30d93e343a0c2d82a89"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21be4770ff4e08698e1e8e0bce06edb6ea0626e7c8f560bc08222880aca6a6f"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c45a639e93a0c5d4b788b2613bd637468edd62f8f95ebc6fcc303d58ab3f0a8"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:910e71711d1055b2768181efa0a17537b2622afeb0424116619817007f8a2b10"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b9bb1f182a97880f6078283b3505a707057c42bf55d8fca604f70dedfdc0772a"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d54f74f40b1f7aaa595a02ff42ef38ca654b1469bef7d52867da474243cc633"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:8d2e182c9ee01135e11e9676e9a62dfad791a7a467738f06726872374a83db49"}, + {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:636a15acc588f70fda1661234761f9ed9ad79ebed3f2125d44be0862708b666e"}, + {file = "rpds_py-0.18.1.tar.gz", hash = "sha256:dc48b479d540770c811fbd1eb9ba2bb66951863e448efec2e2c102625328e92f"}, ] [[package]] @@ -2482,19 +2404,18 @@ pyasn1 = ">=0.1.3" [[package]] name = "setuptools" -version = "68.2.2" +version = "70.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, - {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, + {file = "setuptools-70.1.0-py3-none-any.whl", hash = "sha256:d9b8b771455a97c8a9f3ab3448ebe0b29b5e105f1228bba41028be116985a267"}, + {file = "setuptools-70.1.0.tar.gz", hash = "sha256:01a1e793faa5bd89abc851fa15d0a0db26f160890c7102cd8dce643e886b47f5"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -2509,24 +2430,28 @@ files = [ [[package]] name = "smart-open" -version = "6.4.0" +version = "7.0.4" description = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)" optional = false -python-versions = ">=3.6,<4.0" +python-versions = "<4.0,>=3.7" files = [ - {file = "smart_open-6.4.0-py3-none-any.whl", hash = "sha256:8d3ef7e6997e8e42dd55c74166ed21e6ac70664caa32dd940b26d54a8f6b4142"}, - {file = "smart_open-6.4.0.tar.gz", hash = "sha256:be3c92c246fbe80ebce8fbacb180494a481a77fcdcb7c1aadb2ea5b9c2bee8b9"}, + {file = "smart_open-7.0.4-py3-none-any.whl", hash = "sha256:4e98489932b3372595cddc075e6033194775165702887216b65eba760dfd8d47"}, + {file = "smart_open-7.0.4.tar.gz", hash = "sha256:62b65852bdd1d1d516839fcb1f6bc50cd0f16e05b4ec44b52f43d38bcb838524"}, ] +[package.dependencies] +wrapt = "*" + [package.extras] -all = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "paramiko", "requests"] +all = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "paramiko", "requests", "zstandard"] azure = ["azure-common", "azure-core", "azure-storage-blob"] gcs = ["google-cloud-storage (>=2.6.0)"] http = ["requests"] s3 = ["boto3"] ssh = ["paramiko"] -test = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "moto[server]", "paramiko", "pytest", "pytest-rerunfailures", "requests", "responses"] +test = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "moto[server]", "paramiko", "pytest", "pytest-rerunfailures", "requests", "responses", "zstandard"] webhdfs = ["requests"] +zst = ["zstandard"] [[package]] name = "stack-data" @@ -2547,20 +2472,6 @@ pure-eval = "*" [package.extras] tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] -[[package]] -name = "tabulate" -version = "0.9.0" -description = "Pretty-print tabular data" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, - {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, -] - -[package.extras] -widechars = ["wcwidth"] - [[package]] name = "tomli" version = "2.0.1" @@ -2574,91 +2485,77 @@ files = [ [[package]] name = "traitlets" -version = "5.14.2" +version = "5.14.3" description = "Traitlets Python configuration system" optional = false python-versions = ">=3.8" files = [ - {file = "traitlets-5.14.2-py3-none-any.whl", hash = "sha256:fcdf85684a772ddeba87db2f398ce00b40ff550d1528c03c14dbf6a02003cd80"}, - {file = "traitlets-5.14.2.tar.gz", hash = "sha256:8cdd83c040dab7d1dee822678e5f5d100b514f7b72b01615b26fc5718916fdf9"}, + {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, + {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, ] [package.extras] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.1)", "pytest-mock", "pytest-mypy-testing"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] [[package]] name = "typing-extensions" -version = "4.8.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, - {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, -] - -[[package]] -name = "typing-inspect" -version = "0.9.0" -description = "Runtime inspection utilities for typing module." -optional = false -python-versions = "*" -files = [ - {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, - {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] -[package.dependencies] -mypy-extensions = ">=0.3.0" -typing-extensions = ">=3.7.4" - [[package]] name = "tzdata" -version = "2023.3" +version = "2024.1" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, - {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, ] [[package]] name = "urllib3" -version = "1.26.17" +version = "2.2.2" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.8" files = [ - {file = "urllib3-1.26.17-py2.py3-none-any.whl", hash = "sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b"}, - {file = "urllib3-1.26.17.tar.gz", hash = "sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21"}, + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, ] [package.extras] -brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.21.0" +version = "20.26.2" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.21.0-py3-none-any.whl", hash = "sha256:31712f8f2a17bd06234fa97fdf19609e789dd4e3e4bf108c3da71d710651adbc"}, - {file = "virtualenv-20.21.0.tar.gz", hash = "sha256:f50e3e60f990a0757c9b68333c9fdaa72d7188caa417f96af9e52407831a3b68"}, + {file = "virtualenv-20.26.2-py3-none-any.whl", hash = "sha256:a624db5e94f01ad993d476b9ee5346fdf7b9de43ccaee0e0197012dc838a0e9b"}, + {file = "virtualenv-20.26.2.tar.gz", hash = "sha256:82bf0f4eebbb78d36ddaee0283d43fe5736b53880b8a8cdcd37390a07ac3741c"}, ] [package.dependencies] -distlib = ">=0.3.6,<1" -filelock = ">=3.4.1,<4" -platformdirs = ">=2.4,<4" +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" [package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] -test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23)", "pytest (>=7.2.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [[package]] name = "wcwidth" @@ -2673,29 +2570,108 @@ files = [ [[package]] name = "websocket-client" -version = "1.6.4" +version = "1.8.0" description = "WebSocket client for Python with low level API options" optional = false python-versions = ">=3.8" files = [ - {file = "websocket-client-1.6.4.tar.gz", hash = "sha256:b3324019b3c28572086c4a319f91d1dcd44e6e11cd340232978c684a7650d0df"}, - {file = "websocket_client-1.6.4-py3-none-any.whl", hash = "sha256:084072e0a7f5f347ef2ac3d8698a5e0b4ffbfcab607628cadabc650fc9a83a24"}, + {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, + {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, ] [package.extras] -docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] +docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] optional = ["python-socks", "wsaccel"] test = ["websockets"] [[package]] name = "widgetsnbextension" -version = "4.0.10" +version = "4.0.11" description = "Jupyter interactive widgets for Jupyter Notebook" optional = false python-versions = ">=3.7" files = [ - {file = "widgetsnbextension-4.0.10-py3-none-any.whl", hash = "sha256:d37c3724ec32d8c48400a435ecfa7d3e259995201fbefa37163124a9fcb393cc"}, - {file = "widgetsnbextension-4.0.10.tar.gz", hash = "sha256:64196c5ff3b9a9183a8e699a4227fb0b7002f252c814098e66c4d1cd0644688f"}, + {file = "widgetsnbextension-4.0.11-py3-none-any.whl", hash = "sha256:55d4d6949d100e0d08b94948a42efc3ed6dfdc0e9468b2c4b128c9a2ce3a7a36"}, + {file = "widgetsnbextension-4.0.11.tar.gz", hash = "sha256:8b22a8f1910bfd188e596fe7fc05dcbd87e810c8a4ba010bdb3da86637398474"}, +] + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, ] [[package]] @@ -2803,20 +2779,20 @@ multidict = ">=4.0" [[package]] name = "zipp" -version = "3.17.0" +version = "3.19.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, - {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, + {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, + {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "d834cef86c5776cdd688c9ff9df9e31731bc829e1f319ec4ea86c17d33762af6" +content-hash = "d656bab99c2e5a911ee1003db9e0682141328ae3ef1e1620945f8479451425bf" diff --git a/pyproject.toml b/pyproject.toml index a295e383..af7dd1ca 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,7 +25,6 @@ openshift-client = "1.0.18" rich = "^12.5" ray = {version = "2.20.0", extras = ["data", "default"]} kubernetes = ">= 25.3.0, < 27" -codeflare-torchx = "0.6.0.dev2" cryptography = "40.0.2" executing = "1.2.0" pydantic = "< 2" From f6399e93968acf19ace688bf58a37f4c4a77c16d Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Fri, 21 Jun 2024 13:43:44 +0100 Subject: [PATCH 269/496] Add new CONTRIBUTING.md file --- CONTRIBUTING.md | 90 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 90 insertions(+) create mode 100644 CONTRIBUTING.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..60f84102 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,90 @@ +# Contributing to the CodeFlare SDK + +Thank you for your interest in contributing to the CodeFlare SDK! + +## Getting Started + +### Prerequisites + +- Python 3.9 +- [Poetry](https://python-poetry.org/) + +### Setting Up Your Development Environment + +1. **Clone the repository:** + + ```sh + git clone https://github.com/project-codeflare/codeflare-sdk.git + cd codeflare-sdk + ``` + +2. Create a Poetry virtual environment: + + ```sh + poetry shell + ``` + +3. Install dependencies: + + ```sh + poetry install + ``` + + - To include test dependencies, run: + + ```sh + poetry install --with test + ``` + + - To include docs dependencies, run: + + ```sh + poetry install --with docs + ``` + + - To include both test and docs dependencies, run: + + ```sh + poetry install --with test,docs + ``` + +## Development Workflow + +### Pre-commit + +We use pre-commit to ensure consistent code formatting. To enable pre-commit hooks, run: + +```sh +pre-commit install +``` + +## Testing + +To install CodeFlare SDK in editable mode, run: + +```sh +pip install -e . +``` + +### Unit Testing + +To run the unit tests, execute: + +```sh +pytest -v tests/unit_test.py +``` + +### Local e2e Testing + +- Please follow the [e2e documentation](https://github.com/project-codeflare/codeflare-sdk/blob/main/docs/e2e.md) + +#### Code Coverage + +- Run tests with the following command: `coverage run -m --source=src pytest tests/unit_test.py` +- To then view a code coverage report w/ missing lines, run `coverage report -m` + +### Code Formatting + +- To check file formatting, in top-level dir run `black --check .` +- To auto-reformat all files, remove the `--check` flag +- To reformat an individual file, run `black ` From 8bfcfb14ec8539fce29433b1121022b6c6794393 Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Fri, 21 Jun 2024 13:44:15 +0100 Subject: [PATCH 270/496] Update README.md to point to the new CONTRIBUTING.md --- README.md | 58 ++----------------------------------------------------- 1 file changed, 2 insertions(+), 56 deletions(-) diff --git a/README.md b/README.md index 0ea7f694..31177840 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Codeflare-SDK +# CodeFlare SDK [![Python application](https://github.com/project-codeflare/codeflare-sdk/actions/workflows/unit-tests.yml/badge.svg?branch=main)](https://github.com/project-codeflare/codeflare-sdk/actions/workflows/unit-tests.yml) ![coverage badge](./coverage.svg) @@ -19,61 +19,7 @@ Can be installed via `pip`: `pip install codeflare-sdk` ## Development -### Prerequisites - -We recommend using Python 3.9 for development, along with Poetry. -Create a Poetry virtual environment with the required Python version 3.9, and run all commands within this environment. - - - run: `poetry shell` - -#### Install dependencies: - - - run: `poetry install` - -This will install standard requirements as specified in the poetry.lock file. Test and docs dependencies are optional. - -- To include test dependencies run: `poetry install --with test` - -- To include docs dependencies run: `poetry install --with docs` - -- To include test and docs dependencies run: `poetry install --with test,docs` - -If you require a requirements.txt file you can run: - -`poetry export -f requirements.txt --output requirements.txt --without-hashes` - -### Pre-commit - -We use pre-commit to make sure the code is consistently formatted. To make sure that pre-commit is run every time you commit changes, simply run `pre-commit install` - -To build the codeflare-sdk pre-commit image run `podman build -f .github/build/Containerfile .` from the root directory. - -### Testing - -- To install codeflare-sdk in editable mode, run `pip install -e .` from the repo root. -- Any new test functions/scripts can be added into the `tests` folder -- NOTE: Functional tests coming soon, will live in `tests/func_test.py` - -#### Unit Testing -- To run the unit tests, run `pytest -v tests/unit_test.py` - -#### Local e2e Testing -- Please follow the [e2e documentation](https://github.com/project-codeflare/codeflare-sdk/blob/main/docs/e2e.md) - -#### Code Coverage - -- Run tests with the following command: `coverage run -m --source=src pytest tests/unit_test.py` -- To then view a code coverage report w/ missing lines, run `coverage report -m` - -### Code Formatting - -- To check file formatting, in top-level dir run `black --check .` -- To auto-reformat all files, remove the `--check` flag -- To reformat an individual file, run `black ` - -### Package Build - -To build the python package: `$ poetry build` +Please see our [CONTRIBUTING.md](./CONTRIBUTING.md) for detailed instructions. ## Release Instructions From 1f825d63cdf72e3ceaa5bbb653da1145501bbf79 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 20 Jun 2024 12:28:35 +0100 Subject: [PATCH 271/496] Revert "Added custom Volumes and Volume Mounts support" This reverts commit c68d425377105a30ec591364f73b993991afaa47. --- src/codeflare_sdk/cluster/cluster.py | 4 ---- src/codeflare_sdk/cluster/config.py | 2 -- src/codeflare_sdk/utils/generate_yaml.py | 22 ---------------------- 3 files changed, 28 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 015f15ed..e5bbcd86 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -152,8 +152,6 @@ def create_app_wrapper(self): write_to_file = self.config.write_to_file local_queue = self.config.local_queue labels = self.config.labels - volumes = self.config.volumes - volume_mounts = self.config.volume_mounts return generate_appwrapper( name=name, namespace=namespace, @@ -174,8 +172,6 @@ def create_app_wrapper(self): write_to_file=write_to_file, local_queue=local_queue, labels=labels, - volumes=volumes, - volume_mounts=volume_mounts, ) # creates a new cluster with the provided or default spec diff --git a/src/codeflare_sdk/cluster/config.py b/src/codeflare_sdk/cluster/config.py index 97067365..9e069c37 100644 --- a/src/codeflare_sdk/cluster/config.py +++ b/src/codeflare_sdk/cluster/config.py @@ -53,8 +53,6 @@ class ClusterConfiguration: write_to_file: bool = False verify_tls: bool = True labels: dict = field(default_factory=dict) - volumes: list = field(default_factory=list) - volume_mounts: list = field(default_factory=list) def __post_init__(self): if not self.verify_tls: diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 3192ae1b..30edcd91 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -96,20 +96,6 @@ def update_image_pull_secrets(spec, image_pull_secrets): ] -def update_volume_mounts(spec, volume_mounts: list): - containers = spec.get("containers") - for volume_mount in volume_mounts: - for container in containers: - volumeMount = client.ApiClient().sanitize_for_serialization(volume_mount) - container["volumeMounts"].append(volumeMount) - - -def update_volumes(spec, volumes: list): - for volume in volumes: - new_volume = client.ApiClient().sanitize_for_serialization(volume) - spec["volumes"].append(new_volume) - - def update_env(spec, env): containers = spec.get("containers") for container in containers: @@ -150,8 +136,6 @@ def update_nodes( head_cpus, head_memory, head_gpus, - volumes, - volume_mounts, ): head = cluster_yaml.get("spec").get("headGroupSpec") head["rayStartParams"]["num-gpus"] = str(int(head_gpus)) @@ -166,8 +150,6 @@ def update_nodes( for comp in [head, worker]: spec = comp.get("template").get("spec") - update_volume_mounts(spec, volume_mounts) - update_volumes(spec, volumes) update_image_pull_secrets(spec, image_pull_secrets) update_image(spec, image) update_env(spec, env) @@ -298,8 +280,6 @@ def generate_appwrapper( write_to_file: bool, local_queue: Optional[str], labels, - volumes: list[client.V1Volume], - volume_mounts: list[client.V1VolumeMount], ): cluster_yaml = read_template(template) appwrapper_name, cluster_name = gen_names(name) @@ -319,8 +299,6 @@ def generate_appwrapper( head_cpus, head_memory, head_gpus, - volumes, - volume_mounts, ) augment_labels(cluster_yaml, labels) notebook_annotations(cluster_yaml) From 7eb00bef8ca8eca6e5431e8787716cee68ef117c Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 20 Jun 2024 12:30:48 +0100 Subject: [PATCH 272/496] Revert "Added Unit tests for custom volumes and volume mounts" This reverts commit 20476aa81f30c2faf6781c581bed3b1907ac590e. --- tests/unit-test-volume-cluster.yaml | 186 ---------------------------- tests/unit_test.py | 62 ---------- 2 files changed, 248 deletions(-) delete mode 100644 tests/unit-test-volume-cluster.yaml diff --git a/tests/unit-test-volume-cluster.yaml b/tests/unit-test-volume-cluster.yaml deleted file mode 100644 index 67d2c6b2..00000000 --- a/tests/unit-test-volume-cluster.yaml +++ /dev/null @@ -1,186 +0,0 @@ -apiVersion: ray.io/v1 -kind: RayCluster -metadata: - labels: - controller-tools.k8s.io: '1.0' - kueue.x-k8s.io/queue-name: local-queue-default - name: unit-test-volume-cluster - namespace: opendatahub -spec: - autoscalerOptions: - idleTimeoutSeconds: 60 - imagePullPolicy: Always - resources: - limits: - cpu: 500m - memory: 512Mi - requests: - cpu: 500m - memory: 512Mi - upscalingMode: Default - enableInTreeAutoscaling: false - headGroupSpec: - enableIngress: false - rayStartParams: - block: 'true' - dashboard-host: 0.0.0.0 - num-gpus: '0' - serviceType: ClusterIP - template: - spec: - containers: - - image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 - imagePullPolicy: Always - lifecycle: - preStop: - exec: - command: - - /bin/sh - - -c - - ray stop - name: ray-head - ports: - - containerPort: 6379 - name: gcs - - containerPort: 8265 - name: dashboard - - containerPort: 10001 - name: client - resources: - limits: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - requests: - cpu: 2 - memory: 8G - nvidia.com/gpu: 0 - volumeMounts: - - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - - mountPath: /home/ray/test1 - name: test - - mountPath: /home/ray/test2 - name: test2 - - mountPath: /home/ray/test2 - name: test3 - imagePullSecrets: [] - volumes: - - configMap: - items: - - key: ca-bundle.crt - path: odh-trusted-ca-bundle.crt - name: odh-trusted-ca-bundle - optional: true - name: odh-trusted-ca-cert - - configMap: - items: - - key: odh-ca-bundle.crt - path: odh-ca-bundle.crt - name: odh-trusted-ca-bundle - optional: true - name: odh-ca-cert - - emptyDir: - sizeLimit: 500Gi - name: test - - configMap: - items: - - key: test - path: /home/ray/test2/data.txt - name: config-map-test - name: test2 - - name: test3 - secret: - secretName: test-secret - rayVersion: 2.20.0 - workerGroupSpecs: - - groupName: small-group-unit-test-volume-cluster - maxReplicas: 1 - minReplicas: 1 - rayStartParams: - block: 'true' - num-gpus: '0' - replicas: 1 - template: - metadata: - annotations: - key: value - labels: - key: value - spec: - containers: - - image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 - lifecycle: - preStop: - exec: - command: - - /bin/sh - - -c - - ray stop - name: machine-learning - resources: - limits: - cpu: 1 - memory: 2G - nvidia.com/gpu: 0 - requests: - cpu: 1 - memory: 2G - nvidia.com/gpu: 0 - volumeMounts: - - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - - mountPath: /home/ray/test1 - name: test - - mountPath: /home/ray/test2 - name: test2 - - mountPath: /home/ray/test2 - name: test3 - imagePullSecrets: [] - volumes: - - configMap: - items: - - key: ca-bundle.crt - path: odh-trusted-ca-bundle.crt - name: odh-trusted-ca-bundle - optional: true - name: odh-trusted-ca-cert - - configMap: - items: - - key: odh-ca-bundle.crt - path: odh-ca-bundle.crt - name: odh-trusted-ca-bundle - optional: true - name: odh-ca-cert - - emptyDir: - sizeLimit: 500Gi - name: test - - configMap: - items: - - key: test - path: /home/ray/test2/data.txt - name: config-map-test - name: test2 - - name: test3 - secret: - secretName: test-secret diff --git a/tests/unit_test.py b/tests/unit_test.py index db908df6..61870b2a 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -444,68 +444,6 @@ def test_default_cluster_creation(mocker): assert cluster.config.namespace == "opendatahub" -def test_cluster_with_custom_volumes(mocker): - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch( - "codeflare_sdk.cluster.cluster.get_current_namespace", - return_value="opendatahub", - ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), - ) - - from kubernetes.client import ( - V1Volume, - V1VolumeMount, - V1EmptyDirVolumeSource, - V1ConfigMapVolumeSource, - V1KeyToPath, - V1SecretVolumeSource, - ) - - volume_mounts = [ - V1VolumeMount(mount_path="/home/ray/test1", name="test"), - V1VolumeMount( - mount_path="/home/ray/test2", - name="test2", - ), - V1VolumeMount( - mount_path="/home/ray/test2", - name="test3", - ), - ] - - volumes = [ - V1Volume( - name="test", - empty_dir=V1EmptyDirVolumeSource(size_limit="500Gi"), - ), - V1Volume( - name="test2", - config_map=V1ConfigMapVolumeSource( - name="config-map-test", - items=[V1KeyToPath(key="test", path="/home/ray/test2/data.txt")], - ), - ), - V1Volume(name="test3", secret=V1SecretVolumeSource(secret_name="test-secret")), - ] - - test_config = ClusterConfiguration( - name="unit-test-volume-cluster", - image="quay.io/project-codeflare/ray:2.20.0-py39-cu118", - volume_mounts=volume_mounts, - volumes=volumes, - ) - cluster = Cluster(test_config) - test_rc = yaml.load(cluster.app_wrapper_yaml, Loader=yaml.FullLoader) - with open( - f"{parent}/tests/unit-test-volume-cluster.yaml", - ) as f: - volume_rc = yaml.load(f, Loader=yaml.FullLoader) - assert test_rc == volume_rc - - def test_gen_names_with_name(mocker): mocker.patch.object( uuid, "uuid4", return_value=uuid.UUID("00000000-0000-0000-0000-000000000001") From 45d0a7436e9b81d81c34d2c0ac313bcf3974e2d3 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Tue, 18 Jun 2024 10:41:49 +0100 Subject: [PATCH 273/496] Added documentation for S3 compatible storage --- demo-notebooks/guided-demos/mnist_fashion.py | 1 + docs/s3-compatible-storage.md | 61 ++++++++++++++++++++ 2 files changed, 62 insertions(+) create mode 100644 docs/s3-compatible-storage.md diff --git a/demo-notebooks/guided-demos/mnist_fashion.py b/demo-notebooks/guided-demos/mnist_fashion.py index 2b75a865..85cd6e64 100644 --- a/demo-notebooks/guided-demos/mnist_fashion.py +++ b/demo-notebooks/guided-demos/mnist_fashion.py @@ -74,6 +74,7 @@ def train_func_distributed(): # For GPU Training, set `use_gpu` to True. use_gpu = True +# To learn more about configuring S3 compatible storage check out our docs -> https://github.com/project-codeflare/codeflare-sdk/blob/main/docs/s3-compatible-storage.md trainer = TorchTrainer( train_func_distributed, scaling_config=ScalingConfig( diff --git a/docs/s3-compatible-storage.md b/docs/s3-compatible-storage.md new file mode 100644 index 00000000..919ce815 --- /dev/null +++ b/docs/s3-compatible-storage.md @@ -0,0 +1,61 @@ +# S3 compatible storage with Ray Train examples +Some of our distributed training examples require an external storage solution so that all nodes can access the same data.
      +The following are examples for configuring S3 or Minio storage for your Ray Train script or interactive session. + +## S3 Bucket +In your Python Script add the following environment variables: +``` python +os.environ["AWS_ACCESS_KEY_ID"] = "XXXXXXXX" +os.environ["AWS_SECRET_ACCESS_KEY"] = "XXXXXXXX" +os.environ["AWS_DEFAULT_REGION"] = "XXXXXXXX" +``` +Alternatively you can specify these variables in your runtime environment on Job Submission. +``` python +submission_id = client.submit_job( + entrypoint=..., + runtime_env={ + "env_vars": { + "AWS_ACCESS_KEY_ID": os.environ.get('AWS_ACCESS_KEY_ID'), + "AWS_SECRET_ACCESS_KEY": os.environ.get('AWS_SECRET_ACCESS_KEY'), + "AWS_DEFAULT_REGION": os.environ.get('AWS_DEFAULT_REGION') + }, + } +) +``` +In your Trainer configuration you can specify a `run_config` which will utilise your external storage. +``` python +trainer = TorchTrainer( + train_func_distributed, + scaling_config=scaling_config, + run_config = ray.train.RunConfig(storage_path="s3://BUCKET_NAME/SUB_PATH/", name="unique_run_name") +) +``` +To learn more about Amazon S3 Storage you can find information [here](https://docs.aws.amazon.com/AmazonS3/latest/userguide/creating-bucket.html). + +## Minio Bucket +In your Python Script add the following function for configuring your run_config: +``` python +import s3fs +import pyarrow + +def get_minio_run_config(): + s3_fs = s3fs.S3FileSystem( + key = os.getenv('MINIO_ACCESS_KEY', "XXXXX"), + secret = os.getenv('MINIO_SECRET_ACCESS_KEY', "XXXXX"), + endpoint_url = os.getenv('MINIO_URL', "XXXXX") + ) + custom_fs = pyarrow.fs.PyFileSystem(pyarrow.fs.FSSpecHandler(s3_fs)) + run_config = ray.train.RunConfig(storage_path='training', storage_filesystem=custom_fs) + return run_config +``` +You can update the `run_config` to further suit your needs above. +Lastly the new `run_config` must be added to the Trainer: +``` python +trainer = TorchTrainer( + train_func_distributed, + scaling_config=scaling_config, + run_config = get_minio_run_config() +) +``` +To find more information on creating a Minio Bucket compatible with RHOAI you can refer to this [documentation](https://ai-on-openshift.io/tools-and-applications/minio/minio/).
      +Note: You must have `sf3s` and `pyarrow` installed in your environment for this method. From a03c72f6f5f7de517ec6e8c5164d3d43e474aaa0 Mon Sep 17 00:00:00 2001 From: codeflare-machine-account Date: Fri, 21 Jun 2024 16:55:29 +0000 Subject: [PATCH 274/496] Changes in docs for release: v0.16.4 --- .../cluster/awload.html | 6 +- .../cluster/cluster.html | 30 +- .../cluster/config.html | 3 +- .../detailed-documentation/cluster/model.html | 6 +- docs/detailed-documentation/job/ray_jobs.html | 4 +- .../utils/generate_yaml.html | 402 +++++++----------- 6 files changed, 174 insertions(+), 277 deletions(-) diff --git a/docs/detailed-documentation/cluster/awload.html b/docs/detailed-documentation/cluster/awload.html index 297ebc02..c77ac0e7 100644 --- a/docs/detailed-documentation/cluster/awload.html +++ b/docs/detailed-documentation/cluster/awload.html @@ -61,7 +61,7 @@

      Module codeflare_sdk.cluster.awload

      class AWManager: """ An object for submitting and removing existing AppWrapper yamls - to be added to the MCAD queue. + to be added to the Kueue localqueue. """ def __init__(self, filename: str) -> None: @@ -145,7 +145,7 @@

      Classes

  • An object for submitting and removing existing AppWrapper yamls -to be added to the MCAD queue.

    +to be added to the Kueue localqueue.

    Create the AppWrapper Manager object by passing in an AppWrapper yaml file

    @@ -155,7 +155,7 @@

    Classes

    class AWManager:
         """
         An object for submitting and removing existing AppWrapper yamls
    -    to be added to the MCAD queue.
    +    to be added to the Kueue localqueue.
         """
     
         def __init__(self, filename: str) -> None:
    diff --git a/docs/detailed-documentation/cluster/cluster.html b/docs/detailed-documentation/cluster/cluster.html
    index 7544b4d5..1e49cf6d 100644
    --- a/docs/detailed-documentation/cluster/cluster.html
    +++ b/docs/detailed-documentation/cluster/cluster.html
    @@ -179,11 +179,9 @@ 

    Module codeflare_sdk.cluster.cluster

    template = self.config.template image = self.config.image appwrapper = self.config.appwrapper - instance_types = self.config.machine_types env = self.config.envs image_pull_secrets = self.config.image_pull_secrets write_to_file = self.config.write_to_file - verify_tls = self.config.verify_tls local_queue = self.config.local_queue labels = self.config.labels return generate_appwrapper( @@ -201,11 +199,9 @@

    Module codeflare_sdk.cluster.cluster

    template=template, image=image, appwrapper=appwrapper, - instance_types=instance_types, env=env, image_pull_secrets=image_pull_secrets, write_to_file=write_to_file, - verify_tls=verify_tls, local_queue=local_queue, labels=labels, ) @@ -213,8 +209,8 @@

    Module codeflare_sdk.cluster.cluster

    # creates a new cluster with the provided or default spec def up(self): """ - Applies the AppWrapper yaml, pushing the resource request onto - the MCAD queue. + Applies the Cluster yaml, pushing the resource request onto + the Kueue localqueue. """ # check if RayCluster CustomResourceDefinition exists if not throw RuntimeError @@ -1229,11 +1225,9 @@

    Classes

    template = self.config.template image = self.config.image appwrapper = self.config.appwrapper - instance_types = self.config.machine_types env = self.config.envs image_pull_secrets = self.config.image_pull_secrets write_to_file = self.config.write_to_file - verify_tls = self.config.verify_tls local_queue = self.config.local_queue labels = self.config.labels return generate_appwrapper( @@ -1251,11 +1245,9 @@

    Classes

    template=template, image=image, appwrapper=appwrapper, - instance_types=instance_types, env=env, image_pull_secrets=image_pull_secrets, write_to_file=write_to_file, - verify_tls=verify_tls, local_queue=local_queue, labels=labels, ) @@ -1263,8 +1255,8 @@

    Classes

    # creates a new cluster with the provided or default spec def up(self): """ - Applies the AppWrapper yaml, pushing the resource request onto - the MCAD queue. + Applies the Cluster yaml, pushing the resource request onto + the Kueue localqueue. """ # check if RayCluster CustomResourceDefinition exists if not throw RuntimeError @@ -1776,11 +1768,9 @@

    Methods

    template = self.config.template image = self.config.image appwrapper = self.config.appwrapper - instance_types = self.config.machine_types env = self.config.envs image_pull_secrets = self.config.image_pull_secrets write_to_file = self.config.write_to_file - verify_tls = self.config.verify_tls local_queue = self.config.local_queue labels = self.config.labels return generate_appwrapper( @@ -1798,11 +1788,9 @@

    Methods

    template=template, image=image, appwrapper=appwrapper, - instance_types=instance_types, env=env, image_pull_secrets=image_pull_secrets, write_to_file=write_to_file, - verify_tls=verify_tls, local_queue=local_queue, labels=labels, )
    @@ -1974,7 +1962,7 @@

    Methods

    -def list_jobs(self) ‑> List[~T] +def list_jobs(self) ‑> List

    This method accesses the head ray node in your cluster and lists the running jobs.

    @@ -2090,16 +2078,16 @@

    Methods

    def up(self)
    -

    Applies the AppWrapper yaml, pushing the resource request onto -the MCAD queue.

    +

    Applies the Cluster yaml, pushing the resource request onto +the Kueue localqueue.

    Expand source code
    def up(self):
         """
    -    Applies the AppWrapper yaml, pushing the resource request onto
    -    the MCAD queue.
    +    Applies the Cluster yaml, pushing the resource request onto
    +    the Kueue localqueue.
         """
     
         # check if RayCluster CustomResourceDefinition exists if not throw RuntimeError
    diff --git a/docs/detailed-documentation/cluster/config.html b/docs/detailed-documentation/cluster/config.html
    index 4ce9dcdd..60b6dc95 100644
    --- a/docs/detailed-documentation/cluster/config.html
    +++ b/docs/detailed-documentation/cluster/config.html
    @@ -133,7 +133,8 @@ 

    Classes

    Expand source code -
    class ClusterConfiguration:
    +
    @dataclass
    +class ClusterConfiguration:
         """
         This dataclass is used to specify resource requirements and other details, and
         is passed in as an argument when creating a Cluster object.
    diff --git a/docs/detailed-documentation/cluster/model.html b/docs/detailed-documentation/cluster/model.html
    index d69357e1..e47808e4 100644
    --- a/docs/detailed-documentation/cluster/model.html
    +++ b/docs/detailed-documentation/cluster/model.html
    @@ -146,7 +146,8 @@ 

    Classes

    Expand source code -
    class AppWrapper:
    +
    @dataclass
    +class AppWrapper:
         """
         For storing information about an AppWrapper.
         """
    @@ -299,7 +300,8 @@ 

    Class variables

    Expand source code -
    class RayCluster:
    +
    @dataclass
    +class RayCluster:
         """
         For storing information about a Ray cluster.
         """
    diff --git a/docs/detailed-documentation/job/ray_jobs.html b/docs/detailed-documentation/job/ray_jobs.html
    index f0d10cd8..01fa5fb5 100644
    --- a/docs/detailed-documentation/job/ray_jobs.html
    +++ b/docs/detailed-documentation/job/ray_jobs.html
    @@ -191,7 +191,7 @@ 

    Classes

    class RayJobClient -(address: Optional[str] = None, create_cluster_if_needed: bool = False, cookies: Optional[Dict[str, Any]] = None, metadata: Optional[Dict[str, Any]] = None, headers: Optional[Dict[str, Any]] = None, verify: Union[str, bool, None] = True) +(address: Optional[str] = None, create_cluster_if_needed: bool = False, cookies: Optional[Dict[str, Any]] = None, metadata: Optional[Dict[str, Any]] = None, headers: Optional[Dict[str, Any]] = None, verify: Union[str, bool, ForwardRef(None)] = True)

    A class that functions as a wrapper for the Ray Job Submission Client.

    @@ -461,7 +461,7 @@

    Methods

    -def submit_job(self, entrypoint: str, job_id: Optional[str] = None, runtime_env: Optional[Dict[str, Any]] = None, metadata: Optional[Dict[str, str]] = None, submission_id: Optional[str] = None, entrypoint_num_cpus: Union[int, float, None] = None, entrypoint_num_gpus: Union[int, float, None] = None, entrypoint_resources: Optional[Dict[str, float]] = None) ‑> str +def submit_job(self, entrypoint: str, job_id: Optional[str] = None, runtime_env: Optional[Dict[str, Any]] = None, metadata: Optional[Dict[str, str]] = None, submission_id: Optional[str] = None, entrypoint_num_cpus: Union[int, float, ForwardRef(None)] = None, entrypoint_num_gpus: Union[int, float, ForwardRef(None)] = None, entrypoint_resources: Optional[Dict[str, float]] = None) ‑> str

    Method for submitting jobs to a Ray Cluster and returning the job id with entrypoint being a mandatory field.

    diff --git a/docs/detailed-documentation/utils/generate_yaml.html b/docs/detailed-documentation/utils/generate_yaml.html index 5ee410ce..1e421762 100644 --- a/docs/detailed-documentation/utils/generate_yaml.html +++ b/docs/detailed-documentation/utils/generate_yaml.html @@ -108,13 +108,10 @@

    Module codeflare_sdk.utils.generate_yaml

    return False -def update_names(yaml, item, appwrapper_name, cluster_name, namespace): - metadata = yaml.get("metadata") - metadata["name"] = appwrapper_name - metadata["namespace"] = namespace - lower_meta = item.get("template", {}).get("metadata") - lower_meta["name"] = cluster_name - lower_meta["namespace"] = namespace +def update_names(cluster_yaml, cluster_name, namespace): + meta = cluster_yaml.get("metadata") + meta["name"] = cluster_name + meta["namespace"] = namespace def update_image(spec, image): @@ -156,7 +153,7 @@

    Module codeflare_sdk.utils.generate_yaml

    def update_nodes( - item, + cluster_yaml, appwrapper_name, min_cpu, max_cpu, @@ -171,48 +168,35 @@

    Module codeflare_sdk.utils.generate_yaml

    head_memory, head_gpus, ): - if "template" in item.keys(): - head = item.get("template").get("spec").get("headGroupSpec") - head["rayStartParams"]["num-gpus"] = str(int(head_gpus)) - - worker = item.get("template").get("spec").get("workerGroupSpecs")[0] - # Head counts as first worker - worker["replicas"] = workers - worker["minReplicas"] = workers - worker["maxReplicas"] = workers - worker["groupName"] = "small-group-" + appwrapper_name - worker["rayStartParams"]["num-gpus"] = str(int(gpu)) - - for comp in [head, worker]: - spec = comp.get("template").get("spec") - update_image_pull_secrets(spec, image_pull_secrets) - update_image(spec, image) - update_env(spec, env) - if comp == head: - # TODO: Eventually add head node configuration outside of template - update_resources( - spec, head_cpus, head_cpus, head_memory, head_memory, head_gpus - ) - else: - update_resources(spec, min_cpu, max_cpu, min_memory, max_memory, gpu) + head = cluster_yaml.get("spec").get("headGroupSpec") + head["rayStartParams"]["num-gpus"] = str(int(head_gpus)) + + worker = cluster_yaml.get("spec").get("workerGroupSpecs")[0] + # Head counts as first worker + worker["replicas"] = workers + worker["minReplicas"] = workers + worker["maxReplicas"] = workers + worker["groupName"] = "small-group-" + appwrapper_name + worker["rayStartParams"]["num-gpus"] = str(int(gpu)) + + for comp in [head, worker]: + spec = comp.get("template").get("spec") + update_image_pull_secrets(spec, image_pull_secrets) + update_image(spec, image) + update_env(spec, env) + if comp == head: + # TODO: Eventually add head node configuration outside of template + update_resources( + spec, head_cpus, head_cpus, head_memory, head_memory, head_gpus + ) + else: + update_resources(spec, min_cpu, max_cpu, min_memory, max_memory, gpu) def del_from_list_by_name(l: list, target: typing.List[str]) -> list: return [x for x in l if x["name"] not in target] -def write_user_appwrapper(user_yaml, output_file_name): - # Create the directory if it doesn't exist - directory_path = os.path.dirname(output_file_name) - if not os.path.exists(directory_path): - os.makedirs(directory_path) - - with open(output_file_name, "w") as outfile: - yaml.dump(user_yaml, outfile, default_flow_style=False) - - print(f"Written to: {output_file_name}") - - def get_default_kueue_name(namespace: str): # If the local queue is set, use it. Otherwise, try to use the default queue. try: @@ -271,53 +255,40 @@

    Module codeflare_sdk.utils.generate_yaml

    def augment_labels(item: dict, labels: dict): - if "template" in item: - if not "labels" in item["template"]["metadata"]: - item["template"]["metadata"]["labels"] = {} - item["template"]["metadata"]["labels"].update(labels) + if not "labels" in item["metadata"]: + item["metadata"]["labels"] = {} + item["metadata"]["labels"].update(labels) -def write_components( - user_yaml: dict, - output_file_name: str, -): +def notebook_annotations(item: dict): + nb_prefix = os.environ.get("NB_PREFIX") + if nb_prefix: + if not "annotations" in item["metadata"]: + item["metadata"]["annotations"] = {} + item["metadata"]["annotations"].update( + {"app.kubernetes.io/managed-by": nb_prefix} + ) + + +def wrap_cluster(cluster_yaml: dict, appwrapper_name: str, namespace: str): + return { + "apiVersion": "workload.codeflare.dev/v1beta2", + "kind": "AppWrapper", + "metadata": {"name": appwrapper_name, "namespace": namespace}, + "spec": {"components": [{"template": cluster_yaml}]}, + } + + +def write_user_yaml(user_yaml, output_file_name): # Create the directory if it doesn't exist directory_path = os.path.dirname(output_file_name) if not os.path.exists(directory_path): os.makedirs(directory_path) - components = user_yaml.get("spec", "resources").get("components") - open(output_file_name, "w").close() - with open(output_file_name, "a") as outfile: - for component in components: - if "template" in component: - outfile.write("---\n") - yaml.dump(component["template"], outfile, default_flow_style=False) - print(f"Written to: {output_file_name}") - - -def load_components( - user_yaml: dict, - name: str, -): - component_list = [] - components = user_yaml.get("spec", "resources").get("components") - for component in components: - if "template" in component: - component_list.append(component["template"]) - - resources = "---\n" + "---\n".join( - [yaml.dump(component) for component in component_list] - ) - user_yaml = resources - print(f"Yaml resources loaded for {name}") - return user_yaml - + with open(output_file_name, "w") as outfile: + yaml.dump(user_yaml, outfile, default_flow_style=False) -def load_appwrapper(user_yaml: dict, name: str): - user_yaml = yaml.dump(user_yaml) - print(f"Yaml resources loaded for {name}") - return user_yaml + print(f"Written to: {output_file_name}") def generate_appwrapper( @@ -335,27 +306,17 @@

    Module codeflare_sdk.utils.generate_yaml

    template: str, image: str, appwrapper: bool, - instance_types: list, env, image_pull_secrets: list, write_to_file: bool, - verify_tls: bool, local_queue: Optional[str], labels, ): - user_yaml = read_template(template) + cluster_yaml = read_template(template) appwrapper_name, cluster_name = gen_names(name) - resources = user_yaml.get("spec", "resources") - item = resources.get("components")[0] - update_names( - user_yaml, - item, - appwrapper_name, - cluster_name, - namespace, - ) + update_names(cluster_yaml, cluster_name, namespace) update_nodes( - item, + cluster_yaml, appwrapper_name, min_cpu, max_cpu, @@ -370,28 +331,25 @@

    Module codeflare_sdk.utils.generate_yaml

    head_memory, head_gpus, ) + augment_labels(cluster_yaml, labels) + notebook_annotations(cluster_yaml) - augment_labels(item, labels) - - if appwrapper: - add_queue_label(user_yaml, namespace, local_queue) - else: - add_queue_label(item["template"], namespace, local_queue) + user_yaml = ( + wrap_cluster(cluster_yaml, appwrapper_name, namespace) + if appwrapper + else cluster_yaml + ) - directory_path = os.path.expanduser("~/.codeflare/resources/") - outfile = os.path.join(directory_path, appwrapper_name + ".yaml") + add_queue_label(user_yaml, namespace, local_queue) if write_to_file: - if appwrapper: - write_user_appwrapper(user_yaml, outfile) - else: - write_components(user_yaml, outfile) + directory_path = os.path.expanduser("~/.codeflare/resources/") + outfile = os.path.join(directory_path, appwrapper_name + ".yaml") + write_user_yaml(user_yaml, outfile) return outfile else: - if appwrapper: - user_yaml = load_appwrapper(user_yaml, name) - else: - user_yaml = load_components(user_yaml, name) + user_yaml = yaml.dump(user_yaml) + print(f"Yaml resources loaded for {name}") return user_yaml
    @@ -432,10 +390,9 @@

    Functions

    Expand source code
    def augment_labels(item: dict, labels: dict):
    -    if "template" in item:
    -        if not "labels" in item["template"]["metadata"]:
    -            item["template"]["metadata"]["labels"] = {}
    -    item["template"]["metadata"]["labels"].update(labels)
    + if not "labels" in item["metadata"]: + item["metadata"]["labels"] = {} + item["metadata"]["labels"].update(labels)
    @@ -471,7 +428,7 @@

    Functions

    -def generate_appwrapper(name: str, namespace: str, head_cpus: int, head_memory: int, head_gpus: int, min_cpu: int, max_cpu: int, min_memory: int, max_memory: int, gpu: int, workers: int, template: str, image: str, appwrapper: bool, instance_types: list, env, image_pull_secrets: list, write_to_file: bool, verify_tls: bool, local_queue: Optional[str], labels) +def generate_appwrapper(name: str, namespace: str, head_cpus: int, head_memory: int, head_gpus: int, min_cpu: int, max_cpu: int, min_memory: int, max_memory: int, gpu: int, workers: int, template: str, image: str, appwrapper: bool, env, image_pull_secrets: list, write_to_file: bool, local_queue: Optional[str], labels)
    @@ -494,27 +451,17 @@

    Functions

    template: str, image: str, appwrapper: bool, - instance_types: list, env, image_pull_secrets: list, write_to_file: bool, - verify_tls: bool, local_queue: Optional[str], labels, ): - user_yaml = read_template(template) + cluster_yaml = read_template(template) appwrapper_name, cluster_name = gen_names(name) - resources = user_yaml.get("spec", "resources") - item = resources.get("components")[0] - update_names( - user_yaml, - item, - appwrapper_name, - cluster_name, - namespace, - ) + update_names(cluster_yaml, cluster_name, namespace) update_nodes( - item, + cluster_yaml, appwrapper_name, min_cpu, max_cpu, @@ -529,28 +476,25 @@

    Functions

    head_memory, head_gpus, ) + augment_labels(cluster_yaml, labels) + notebook_annotations(cluster_yaml) - augment_labels(item, labels) - - if appwrapper: - add_queue_label(user_yaml, namespace, local_queue) - else: - add_queue_label(item["template"], namespace, local_queue) + user_yaml = ( + wrap_cluster(cluster_yaml, appwrapper_name, namespace) + if appwrapper + else cluster_yaml + ) - directory_path = os.path.expanduser("~/.codeflare/resources/") - outfile = os.path.join(directory_path, appwrapper_name + ".yaml") + add_queue_label(user_yaml, namespace, local_queue) if write_to_file: - if appwrapper: - write_user_appwrapper(user_yaml, outfile) - else: - write_components(user_yaml, outfile) + directory_path = os.path.expanduser("~/.codeflare/resources/") + outfile = os.path.join(directory_path, appwrapper_name + ".yaml") + write_user_yaml(user_yaml, outfile) return outfile else: - if appwrapper: - user_yaml = load_appwrapper(user_yaml, name) - else: - user_yaml = load_components(user_yaml, name) + user_yaml = yaml.dump(user_yaml) + print(f"Yaml resources loaded for {name}") return user_yaml
    @@ -633,48 +577,6 @@

    Functions

    return _kube_api_error_handling(e)
    -
    -def load_appwrapper(user_yaml: dict, name: str) -
    -
    -
    -
    - -Expand source code - -
    def load_appwrapper(user_yaml: dict, name: str):
    -    user_yaml = yaml.dump(user_yaml)
    -    print(f"Yaml resources loaded for {name}")
    -    return user_yaml
    -
    -
    -
    -def load_components(user_yaml: dict, name: str) -
    -
    -
    -
    - -Expand source code - -
    def load_components(
    -    user_yaml: dict,
    -    name: str,
    -):
    -    component_list = []
    -    components = user_yaml.get("spec", "resources").get("components")
    -    for component in components:
    -        if "template" in component:
    -            component_list.append(component["template"])
    -
    -    resources = "---\n" + "---\n".join(
    -        [yaml.dump(component) for component in component_list]
    -    )
    -    user_yaml = resources
    -    print(f"Yaml resources loaded for {name}")
    -    return user_yaml
    -
    -
    def local_queue_exists(namespace: str, local_queue_name: str)
    @@ -704,6 +606,25 @@

    Functions

    return False +
    +def notebook_annotations(item: dict) +
    +
    +
    +
    + +Expand source code + +
    def notebook_annotations(item: dict):
    +    nb_prefix = os.environ.get("NB_PREFIX")
    +    if nb_prefix:
    +        if not "annotations" in item["metadata"]:
    +            item["metadata"]["annotations"] = {}
    +        item["metadata"]["annotations"].update(
    +            {"app.kubernetes.io/managed-by": nb_prefix}
    +        )
    +
    +
    def read_template(template)
    @@ -772,7 +693,7 @@

    Functions

    -def update_names(yaml, item, appwrapper_name, cluster_name, namespace) +def update_names(cluster_yaml, cluster_name, namespace)
    @@ -780,17 +701,14 @@

    Functions

    Expand source code -
    def update_names(yaml, item, appwrapper_name, cluster_name, namespace):
    -    metadata = yaml.get("metadata")
    -    metadata["name"] = appwrapper_name
    -    metadata["namespace"] = namespace
    -    lower_meta = item.get("template", {}).get("metadata")
    -    lower_meta["name"] = cluster_name
    -    lower_meta["namespace"] = namespace
    +
    def update_names(cluster_yaml, cluster_name, namespace):
    +    meta = cluster_yaml.get("metadata")
    +    meta["name"] = cluster_name
    +    meta["namespace"] = namespace
    -def update_nodes(item, appwrapper_name, min_cpu, max_cpu, min_memory, max_memory, gpu, workers, image, env, image_pull_secrets, head_cpus, head_memory, head_gpus) +def update_nodes(cluster_yaml, appwrapper_name, min_cpu, max_cpu, min_memory, max_memory, gpu, workers, image, env, image_pull_secrets, head_cpus, head_memory, head_gpus)
    @@ -799,7 +717,7 @@

    Functions

    Expand source code
    def update_nodes(
    -    item,
    +    cluster_yaml,
         appwrapper_name,
         min_cpu,
         max_cpu,
    @@ -814,30 +732,29 @@ 

    Functions

    head_memory, head_gpus, ): - if "template" in item.keys(): - head = item.get("template").get("spec").get("headGroupSpec") - head["rayStartParams"]["num-gpus"] = str(int(head_gpus)) - - worker = item.get("template").get("spec").get("workerGroupSpecs")[0] - # Head counts as first worker - worker["replicas"] = workers - worker["minReplicas"] = workers - worker["maxReplicas"] = workers - worker["groupName"] = "small-group-" + appwrapper_name - worker["rayStartParams"]["num-gpus"] = str(int(gpu)) - - for comp in [head, worker]: - spec = comp.get("template").get("spec") - update_image_pull_secrets(spec, image_pull_secrets) - update_image(spec, image) - update_env(spec, env) - if comp == head: - # TODO: Eventually add head node configuration outside of template - update_resources( - spec, head_cpus, head_cpus, head_memory, head_memory, head_gpus - ) - else: - update_resources(spec, min_cpu, max_cpu, min_memory, max_memory, gpu)
    + head = cluster_yaml.get("spec").get("headGroupSpec") + head["rayStartParams"]["num-gpus"] = str(int(head_gpus)) + + worker = cluster_yaml.get("spec").get("workerGroupSpecs")[0] + # Head counts as first worker + worker["replicas"] = workers + worker["minReplicas"] = workers + worker["maxReplicas"] = workers + worker["groupName"] = "small-group-" + appwrapper_name + worker["rayStartParams"]["num-gpus"] = str(int(gpu)) + + for comp in [head, worker]: + spec = comp.get("template").get("spec") + update_image_pull_secrets(spec, image_pull_secrets) + update_image(spec, image) + update_env(spec, env) + if comp == head: + # TODO: Eventually add head node configuration outside of template + update_resources( + spec, head_cpus, head_cpus, head_memory, head_memory, head_gpus + ) + else: + update_resources(spec, min_cpu, max_cpu, min_memory, max_memory, gpu)
    @@ -864,8 +781,8 @@

    Functions

    limits["nvidia.com/gpu"] = gpu
    -
    -def write_components(user_yaml: dict, output_file_name: str) +
    +def wrap_cluster(cluster_yaml: dict, appwrapper_name: str, namespace: str)
    @@ -873,27 +790,17 @@

    Functions

    Expand source code -
    def write_components(
    -    user_yaml: dict,
    -    output_file_name: str,
    -):
    -    # Create the directory if it doesn't exist
    -    directory_path = os.path.dirname(output_file_name)
    -    if not os.path.exists(directory_path):
    -        os.makedirs(directory_path)
    -
    -    components = user_yaml.get("spec", "resources").get("components")
    -    open(output_file_name, "w").close()
    -    with open(output_file_name, "a") as outfile:
    -        for component in components:
    -            if "template" in component:
    -                outfile.write("---\n")
    -                yaml.dump(component["template"], outfile, default_flow_style=False)
    -    print(f"Written to: {output_file_name}")
    +
    def wrap_cluster(cluster_yaml: dict, appwrapper_name: str, namespace: str):
    +    return {
    +        "apiVersion": "workload.codeflare.dev/v1beta2",
    +        "kind": "AppWrapper",
    +        "metadata": {"name": appwrapper_name, "namespace": namespace},
    +        "spec": {"components": [{"template": cluster_yaml}]},
    +    }
    -
    -def write_user_appwrapper(user_yaml, output_file_name) +
    +def write_user_yaml(user_yaml, output_file_name)
    @@ -901,7 +808,7 @@

    Functions

    Expand source code -
    def write_user_appwrapper(user_yaml, output_file_name):
    +
    def write_user_yaml(user_yaml, output_file_name):
         # Create the directory if it doesn't exist
         directory_path = os.path.dirname(output_file_name)
         if not os.path.exists(directory_path):
    @@ -939,9 +846,8 @@ 

    Index

  • get_default_kueue_name
  • is_kind_cluster
  • is_openshift_cluster
  • -
  • load_appwrapper
  • -
  • load_components
  • local_queue_exists
  • +
  • notebook_annotations
  • read_template
  • update_env
  • update_image
  • @@ -949,8 +855,8 @@

    Index

  • update_names
  • update_nodes
  • update_resources
  • -
  • write_components
  • -
  • write_user_appwrapper
  • +
  • wrap_cluster
  • +
  • write_user_yaml
  • From 713d01107332b026db84618c968f35bf9cacf529 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Fri, 21 Jun 2024 11:58:26 +0100 Subject: [PATCH 275/496] Updated the Ray image in docs/nbs to 2.20.0 --- demo-notebooks/additional-demos/hf_interactive.ipynb | 2 +- demo-notebooks/additional-demos/local_interactive.ipynb | 2 +- demo-notebooks/additional-demos/ray_job_client.ipynb | 2 +- demo-notebooks/guided-demos/0_basic_ray.ipynb | 2 +- demo-notebooks/guided-demos/1_cluster_job_client.ipynb | 2 +- demo-notebooks/guided-demos/2_basic_interactive.ipynb | 2 +- .../guided-demos/notebook-ex-outputs/0_basic_ray.ipynb | 2 +- .../notebook-ex-outputs/1_cluster_job_client.ipynb | 2 +- .../notebook-ex-outputs/2_basic_interactive.ipynb | 2 +- .../guided-demos/notebook-ex-outputs/interactivetest.yaml | 4 ++-- demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml | 4 ++-- demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml | 4 ++-- demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb | 2 +- .../guided-demos/preview_nbs/1_cluster_job_client.ipynb | 2 +- .../guided-demos/preview_nbs/2_basic_interactive.ipynb | 2 +- docs/cluster-configuration.md | 2 +- 16 files changed, 19 insertions(+), 19 deletions(-) diff --git a/demo-notebooks/additional-demos/hf_interactive.ipynb b/demo-notebooks/additional-demos/hf_interactive.ipynb index a7b004cb..3db8c11b 100644 --- a/demo-notebooks/additional-demos/hf_interactive.ipynb +++ b/demo-notebooks/additional-demos/hf_interactive.ipynb @@ -98,7 +98,7 @@ " max_cpus=8, \n", " min_memory=16, \n", " max_memory=16, \n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " image=\"quay.io/project-codeflare/ray:2.20.0-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", " ))" diff --git a/demo-notebooks/additional-demos/local_interactive.ipynb b/demo-notebooks/additional-demos/local_interactive.ipynb index 94e657a7..74ac4055 100644 --- a/demo-notebooks/additional-demos/local_interactive.ipynb +++ b/demo-notebooks/additional-demos/local_interactive.ipynb @@ -62,7 +62,7 @@ " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " image=\"quay.io/project-codeflare/ray:2.20.0-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", " ))" diff --git a/demo-notebooks/additional-demos/ray_job_client.ipynb b/demo-notebooks/additional-demos/ray_job_client.ipynb index 0182f1b3..c58a4d73 100644 --- a/demo-notebooks/additional-demos/ray_job_client.ipynb +++ b/demo-notebooks/additional-demos/ray_job_client.ipynb @@ -53,7 +53,7 @@ " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " image=\"quay.io/project-codeflare/ray:2.20.0-py39-cu118\",\n", " write_to_file=False # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", "))" ] diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb index 7dc9e8cd..fd0eea26 100644 --- a/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -70,7 +70,7 @@ " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " image=\"quay.io/project-codeflare/ray:2.20.0-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" diff --git a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb index 9e626f55..f32f6046 100644 --- a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb @@ -52,7 +52,7 @@ " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " image=\"quay.io/project-codeflare/ray:2.20.0-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" diff --git a/demo-notebooks/guided-demos/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/2_basic_interactive.ipynb index 86142714..7b89e8f0 100644 --- a/demo-notebooks/guided-demos/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/2_basic_interactive.ipynb @@ -69,7 +69,7 @@ " max_cpus=2,\n", " min_memory=8,\n", " max_memory=8,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " image=\"quay.io/project-codeflare/ray:2.20.0-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb index 569b4d34..ed21f46d 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb @@ -78,7 +78,7 @@ " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " image=\"quay.io/project-codeflare/ray:2.20.0-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb index 608fbd2e..7deb6633 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb @@ -52,7 +52,7 @@ " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " image=\"quay.io/project-codeflare/ray:2.20.0-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb index 5ae66a0c..5e499b6b 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb @@ -77,7 +77,7 @@ " max_cpus=2,\n", " min_memory=8,\n", " max_memory=8,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " image=\"quay.io/project-codeflare/ray:2.20.0-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml index 786ef039..6426a017 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml @@ -81,7 +81,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:latest-py39-cu118 + image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 imagePullPolicy: Always lifecycle: preStop: @@ -147,7 +147,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:latest-py39-cu118 + image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 lifecycle: preStop: exec: diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml index 60a4b7b2..6d7aa0d3 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml @@ -70,7 +70,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:latest-py39-cu118 + image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 imagePullPolicy: Always lifecycle: preStop: @@ -127,7 +127,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:latest-py39-cu118 + image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 lifecycle: preStop: exec: diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml index 53e4be9a..0c31914c 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml @@ -70,7 +70,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:latest-py39-cu118 + image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 imagePullPolicy: Always lifecycle: preStop: @@ -127,7 +127,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/project-codeflare/ray:latest-py39-cu118 + image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 lifecycle: preStop: exec: diff --git a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb index 77b0e278..5c640424 100644 --- a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb @@ -70,7 +70,7 @@ " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " image=\"quay.io/project-codeflare/ray:2.20.0-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" diff --git a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb index a041ab5b..5ec37998 100644 --- a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb @@ -52,7 +52,7 @@ " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " image=\"quay.io/project-codeflare/ray:2.20.0-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources\n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" diff --git a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb index d7d4d69d..17299c10 100644 --- a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb @@ -69,7 +69,7 @@ " max_cpus=2,\n", " min_memory=8,\n", " max_memory=8,\n", - " image=\"quay.io/project-codeflare/ray:latest-py39-cu118\",\n", + " image=\"quay.io/project-codeflare/ray:2.20.0-py39-cu118\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" diff --git a/docs/cluster-configuration.md b/docs/cluster-configuration.md index ae6cd2ea..a63f5be1 100644 --- a/docs/cluster-configuration.md +++ b/docs/cluster-configuration.md @@ -19,7 +19,7 @@ cluster = Cluster(ClusterConfiguration( min_memory=2, # Default 2 max_memory=2, # Default 2 num_gpus=0, # Default 0 - image="quay.io/project-codeflare/ray:latest-py39-cu118", # Mandatory Field + image="quay.io/project-codeflare/ray:2.20.0-py39-cu118", # Mandatory Field machine_types=["m5.xlarge", "g4dn.xlarge"], labels={"exampleLabel": "example", "secondLabel": "example"}, )) From 9a1d66d36fce8d144d0089959ab6544e56ea87f5 Mon Sep 17 00:00:00 2001 From: abhijeet-dhumal Date: Tue, 25 Jun 2024 19:52:38 +0530 Subject: [PATCH 276/496] Fix ODH notebooks sync workflow --- .github/workflows/odh-notebooks-sync.yml | 29 ++++++++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/.github/workflows/odh-notebooks-sync.yml b/.github/workflows/odh-notebooks-sync.yml index 14a743a6..9ce1858f 100644 --- a/.github/workflows/odh-notebooks-sync.yml +++ b/.github/workflows/odh-notebooks-sync.yml @@ -25,6 +25,7 @@ env: REPO_OWNER: ${{ github.event.inputs.codeflare-repository-organization }} REPO_NAME: notebooks GITHUB_TOKEN: ${{ secrets.CODEFLARE_MACHINE_ACCOUNT_TOKEN }} + MINIMUM_SUPPORTED_PYTHON_VERSION: 3.9 jobs: build: @@ -44,7 +45,6 @@ jobs: uses: actions/setup-python@v4 with: python-version: | - 3.8 3.9 - name: Install pipenv and pip-versions @@ -92,7 +92,32 @@ jobs: for dir in "${directories[@]}"; do counter=$((counter+1)) echo "--Processing directory $counter '$dir' of total $total" - cd "$dir" && pipenv install ${package_name}~="${CODEFLARE_RELEASE_VERSION}" && pipenv --rm && cd - + cd "$dir" + minimum_supported_python_version_major=$(echo "${MINIMUM_SUPPORTED_PYTHON_VERSION}" | awk -F '.' '{print $1}') #integer of MINIMUM_SUPPORTED_PYTHON_VERSION env variable + minimum_supported_python_version_minor=$(echo "${MINIMUM_SUPPORTED_PYTHON_VERSION}" | awk -F '.' '{print $2}') #decimal of MINIMUM_SUPPORTED_PYTHON_VERSION env variable + pipfile_python_version=$(grep -E '^python_version' ./Pipfile | cut -d '"' -f 2) # extracted from pipfile + pipfile_python_version_major=$(echo "$pipfile_python_version" | awk -F '.' '{print $1}') + pipfile_python_version_minor=$(echo "$pipfile_python_version" | awk -F '.' '{print $2}') + if [[ "pipfile_python_version_major" -ge "$minimum_supported_python_version_major" && "pipfile_python_version_minor" -ge "$minimum_supported_python_version_minor" ]]; then + #install specified package + if ! pipenv install ${package_name}~="${CODEFLARE_RELEASE_VERSION}"; then + echo "Failed to install ${package_name} with version ${CODEFLARE_RELEASE_VERSION} in $dir" + exit 1 + fi + # Lock dependencies, ensuring pre-release are included and clear previous state + if ! pipenv lock --pre --clear ; then + echo "Failed to lock dependencies" + exit 1 + fi + # remove virtual env and clear cache + if ! pipenv --rm --clear ; then + echo "Failed to remove virtual environment" + exit 1 + fi + else + echo "Skipped installation of ${package_name} with version ${CODEFLARE_RELEASE_VERSION} in $dir" + fi + cd - echo "$((total-counter)) directories remaining.." done else From 34a9841b850c7281bed23dab08d2124db87ac5b1 Mon Sep 17 00:00:00 2001 From: Antonin Stefanutti Date: Fri, 28 Jun 2024 09:54:05 +0200 Subject: [PATCH 277/496] Default to Ray container image provided by OpenShift AI --- .../additional-demos/hf_interactive.ipynb | 1040 +++++------ .../additional-demos/local_interactive.ipynb | 2 +- .../additional-demos/ray_job_client.ipynb | 2 +- demo-notebooks/guided-demos/0_basic_ray.ipynb | 2 +- .../guided-demos/1_cluster_job_client.ipynb | 2 +- .../guided-demos/2_basic_interactive.ipynb | 2 +- .../notebook-ex-outputs/0_basic_ray.ipynb | 46 +- .../1_cluster_job_client.ipynb | 2 +- .../2_basic_interactive.ipynb | 1576 ++++++++--------- .../notebook-ex-outputs/interactivetest.yaml | 4 +- .../notebook-ex-outputs/jobtest.yaml | 4 +- .../notebook-ex-outputs/raytest.yaml | 4 +- .../preview_nbs/0_basic_ray.ipynb | 2 +- .../preview_nbs/1_cluster_job_client.ipynb | 2 +- .../preview_nbs/2_basic_interactive.ipynb | 2 +- docs/cluster-configuration.md | 2 +- poetry.lock | 136 +- pyproject.toml | 2 +- .../templates/base-template.yaml | 6 +- tests/e2e/support.py | 2 +- tests/test-case-bad.yaml | 6 +- tests/test-case-no-mcad.yamls | 6 +- tests/test-case.yaml | 6 +- tests/test-default-appwrapper.yaml | 6 +- tests/unit_test.py | 22 +- tests/unit_test_support.py | 2 +- 26 files changed, 1450 insertions(+), 1438 deletions(-) diff --git a/demo-notebooks/additional-demos/hf_interactive.ipynb b/demo-notebooks/additional-demos/hf_interactive.ipynb index 3db8c11b..ad552451 100644 --- a/demo-notebooks/additional-demos/hf_interactive.ipynb +++ b/demo-notebooks/additional-demos/hf_interactive.ipynb @@ -98,7 +98,7 @@ " max_cpus=8, \n", " min_memory=16, \n", " max_memory=16, \n", - " image=\"quay.io/project-codeflare/ray:2.20.0-py39-cu118\",\n", + " image=\"quay.io/rhoai/ray:2.23.0-py39-cu121\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", " ))" @@ -153,13 +153,13 @@ ], "text/plain": [ "╭─────────────────────────╮\n", - "│ \u001b[3m \u001b[0m\u001b[1;3m 🚀 List of CodeFlare\u001b[0m\u001b[3m \u001b[0m │\n", - "│ \u001b[3m \u001b[0m\u001b[1;3mclusters in queue🚀\u001b[0m\u001b[3m \u001b[0m │\n", + "│ \u001B[3m \u001B[0m\u001B[1;3m 🚀 List of CodeFlare\u001B[0m\u001B[3m \u001B[0m │\n", + "│ \u001B[3m \u001B[0m\u001B[1;3mclusters in queue🚀\u001B[0m\u001B[3m \u001B[0m │\n", "│ +-----------+---------+ │\n", - "│ |\u001b[1m \u001b[0m\u001b[1mName \u001b[0m\u001b[1m \u001b[0m|\u001b[1m \u001b[0m\u001b[1mStatus \u001b[0m\u001b[1m \u001b[0m| │\n", + "│ |\u001B[1m \u001B[0m\u001B[1mName \u001B[0m\u001B[1m \u001B[0m|\u001B[1m \u001B[0m\u001B[1mStatus \u001B[0m\u001B[1m \u001B[0m| │\n", "│ +===========+=========+ │\n", - "│ |\u001b[36m \u001b[0m\u001b[36mhfgputest\u001b[0m\u001b[36m \u001b[0m|\u001b[35m \u001b[0m\u001b[35mpending\u001b[0m\u001b[35m \u001b[0m| │\n", - "│ |\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m|\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m| │\n", + "│ |\u001B[36m \u001B[0m\u001B[36mhfgputest\u001B[0m\u001B[36m \u001B[0m|\u001B[35m \u001B[0m\u001B[35mpending\u001B[0m\u001B[35m \u001B[0m| │\n", + "│ |\u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[36m \u001B[0m|\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m| │\n", "│ +-----------+---------+ │\n", "╰─────────────────────────╯\n" ] @@ -240,22 +240,22 @@ "
    \n" ], "text/plain": [ - "\u001b[3m \u001b[0m\u001b[1;3m 🚀 List of CodeFlare clusters 🚀\u001b[0m\u001b[3m \u001b[0m\n", - "\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\n", + "\u001B[3m \u001B[0m\u001B[1;3m 🚀 List of CodeFlare clusters 🚀\u001B[0m\u001B[3m \u001B[0m\n", + "\u001B[1m \u001B[0m\u001B[1m \u001B[0m\u001B[1m \u001B[0m\n", " ╭────────────────────────────────────────────────────────────────╮ \n", - " │ \u001b[1;37;42mOwner\u001b[0m │ \n", - " │ \u001b[1;4mhfgputest\u001b[0m Active ✅ │ \n", + " │ \u001B[1;37;42mOwner\u001B[0m │ \n", + " │ \u001B[1;4mhfgputest\u001B[0m Active ✅ │ \n", " │ │ \n", - " │ \u001b[1mURI:\u001b[0m ray://hfgputest-head-svc.default.svc:10001 │ \n", + " │ \u001B[1mURI:\u001B[0m ray://hfgputest-head-svc.default.svc:10001 │ \n", " │ │ \n", - " │ \u001b]8;id=552692;ray-dashboard-hfgputest-default.apps.prepfullinstall.psap.aws.rhperfscale.org\u001b\\\u001b[4;34mDashboard🔗\u001b[0m\u001b]8;;\u001b\\ │ \n", + " │ \u001B]8;id=552692;ray-dashboard-hfgputest-default.apps.prepfullinstall.psap.aws.rhperfscale.org\u001B\\\u001B[4;34mDashboard🔗\u001B[0m\u001B]8;;\u001B\\ │ \n", " │ │ \n", - " │ \u001b[3m Cluster Resources \u001b[0m │ \n", + " │ \u001B[3m Cluster Resources \u001B[0m │ \n", " │ ╭─ Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │ \n", - " │ │ \u001b[1m \u001b[0m\u001b[1mMin\u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mMax\u001b[0m\u001b[1m \u001b[0m │ │ \u001b[1m \u001b[0m\u001b[1mMemory \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mCPU \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mGPU \u001b[0m\u001b[1m \u001b[0m │ │ \n", - " │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ │ \u001b[36m \u001b[0m\u001b[36m1 \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m1 \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m16G~16G \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m8 \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m4 \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", + " │ │ \u001B[1m \u001B[0m\u001B[1mMin\u001B[0m\u001B[1m \u001B[0m\u001B[1m \u001B[0m\u001B[1mMax\u001B[0m\u001B[1m \u001B[0m │ │ \u001B[1m \u001B[0m\u001B[1mMemory \u001B[0m\u001B[1m \u001B[0m\u001B[1m \u001B[0m\u001B[1mCPU \u001B[0m\u001B[1m \u001B[0m\u001B[1m \u001B[0m\u001B[1mGPU \u001B[0m\u001B[1m \u001B[0m │ │ \n", + " │ │ \u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m │ │ \u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m │ │ \n", + " │ │ \u001B[36m \u001B[0m\u001B[36m1 \u001B[0m\u001B[36m \u001B[0m\u001B[35m \u001B[0m\u001B[35m1 \u001B[0m\u001B[35m \u001B[0m │ │ \u001B[36m \u001B[0m\u001B[36m16G~16G \u001B[0m\u001B[36m \u001B[0m\u001B[35m \u001B[0m\u001B[35m8 \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m4 \u001B[0m\u001B[35m \u001B[0m │ │ \n", + " │ │ \u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m │ │ \u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m │ │ \n", " │ ╰────────────╯ ╰──────────────────────────────────────╯ │ \n", " ╰────────────────────────────────────────────────────────────────╯ \n" ] @@ -469,7 +469,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Downloading and preparing dataset imdb/plain_text to /home/ray/.cache/huggingface/datasets/imdb/plain_text/1.0.0/2fdd8b9bcadd6e7055e742a706876ba43f19faee861df134affd7a3f60fc38a1...\n" + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Downloading and preparing dataset imdb/plain_text to /home/ray/.cache/huggingface/datasets/imdb/plain_text/1.0.0/2fdd8b9bcadd6e7055e742a706876ba43f19faee861df134affd7a3f60fc38a1...\n" ] }, { @@ -645,7 +645,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Dataset imdb downloaded and prepared to /home/ray/.cache/huggingface/datasets/imdb/plain_text/1.0.0/2fdd8b9bcadd6e7055e742a706876ba43f19faee861df134affd7a3f60fc38a1. Subsequent calls will reuse this data.\n" + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Dataset imdb downloaded and prepared to /home/ray/.cache/huggingface/datasets/imdb/plain_text/1.0.0/2fdd8b9bcadd6e7055e742a706876ba43f19faee861df134affd7a3f60fc38a1. Subsequent calls will reuse this data.\n" ] }, { @@ -767,13 +767,13 @@ "name": "stdout", "output_type": "stream", "text": [ - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m len of train Dataset({\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m features: ['text', 'label', 'input_ids', 'attention_mask'],\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m num_rows: 100\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m }) and test Dataset({\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m features: ['text', 'label', 'input_ids', 'attention_mask'],\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m num_rows: 100\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m })\n" + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m len of train Dataset({\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m features: ['text', 'label', 'input_ids', 'attention_mask'],\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m num_rows: 100\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m }) and test Dataset({\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m features: ['text', 'label', 'input_ids', 'attention_mask'],\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m num_rows: 100\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m })\n" ] }, { @@ -787,54 +787,54 @@ "name": "stdout", "output_type": "stream", "text": [ - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m To disable this warning, you can either:\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \t- Avoid using `tokenizers` before the fork if possible\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:55:58 (running for 00:00:05.07)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 6.4/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n" + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m To disable this warning, you can either:\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \t- Avoid using `tokenizers` before the fork if possible\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:55:58 (running for 00:00:05.07)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 6.4/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m 2022-11-04 07:56:02,047\tINFO torch.py:346 -- Setting up process group for: env:// [rank=0, world_size=4]\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001b[0m 2022-11-04 07:56:02,045\tINFO torch.py:346 -- Setting up process group for: env:// [rank=2, world_size=4]\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001b[0m 2022-11-04 07:56:02,047\tINFO torch.py:346 -- Setting up process group for: env:// [rank=1, world_size=4]\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001b[0m 2022-11-04 07:56:02,048\tINFO torch.py:346 -- Setting up process group for: env:// [rank=3, world_size=4]\n" + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m 2022-11-04 07:56:02,047\tINFO torch.py:346 -- Setting up process group for: env:// [rank=0, world_size=4]\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001B[0m 2022-11-04 07:56:02,045\tINFO torch.py:346 -- Setting up process group for: env:// [rank=2, world_size=4]\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001B[0m 2022-11-04 07:56:02,047\tINFO torch.py:346 -- Setting up process group for: env:// [rank=1, world_size=4]\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001B[0m 2022-11-04 07:56:02,048\tINFO torch.py:346 -- Setting up process group for: env:// [rank=3, world_size=4]\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:56:03 (running for 00:00:10.07)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 7.2/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n" + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:56:03 (running for 00:00:10.07)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 7.2/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n" ] }, { @@ -881,20 +881,20 @@ "name": "stdout", "output_type": "stream", "text": [ - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:56:08 (running for 00:00:15.07)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 7.5/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n" + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:56:08 (running for 00:00:15.07)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 7.5/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n" ] }, { @@ -911,510 +911,510 @@ "Downloading: 95%|█████████▌| 255M/268M [00:04<00:00, 65.7MB/s]\n", "Downloading: 98%|█████████▊| 262M/268M [00:04<00:00, 65.8MB/s]\n", "Downloading: 100%|██████████| 268M/268M [00:04<00:00, 63.9MB/s]\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_projector.bias', 'vocab_layer_norm.bias', 'vocab_transform.bias', 'vocab_layer_norm.weight', 'vocab_transform.weight', 'vocab_projector.weight']\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['pre_classifier.bias', 'classifier.bias', 'classifier.weight', 'pre_classifier.weight']\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001b[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_transform.weight', 'vocab_transform.bias', 'vocab_layer_norm.bias', 'vocab_projector.bias', 'vocab_projector.weight', 'vocab_layer_norm.weight']\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001b[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001b[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001b[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['classifier.bias', 'pre_classifier.weight', 'pre_classifier.bias', 'classifier.weight']\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001b[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001b[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_projector.weight', 'vocab_transform.weight', 'vocab_projector.bias', 'vocab_transform.bias', 'vocab_layer_norm.bias', 'vocab_layer_norm.weight']\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001b[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001b[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001b[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['classifier.bias', 'pre_classifier.bias', 'pre_classifier.weight', 'classifier.weight']\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001b[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001b[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_layer_norm.weight', 'vocab_projector.weight', 'vocab_transform.weight', 'vocab_projector.bias', 'vocab_layer_norm.bias', 'vocab_transform.bias']\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001b[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001b[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001b[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['classifier.bias', 'classifier.weight', 'pre_classifier.bias', 'pre_classifier.weight']\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001b[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m /tmp/ray/session_2022-11-04_07-51-23_507232_7/runtime_resources/pip/4a36d7bd0bbff8fccea52f9c0d942dd63707933f/virtualenv/lib/python3.8/site-packages/transformers/optimization.py:306: FutureWarning: This implementation of AdamW is deprecated and will be removed in a future version. Use the PyTorch implementation torch.optim.AdamW instead, or set `no_deprecation_warning=True` to disable this warning\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m warnings.warn(\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001b[0m /tmp/ray/session_2022-11-04_07-51-23_507232_7/runtime_resources/pip/4a36d7bd0bbff8fccea52f9c0d942dd63707933f/virtualenv/lib/python3.8/site-packages/transformers/optimization.py:306: FutureWarning: This implementation of AdamW is deprecated and will be removed in a future version. Use the PyTorch implementation torch.optim.AdamW instead, or set `no_deprecation_warning=True` to disable this warning\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001b[0m warnings.warn(\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001b[0m /tmp/ray/session_2022-11-04_07-51-23_507232_7/runtime_resources/pip/4a36d7bd0bbff8fccea52f9c0d942dd63707933f/virtualenv/lib/python3.8/site-packages/transformers/optimization.py:306: FutureWarning: This implementation of AdamW is deprecated and will be removed in a future version. Use the PyTorch implementation torch.optim.AdamW instead, or set `no_deprecation_warning=True` to disable this warning\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001b[0m warnings.warn(\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001b[0m /tmp/ray/session_2022-11-04_07-51-23_507232_7/runtime_resources/pip/4a36d7bd0bbff8fccea52f9c0d942dd63707933f/virtualenv/lib/python3.8/site-packages/transformers/optimization.py:306: FutureWarning: This implementation of AdamW is deprecated and will be removed in a future version. Use the PyTorch implementation torch.optim.AdamW instead, or set `no_deprecation_warning=True` to disable this warning\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001b[0m warnings.warn(\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m ***** Running training *****\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m Num examples = 6250\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m Num Epochs = 1\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m Instantaneous batch size per device = 16\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m Total train batch size (w. parallel, distributed & accumulation) = 64\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m Gradient Accumulation steps = 1\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m Total optimization steps = 391\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m Number of trainable parameters = 66955010\n" + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_projector.bias', 'vocab_layer_norm.bias', 'vocab_transform.bias', 'vocab_layer_norm.weight', 'vocab_transform.weight', 'vocab_projector.weight']\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['pre_classifier.bias', 'classifier.bias', 'classifier.weight', 'pre_classifier.weight']\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001B[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_transform.weight', 'vocab_transform.bias', 'vocab_layer_norm.bias', 'vocab_projector.bias', 'vocab_projector.weight', 'vocab_layer_norm.weight']\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001B[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001B[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001B[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['classifier.bias', 'pre_classifier.weight', 'pre_classifier.bias', 'classifier.weight']\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001B[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001B[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_projector.weight', 'vocab_transform.weight', 'vocab_projector.bias', 'vocab_transform.bias', 'vocab_layer_norm.bias', 'vocab_layer_norm.weight']\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001B[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001B[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001B[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['classifier.bias', 'pre_classifier.bias', 'pre_classifier.weight', 'classifier.weight']\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001B[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001B[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_layer_norm.weight', 'vocab_projector.weight', 'vocab_transform.weight', 'vocab_projector.bias', 'vocab_layer_norm.bias', 'vocab_transform.bias']\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001B[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001B[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001B[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['classifier.bias', 'classifier.weight', 'pre_classifier.bias', 'pre_classifier.weight']\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001B[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m /tmp/ray/session_2022-11-04_07-51-23_507232_7/runtime_resources/pip/4a36d7bd0bbff8fccea52f9c0d942dd63707933f/virtualenv/lib/python3.8/site-packages/transformers/optimization.py:306: FutureWarning: This implementation of AdamW is deprecated and will be removed in a future version. Use the PyTorch implementation torch.optim.AdamW instead, or set `no_deprecation_warning=True` to disable this warning\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m warnings.warn(\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001B[0m /tmp/ray/session_2022-11-04_07-51-23_507232_7/runtime_resources/pip/4a36d7bd0bbff8fccea52f9c0d942dd63707933f/virtualenv/lib/python3.8/site-packages/transformers/optimization.py:306: FutureWarning: This implementation of AdamW is deprecated and will be removed in a future version. Use the PyTorch implementation torch.optim.AdamW instead, or set `no_deprecation_warning=True` to disable this warning\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001B[0m warnings.warn(\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001B[0m /tmp/ray/session_2022-11-04_07-51-23_507232_7/runtime_resources/pip/4a36d7bd0bbff8fccea52f9c0d942dd63707933f/virtualenv/lib/python3.8/site-packages/transformers/optimization.py:306: FutureWarning: This implementation of AdamW is deprecated and will be removed in a future version. Use the PyTorch implementation torch.optim.AdamW instead, or set `no_deprecation_warning=True` to disable this warning\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001B[0m warnings.warn(\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001B[0m /tmp/ray/session_2022-11-04_07-51-23_507232_7/runtime_resources/pip/4a36d7bd0bbff8fccea52f9c0d942dd63707933f/virtualenv/lib/python3.8/site-packages/transformers/optimization.py:306: FutureWarning: This implementation of AdamW is deprecated and will be removed in a future version. Use the PyTorch implementation torch.optim.AdamW instead, or set `no_deprecation_warning=True` to disable this warning\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001B[0m warnings.warn(\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m ***** Running training *****\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m Num examples = 6250\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m Num Epochs = 1\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m Instantaneous batch size per device = 16\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m Total train batch size (w. parallel, distributed & accumulation) = 64\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m Gradient Accumulation steps = 1\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m Total optimization steps = 391\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m Number of trainable parameters = 66955010\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:56:13 (running for 00:00:20.08)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 12.3/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n" + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:56:13 (running for 00:00:20.08)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 12.3/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m [W reducer.cpp:1251] Warning: find_unused_parameters=True was specified in DDP constructor, but did not find any unused parameters in the forward pass. This flag results in an extra traversal of the autograd graph every iteration, which can adversely affect performance. If your model indeed never has any unused parameters in the forward pass, consider turning this flag off. Note that this warning may be a false positive if your model has flow control causing later iterations to have unused parameters. (function operator())\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001b[0m [W reducer.cpp:1251] Warning: find_unused_parameters=True was specified in DDP constructor, but did not find any unused parameters in the forward pass. This flag results in an extra traversal of the autograd graph every iteration, which can adversely affect performance. If your model indeed never has any unused parameters in the forward pass, consider turning this flag off. Note that this warning may be a false positive if your model has flow control causing later iterations to have unused parameters. (function operator())\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001b[0m [W reducer.cpp:1251] Warning: find_unused_parameters=True was specified in DDP constructor, but did not find any unused parameters in the forward pass. This flag results in an extra traversal of the autograd graph every iteration, which can adversely affect performance. If your model indeed never has any unused parameters in the forward pass, consider turning this flag off. Note that this warning may be a false positive if your model has flow control causing later iterations to have unused parameters. (function operator())\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001b[0m [W reducer.cpp:1251] Warning: find_unused_parameters=True was specified in DDP constructor, but did not find any unused parameters in the forward pass. This flag results in an extra traversal of the autograd graph every iteration, which can adversely affect performance. If your model indeed never has any unused parameters in the forward pass, consider turning this flag off. Note that this warning may be a false positive if your model has flow control causing later iterations to have unused parameters. (function operator())\n" + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m [W reducer.cpp:1251] Warning: find_unused_parameters=True was specified in DDP constructor, but did not find any unused parameters in the forward pass. This flag results in an extra traversal of the autograd graph every iteration, which can adversely affect performance. If your model indeed never has any unused parameters in the forward pass, consider turning this flag off. Note that this warning may be a false positive if your model has flow control causing later iterations to have unused parameters. (function operator())\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001B[0m [W reducer.cpp:1251] Warning: find_unused_parameters=True was specified in DDP constructor, but did not find any unused parameters in the forward pass. This flag results in an extra traversal of the autograd graph every iteration, which can adversely affect performance. If your model indeed never has any unused parameters in the forward pass, consider turning this flag off. Note that this warning may be a false positive if your model has flow control causing later iterations to have unused parameters. (function operator())\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001B[0m [W reducer.cpp:1251] Warning: find_unused_parameters=True was specified in DDP constructor, but did not find any unused parameters in the forward pass. This flag results in an extra traversal of the autograd graph every iteration, which can adversely affect performance. If your model indeed never has any unused parameters in the forward pass, consider turning this flag off. Note that this warning may be a false positive if your model has flow control causing later iterations to have unused parameters. (function operator())\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001B[0m [W reducer.cpp:1251] Warning: find_unused_parameters=True was specified in DDP constructor, but did not find any unused parameters in the forward pass. This flag results in an extra traversal of the autograd graph every iteration, which can adversely affect performance. If your model indeed never has any unused parameters in the forward pass, consider turning this flag off. Note that this warning may be a false positive if your model has flow control causing later iterations to have unused parameters. (function operator())\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:56:18 (running for 00:00:25.08)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:56:23 (running for 00:00:30.08)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:56:28 (running for 00:00:35.09)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:56:33 (running for 00:00:40.09)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:56:38 (running for 00:00:45.10)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:56:43 (running for 00:00:50.10)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:56:48 (running for 00:00:55.10)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:56:53 (running for 00:01:00.10)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:56:59 (running for 00:01:05.11)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:57:04 (running for 00:01:10.11)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:57:09 (running for 00:01:15.11)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:57:14 (running for 00:01:20.12)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:57:19 (running for 00:01:25.12)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:57:24 (running for 00:01:30.12)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:57:29 (running for 00:01:35.13)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:57:34 (running for 00:01:40.13)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:57:39 (running for 00:01:45.13)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:57:44 (running for 00:01:50.13)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:57:49 (running for 00:01:55.14)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:57:54 (running for 00:02:00.14)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:57:59 (running for 00:02:05.15)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n" + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:56:18 (running for 00:00:25.08)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:56:23 (running for 00:00:30.08)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:56:28 (running for 00:00:35.09)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:56:33 (running for 00:00:40.09)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:56:38 (running for 00:00:45.10)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:56:43 (running for 00:00:50.10)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:56:48 (running for 00:00:55.10)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:56:53 (running for 00:01:00.10)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:56:59 (running for 00:01:05.11)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:57:04 (running for 00:01:10.11)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:57:09 (running for 00:01:15.11)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:57:14 (running for 00:01:20.12)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:57:19 (running for 00:01:25.12)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:57:24 (running for 00:01:30.12)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:57:29 (running for 00:01:35.13)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:57:34 (running for 00:01:40.13)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:57:39 (running for 00:01:45.13)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:57:44 (running for 00:01:50.13)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:57:49 (running for 00:01:55.14)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:57:54 (running for 00:02:00.14)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:57:59 (running for 00:02:05.15)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m Saving model checkpoint to /tmp/hf_imdb/test/checkpoint-391\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m Configuration saved in /tmp/hf_imdb/test/checkpoint-391/config.json\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m Model weights saved in /tmp/hf_imdb/test/checkpoint-391/pytorch_model.bin\n" + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m Saving model checkpoint to /tmp/hf_imdb/test/checkpoint-391\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m Configuration saved in /tmp/hf_imdb/test/checkpoint-391/config.json\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m Model weights saved in /tmp/hf_imdb/test/checkpoint-391/pytorch_model.bin\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result for HuggingFaceTrainer_c7d60_00000:\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m _time_this_iter_s: 118.07144260406494\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m _timestamp: 1667573883\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m _training_iteration: 1\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m date: 2022-11-04_07-58-03\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m done: false\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m epoch: 1.0\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m experiment_id: 7bc6ab25d0414fcbb589bcb5d0f29b99\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m hostname: hfgputest-worker-small-group-hfgputest-q4758\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m iterations_since_restore: 1\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m node_ip: 10.129.66.16\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m pid: 146\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m should_checkpoint: true\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m step: 391\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m time_since_restore: 124.55581378936768\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m time_this_iter_s: 124.55581378936768\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m time_total_s: 124.55581378936768\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m timestamp: 1667573883\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m timesteps_since_restore: 0\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m train_loss: 0.2760564701636429\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m train_runtime: 109.7668\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m train_samples_per_second: 56.939\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m train_steps_per_second: 3.562\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m training_iteration: 1\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m trial_id: c7d60_00000\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m warmup_time: 0.003995656967163086\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m {'train_runtime': 109.7668, 'train_samples_per_second': 56.939, 'train_steps_per_second': 3.562, 'train_loss': 0.2760564701636429, 'epoch': 1.0}\n" + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result for HuggingFaceTrainer_c7d60_00000:\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m _time_this_iter_s: 118.07144260406494\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m _timestamp: 1667573883\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m _training_iteration: 1\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m date: 2022-11-04_07-58-03\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m done: false\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m epoch: 1.0\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m experiment_id: 7bc6ab25d0414fcbb589bcb5d0f29b99\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m hostname: hfgputest-worker-small-group-hfgputest-q4758\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m iterations_since_restore: 1\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m node_ip: 10.129.66.16\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m pid: 146\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m should_checkpoint: true\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m step: 391\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m time_since_restore: 124.55581378936768\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m time_this_iter_s: 124.55581378936768\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m time_total_s: 124.55581378936768\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m timestamp: 1667573883\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m timesteps_since_restore: 0\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m train_loss: 0.2760564701636429\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m train_runtime: 109.7668\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m train_samples_per_second: 56.939\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m train_steps_per_second: 3.562\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m training_iteration: 1\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m trial_id: c7d60_00000\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m warmup_time: 0.003995656967163086\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m {'train_runtime': 109.7668, 'train_samples_per_second': 56.939, 'train_steps_per_second': 3.562, 'train_loss': 0.2760564701636429, 'epoch': 1.0}\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m \n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m \n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m Training completed. Do not forget to share your model on huggingface.co/models =)\n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m \n", - "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m \n" + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m \n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m \n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m Training completed. Do not forget to share your model on huggingface.co/models =)\n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m \n", + "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m \n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:58:13 (running for 00:02:19.36)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 16.0/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+--------+------------------+-----------------+----------------------------+--------------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc | iter | total time (s) | train_runtime | train_samples_per_second | train_steps_per_second |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------+--------+------------------+-----------------+----------------------------+--------------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 | 1 | 124.556 | 109.767 | 56.939 | 3.562 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+--------+------------------+-----------------+----------------------------+--------------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n" + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:58:13 (running for 00:02:19.36)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 16.0/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+--------+------------------+-----------------+----------------------------+--------------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc | iter | total time (s) | train_runtime | train_samples_per_second | train_steps_per_second |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------+--------+------------------+-----------------+----------------------------+--------------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 | 1 | 124.556 | 109.767 | 56.939 | 3.562 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+--------+------------------+-----------------+----------------------------+--------------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m 2022-11-04 07:58:13,248\tWARNING util.py:214 -- The `process_trial_save` operation took 9.709 s, which may be a performance bottleneck.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m 2022-11-04 07:58:13,248\tWARNING trial_runner.py:856 -- Consider turning off forced head-worker trial checkpoint syncs by setting sync_on_checkpoint=False. Note that this may result in faulty trial restoration if a failure occurs while the checkpoint is being synced from the worker to the head node.\n" + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m 2022-11-04 07:58:13,248\tWARNING util.py:214 -- The `process_trial_save` operation took 9.709 s, which may be a performance bottleneck.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m 2022-11-04 07:58:13,248\tWARNING trial_runner.py:856 -- Consider turning off forced head-worker trial checkpoint syncs by setting sync_on_checkpoint=False. Note that this may result in faulty trial restoration if a failure occurs while the checkpoint is being synced from the worker to the head node.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result for HuggingFaceTrainer_c7d60_00000:\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m _time_this_iter_s: 118.07144260406494\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m _timestamp: 1667573883\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m _training_iteration: 1\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m date: 2022-11-04_07-58-03\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m done: true\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m epoch: 1.0\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m experiment_id: 7bc6ab25d0414fcbb589bcb5d0f29b99\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m experiment_tag: '0'\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m hostname: hfgputest-worker-small-group-hfgputest-q4758\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m iterations_since_restore: 1\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m node_ip: 10.129.66.16\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m pid: 146\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m should_checkpoint: true\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m step: 391\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m time_since_restore: 124.55581378936768\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m time_this_iter_s: 124.55581378936768\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m time_total_s: 124.55581378936768\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m timestamp: 1667573883\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m timesteps_since_restore: 0\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m train_loss: 0.2760564701636429\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m train_runtime: 109.7668\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m train_samples_per_second: 56.939\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m train_steps_per_second: 3.562\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m training_iteration: 1\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m trial_id: c7d60_00000\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m warmup_time: 0.003995656967163086\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:58:16 (running for 00:02:22.40)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 9.1/240.1 GiB\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 0/10 CPUs, 0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 TERMINATED)\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+------------+------------------+--------+------------------+-----------------+----------------------------+--------------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc | iter | total time (s) | train_runtime | train_samples_per_second | train_steps_per_second |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+------------+------------------+--------+------------------+-----------------+----------------------------+--------------------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | TERMINATED | 10.129.66.16:146 | 1 | 124.556 | 109.767 | 56.939 | 3.562 |\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+------------+------------------+--------+------------------+-----------------+----------------------------+--------------------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n" + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result for HuggingFaceTrainer_c7d60_00000:\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m _time_this_iter_s: 118.07144260406494\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m _timestamp: 1667573883\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m _training_iteration: 1\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m date: 2022-11-04_07-58-03\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m done: true\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m epoch: 1.0\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m experiment_id: 7bc6ab25d0414fcbb589bcb5d0f29b99\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m experiment_tag: '0'\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m hostname: hfgputest-worker-small-group-hfgputest-q4758\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m iterations_since_restore: 1\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m node_ip: 10.129.66.16\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m pid: 146\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m should_checkpoint: true\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m step: 391\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m time_since_restore: 124.55581378936768\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m time_this_iter_s: 124.55581378936768\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m time_total_s: 124.55581378936768\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m timestamp: 1667573883\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m timesteps_since_restore: 0\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m train_loss: 0.2760564701636429\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m train_runtime: 109.7668\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m train_samples_per_second: 56.939\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m train_steps_per_second: 3.562\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m training_iteration: 1\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m trial_id: c7d60_00000\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m warmup_time: 0.003995656967163086\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:58:16 (running for 00:02:22.40)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 9.1/240.1 GiB\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 0/10 CPUs, 0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 TERMINATED)\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+------------+------------------+--------+------------------+-----------------+----------------------------+--------------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc | iter | total time (s) | train_runtime | train_samples_per_second | train_steps_per_second |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+------------+------------------+--------+------------------+-----------------+----------------------------+--------------------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | TERMINATED | 10.129.66.16:146 | 1 | 124.556 | 109.767 | 56.939 | 3.562 |\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+------------+------------------+--------+------------------+-----------------+----------------------------+--------------------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m 2022-11-04 07:58:16,286\tWARNING util.py:214 -- The `process_trial_save` operation took 2.161 s, which may be a performance bottleneck.\n", - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m 2022-11-04 07:58:16,398\tINFO tune.py:747 -- Total run time: 142.70 seconds (142.40 seconds for the tuning loop).\n" + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m 2022-11-04 07:58:16,286\tWARNING util.py:214 -- The `process_trial_save` operation took 2.161 s, which may be a performance bottleneck.\n", + "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m 2022-11-04 07:58:16,398\tINFO tune.py:747 -- Total run time: 142.70 seconds (142.40 seconds for the tuning loop).\n" ] } ], diff --git a/demo-notebooks/additional-demos/local_interactive.ipynb b/demo-notebooks/additional-demos/local_interactive.ipynb index 74ac4055..1d3c83ad 100644 --- a/demo-notebooks/additional-demos/local_interactive.ipynb +++ b/demo-notebooks/additional-demos/local_interactive.ipynb @@ -62,7 +62,7 @@ " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " image=\"quay.io/project-codeflare/ray:2.20.0-py39-cu118\",\n", + " image=\"quay.io/rhoai/ray:2.23.0-py39-cu121\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", " ))" diff --git a/demo-notebooks/additional-demos/ray_job_client.ipynb b/demo-notebooks/additional-demos/ray_job_client.ipynb index c58a4d73..c452fb31 100644 --- a/demo-notebooks/additional-demos/ray_job_client.ipynb +++ b/demo-notebooks/additional-demos/ray_job_client.ipynb @@ -53,7 +53,7 @@ " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " image=\"quay.io/project-codeflare/ray:2.20.0-py39-cu118\",\n", + " image=\"quay.io/rhoai/ray:2.23.0-py39-cu121\",\n", " write_to_file=False # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", "))" ] diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb index fd0eea26..0cd3419f 100644 --- a/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -70,7 +70,7 @@ " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " image=\"quay.io/project-codeflare/ray:2.20.0-py39-cu118\",\n", + " image=\"quay.io/rhoai/ray:2.23.0-py39-cu121\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" diff --git a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb index f32f6046..de8fafdd 100644 --- a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb @@ -52,7 +52,7 @@ " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " image=\"quay.io/project-codeflare/ray:2.20.0-py39-cu118\",\n", + " image=\"quay.io/rhoai/ray:2.23.0-py39-cu121\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" diff --git a/demo-notebooks/guided-demos/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/2_basic_interactive.ipynb index 7b89e8f0..b6a13b8c 100644 --- a/demo-notebooks/guided-demos/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/2_basic_interactive.ipynb @@ -69,7 +69,7 @@ " max_cpus=2,\n", " min_memory=8,\n", " max_memory=8,\n", - " image=\"quay.io/project-codeflare/ray:2.20.0-py39-cu118\",\n", + " image=\"quay.io/rhoai/ray:2.23.0-py39-cu121\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb index ed21f46d..646e2424 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb @@ -78,7 +78,7 @@ " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " image=\"quay.io/project-codeflare/ray:2.20.0-py39-cu118\",\n", + " image=\"quay.io/rhoai/ray:2.23.0-py39-cu121\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" @@ -134,13 +134,13 @@ ], "text/plain": [ "╭───────────────────────╮\n", - "│ \u001b[3m \u001b[0m\u001b[1;3m 🚀 Cluster Queue\u001b[0m\u001b[3m \u001b[0m │\n", - "│ \u001b[3m \u001b[0m\u001b[1;3mStatus 🚀\u001b[0m\u001b[3m \u001b[0m │\n", + "│ \u001B[3m \u001B[0m\u001B[1;3m 🚀 Cluster Queue\u001B[0m\u001B[3m \u001B[0m │\n", + "│ \u001B[3m \u001B[0m\u001B[1;3mStatus 🚀\u001B[0m\u001B[3m \u001B[0m │\n", "│ +---------+---------+ │\n", - "│ |\u001b[1m \u001b[0m\u001b[1mName \u001b[0m\u001b[1m \u001b[0m|\u001b[1m \u001b[0m\u001b[1mStatus \u001b[0m\u001b[1m \u001b[0m| │\n", + "│ |\u001B[1m \u001B[0m\u001B[1mName \u001B[0m\u001B[1m \u001B[0m|\u001B[1m \u001B[0m\u001B[1mStatus \u001B[0m\u001B[1m \u001B[0m| │\n", "│ +=========+=========+ │\n", - "│ |\u001b[36m \u001b[0m\u001b[36mraytest\u001b[0m\u001b[36m \u001b[0m|\u001b[35m \u001b[0m\u001b[35mpending\u001b[0m\u001b[35m \u001b[0m| │\n", - "│ |\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m|\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m| │\n", + "│ |\u001B[36m \u001B[0m\u001B[36mraytest\u001B[0m\u001B[36m \u001B[0m|\u001B[35m \u001B[0m\u001B[35mpending\u001B[0m\u001B[35m \u001B[0m| │\n", + "│ |\u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[36m \u001B[0m|\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m| │\n", "│ +---------+---------+ │\n", "╰───────────────────────╯\n" ] @@ -205,15 +205,15 @@ "
    \n" ], "text/plain": [ - "\u001b[3m \u001b[0m\u001b[1;3m 🚀 CodeFlare Cluster Status 🚀\u001b[0m\u001b[3m \u001b[0m\n", - "\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\n", + "\u001B[3m \u001B[0m\u001B[1;3m 🚀 CodeFlare Cluster Status 🚀\u001B[0m\u001B[3m \u001B[0m\n", + "\u001B[1m \u001B[0m\u001B[1m \u001B[0m\u001B[1m \u001B[0m\n", " ╭──────────────────────────────────────────────────────────────╮ \n", - " │ \u001b[1;37;42mName\u001b[0m │ \n", - " │ \u001b[1;4mraytest\u001b[0m Active ✅ │ \n", + " │ \u001B[1;37;42mName\u001B[0m │ \n", + " │ \u001B[1;4mraytest\u001B[0m Active ✅ │ \n", " │ │ \n", - " │ \u001b[1mURI:\u001b[0m ray://raytest-head-svc.default.svc:10001 │ \n", + " │ \u001B[1mURI:\u001B[0m ray://raytest-head-svc.default.svc:10001 │ \n", " │ │ \n", - " │ \u001b]8;id=630217;ray-dashboard-raytest-default.apps.meyceoz-07122023.psap.aws.rhperfscale.org\u001b\\\u001b[4;34mDashboard🔗\u001b[0m\u001b]8;;\u001b\\ │ \n", + " │ \u001B]8;id=630217;ray-dashboard-raytest-default.apps.meyceoz-07122023.psap.aws.rhperfscale.org\u001B\\\u001B[4;34mDashboard🔗\u001B[0m\u001B]8;;\u001B\\ │ \n", " │ │ \n", " ╰──────────────────────────────────────────────────────────────╯ \n" ] @@ -274,22 +274,22 @@ "\n" ], "text/plain": [ - "\u001b[3m \u001b[0m\u001b[1;3m 🚀 CodeFlare Cluster Details 🚀\u001b[0m\u001b[3m \u001b[0m\n", - "\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\n", + "\u001B[3m \u001B[0m\u001B[1;3m 🚀 CodeFlare Cluster Details 🚀\u001B[0m\u001B[3m \u001B[0m\n", + "\u001B[1m \u001B[0m\u001B[1m \u001B[0m\u001B[1m \u001B[0m\n", " ╭───────────────────────────────────────────────────────────────╮ \n", - " │ \u001b[1;37;42mName\u001b[0m │ \n", - " │ \u001b[1;4mraytest\u001b[0m Active ✅ │ \n", + " │ \u001B[1;37;42mName\u001B[0m │ \n", + " │ \u001B[1;4mraytest\u001B[0m Active ✅ │ \n", " │ │ \n", - " │ \u001b[1mURI:\u001b[0m ray://raytest-head-svc.default.svc:10001 │ \n", + " │ \u001B[1mURI:\u001B[0m ray://raytest-head-svc.default.svc:10001 │ \n", " │ │ \n", - " │ \u001b]8;id=623965;http://ray-dashboard-raytest-default.apps.meyceoz-07122023.psap.aws.rhperfscale.org\u001b\\\u001b[4;34mDashboard🔗\u001b[0m\u001b]8;;\u001b\\ │ \n", + " │ \u001B]8;id=623965;http://ray-dashboard-raytest-default.apps.meyceoz-07122023.psap.aws.rhperfscale.org\u001B\\\u001B[4;34mDashboard🔗\u001B[0m\u001B]8;;\u001B\\ │ \n", " │ │ \n", - " │ \u001b[3m Cluster Resources \u001b[0m │ \n", + " │ \u001B[3m Cluster Resources \u001B[0m │ \n", " │ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │ \n", - " │ │ \u001b[1m \u001b[0m\u001b[1m# Workers\u001b[0m\u001b[1m \u001b[0m │ │ \u001b[1m \u001b[0m\u001b[1mMemory \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mCPU \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mGPU \u001b[0m\u001b[1m \u001b[0m │ │ \n", - " │ │ \u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ │ \u001b[35m \u001b[0m\u001b[35m2 \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m4~4 \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m1 \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m0 \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ │ \u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", + " │ │ \u001B[1m \u001B[0m\u001B[1m# Workers\u001B[0m\u001B[1m \u001B[0m │ │ \u001B[1m \u001B[0m\u001B[1mMemory \u001B[0m\u001B[1m \u001B[0m\u001B[1m \u001B[0m\u001B[1mCPU \u001B[0m\u001B[1m \u001B[0m\u001B[1m \u001B[0m\u001B[1mGPU \u001B[0m\u001B[1m \u001B[0m │ │ \n", + " │ │ \u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m │ │ \u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m │ │ \n", + " │ │ \u001B[35m \u001B[0m\u001B[35m2 \u001B[0m\u001B[35m \u001B[0m │ │ \u001B[36m \u001B[0m\u001B[36m4~4 \u001B[0m\u001B[36m \u001B[0m\u001B[35m \u001B[0m\u001B[35m1 \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m0 \u001B[0m\u001B[35m \u001B[0m │ │ \n", + " │ │ \u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m │ │ \u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m │ │ \n", " │ ╰─────────────╯ ╰──────────────────────────────────────╯ │ \n", " ╰───────────────────────────────────────────────────────────────╯ \n" ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb index 7deb6633..b2e954e2 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb @@ -52,7 +52,7 @@ " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " image=\"quay.io/project-codeflare/ray:2.20.0-py39-cu118\",\n", + " image=\"quay.io/rhoai/ray:2.23.0-py39-cu121\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb index 5e499b6b..443ea063 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb @@ -77,7 +77,7 @@ " max_cpus=2,\n", " min_memory=8,\n", " max_memory=8,\n", - " image=\"quay.io/project-codeflare/ray:2.20.0-py39-cu118\",\n", + " image=\"quay.io/rhoai/ray:2.23.0-py39-cu121\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" @@ -134,22 +134,22 @@ "\n" ], "text/plain": [ - "\u001b[3m \u001b[0m\u001b[1;3m 🚀 CodeFlare Cluster Details 🚀\u001b[0m\u001b[3m \u001b[0m\n", - "\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\n", + "\u001B[3m \u001B[0m\u001B[1;3m 🚀 CodeFlare Cluster Details 🚀\u001B[0m\u001B[3m \u001B[0m\n", + "\u001B[1m \u001B[0m\u001B[1m \u001B[0m\u001B[1m \u001B[0m\n", " ╭──────────────────────────────────────────────────────────────────────╮ \n", - " │ \u001b[1;37;42mName\u001b[0m │ \n", - " │ \u001b[1;4minteractivetest\u001b[0m Active ✅ │ \n", + " │ \u001B[1;37;42mName\u001B[0m │ \n", + " │ \u001B[1;4minteractivetest\u001B[0m Active ✅ │ \n", " │ │ \n", - " │ \u001b[1mURI:\u001b[0m ray://interactivetest-head-svc.default.svc:10001 │ \n", + " │ \u001B[1mURI:\u001B[0m ray://interactivetest-head-svc.default.svc:10001 │ \n", " │ │ \n", - " │ \u001b]8;id=970589;http://ray-dashboard-interactivetest-default.apps.meyceoz-07122023.psap.aws.rhperfscale.org\u001b\\\u001b[4;34mDashboard🔗\u001b[0m\u001b]8;;\u001b\\ │ \n", + " │ \u001B]8;id=970589;http://ray-dashboard-interactivetest-default.apps.meyceoz-07122023.psap.aws.rhperfscale.org\u001B\\\u001B[4;34mDashboard🔗\u001B[0m\u001B]8;;\u001B\\ │ \n", " │ │ \n", - " │ \u001b[3m Cluster Resources \u001b[0m │ \n", + " │ \u001B[3m Cluster Resources \u001B[0m │ \n", " │ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │ \n", - " │ │ \u001b[1m \u001b[0m\u001b[1m# Workers\u001b[0m\u001b[1m \u001b[0m │ │ \u001b[1m \u001b[0m\u001b[1mMemory \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mCPU \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mGPU \u001b[0m\u001b[1m \u001b[0m │ │ \n", - " │ │ \u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ │ \u001b[35m \u001b[0m\u001b[35m2 \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m8~8 \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m2 \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m1 \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ │ \u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", + " │ │ \u001B[1m \u001B[0m\u001B[1m# Workers\u001B[0m\u001B[1m \u001B[0m │ │ \u001B[1m \u001B[0m\u001B[1mMemory \u001B[0m\u001B[1m \u001B[0m\u001B[1m \u001B[0m\u001B[1mCPU \u001B[0m\u001B[1m \u001B[0m\u001B[1m \u001B[0m\u001B[1mGPU \u001B[0m\u001B[1m \u001B[0m │ │ \n", + " │ │ \u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m │ │ \u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m │ │ \n", + " │ │ \u001B[35m \u001B[0m\u001B[35m2 \u001B[0m\u001B[35m \u001B[0m │ │ \u001B[36m \u001B[0m\u001B[36m8~8 \u001B[0m\u001B[36m \u001B[0m\u001B[35m \u001B[0m\u001B[35m2 \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m1 \u001B[0m\u001B[35m \u001B[0m │ │ \n", + " │ │ \u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m │ │ \u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m │ │ \n", " │ ╰─────────────╯ ╰──────────────────────────────────────╯ │ \n", " ╰──────────────────────────────────────────────────────────────────────╯ \n" ] @@ -381,7 +381,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Downloading and preparing dataset imdb/plain_text to /home/ray/.cache/huggingface/datasets/imdb/plain_text/1.0.0/d613c88cf8fa3bab83b4ded3713f1f74830d1100e171db75bbddb80b3345c9c0...\n" + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Downloading and preparing dataset imdb/plain_text to /home/ray/.cache/huggingface/datasets/imdb/plain_text/1.0.0/d613c88cf8fa3bab83b4ded3713f1f74830d1100e171db75bbddb80b3345c9c0...\n" ] }, { @@ -494,7 +494,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Dataset imdb downloaded and prepared to /home/ray/.cache/huggingface/datasets/imdb/plain_text/1.0.0/d613c88cf8fa3bab83b4ded3713f1f74830d1100e171db75bbddb80b3345c9c0. Subsequent calls will reuse this data.\n" + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Dataset imdb downloaded and prepared to /home/ray/.cache/huggingface/datasets/imdb/plain_text/1.0.0/d613c88cf8fa3bab83b4ded3713f1f74830d1100e171db75bbddb80b3345c9c0. Subsequent calls will reuse this data.\n" ] }, { @@ -613,13 +613,13 @@ "name": "stdout", "output_type": "stream", "text": [ - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m len of train Dataset({\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m features: ['text', 'label', 'input_ids', 'attention_mask'],\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m num_rows: 100\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m }) and test Dataset({\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m features: ['text', 'label', 'input_ids', 'attention_mask'],\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m num_rows: 100\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m })\n" + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m len of train Dataset({\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m features: ['text', 'label', 'input_ids', 'attention_mask'],\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m num_rows: 100\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m }) and test Dataset({\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m features: ['text', 'label', 'input_ids', 'attention_mask'],\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m num_rows: 100\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m })\n" ] }, { @@ -627,106 +627,106 @@ "output_type": "stream", "text": [ " \n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m 2023-08-09 14:51:50,865\tWARNING dataset.py:253 -- \u001b[33mImportant: Ray Data requires schemas for all datasets in Ray 2.5. This means that standalone Python objects are no longer supported. In addition, the default batch format is fixed to NumPy. To revert to legacy behavior temporarily, set the environment variable RAY_DATA_STRICT_MODE=0 on all cluster processes.\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Learn more here: https://docs.ray.io/en/master/data/faq.html#migrating-to-strict-mode\u001b[0m\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m /tmp/ipykernel_265/307576807.py:57: DeprecationWarning: `HuggingFaceTrainer`, `HuggingFacePredictor` and `HuggingFaceCheckpoint` have been renamed to `TransformersTrainer`, `TransformersPredictor` and `TransformersCheckpoint` respectively. Update your code to use the new import paths. This will raise an exception in the future.\n" + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m 2023-08-09 14:51:50,865\tWARNING dataset.py:253 -- \u001B[33mImportant: Ray Data requires schemas for all datasets in Ray 2.5. This means that standalone Python objects are no longer supported. In addition, the default batch format is fixed to NumPy. To revert to legacy behavior temporarily, set the environment variable RAY_DATA_STRICT_MODE=0 on all cluster processes.\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Learn more here: https://docs.ray.io/en/master/data/faq.html#migrating-to-strict-mode\u001B[0m\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m /tmp/ipykernel_265/307576807.py:57: DeprecationWarning: `HuggingFaceTrainer`, `HuggingFacePredictor` and `HuggingFaceCheckpoint` have been renamed to `TransformersTrainer`, `TransformersPredictor` and `TransformersCheckpoint` respectively. Update your code to use the new import paths. This will raise an exception in the future.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m To disable this warning, you can either:\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m \t- Avoid using `tokenizers` before the fork if possible\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m \t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Current time: 2023-08-09 14:51:51 (running for 00:00:00.12)\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Logical resource usage: 0/6 CPUs, 0/2 GPUs\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2023-08-09_14-51-51\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Number of trials: 1/1 (1 PENDING)\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m +--------------------------------+----------+-------+\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m |--------------------------------+----------+-------|\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m | HuggingFaceTrainer_f2621_00000 | PENDING | |\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m +--------------------------------+----------+-------+\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m \n" + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m To disable this warning, you can either:\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m \t- Avoid using `tokenizers` before the fork if possible\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m \t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Current time: 2023-08-09 14:51:51 (running for 00:00:00.12)\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Logical resource usage: 0/6 CPUs, 0/2 GPUs\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2023-08-09_14-51-51\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Number of trials: 1/1 (1 PENDING)\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m +--------------------------------+----------+-------+\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m |--------------------------------+----------+-------|\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m | HuggingFaceTrainer_f2621_00000 | PENDING | |\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m +--------------------------------+----------+-------+\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m \n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "\u001b[2m\u001b[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001b[0m 2023-08-09 14:51:55,978\tWARNING dataset.py:253 -- \u001b[33mImportant: Ray Data requires schemas for all datasets in Ray 2.5. This means that standalone Python objects are no longer supported. In addition, the default batch format is fixed to NumPy. To revert to legacy behavior temporarily, set the environment variable RAY_DATA_STRICT_MODE=0 on all cluster processes.\n", - "\u001b[2m\u001b[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001b[0m \n", - "\u001b[2m\u001b[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001b[0m Learn more here: https://docs.ray.io/en/master/data/faq.html#migrating-to-strict-mode\u001b[0m\n" + "\u001B[2m\u001B[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001B[0m 2023-08-09 14:51:55,978\tWARNING dataset.py:253 -- \u001B[33mImportant: Ray Data requires schemas for all datasets in Ray 2.5. This means that standalone Python objects are no longer supported. In addition, the default batch format is fixed to NumPy. To revert to legacy behavior temporarily, set the environment variable RAY_DATA_STRICT_MODE=0 on all cluster processes.\n", + "\u001B[2m\u001B[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001B[0m \n", + "\u001B[2m\u001B[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001B[0m Learn more here: https://docs.ray.io/en/master/data/faq.html#migrating-to-strict-mode\u001B[0m\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Current time: 2023-08-09 14:51:56 (running for 00:00:05.16)\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Logical resource usage: 1.0/6 CPUs, 2.0/2 GPUs\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2023-08-09_14-51-51\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m +--------------------------------+----------+-----------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m |--------------------------------+----------+-----------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m | HuggingFaceTrainer_f2621_00000 | RUNNING | 10.130.4.19:196 |\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m +--------------------------------+----------+-----------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m \n" + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Current time: 2023-08-09 14:51:56 (running for 00:00:05.16)\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Logical resource usage: 1.0/6 CPUs, 2.0/2 GPUs\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2023-08-09_14-51-51\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m +--------------------------------+----------+-----------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m |--------------------------------+----------+-----------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m | HuggingFaceTrainer_f2621_00000 | RUNNING | 10.130.4.19:196 |\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m +--------------------------------+----------+-----------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m \n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "\u001b[2m\u001b[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001b[0m 2023-08-09 14:51:57,260\tINFO backend_executor.py:137 -- Starting distributed worker processes: ['235 (10.130.4.19)', '232 (10.129.4.19)']\n", - "\u001b[2m\u001b[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001b[0m 2023-08-09 14:51:58,957\tINFO streaming_executor.py:91 -- Executing DAG InputDataBuffer[Input] -> AllToAllOperator[RandomizeBlockOrder]\n", - "\u001b[2m\u001b[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001b[0m 2023-08-09 14:51:58,957\tINFO streaming_executor.py:92 -- Execution config: ExecutionOptions(resource_limits=ExecutionResources(cpu=None, gpu=None, object_store_memory=None), locality_with_output=False, preserve_order=False, actor_locality_enabled=True, verbose_progress=False)\n", - "\u001b[2m\u001b[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001b[0m 2023-08-09 14:51:58,958\tINFO streaming_executor.py:94 -- Tip: For detailed progress reporting, run `ray.data.DataContext.get_current().execution_options.verbose_progress = True`\n", - "\u001b[2m\u001b[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001b[0m 2023-08-09 14:51:58,969\tINFO streaming_executor.py:149 -- Shutting down .\n", - "\u001b[2m\u001b[36m(RayTrainWorker pid=235, ip=10.130.4.19)\u001b[0m 2023-08-09 14:51:58,912\tINFO config.py:86 -- Setting up process group for: env:// [rank=0, world_size=2]\n" + "\u001B[2m\u001B[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001B[0m 2023-08-09 14:51:57,260\tINFO backend_executor.py:137 -- Starting distributed worker processes: ['235 (10.130.4.19)', '232 (10.129.4.19)']\n", + "\u001B[2m\u001B[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001B[0m 2023-08-09 14:51:58,957\tINFO streaming_executor.py:91 -- Executing DAG InputDataBuffer[Input] -> AllToAllOperator[RandomizeBlockOrder]\n", + "\u001B[2m\u001B[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001B[0m 2023-08-09 14:51:58,957\tINFO streaming_executor.py:92 -- Execution config: ExecutionOptions(resource_limits=ExecutionResources(cpu=None, gpu=None, object_store_memory=None), locality_with_output=False, preserve_order=False, actor_locality_enabled=True, verbose_progress=False)\n", + "\u001B[2m\u001B[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001B[0m 2023-08-09 14:51:58,958\tINFO streaming_executor.py:94 -- Tip: For detailed progress reporting, run `ray.data.DataContext.get_current().execution_options.verbose_progress = True`\n", + "\u001B[2m\u001B[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001B[0m 2023-08-09 14:51:58,969\tINFO streaming_executor.py:149 -- Shutting down .\n", + "\u001B[2m\u001B[36m(RayTrainWorker pid=235, ip=10.130.4.19)\u001B[0m 2023-08-09 14:51:58,912\tINFO config.py:86 -- Setting up process group for: env:// [rank=0, world_size=2]\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Current time: 2023-08-09 14:52:01 (running for 00:00:10.18)\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Logical resource usage: 1.0/6 CPUs, 2.0/2 GPUs\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2023-08-09_14-51-51\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m +--------------------------------+----------+-----------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m |--------------------------------+----------+-----------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m | HuggingFaceTrainer_f2621_00000 | RUNNING | 10.130.4.19:196 |\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m +--------------------------------+----------+-----------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m \n" + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m == Status ==\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Current time: 2023-08-09 14:52:01 (running for 00:00:10.18)\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Using FIFO scheduling algorithm.\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Logical resource usage: 1.0/6 CPUs, 2.0/2 GPUs\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2023-08-09_14-51-51\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m +--------------------------------+----------+-----------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m | Trial name | status | loc |\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m |--------------------------------+----------+-----------------|\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m | HuggingFaceTrainer_f2621_00000 | RUNNING | 10.130.4.19:196 |\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m +--------------------------------+----------+-----------------+\n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m \n", + "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m \n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "\u001b[2m\u001b[36m(RayTrainWorker pid=235, ip=10.130.4.19)\u001b[0m 2023-08-09 14:52:01,262\tINFO streaming_executor.py:91 -- Executing DAG InputDataBuffer[Input] -> AllToAllOperator[RandomizeBlockOrder]\n", - "\u001b[2m\u001b[36m(RayTrainWorker pid=235, ip=10.130.4.19)\u001b[0m 2023-08-09 14:52:01,262\tINFO streaming_executor.py:92 -- Execution config: ExecutionOptions(resource_limits=ExecutionResources(cpu=None, gpu=None, object_store_memory=None), locality_with_output=False, preserve_order=False, actor_locality_enabled=True, verbose_progress=False)\n", - "\u001b[2m\u001b[36m(RayTrainWorker pid=235, ip=10.130.4.19)\u001b[0m 2023-08-09 14:52:01,262\tINFO streaming_executor.py:94 -- Tip: For detailed progress reporting, run `ray.data.DataContext.get_current().execution_options.verbose_progress = True`\n", - "\u001b[2m\u001b[36m(RayTrainWorker pid=235, ip=10.130.4.19)\u001b[0m 2023-08-09 14:52:01,274\tINFO streaming_executor.py:149 -- Shutting down .\n", - "\u001b[2m\u001b[36m(RayTrainWorker pid=232, ip=10.129.4.19)\u001b[0m 2023-08-09 14:52:01,252\tINFO streaming_executor.py:91 -- Executing DAG InputDataBuffer[Input] -> AllToAllOperator[RandomizeBlockOrder]\n", - "\u001b[2m\u001b[36m(RayTrainWorker pid=232, ip=10.129.4.19)\u001b[0m 2023-08-09 14:52:01,252\tINFO streaming_executor.py:92 -- Execution config: ExecutionOptions(resource_limits=ExecutionResources(cpu=None, gpu=None, object_store_memory=None), locality_with_output=False, preserve_order=False, actor_locality_enabled=True, verbose_progress=False)\n", - "\u001b[2m\u001b[36m(RayTrainWorker pid=232, ip=10.129.4.19)\u001b[0m 2023-08-09 14:52:01,252\tINFO streaming_executor.py:94 -- Tip: For detailed progress reporting, run `ray.data.DataContext.get_current().execution_options.verbose_progress = True`\n", - "\u001b[2m\u001b[36m(RayTrainWorker pid=232, ip=10.129.4.19)\u001b[0m 2023-08-09 14:52:01,263\tINFO streaming_executor.py:149 -- Shutting down .\n", + "\u001B[2m\u001B[36m(RayTrainWorker pid=235, ip=10.130.4.19)\u001B[0m 2023-08-09 14:52:01,262\tINFO streaming_executor.py:91 -- Executing DAG InputDataBuffer[Input] -> AllToAllOperator[RandomizeBlockOrder]\n", + "\u001B[2m\u001B[36m(RayTrainWorker pid=235, ip=10.130.4.19)\u001B[0m 2023-08-09 14:52:01,262\tINFO streaming_executor.py:92 -- Execution config: ExecutionOptions(resource_limits=ExecutionResources(cpu=None, gpu=None, object_store_memory=None), locality_with_output=False, preserve_order=False, actor_locality_enabled=True, verbose_progress=False)\n", + "\u001B[2m\u001B[36m(RayTrainWorker pid=235, ip=10.130.4.19)\u001B[0m 2023-08-09 14:52:01,262\tINFO streaming_executor.py:94 -- Tip: For detailed progress reporting, run `ray.data.DataContext.get_current().execution_options.verbose_progress = True`\n", + "\u001B[2m\u001B[36m(RayTrainWorker pid=235, ip=10.130.4.19)\u001B[0m 2023-08-09 14:52:01,274\tINFO streaming_executor.py:149 -- Shutting down .\n", + "\u001B[2m\u001B[36m(RayTrainWorker pid=232, ip=10.129.4.19)\u001B[0m 2023-08-09 14:52:01,252\tINFO streaming_executor.py:91 -- Executing DAG InputDataBuffer[Input] -> AllToAllOperator[RandomizeBlockOrder]\n", + "\u001B[2m\u001B[36m(RayTrainWorker pid=232, ip=10.129.4.19)\u001B[0m 2023-08-09 14:52:01,252\tINFO streaming_executor.py:92 -- Execution config: ExecutionOptions(resource_limits=ExecutionResources(cpu=None, gpu=None, object_store_memory=None), locality_with_output=False, preserve_order=False, actor_locality_enabled=True, verbose_progress=False)\n", + "\u001B[2m\u001B[36m(RayTrainWorker pid=232, ip=10.129.4.19)\u001B[0m 2023-08-09 14:52:01,252\tINFO streaming_executor.py:94 -- Tip: For detailed progress reporting, run `ray.data.DataContext.get_current().execution_options.verbose_progress = True`\n", + "\u001B[2m\u001B[36m(RayTrainWorker pid=232, ip=10.129.4.19)\u001B[0m 2023-08-09 14:52:01,263\tINFO streaming_executor.py:149 -- Shutting down .\n", "Downloading (…)lve/main/config.json: 100%|██████████| 483/483 [00:00<00:00, 151kB/s]\n", "Downloading (…)lve/main/config.json: 100%|██████████| 483/483 [00:00<00:00, 146kB/s]\n", "Downloading model.safetensors: 0%| | 0.00/268M [00:00=3.8" files = [ - {file = "filelock-3.15.3-py3-none-any.whl", hash = "sha256:0151273e5b5d6cf753a61ec83b3a9b7d8821c39ae9af9d7ecf2f9e2f17404103"}, - {file = "filelock-3.15.3.tar.gz", hash = "sha256:e1199bf5194a2277273dacd50269f0d87d0682088a3c561c15674ea9005d8635"}, + {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, + {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, ] [package.extras] @@ -734,13 +734,13 @@ files = [ [[package]] name = "fsspec" -version = "2024.6.0" +version = "2024.6.1" description = "File-system specification" optional = false python-versions = ">=3.8" files = [ - {file = "fsspec-2024.6.0-py3-none-any.whl", hash = "sha256:58d7122eb8a1a46f7f13453187bfea4972d66bf01618d37366521b1998034cee"}, - {file = "fsspec-2024.6.0.tar.gz", hash = "sha256:f579960a56e6d8038a9efc8f9c77279ec12e6299aa86b0769a7e9c46b94527c2"}, + {file = "fsspec-2024.6.1-py3-none-any.whl", hash = "sha256:3cb443f8bcd2efb31295a5b9fdb02aee81d8452c80d28f97a6d0959e6cee101e"}, + {file = "fsspec-2024.6.1.tar.gz", hash = "sha256:fad7d7e209dd4c1208e3bbfda706620e0da5142bebbd9c384afb95b07e798e49"}, ] [package.extras] @@ -773,25 +773,26 @@ tqdm = ["tqdm"] [[package]] name = "google-api-core" -version = "2.8.0" +version = "2.19.1" description = "Google API client core library" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "google-api-core-2.8.0.tar.gz", hash = "sha256:065bb8e11c605fd232707ae50963dc1c8af5b3c95b4568887515985e6c1156b3"}, - {file = "google_api_core-2.8.0-py3-none-any.whl", hash = "sha256:1b9f59236ce1bae9a687c1d4f22957e79a2669e53d032893f6bf0fca54f6931d"}, + {file = "google-api-core-2.19.1.tar.gz", hash = "sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd"}, + {file = "google_api_core-2.19.1-py3-none-any.whl", hash = "sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125"}, ] [package.dependencies] -google-auth = ">=1.25.0,<3.0dev" -googleapis-common-protos = ">=1.52.0,<2.0dev" -protobuf = ">=3.12.0" -requests = ">=2.18.0,<3.0.0dev" +google-auth = ">=2.14.1,<3.0.dev0" +googleapis-common-protos = ">=1.56.2,<2.0.dev0" +proto-plus = ">=1.22.3,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" +requests = ">=2.18.0,<3.0.0.dev0" [package.extras] -grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio-status (>=1.33.2,<2.0dev)"] -grpcgcp = ["grpcio-gcp (>=0.2.2)"] -grpcio-gcp = ["grpcio-gcp (>=0.2.2)"] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" @@ -818,17 +819,17 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] [[package]] name = "googleapis-common-protos" -version = "1.63.1" +version = "1.63.2" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.63.1.tar.gz", hash = "sha256:c6442f7a0a6b2a80369457d79e6672bb7dcbaab88e0848302497e3ec80780a6a"}, - {file = "googleapis_common_protos-1.63.1-py2.py3-none-any.whl", hash = "sha256:0e1c2cdfcbc354b76e4a211a35ea35d6926a835cba1377073c4861db904a1877"}, + {file = "googleapis-common-protos-1.63.2.tar.gz", hash = "sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87"}, + {file = "googleapis_common_protos-1.63.2-py2.py3-none-any.whl", hash = "sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945"}, ] [package.dependencies] -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" [package.extras] grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] @@ -904,13 +905,13 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.2.0" +version = "8.0.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.2.0-py3-none-any.whl", hash = "sha256:04e4aad329b8b948a5711d394fa8759cb80f009225441b4f2a02bd4d8e5f426c"}, - {file = "importlib_metadata-7.2.0.tar.gz", hash = "sha256:3ff4519071ed42740522d494d04819b666541b9752c43012f85afb2cc220fcc6"}, + {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"}, + {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"}, ] [package.dependencies] @@ -1758,24 +1759,41 @@ files = [ [package.dependencies] wcwidth = "*" +[[package]] +name = "proto-plus" +version = "1.24.0" +description = "Beautiful, Pythonic protocol buffers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"}, + {file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"}, +] + +[package.dependencies] +protobuf = ">=3.19.0,<6.0.0dev" + +[package.extras] +testing = ["google-api-core (>=1.31.5)"] + [[package]] name = "protobuf" -version = "5.27.1" +version = "5.27.2" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-5.27.1-cp310-abi3-win32.whl", hash = "sha256:3adc15ec0ff35c5b2d0992f9345b04a540c1e73bfee3ff1643db43cc1d734333"}, - {file = "protobuf-5.27.1-cp310-abi3-win_amd64.whl", hash = "sha256:25236b69ab4ce1bec413fd4b68a15ef8141794427e0b4dc173e9d5d9dffc3bcd"}, - {file = "protobuf-5.27.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4e38fc29d7df32e01a41cf118b5a968b1efd46b9c41ff515234e794011c78b17"}, - {file = "protobuf-5.27.1-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:917ed03c3eb8a2d51c3496359f5b53b4e4b7e40edfbdd3d3f34336e0eef6825a"}, - {file = "protobuf-5.27.1-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:ee52874a9e69a30271649be88ecbe69d374232e8fd0b4e4b0aaaa87f429f1631"}, - {file = "protobuf-5.27.1-cp38-cp38-win32.whl", hash = "sha256:7a97b9c5aed86b9ca289eb5148df6c208ab5bb6906930590961e08f097258107"}, - {file = "protobuf-5.27.1-cp38-cp38-win_amd64.whl", hash = "sha256:f6abd0f69968792da7460d3c2cfa7d94fd74e1c21df321eb6345b963f9ec3d8d"}, - {file = "protobuf-5.27.1-cp39-cp39-win32.whl", hash = "sha256:dfddb7537f789002cc4eb00752c92e67885badcc7005566f2c5de9d969d3282d"}, - {file = "protobuf-5.27.1-cp39-cp39-win_amd64.whl", hash = "sha256:39309898b912ca6febb0084ea912e976482834f401be35840a008da12d189340"}, - {file = "protobuf-5.27.1-py3-none-any.whl", hash = "sha256:4ac7249a1530a2ed50e24201d6630125ced04b30619262f06224616e0030b6cf"}, - {file = "protobuf-5.27.1.tar.gz", hash = "sha256:df5e5b8e39b7d1c25b186ffdf9f44f40f810bbcc9d2b71d9d3156fee5a9adf15"}, + {file = "protobuf-5.27.2-cp310-abi3-win32.whl", hash = "sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38"}, + {file = "protobuf-5.27.2-cp310-abi3-win_amd64.whl", hash = "sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505"}, + {file = "protobuf-5.27.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5"}, + {file = "protobuf-5.27.2-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b"}, + {file = "protobuf-5.27.2-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e"}, + {file = "protobuf-5.27.2-cp38-cp38-win32.whl", hash = "sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863"}, + {file = "protobuf-5.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6"}, + {file = "protobuf-5.27.2-cp39-cp39-win32.whl", hash = "sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca"}, + {file = "protobuf-5.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce"}, + {file = "protobuf-5.27.2-py3-none-any.whl", hash = "sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470"}, + {file = "protobuf-5.27.2.tar.gz", hash = "sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714"}, ] [[package]] @@ -2142,26 +2160,20 @@ files = [ [[package]] name = "ray" -version = "2.20.0" +version = "2.23.0" description = "Ray provides a simple, universal API for building distributed applications." optional = false python-versions = ">=3.8" files = [ - {file = "ray-2.20.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:8855a5df8b3e6b8bcb5582a8491c50d0237e70751f941e8978bd6408245b7838"}, - {file = "ray-2.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0566b28c75aad1d47b9403c3901a85db586ce7191fdc6978e07ad56e80bf82b"}, - {file = "ray-2.20.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:738c68f4114754f846b3d03b730b42a6468f8b54665732da9f9108aa1d3ecbe3"}, - {file = "ray-2.20.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:2c7f8cd468cbba009d7ebd8a8da66026aeb520f7f4183dd6f49419d75bc84415"}, - {file = "ray-2.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:611d34d0c659652a38ef482a82dfc362074984617765e1d5a414337e4f914cfd"}, - {file = "ray-2.20.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:f7816767e644014f65afbfceb6adfb08c15784a4227aa331b28ac90d1b757a58"}, - {file = "ray-2.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e98df29fd6dac52c87c1f5be5ad99601a8955eaabe921e5cab29b27775250ce"}, - {file = "ray-2.20.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:e84ddad1521e06c91fc641f2b856d33ca2bfa314784172862c41a5184e0e760b"}, - {file = "ray-2.20.0-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:d9b13815fae5c9a68c9a02f21e1c49c58a5bb6565cb9ed5d48571cacce7568f2"}, - {file = "ray-2.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:6ac1dcb303ddf53d2d87bc5b719e8c38f0a5efe41e175b6ba563fb65b5f4e9a2"}, - {file = "ray-2.20.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:1de0810f77ae4a0bf055aa2bdcb161be1d6d1b67b4095e85a5b3fbb6e0dadcd2"}, - {file = "ray-2.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3f3519dd7794ead4d3e17d4570593b2a10e8db062836907517e85b4e769dec1a"}, - {file = "ray-2.20.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:5a2cb9f100bbb6351372519b03ddc21d9fa6c8716621237273a59a6e250a8204"}, - {file = "ray-2.20.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:64b394a6462a2ac2401b1b004f2cc7ac31e429388abf27024072a55702f1159c"}, - {file = "ray-2.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:65938f7bd28a825d90c643465ad6b1334d97d16e381c409b19269e4dcc043341"}, + {file = "ray-2.23.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:4f5ea8dc8fc014704ea12ef8a569abf0deca2ba2a6f157dc5fdd1789db4e0a65"}, + {file = "ray-2.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc950898871c3ecf3b921295c5fcf47b4a30b57b54be8f369014fb1eb9b4cfa5"}, + {file = "ray-2.23.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:55610f8eae65ce5686bde75a5782ce63e2a0112ccd2262b8acd707264da6dbea"}, + {file = "ray-2.23.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:1a43d94ce3f14490e6f1e3e868fd6a5f3be4878cbf83c4bcdc741861d6a4dbf6"}, + {file = "ray-2.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f2d2c1d59d7c8bd8b97288f7ae9a6bf762bd4e703b57787282400d3176dd159d"}, + {file = "ray-2.23.0-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:15c109fd9969326323c8bdb0701cd9af21c85f465002f74950622f9a580ec4e5"}, + {file = "ray-2.23.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:e7d059f094dedae36dddeaf792ebb74d4eed1a8ab1fb540dbffce4ac22694800"}, + {file = "ray-2.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7c305f31674fb8319c147d66e27dd210c7ad6d375626307ddfc62137a26d4155"}, + {file = "ray-2.23.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:b40f85c67ee3d58732b4021460c4297eb418f466313d70b577e5bf9fbb4c2d16"}, ] [package.dependencies] @@ -2196,9 +2208,9 @@ virtualenv = {version = ">=20.0.24,<20.21.1 || >20.21.1", optional = true, marke [package.extras] air = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "fsspec", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "memray", "numpy (>=1.20)", "opencensus", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] -all = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "dm-tree", "fastapi", "fsspec", "grpcio", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "gymnasium (==0.28.1)", "lz4", "memray", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "ray-cpp (==2.20.0)", "requests", "rich", "scikit-image", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "typer", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] -client = ["grpcio"] -cpp = ["ray-cpp (==2.20.0)"] +all = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "dm-tree", "fastapi", "fsspec", "grpcio (!=1.56.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "gymnasium (==0.28.1)", "lz4", "memray", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "ray-cpp (==2.23.0)", "requests", "rich", "scikit-image", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "typer", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +client = ["grpcio (!=1.56.0)"] +cpp = ["ray-cpp (==2.23.0)"] data = ["fsspec", "numpy (>=1.20)", "pandas (>=1.3)", "pyarrow (>=6.0.1)"] default = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "memray", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "virtualenv (>=20.0.24,!=20.21.1)"] observability = ["opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk"] @@ -2404,13 +2416,13 @@ pyasn1 = ">=0.1.3" [[package]] name = "setuptools" -version = "70.1.0" +version = "70.1.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-70.1.0-py3-none-any.whl", hash = "sha256:d9b8b771455a97c8a9f3ab3448ebe0b29b5e105f1228bba41028be116985a267"}, - {file = "setuptools-70.1.0.tar.gz", hash = "sha256:01a1e793faa5bd89abc851fa15d0a0db26f160890c7102cd8dce643e886b47f5"}, + {file = "setuptools-70.1.1-py3-none-any.whl", hash = "sha256:a58a8fde0541dab0419750bcc521fbdf8585f6e5cb41909df3a472ef7b81ca95"}, + {file = "setuptools-70.1.1.tar.gz", hash = "sha256:937a48c7cdb7a21eb53cd7f9b59e525503aa8abaf3584c730dc5f7a5bec3a650"}, ] [package.extras] @@ -2539,13 +2551,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.26.2" +version = "20.26.3" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.26.2-py3-none-any.whl", hash = "sha256:a624db5e94f01ad993d476b9ee5346fdf7b9de43ccaee0e0197012dc838a0e9b"}, - {file = "virtualenv-20.26.2.tar.gz", hash = "sha256:82bf0f4eebbb78d36ddaee0283d43fe5736b53880b8a8cdcd37390a07ac3741c"}, + {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, + {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, ] [package.dependencies] @@ -2795,4 +2807,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "d656bab99c2e5a911ee1003db9e0682141328ae3ef1e1620945f8479451425bf" +content-hash = "70c25813b589204a08c4143380cb8fe06de901dd6a83162b8f93a51945072629" diff --git a/pyproject.toml b/pyproject.toml index af7dd1ca..457e6de9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ keywords = ['codeflare', 'python', 'sdk', 'client', 'batch', 'scale'] python = "^3.9" openshift-client = "1.0.18" rich = "^12.5" -ray = {version = "2.20.0", extras = ["data", "default"]} +ray = {version = "2.23.0", extras = ["data", "default"]} kubernetes = ">= 25.3.0, < 27" cryptography = "40.0.2" executing = "1.2.0" diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index 5c0c919d..7b36146a 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -12,7 +12,7 @@ metadata: namespace: default spec: # The version of Ray you are using. Make sure all Ray containers are running this version of Ray. - rayVersion: '2.20.0' + rayVersion: '2.23.0' # If enableInTreeAutoscaling is true, the autoscaler sidecar will be added to the Ray head pod. # Ray autoscaler integration is supported only for Ray versions >= 1.11.0 # Ray autoscaler integration is Beta with KubeRay >= 0.3.0 and Ray >= 2.0.0. @@ -69,7 +69,7 @@ spec: containers: # The Ray head pod - name: ray-head - image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 + image: quay.io/rhoai/ray:2.23.0-py39-cu121 imagePullPolicy: Always ports: - containerPort: 6379 @@ -152,7 +152,7 @@ spec: spec: containers: - name: machine-learning # must consist of lower case alphanumeric characters or '-', and must start and end with an alphanumeric character (e.g. 'my-name', or '123-abc' - image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 + image: quay.io/rhoai/ray:2.23.0-py39-cu121 # environment variables to set in the container.Optional. # Refer to https://kubernetes.io/docs/tasks/inject-data-application/define-environment-variable-container/ lifecycle: diff --git a/tests/e2e/support.py b/tests/e2e/support.py index 04c9cb42..d8a06bb7 100644 --- a/tests/e2e/support.py +++ b/tests/e2e/support.py @@ -8,7 +8,7 @@ def get_ray_image(): - default_ray_image = "quay.io/project-codeflare/ray:2.20.0-py39-cu118" + default_ray_image = "quay.io/rhoai/ray:2.23.0-py39-cu121" return os.getenv("RAY_IMAGE", default_ray_image) diff --git a/tests/test-case-bad.yaml b/tests/test-case-bad.yaml index 18dcb7d7..d4d230d4 100644 --- a/tests/test-case-bad.yaml +++ b/tests/test-case-bad.yaml @@ -42,7 +42,7 @@ spec: valueFrom: fieldRef: fieldPath: status.podIP - image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 + image: quay.io/rhoai/ray:2.23.0-py39-cu121 imagePullPolicy: Always lifecycle: preStop: @@ -68,7 +68,7 @@ spec: cpu: 2 memory: 8G nvidia.com/gpu: 0 - rayVersion: 2.20.0 + rayVersion: 2.23.0 workerGroupSpecs: - groupName: small-group-unit-test-cluster maxReplicas: 2 @@ -90,7 +90,7 @@ spec: valueFrom: fieldRef: fieldPath: status.podIP - image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 + image: quay.io/rhoai/ray:2.23.0-py39-cu121 lifecycle: preStop: exec: diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index d8d2516c..2d0e7e9b 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -33,7 +33,7 @@ spec: template: spec: containers: - - image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 + - image: quay.io/rhoai/ray:2.23.0-py39-cu121 imagePullPolicy: Always lifecycle: preStop: @@ -89,7 +89,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.20.0 + rayVersion: 2.23.0 workerGroupSpecs: - groupName: small-group-unit-test-cluster-ray maxReplicas: 2 @@ -106,7 +106,7 @@ spec: key: value spec: containers: - - image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 + - image: quay.io/rhoai/ray:2.23.0-py39-cu121 lifecycle: preStop: exec: diff --git a/tests/test-case.yaml b/tests/test-case.yaml index c5229ce7..00b241af 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -38,7 +38,7 @@ spec: template: spec: containers: - - image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 + - image: quay.io/rhoai/ray:2.23.0-py39-cu121 imagePullPolicy: Always lifecycle: preStop: @@ -94,7 +94,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.20.0 + rayVersion: 2.23.0 workerGroupSpecs: - groupName: small-group-unit-test-cluster maxReplicas: 2 @@ -111,7 +111,7 @@ spec: key: value spec: containers: - - image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 + - image: quay.io/rhoai/ray:2.23.0-py39-cu121 lifecycle: preStop: exec: diff --git a/tests/test-default-appwrapper.yaml b/tests/test-default-appwrapper.yaml index 8fd1873f..cc44e234 100644 --- a/tests/test-default-appwrapper.yaml +++ b/tests/test-default-appwrapper.yaml @@ -38,7 +38,7 @@ spec: template: spec: containers: - - image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 + - image: quay.io/rhoai/ray:2.23.0-py39-cu121 imagePullPolicy: Always lifecycle: preStop: @@ -93,7 +93,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.20.0 + rayVersion: 2.23.0 workerGroupSpecs: - groupName: small-group-unit-test-default-cluster maxReplicas: 1 @@ -110,7 +110,7 @@ spec: key: value spec: containers: - - image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 + - image: quay.io/rhoai/ray:2.23.0-py39-cu121 lifecycle: preStop: exec: diff --git a/tests/unit_test.py b/tests/unit_test.py index 61870b2a..6346d462 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -261,7 +261,7 @@ def test_config_creation(): assert config.min_cpus == 3 and config.max_cpus == 4 assert config.min_memory == "5G" and config.max_memory == "6G" assert config.num_gpus == 7 - assert config.image == "quay.io/project-codeflare/ray:2.20.0-py39-cu118" + assert config.image == "quay.io/rhoai/ray:2.23.0-py39-cu121" assert config.template == f"{parent}/src/codeflare_sdk/templates/base-template.yaml" assert config.machine_types == ["cpu.small", "gpu.large"] assert config.image_pull_secrets == ["unit-test-pull-secret"] @@ -400,7 +400,7 @@ def test_cluster_creation_no_mcad_local_queue(mocker): num_gpus=7, machine_types=["cpu.small", "gpu.large"], image_pull_secrets=["unit-test-pull-secret"], - image="quay.io/project-codeflare/ray:2.20.0-py39-cu118", + image="quay.io/rhoai/ray:2.23.0-py39-cu121", write_to_file=True, appwrapper=False, local_queue="local-queue-default", @@ -428,7 +428,7 @@ def test_default_cluster_creation(mocker): ) default_config = ClusterConfiguration( name="unit-test-default-cluster", - image="quay.io/project-codeflare/ray:2.20.0-py39-cu118", + image="quay.io/rhoai/ray:2.23.0-py39-cu121", appwrapper=True, ) cluster = Cluster(default_config) @@ -777,7 +777,7 @@ def test_ray_job_wrapping(mocker): return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), ) cluster = cluster = createClusterWithConfig(mocker) - cluster.config.image = "quay.io/project-codeflare/ray:2.20.0-py39-cu118" + cluster.config.image = "quay.io/rhoai/ray:2.23.0-py39-cu121" mocker.patch( "ray.job_submission.JobSubmissionClient._check_connection_and_version_with_url", return_value="None", @@ -897,7 +897,7 @@ def test_ray_details(mocker, capsys): ClusterConfiguration( name="raytest2", namespace="ns", - image="quay.io/project-codeflare/ray:2.20.0-py39-cu118", + image="quay.io/rhoai/ray:2.23.0-py39-cu121", write_to_file=True, appwrapper=True, local_queue="local_default_queue", @@ -1183,7 +1183,7 @@ def get_ray_obj(group, version, namespace, plural, cls=None): }, }, }, - "rayVersion": "2.20.0", + "rayVersion": "2.23.0", "workerGroupSpecs": [ { "groupName": "small-group-quicktest", @@ -1417,7 +1417,7 @@ def get_ray_obj(group, version, namespace, plural, cls=None): } }, }, - "rayVersion": "2.20.0", + "rayVersion": "2.23.0", "workerGroupSpecs": [ { "groupName": "small-group-quicktest2", @@ -1790,7 +1790,7 @@ def get_aw_obj(group, version, namespace, plural): } }, }, - "rayVersion": "2.20.0", + "rayVersion": "2.23.0", "workerGroupSpecs": [ { "groupName": "small-group-quicktest", @@ -2294,7 +2294,7 @@ def test_cluster_status(mocker): ClusterConfiguration( name="test", namespace="ns", - image="quay.io/project-codeflare/ray:2.20.0-py39-cu118", + image="quay.io/rhoai/ray:2.23.0-py39-cu121", write_to_file=True, appwrapper=True, local_queue="local_default_queue", @@ -2389,7 +2389,7 @@ def test_wait_ready(mocker, capsys): ClusterConfiguration( name="test", namespace="ns", - image="quay.io/project-codeflare/ray:2.20.0-py39-cu118", + image="quay.io/rhoai/ray:2.23.0-py39-cu121", write_to_file=True, appwrapper=True, local_queue="local-queue-default", @@ -2616,7 +2616,7 @@ def throw_if_getting_raycluster(group, version, namespace, plural): cluster = Cluster( ClusterConfiguration( "test_cluster", - image="quay.io/project-codeflare/ray:2.20.0-py39-cu118", + image="quay.io/rhoai/ray:2.23.0-py39-cu121", write_to_file=False, ) ) diff --git a/tests/unit_test_support.py b/tests/unit_test_support.py index 6e662db4..9e7a60b6 100644 --- a/tests/unit_test_support.py +++ b/tests/unit_test_support.py @@ -17,7 +17,7 @@ def createClusterConfig(): appwrapper=True, machine_types=["cpu.small", "gpu.large"], image_pull_secrets=["unit-test-pull-secret"], - image="quay.io/project-codeflare/ray:2.20.0-py39-cu118", + image="quay.io/rhoai/ray:2.23.0-py39-cu121", write_to_file=True, ) return config From 47654d1865e8a51a98561349ef75994b90b3bc9e Mon Sep 17 00:00:00 2001 From: Antonin Stefanutti Date: Fri, 28 Jun 2024 17:32:16 +0200 Subject: [PATCH 278/496] test: Upgrade pytorch_lightning to fix invalid metadata --- .../guided-demos/notebook-ex-outputs/requirements.txt | 2 +- demo-notebooks/guided-demos/preview_nbs/requirements.txt | 2 +- tests/e2e/mnist_pip_requirements.txt | 2 +- tests/unit_test.py | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt b/demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt index 7266b064..e77d612b 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt @@ -1,4 +1,4 @@ -pytorch_lightning==1.5.10 +pytorch_lightning==1.9.5 ray_lightning torchmetrics==0.9.1 torchvision==0.12.0 diff --git a/demo-notebooks/guided-demos/preview_nbs/requirements.txt b/demo-notebooks/guided-demos/preview_nbs/requirements.txt index 7266b064..e77d612b 100644 --- a/demo-notebooks/guided-demos/preview_nbs/requirements.txt +++ b/demo-notebooks/guided-demos/preview_nbs/requirements.txt @@ -1,4 +1,4 @@ -pytorch_lightning==1.5.10 +pytorch_lightning==1.9.5 ray_lightning torchmetrics==0.9.1 torchvision==0.12.0 diff --git a/tests/e2e/mnist_pip_requirements.txt b/tests/e2e/mnist_pip_requirements.txt index 87edeef2..4c9d5fcb 100644 --- a/tests/e2e/mnist_pip_requirements.txt +++ b/tests/e2e/mnist_pip_requirements.txt @@ -1,3 +1,3 @@ -pytorch_lightning==1.5.10 +pytorch_lightning==1.9.5 torchmetrics==0.9.1 torchvision==0.12.0 diff --git a/tests/unit_test.py b/tests/unit_test.py index 6346d462..e8fa61c3 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -2767,8 +2767,8 @@ def test_rjc_tail_job_logs(ray_job_client, mocker): def test_rjc_list_jobs(ray_job_client, mocker): jobs_list = [ - "JobDetails(type=, job_id=None, submission_id='raysubmit_4k2NYS1YbRXYPZCM', driver_info=None, status=, entrypoint='python mnist.py', message='Job finished successfully.', error_type=None, start_time=1701352132585, end_time=1701352192002, metadata={}, runtime_env={'working_dir': 'gcs://_ray_pkg_6200b93a110e8033.zip', 'pip': {'packages': ['pytorch_lightning==1.5.10', 'ray_lightning', 'torchmetrics==0.9.1', 'torchvision==0.12.0'], 'pip_check': False}, '_ray_commit': 'b4bba4717f5ba04ee25580fe8f88eed63ef0c5dc'}, driver_agent_http_address='http://10.131.0.18:52365', driver_node_id='9fb515995f5fb13ad4db239ceea378333bebf0a2d45b6aa09d02e691')", - "JobDetails(type=, job_id=None, submission_id='raysubmit_iRuwU8vdkbUZZGvT', driver_info=None, status=, entrypoint='python mnist.py', message='Job was intentionally stopped.', error_type=None, start_time=1701353096163, end_time=1701353097733, metadata={}, runtime_env={'working_dir': 'gcs://_ray_pkg_6200b93a110e8033.zip', 'pip': {'packages': ['pytorch_lightning==1.5.10', 'ray_lightning', 'torchmetrics==0.9.1', 'torchvision==0.12.0'], 'pip_check': False}, '_ray_commit': 'b4bba4717f5ba04ee25580fe8f88eed63ef0c5dc'}, driver_agent_http_address='http://10.131.0.18:52365', driver_node_id='9fb515995f5fb13ad4db239ceea378333bebf0a2d45b6aa09d02e691')", + "JobDetails(type=, job_id=None, submission_id='raysubmit_4k2NYS1YbRXYPZCM', driver_info=None, status=, entrypoint='python mnist.py', message='Job finished successfully.', error_type=None, start_time=1701352132585, end_time=1701352192002, metadata={}, runtime_env={'working_dir': 'gcs://_ray_pkg_6200b93a110e8033.zip', 'pip': {'packages': ['pytorch_lightning==1.9.5', 'ray_lightning', 'torchmetrics==0.9.1', 'torchvision==0.12.0'], 'pip_check': False}, '_ray_commit': 'b4bba4717f5ba04ee25580fe8f88eed63ef0c5dc'}, driver_agent_http_address='http://10.131.0.18:52365', driver_node_id='9fb515995f5fb13ad4db239ceea378333bebf0a2d45b6aa09d02e691')", + "JobDetails(type=, job_id=None, submission_id='raysubmit_iRuwU8vdkbUZZGvT', driver_info=None, status=, entrypoint='python mnist.py', message='Job was intentionally stopped.', error_type=None, start_time=1701353096163, end_time=1701353097733, metadata={}, runtime_env={'working_dir': 'gcs://_ray_pkg_6200b93a110e8033.zip', 'pip': {'packages': ['pytorch_lightning==1.9.5', 'ray_lightning', 'torchmetrics==0.9.1', 'torchvision==0.12.0'], 'pip_check': False}, '_ray_commit': 'b4bba4717f5ba04ee25580fe8f88eed63ef0c5dc'}, driver_agent_http_address='http://10.131.0.18:52365', driver_node_id='9fb515995f5fb13ad4db239ceea378333bebf0a2d45b6aa09d02e691')", ] mocked_rjc_list_jobs = mocker.patch.object( JobSubmissionClient, "list_jobs", return_value=jobs_list From ff3ade70321939e17535b19857ea2b7326bf7a65 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Mon, 1 Jul 2024 15:11:18 +0100 Subject: [PATCH 279/496] Added Varsha to OWNERS file --- OWNERS | 2 ++ 1 file changed, 2 insertions(+) diff --git a/OWNERS b/OWNERS index fdb68837..c7dd78c3 100644 --- a/OWNERS +++ b/OWNERS @@ -7,6 +7,7 @@ approvers: - kpostoffice - maxusmusti - MichaelClifford + - varshaprasad96 reviewers: - anishasthana - astefanutti @@ -18,3 +19,4 @@ reviewers: - kpostoffice - maxusmusti - MichaelClifford + - varshaprasad96 From a36ebdb63c7d017c234608eb8ad4866872f7fcf1 Mon Sep 17 00:00:00 2001 From: Alex Fan Date: Mon, 27 May 2024 12:34:32 +0100 Subject: [PATCH 280/496] Update SDK args --- src/codeflare_sdk/cluster/cluster.py | 50 ++++++------- src/codeflare_sdk/cluster/config.py | 69 ++++++++++++++---- src/codeflare_sdk/utils/generate_yaml.py | 72 +++++++++++-------- tests/e2e/local_interactive_sdk_kind_test.py | 10 +-- tests/e2e/local_interactive_sdk_oauth_test.py | 10 +-- tests/e2e/mnist_raycluster_sdk_kind_test.py | 10 +-- tests/e2e/mnist_raycluster_sdk_oauth_test.py | 10 +-- tests/e2e/start_ray_cluster.py | 10 +-- tests/unit_test.py | 52 +++++++++----- tests/unit_test_support.py | 10 +-- tests/upgrade/raycluster_sdk_upgrade_test.py | 10 +-- 11 files changed, 193 insertions(+), 120 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index e5bbcd86..b823cfd5 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -137,12 +137,12 @@ def create_app_wrapper(self): namespace = self.config.namespace head_cpus = self.config.head_cpus head_memory = self.config.head_memory - head_gpus = self.config.head_gpus - min_cpu = self.config.min_cpus - max_cpu = self.config.max_cpus - min_memory = self.config.min_memory - max_memory = self.config.max_memory - gpu = self.config.num_gpus + num_head_gpus = self.config.num_head_gpus + worker_cpu_requests = self.config.worker_cpu_requests + worker_cpu_limits = self.config.worker_cpu_limits + worker_memory_requests = self.config.worker_memory_requests + worker_memory_limits = self.config.worker_memory_limits + num_worker_gpus = self.config.num_worker_gpus workers = self.config.num_workers template = self.config.template image = self.config.image @@ -157,12 +157,12 @@ def create_app_wrapper(self): namespace=namespace, head_cpus=head_cpus, head_memory=head_memory, - head_gpus=head_gpus, - min_cpu=min_cpu, - max_cpu=max_cpu, - min_memory=min_memory, - max_memory=max_memory, - gpu=gpu, + num_head_gpus=num_head_gpus, + worker_cpu_requests=worker_cpu_requests, + worker_cpu_limits=worker_cpu_limits, + worker_memory_requests=worker_memory_requests, + worker_memory_limits=worker_memory_limits, + num_worker_gpus=num_worker_gpus, workers=workers, template=template, image=image, @@ -318,7 +318,7 @@ def status( if print_to_console: # overriding the number of gpus with requested - cluster.worker_gpu = self.config.num_gpus + cluster.worker_gpu = self.config.num_worker_gpus pretty_print.print_cluster_status(cluster) elif print_to_console: if status == CodeFlareClusterStatus.UNKNOWN: @@ -474,19 +474,19 @@ def from_k8_cluster_object( namespace=rc["metadata"]["namespace"], machine_types=machine_types, num_workers=rc["spec"]["workerGroupSpecs"][0]["minReplicas"], - min_cpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + worker_cpu_requests=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ "containers" ][0]["resources"]["requests"]["cpu"], - max_cpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + worker_cpu_limits=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ "containers" ][0]["resources"]["limits"]["cpu"], - min_memory=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ - "containers" - ][0]["resources"]["requests"]["memory"], - max_memory=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + worker_memory_requests=rc["spec"]["workerGroupSpecs"][0]["template"][ + "spec" + ]["containers"][0]["resources"]["requests"]["memory"], + worker_memory_limits=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ "containers" ][0]["resources"]["limits"]["memory"], - num_gpus=int( + num_worker_gpus=int( rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ "resources" ]["limits"]["nvidia.com/gpu"] @@ -917,15 +917,15 @@ def _copy_to_ray(cluster: Cluster) -> RayCluster: name=cluster.config.name, status=cluster.status(print_to_console=False)[0], workers=cluster.config.num_workers, - worker_mem_min=cluster.config.min_memory, - worker_mem_max=cluster.config.max_memory, - worker_cpu=cluster.config.min_cpus, - worker_gpu=cluster.config.num_gpus, + worker_mem_min=cluster.config.worker_memory_requests, + worker_mem_max=cluster.config.worker_memory_limits, + worker_cpu=cluster.config.worker_cpu_requests, + worker_gpu=cluster.config.num_worker_gpus, namespace=cluster.config.namespace, dashboard=cluster.cluster_dashboard_uri(), head_cpus=cluster.config.head_cpus, head_mem=cluster.config.head_memory, - head_gpu=cluster.config.head_gpus, + head_gpu=cluster.config.num_head_gpus, ) if ray.status == CodeFlareClusterStatus.READY: ray.status = RayClusterStatus.READY diff --git a/src/codeflare_sdk/cluster/config.py b/src/codeflare_sdk/cluster/config.py index 9e069c37..cb8e3d3d 100644 --- a/src/codeflare_sdk/cluster/config.py +++ b/src/codeflare_sdk/cluster/config.py @@ -21,6 +21,7 @@ from dataclasses import dataclass, field import pathlib import typing +import warnings dir = pathlib.Path(__file__).parent.parent.resolve() @@ -37,14 +38,20 @@ class ClusterConfiguration: head_info: list = field(default_factory=list) head_cpus: typing.Union[int, str] = 2 head_memory: typing.Union[int, str] = 8 - head_gpus: int = 0 + head_gpus: int = None # Deprecating + num_head_gpus: int = 0 machine_types: list = field(default_factory=list) # ["m4.xlarge", "g4dn.xlarge"] - min_cpus: typing.Union[int, str] = 1 - max_cpus: typing.Union[int, str] = 1 + worker_cpu_requests: typing.Union[int, str] = 1 + worker_cpu_limits: typing.Union[int, str] = 1 + min_cpus: typing.Union[int, str] = None # Deprecating + max_cpus: typing.Union[int, str] = None # Deprecating num_workers: int = 1 - min_memory: typing.Union[int, str] = 2 - max_memory: typing.Union[int, str] = 2 - num_gpus: int = 0 + worker_memory_requests: typing.Union[int, str] = 2 + worker_memory_limits: typing.Union[int, str] = 2 + min_memory: typing.Union[int, str] = None # Deprecating + max_memory: typing.Union[int, str] = None # Deprecating + num_worker_gpus: int = 0 + num_gpus: int = None # Deprecating template: str = f"{dir}/templates/base-template.yaml" appwrapper: bool = False envs: dict = field(default_factory=dict) @@ -59,23 +66,57 @@ def __post_init__(self): print( "Warning: TLS verification has been disabled - Endpoint checks will be bypassed" ) + self._memory_to_string() self._str_mem_no_unit_add_GB() + self._memory_to_resource() + self._gpu_to_resource() + self._cpu_to_resource() def _str_mem_no_unit_add_GB(self): if isinstance(self.head_memory, str) and self.head_memory.isdecimal(): self.head_memory = f"{self.head_memory}G" - if isinstance(self.min_memory, str) and self.min_memory.isdecimal(): - self.min_memory = f"{self.min_memory}G" - if isinstance(self.max_memory, str) and self.max_memory.isdecimal(): - self.max_memory = f"{self.max_memory}G" + if ( + isinstance(self.worker_memory_requests, str) + and self.worker_memory_requests.isdecimal() + ): + self.worker_memory_requests = f"{self.worker_memory_requests}G" + if ( + isinstance(self.worker_memory_limits, str) + and self.worker_memory_limits.isdecimal() + ): + self.worker_memory_limits = f"{self.worker_memory_limits}G" def _memory_to_string(self): if isinstance(self.head_memory, int): self.head_memory = f"{self.head_memory}G" - if isinstance(self.min_memory, int): - self.min_memory = f"{self.min_memory}G" - if isinstance(self.max_memory, int): - self.max_memory = f"{self.max_memory}G" + if isinstance(self.worker_memory_requests, int): + self.worker_memory_requests = f"{self.worker_memory_requests}G" + if isinstance(self.worker_memory_limits, int): + self.worker_memory_limits = f"{self.worker_memory_limits}G" + + def _gpu_to_resource(self): + if self.head_gpus: + warnings.warn("head_gpus is being deprecated, use num_head_gpus") + self.num_head_gpus = self.head_gpus + if self.num_gpus: + warnings.warn("num_gpus is being deprecated, use num_worker_gpus") + self.num_worker_gpus = self.num_gpus + + def _cpu_to_resource(self): + if self.min_cpus: + warnings.warn("min_cpus is being deprecated, use worker_cpu_requests") + self.worker_cpu_requests = self.min_cpus + if self.max_cpus: + warnings.warn("max_cpus is being deprecated, use worker_cpu_limits") + self.worker_cpu_limits = self.max_cpus + + def _memory_to_resource(self): + if self.min_memory: + warnings.warn("min_memory is being deprecated, use worker_memory_requests") + self.worker_memory_requests = f"{self.min_memory}G" + if self.max_memory: + warnings.warn("max_memory is being deprecated, use worker_memory_limits") + self.worker_memory_limits = f"{self.max_memory}G" local_queue: str = None diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 30edcd91..18370564 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -106,39 +106,46 @@ def update_env(spec, env): container["env"] = env -def update_resources(spec, min_cpu, max_cpu, min_memory, max_memory, gpu): +def update_resources( + spec, + worker_cpu_requests, + worker_cpu_limits, + worker_memory_requests, + worker_memory_limits, + num_worker_gpus, +): container = spec.get("containers") for resource in container: requests = resource.get("resources").get("requests") if requests is not None: - requests["cpu"] = min_cpu - requests["memory"] = min_memory - requests["nvidia.com/gpu"] = gpu + requests["cpu"] = worker_cpu_requests + requests["memory"] = worker_memory_requests + requests["nvidia.com/gpu"] = num_worker_gpus limits = resource.get("resources").get("limits") if limits is not None: - limits["cpu"] = max_cpu - limits["memory"] = max_memory - limits["nvidia.com/gpu"] = gpu + limits["cpu"] = worker_cpu_limits + limits["memory"] = worker_memory_limits + limits["nvidia.com/gpu"] = num_worker_gpus def update_nodes( cluster_yaml, appwrapper_name, - min_cpu, - max_cpu, - min_memory, - max_memory, - gpu, + worker_cpu_requests, + worker_cpu_limits, + worker_memory_requests, + worker_memory_limits, + num_worker_gpus, workers, image, env, image_pull_secrets, head_cpus, head_memory, - head_gpus, + num_head_gpus, ): head = cluster_yaml.get("spec").get("headGroupSpec") - head["rayStartParams"]["num-gpus"] = str(int(head_gpus)) + head["rayStartParams"]["num-gpus"] = str(int(num_head_gpus)) worker = cluster_yaml.get("spec").get("workerGroupSpecs")[0] # Head counts as first worker @@ -146,7 +153,7 @@ def update_nodes( worker["minReplicas"] = workers worker["maxReplicas"] = workers worker["groupName"] = "small-group-" + appwrapper_name - worker["rayStartParams"]["num-gpus"] = str(int(gpu)) + worker["rayStartParams"]["num-gpus"] = str(int(num_worker_gpus)) for comp in [head, worker]: spec = comp.get("template").get("spec") @@ -156,10 +163,17 @@ def update_nodes( if comp == head: # TODO: Eventually add head node configuration outside of template update_resources( - spec, head_cpus, head_cpus, head_memory, head_memory, head_gpus + spec, head_cpus, head_cpus, head_memory, head_memory, num_head_gpus ) else: - update_resources(spec, min_cpu, max_cpu, min_memory, max_memory, gpu) + update_resources( + spec, + worker_cpu_requests, + worker_cpu_limits, + worker_memory_requests, + worker_memory_limits, + num_worker_gpus, + ) def del_from_list_by_name(l: list, target: typing.List[str]) -> list: @@ -265,12 +279,12 @@ def generate_appwrapper( namespace: str, head_cpus: int, head_memory: int, - head_gpus: int, - min_cpu: int, - max_cpu: int, - min_memory: int, - max_memory: int, - gpu: int, + num_head_gpus: int, + worker_cpu_requests: int, + worker_cpu_limits: int, + worker_memory_requests: int, + worker_memory_limits: int, + num_worker_gpus: int, workers: int, template: str, image: str, @@ -287,18 +301,18 @@ def generate_appwrapper( update_nodes( cluster_yaml, appwrapper_name, - min_cpu, - max_cpu, - min_memory, - max_memory, - gpu, + worker_cpu_requests, + worker_cpu_limits, + worker_memory_requests, + worker_memory_limits, + num_worker_gpus, workers, image, env, image_pull_secrets, head_cpus, head_memory, - head_gpus, + num_head_gpus, ) augment_labels(cluster_yaml, labels) notebook_annotations(cluster_yaml) diff --git a/tests/e2e/local_interactive_sdk_kind_test.py b/tests/e2e/local_interactive_sdk_kind_test.py index 647ac472..999c72e1 100644 --- a/tests/e2e/local_interactive_sdk_kind_test.py +++ b/tests/e2e/local_interactive_sdk_kind_test.py @@ -39,11 +39,11 @@ def run_local_interactives(self): num_workers=1, head_cpus="500m", head_memory=2, - min_cpus="500m", - max_cpus=1, - min_memory=1, - max_memory=2, - num_gpus=0, + worker_cpu_requests="500m", + worker_cpu_limits=1, + worker_memory_requests=1, + worker_memory_limits=2, + num_worker_gpus=0, image=ray_image, write_to_file=True, verify_tls=False, diff --git a/tests/e2e/local_interactive_sdk_oauth_test.py b/tests/e2e/local_interactive_sdk_oauth_test.py index c3fd1d85..a2d5b612 100644 --- a/tests/e2e/local_interactive_sdk_oauth_test.py +++ b/tests/e2e/local_interactive_sdk_oauth_test.py @@ -44,11 +44,11 @@ def run_local_interactives(self): namespace=self.namespace, name=cluster_name, num_workers=1, - min_cpus=1, - max_cpus=1, - min_memory=4, - max_memory=4, - num_gpus=0, + worker_cpu_requests=1, + worker_cpu_limits=1, + worker_memory_requests=4, + worker_memory_limits=4, + num_worker_gpus=0, image=ray_image, verify_tls=False, ) diff --git a/tests/e2e/mnist_raycluster_sdk_kind_test.py b/tests/e2e/mnist_raycluster_sdk_kind_test.py index 630bc5df..d8539722 100644 --- a/tests/e2e/mnist_raycluster_sdk_kind_test.py +++ b/tests/e2e/mnist_raycluster_sdk_kind_test.py @@ -37,11 +37,11 @@ def run_mnist_raycluster_sdk_kind(self): num_workers=1, head_cpus="500m", head_memory=2, - min_cpus="500m", - max_cpus=1, - min_memory=1, - max_memory=2, - num_gpus=0, + worker_cpu_requests="500m", + worker_cpu_limits=1, + worker_memory_requests=1, + worker_memory_limits=2, + num_worker_gpus=0, image=ray_image, write_to_file=True, verify_tls=False, diff --git a/tests/e2e/mnist_raycluster_sdk_oauth_test.py b/tests/e2e/mnist_raycluster_sdk_oauth_test.py index e489c39f..ed2f4fb1 100644 --- a/tests/e2e/mnist_raycluster_sdk_oauth_test.py +++ b/tests/e2e/mnist_raycluster_sdk_oauth_test.py @@ -44,11 +44,11 @@ def run_mnist_raycluster_sdk_oauth(self): num_workers=1, head_cpus="500m", head_memory=2, - min_cpus="500m", - max_cpus=1, - min_memory=1, - max_memory=2, - num_gpus=0, + worker_cpu_requests="500m", + worker_cpu_limits=1, + worker_memory_requests=1, + worker_memory_limits=2, + num_worker_gpus=0, image=ray_image, write_to_file=True, verify_tls=False, diff --git a/tests/e2e/start_ray_cluster.py b/tests/e2e/start_ray_cluster.py index 957d0c25..4fd7fb3f 100644 --- a/tests/e2e/start_ray_cluster.py +++ b/tests/e2e/start_ray_cluster.py @@ -15,11 +15,11 @@ num_workers=1, head_cpus="500m", head_memory=2, - min_cpus="500m", - max_cpus=1, - min_memory=1, - max_memory=2, - num_gpus=0, + worker_cpu_requests="500m", + worker_cpu_limits=1, + worker_memory_requests=1, + worker_memory_limits=2, + num_worker_gpus=0, image=ray_image, appwrapper=True, ) diff --git a/tests/unit_test.py b/tests/unit_test.py index e8fa61c3..8cc679a5 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -258,9 +258,9 @@ def test_config_creation(): assert config.name == "unit-test-cluster" and config.namespace == "ns" assert config.num_workers == 2 - assert config.min_cpus == 3 and config.max_cpus == 4 - assert config.min_memory == "5G" and config.max_memory == "6G" - assert config.num_gpus == 7 + assert config.worker_cpu_requests == 3 and config.worker_cpu_limits == 4 + assert config.worker_memory_requests == "5G" and config.worker_memory_limits == "6G" + assert config.num_worker_gpus == 7 assert config.image == "quay.io/rhoai/ray:2.23.0-py39-cu121" assert config.template == f"{parent}/src/codeflare_sdk/templates/base-template.yaml" assert config.machine_types == ["cpu.small", "gpu.large"] @@ -393,11 +393,11 @@ def test_cluster_creation_no_mcad_local_queue(mocker): name="unit-test-cluster-ray", namespace="ns", num_workers=2, - min_cpus=3, - max_cpus=4, - min_memory=5, - max_memory=6, - num_gpus=7, + worker_cpu_requests=3, + worker_cpu_limits=4, + worker_memory_requests=5, + worker_memory_limits=6, + num_worker_gpus=7, machine_types=["cpu.small", "gpu.large"], image_pull_secrets=["unit-test-pull-secret"], image="quay.io/rhoai/ray:2.23.0-py39-cu121", @@ -1985,9 +1985,15 @@ def custom_side_effect(group, version, namespace, plural, **kwargs): "m4.xlarge" in cluster_config.machine_types and "g4dn.xlarge" in cluster_config.machine_types ) - assert cluster_config.min_cpus == 1 and cluster_config.max_cpus == 1 - assert cluster_config.min_memory == "2G" and cluster_config.max_memory == "2G" - assert cluster_config.num_gpus == 0 + assert ( + cluster_config.worker_cpu_requests == 1 + and cluster_config.worker_cpu_limits == 1 + ) + assert ( + cluster_config.worker_memory_requests == "2G" + and cluster_config.worker_memory_limits == "2G" + ) + assert cluster_config.num_worker_gpus == 0 assert ( cluster_config.image == "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103" @@ -2021,9 +2027,15 @@ def test_get_cluster(mocker): "m4.xlarge" in cluster_config.machine_types and "g4dn.xlarge" in cluster_config.machine_types ) - assert cluster_config.min_cpus == 1 and cluster_config.max_cpus == 1 - assert cluster_config.min_memory == "2G" and cluster_config.max_memory == "2G" - assert cluster_config.num_gpus == 0 + assert ( + cluster_config.worker_cpu_requests == 1 + and cluster_config.worker_cpu_limits == 1 + ) + assert ( + cluster_config.worker_memory_requests == "2G" + and cluster_config.worker_memory_limits == "2G" + ) + assert cluster_config.num_worker_gpus == 0 assert ( cluster_config.image == "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103" @@ -2053,9 +2065,15 @@ def test_get_cluster_no_mcad(mocker): "m4.xlarge" in cluster_config.machine_types and "g4dn.xlarge" in cluster_config.machine_types ) - assert cluster_config.min_cpus == 1 and cluster_config.max_cpus == 1 - assert cluster_config.min_memory == "2G" and cluster_config.max_memory == "2G" - assert cluster_config.num_gpus == 0 + assert ( + cluster_config.worker_cpu_requests == 1 + and cluster_config.worker_cpu_limits == 1 + ) + assert ( + cluster_config.worker_memory_requests == "2G" + and cluster_config.worker_memory_limits == "2G" + ) + assert cluster_config.num_worker_gpus == 0 assert ( cluster_config.image == "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103" diff --git a/tests/unit_test_support.py b/tests/unit_test_support.py index 9e7a60b6..51c47aa6 100644 --- a/tests/unit_test_support.py +++ b/tests/unit_test_support.py @@ -9,11 +9,11 @@ def createClusterConfig(): name="unit-test-cluster", namespace="ns", num_workers=2, - min_cpus=3, - max_cpus=4, - min_memory=5, - max_memory=6, - num_gpus=7, + worker_cpu_requests=3, + worker_cpu_limits=4, + worker_memory_requests=5, + worker_memory_limits=6, + num_worker_gpus=7, appwrapper=True, machine_types=["cpu.small", "gpu.large"], image_pull_secrets=["unit-test-pull-secret"], diff --git a/tests/upgrade/raycluster_sdk_upgrade_test.py b/tests/upgrade/raycluster_sdk_upgrade_test.py index 235383f6..98204811 100644 --- a/tests/upgrade/raycluster_sdk_upgrade_test.py +++ b/tests/upgrade/raycluster_sdk_upgrade_test.py @@ -50,11 +50,11 @@ def run_mnist_raycluster_sdk_oauth(self): num_workers=1, head_cpus=1, head_memory=2, - min_cpus=1, - max_cpus=1, - min_memory=1, - max_memory=2, - num_gpus=0, + worker_cpu_requests=1, + worker_cpu_limits=1, + worker_memory_requests=1, + worker_memory_limits=2, + num_worker_gpus=0, image=ray_image, write_to_file=True, verify_tls=False, From cc9286f6dc50d8b24e511f0c8da50586f2347368 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Tue, 2 Jul 2024 09:24:41 +0000 Subject: [PATCH 281/496] Updated coverage.svg --- coverage.svg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/coverage.svg b/coverage.svg index a8c7e72a..607d3de4 100644 --- a/coverage.svg +++ b/coverage.svg @@ -15,7 +15,7 @@ coverage coverage - 92% - 92% + 91% + 91% From e30d450fca014021abc04c81d0406c72ec146a6b Mon Sep 17 00:00:00 2001 From: Anish Asthana Date: Tue, 2 Jul 2024 10:59:20 -0400 Subject: [PATCH 282/496] Remove Anish from owners Signed-off-by: Anish Asthana --- OWNERS | 1 - 1 file changed, 1 deletion(-) diff --git a/OWNERS b/OWNERS index c7dd78c3..02abe61c 100644 --- a/OWNERS +++ b/OWNERS @@ -9,7 +9,6 @@ approvers: - MichaelClifford - varshaprasad96 reviewers: - - anishasthana - astefanutti - Bobbins228 - ChristianZaccaria From 21eea8093861374ae8f9cd46072f658985d11a84 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Tue, 2 Jul 2024 15:44:51 +0100 Subject: [PATCH 283/496] Made local_queue parameter optional --- src/codeflare_sdk/utils/generate_yaml.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 18370564..3e692480 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -30,6 +30,7 @@ from os import urandom from base64 import b64encode from urllib3.util import parse_url +from kubernetes.client.exceptions import ApiException def read_template(template): @@ -191,8 +192,11 @@ def get_default_kueue_name(namespace: str): namespace=namespace, plural="localqueues", ) - except Exception as e: # pragma: no cover - return _kube_api_error_handling(e) + except ApiException as e: # pragma: no cover + if e.status == 404 or e.status == 403: + return + else: + return _kube_api_error_handling(e) for lq in local_queues["items"]: if ( "annotations" in lq["metadata"] @@ -201,9 +205,6 @@ def get_default_kueue_name(namespace: str): == "true" ): return lq["metadata"]["name"] - raise ValueError( - "Default Local Queue with kueue.x-k8s.io/default-queue: true annotation not found please create a default Local Queue or provide the local_queue name in Cluster Configuration" - ) def local_queue_exists(namespace: str, local_queue_name: str): @@ -228,7 +229,9 @@ def local_queue_exists(namespace: str, local_queue_name: str): def add_queue_label(item: dict, namespace: str, local_queue: Optional[str]): lq_name = local_queue or get_default_kueue_name(namespace) - if not local_queue_exists(namespace, lq_name): + if lq_name == None: + return + elif not local_queue_exists(namespace, lq_name): raise ValueError( "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration" ) From 1ab542101b986ab3d905605dbccb4eaeefc8fcd9 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Tue, 2 Jul 2024 15:48:18 +0100 Subject: [PATCH 284/496] Added unit test for unmanaged Ray Cluster --- tests/test-case-no-kueue-no-aw.yaml | 153 ++++++++++++++++++++++++++++ tests/unit_test.py | 23 +++++ 2 files changed, 176 insertions(+) create mode 100644 tests/test-case-no-kueue-no-aw.yaml diff --git a/tests/test-case-no-kueue-no-aw.yaml b/tests/test-case-no-kueue-no-aw.yaml new file mode 100644 index 00000000..dfca7951 --- /dev/null +++ b/tests/test-case-no-kueue-no-aw.yaml @@ -0,0 +1,153 @@ +apiVersion: ray.io/v1 +kind: RayCluster +metadata: + annotations: + app.kubernetes.io/managed-by: test-prefix + labels: + controller-tools.k8s.io: '1.0' + name: unit-test-no-kueue + namespace: ns +spec: + autoscalerOptions: + idleTimeoutSeconds: 60 + imagePullPolicy: Always + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 500m + memory: 512Mi + upscalingMode: Default + enableInTreeAutoscaling: false + headGroupSpec: + enableIngress: false + rayStartParams: + block: 'true' + dashboard-host: 0.0.0.0 + num-gpus: '0' + serviceType: ClusterIP + template: + spec: + containers: + - image: quay.io/rhoai/ray:2.23.0-py39-cu121 + imagePullPolicy: Always + lifecycle: + preStop: + exec: + command: + - /bin/sh + - -c + - ray stop + name: ray-head + ports: + - containerPort: 6379 + name: gcs + - containerPort: 8265 + name: dashboard + - containerPort: 10001 + name: client + resources: + limits: + cpu: 2 + memory: 8G + nvidia.com/gpu: 0 + requests: + cpu: 2 + memory: 8G + nvidia.com/gpu: 0 + volumeMounts: + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + imagePullSecrets: + - name: unit-test-pull-secret + volumes: + - configMap: + items: + - key: ca-bundle.crt + path: odh-trusted-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert + rayVersion: 2.23.0 + workerGroupSpecs: + - groupName: small-group-unit-test-no-kueue + maxReplicas: 2 + minReplicas: 2 + rayStartParams: + block: 'true' + num-gpus: '7' + replicas: 2 + template: + metadata: + annotations: + key: value + labels: + key: value + spec: + containers: + - image: quay.io/rhoai/ray:2.23.0-py39-cu121 + lifecycle: + preStop: + exec: + command: + - /bin/sh + - -c + - ray stop + name: machine-learning + resources: + limits: + cpu: 4 + memory: 6G + nvidia.com/gpu: 7 + requests: + cpu: 3 + memory: 5G + nvidia.com/gpu: 7 + volumeMounts: + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + imagePullSecrets: + - name: unit-test-pull-secret + volumes: + - configMap: + items: + - key: ca-bundle.crt + path: odh-trusted-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert diff --git a/tests/unit_test.py b/tests/unit_test.py index 8cc679a5..ca6cb958 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -285,6 +285,28 @@ def test_cluster_creation(mocker): ) +def test_cluster_no_kueue_no_aw(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, + ) + mocker.patch("kubernetes.client.CustomObjectsApi.list_namespaced_custom_object") + mocker.patch("os.environ.get", return_value="test-prefix") + config = createClusterConfig() + config.appwrapper = False + config.name = "unit-test-no-kueue" + config.write_to_file = True + cluster = Cluster(config) + assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-no-kueue.yaml" + assert cluster.config.local_queue == None + assert filecmp.cmp( + f"{aw_dir}unit-test-no-kueue.yaml", + f"{parent}/tests/test-case-no-kueue-no-aw.yaml", + shallow=True, + ) + + def test_create_app_wrapper_raises_error_with_no_image(): config = createClusterConfig() config.image = "" # Clear the image to test error handling @@ -2799,6 +2821,7 @@ def test_rjc_list_jobs(ray_job_client, mocker): # Make sure to always keep this function last def test_cleanup(): + os.remove(f"{aw_dir}unit-test-no-kueue.yaml") os.remove(f"{aw_dir}unit-test-cluster.yaml") os.remove(f"{aw_dir}test.yaml") os.remove(f"{aw_dir}raytest2.yaml") From 2a85469215be4a62ac6b6546eabfc4d6d72bf03d Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Mon, 8 Jul 2024 10:27:54 +0100 Subject: [PATCH 285/496] Updated image parameter to be optional --- src/codeflare_sdk/cluster/cluster.py | 13 ------------- src/codeflare_sdk/utils/generate_yaml.py | 5 +++-- tests/unit_test.py | 13 ------------- 3 files changed, 3 insertions(+), 28 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index b823cfd5..78bc666c 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -103,16 +103,6 @@ def job_client(self): ) return self._job_submission_client - def validate_image_config(self): - """ - Validates that the image configuration is not empty. - - :param image: The image string to validate - :raises ValueError: If the image is not specified - """ - if self.config.image == "" or self.config.image == None: - raise ValueError("Image must be specified in the ClusterConfiguration") - def create_app_wrapper(self): """ Called upon cluster object creation, creates an AppWrapper yaml based on @@ -128,9 +118,6 @@ def create_app_wrapper(self): f"Namespace {self.config.namespace} is of type {type(self.config.namespace)}. Check your Kubernetes Authentication." ) - # Validate image configuration - self.validate_image_config() - # Before attempting to create the cluster AW, let's evaluate the ClusterConfig name = self.config.name diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 3e692480..cde49ed3 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -86,8 +86,9 @@ def update_names(cluster_yaml, cluster_name, namespace): def update_image(spec, image): containers = spec.get("containers") - for container in containers: - container["image"] = image + if image != "": + for container in containers: + container["image"] = image def update_image_pull_secrets(spec, image_pull_secrets): diff --git a/tests/unit_test.py b/tests/unit_test.py index ca6cb958..3daba3a1 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -307,19 +307,6 @@ def test_cluster_no_kueue_no_aw(mocker): ) -def test_create_app_wrapper_raises_error_with_no_image(): - config = createClusterConfig() - config.image = "" # Clear the image to test error handling - try: - cluster = Cluster(config) - cluster.create_app_wrapper() - assert False, "Expected ValueError when 'image' is not specified." - except ValueError as error: - assert ( - str(error) == "Image must be specified in the ClusterConfiguration" - ), "Error message did not match expected output." - - def get_local_queue(group, version, namespace, plural): assert group == "kueue.x-k8s.io" assert version == "v1beta1" From 5ce0b2c9b738444a76bb10319125f9c0318ab37a Mon Sep 17 00:00:00 2001 From: Kevin Date: Thu, 9 May 2024 16:38:25 -0400 Subject: [PATCH 286/496] simplify function calls and add option for custom resources Signed-off-by: Kevin --- src/codeflare_sdk/cluster/cluster.py | 96 ++++----- src/codeflare_sdk/cluster/config.py | 109 +++++++++-- src/codeflare_sdk/cluster/model.py | 7 +- .../templates/base-template.yaml | 4 - src/codeflare_sdk/utils/generate_yaml.py | 182 ++++++++++-------- src/codeflare_sdk/utils/pretty_print.py | 2 +- tests/e2e/local_interactive_sdk_kind_test.py | 1 - tests/e2e/local_interactive_sdk_oauth_test.py | 1 - tests/e2e/mnist_raycluster_sdk_kind_test.py | 1 - tests/e2e/mnist_raycluster_sdk_oauth_test.py | 1 - tests/e2e/start_ray_cluster.py | 1 - tests/test-case-bad.yaml | 4 +- tests/test-case-no-kueue-no-aw.yaml | 4 +- tests/test-case-no-mcad.yamls | 4 +- tests/test-case.yaml | 4 +- tests/test-default-appwrapper.yaml | 10 +- tests/unit_test.py | 60 +++--- tests/unit_test_support.py | 2 +- tests/upgrade/raycluster_sdk_upgrade_test.py | 1 - 19 files changed, 289 insertions(+), 205 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 78bc666c..f0f50eb3 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -29,6 +29,7 @@ from ..utils import pretty_print from ..utils.generate_yaml import ( generate_appwrapper, + head_worker_gpu_count_from_cluster, ) from ..utils.kube_api_helpers import _kube_api_error_handling from ..utils.generate_yaml import is_openshift_cluster @@ -118,48 +119,7 @@ def create_app_wrapper(self): f"Namespace {self.config.namespace} is of type {type(self.config.namespace)}. Check your Kubernetes Authentication." ) - # Before attempting to create the cluster AW, let's evaluate the ClusterConfig - - name = self.config.name - namespace = self.config.namespace - head_cpus = self.config.head_cpus - head_memory = self.config.head_memory - num_head_gpus = self.config.num_head_gpus - worker_cpu_requests = self.config.worker_cpu_requests - worker_cpu_limits = self.config.worker_cpu_limits - worker_memory_requests = self.config.worker_memory_requests - worker_memory_limits = self.config.worker_memory_limits - num_worker_gpus = self.config.num_worker_gpus - workers = self.config.num_workers - template = self.config.template - image = self.config.image - appwrapper = self.config.appwrapper - env = self.config.envs - image_pull_secrets = self.config.image_pull_secrets - write_to_file = self.config.write_to_file - local_queue = self.config.local_queue - labels = self.config.labels - return generate_appwrapper( - name=name, - namespace=namespace, - head_cpus=head_cpus, - head_memory=head_memory, - num_head_gpus=num_head_gpus, - worker_cpu_requests=worker_cpu_requests, - worker_cpu_limits=worker_cpu_limits, - worker_memory_requests=worker_memory_requests, - worker_memory_limits=worker_memory_limits, - num_worker_gpus=num_worker_gpus, - workers=workers, - template=template, - image=image, - appwrapper=appwrapper, - env=env, - image_pull_secrets=image_pull_secrets, - write_to_file=write_to_file, - local_queue=local_queue, - labels=labels, - ) + return generate_appwrapper(self) # creates a new cluster with the provided or default spec def up(self): @@ -305,7 +265,7 @@ def status( if print_to_console: # overriding the number of gpus with requested - cluster.worker_gpu = self.config.num_worker_gpus + _, cluster.worker_gpu = head_worker_gpu_count_from_cluster(self) pretty_print.print_cluster_status(cluster) elif print_to_console: if status == CodeFlareClusterStatus.UNKNOWN: @@ -443,6 +403,29 @@ def job_logs(self, job_id: str) -> str: """ return self.job_client.get_job_logs(job_id) + @staticmethod + def _head_worker_extended_resources_from_rc_dict(rc: Dict) -> Tuple[dict, dict]: + head_extended_resources, worker_extended_resources = {}, {} + for resource in rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + "containers" + ][0]["resources"]["limits"].keys(): + if resource in ["memory", "cpu"]: + continue + worker_extended_resources[resource] = rc["spec"]["workerGroupSpecs"][0][ + "template" + ]["spec"]["containers"][0]["resources"]["limits"][resource] + + for resource in rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][ + 0 + ]["resources"]["limits"].keys(): + if resource in ["memory", "cpu"]: + continue + head_extended_resources[resource] = rc["spec"]["headGroupSpec"]["template"][ + "spec" + ]["containers"][0]["resources"]["limits"][resource] + + return head_extended_resources, worker_extended_resources + def from_k8_cluster_object( rc, appwrapper=True, @@ -456,6 +439,11 @@ def from_k8_cluster_object( else [] ) + ( + head_extended_resources, + worker_extended_resources, + ) = Cluster._head_worker_extended_resources_from_rc_dict(rc) + cluster_config = ClusterConfiguration( name=rc["metadata"]["name"], namespace=rc["metadata"]["namespace"], @@ -473,11 +461,8 @@ def from_k8_cluster_object( worker_memory_limits=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ "containers" ][0]["resources"]["limits"]["memory"], - num_worker_gpus=int( - rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ - "resources" - ]["limits"]["nvidia.com/gpu"] - ), + worker_extended_resource_requests=worker_extended_resources, + head_extended_resource_requests=head_extended_resources, image=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][ 0 ]["image"], @@ -858,6 +843,11 @@ def _map_to_ray_cluster(rc) -> Optional[RayCluster]: protocol = "https" dashboard_url = f"{protocol}://{ingress.spec.rules[0].host}" + ( + head_extended_resources, + worker_extended_resources, + ) = Cluster._head_worker_extended_resources_from_rc_dict(rc) + return RayCluster( name=rc["metadata"]["name"], status=status, @@ -872,7 +862,7 @@ def _map_to_ray_cluster(rc) -> Optional[RayCluster]: worker_cpu=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][ 0 ]["resources"]["limits"]["cpu"], - worker_gpu=0, # hard to detect currently how many gpus, can override it with what the user asked for + worker_extended_resources=worker_extended_resources, namespace=rc["metadata"]["namespace"], head_cpus=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][0][ "resources" @@ -880,9 +870,7 @@ def _map_to_ray_cluster(rc) -> Optional[RayCluster]: head_mem=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][0][ "resources" ]["limits"]["memory"], - head_gpu=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][0][ - "resources" - ]["limits"]["nvidia.com/gpu"], + head_extended_resources=head_extended_resources, dashboard=dashboard_url, ) @@ -907,12 +895,12 @@ def _copy_to_ray(cluster: Cluster) -> RayCluster: worker_mem_min=cluster.config.worker_memory_requests, worker_mem_max=cluster.config.worker_memory_limits, worker_cpu=cluster.config.worker_cpu_requests, - worker_gpu=cluster.config.num_worker_gpus, + worker_extended_resources=cluster.config.worker_extended_resource_requests, namespace=cluster.config.namespace, dashboard=cluster.cluster_dashboard_uri(), head_cpus=cluster.config.head_cpus, head_mem=cluster.config.head_memory, - head_gpu=cluster.config.num_head_gpus, + head_extended_resources=cluster.config.head_extended_resource_requests, ) if ray.status == CodeFlareClusterStatus.READY: ray.status = RayClusterStatus.READY diff --git a/src/codeflare_sdk/cluster/config.py b/src/codeflare_sdk/cluster/config.py index cb8e3d3d..6a522fbc 100644 --- a/src/codeflare_sdk/cluster/config.py +++ b/src/codeflare_sdk/cluster/config.py @@ -25,12 +25,51 @@ dir = pathlib.Path(__file__).parent.parent.resolve() +# https://docs.ray.io/en/latest/ray-core/scheduling/accelerators.html +DEFAULT_RESOURCE_MAPPING = { + "nvidia.com/gpu": "GPU", + "intel.com/gpu": "GPU", + "amd.com/gpu": "GPU", + "aws.amazon.com/neuroncore": "neuron_cores", + "google.com/tpu": "TPU", + "habana.ai/gaudi": "HPU", + "huawei.com/Ascend910": "NPU", + "huawei.com/Ascend310": "NPU", +} + @dataclass class ClusterConfiguration: """ This dataclass is used to specify resource requirements and other details, and is passed in as an argument when creating a Cluster object. + + Attributes: + - name: The name of the cluster. + - namespace: The namespace in which the cluster should be created. + - head_info: A list of strings containing information about the head node. + - head_cpus: The number of CPUs to allocate to the head node. + - head_memory: The amount of memory to allocate to the head node. + - head_gpus: The number of GPUs to allocate to the head node. (Deprecated, use head_extended_resource_requests) + - head_extended_resource_requests: A dictionary of extended resource requests for the head node. ex: {"nvidia.com/gpu": 1} + - machine_types: A list of machine types to use for the cluster. + - min_cpus: The minimum number of CPUs to allocate to each worker. + - max_cpus: The maximum number of CPUs to allocate to each worker. + - num_workers: The number of workers to create. + - min_memory: The minimum amount of memory to allocate to each worker. + - max_memory: The maximum amount of memory to allocate to each worker. + - num_gpus: The number of GPUs to allocate to each worker. (Deprecated, use worker_extended_resource_requests) + - template: The path to the template file to use for the cluster. + - appwrapper: A boolean indicating whether to use an AppWrapper. + - envs: A dictionary of environment variables to set for the cluster. + - image: The image to use for the cluster. + - image_pull_secrets: A list of image pull secrets to use for the cluster. + - write_to_file: A boolean indicating whether to write the cluster configuration to a file. + - verify_tls: A boolean indicating whether to verify TLS when connecting to the cluster. + - labels: A dictionary of labels to apply to the cluster. + - worker_extended_resource_requests: A dictionary of extended resource requests for each worker. ex: {"nvidia.com/gpu": 1} + - extended_resource_mapping: A dictionary of custom resource mappings to map extended resource requests to RayCluster resource names + - overwrite_default_resource_mapping: A boolean indicating whether to overwrite the default resource mapping. """ name: str @@ -39,7 +78,7 @@ class ClusterConfiguration: head_cpus: typing.Union[int, str] = 2 head_memory: typing.Union[int, str] = 8 head_gpus: int = None # Deprecating - num_head_gpus: int = 0 + head_extended_resource_requests: typing.Dict[str, int] = field(default_factory=dict) machine_types: list = field(default_factory=list) # ["m4.xlarge", "g4dn.xlarge"] worker_cpu_requests: typing.Union[int, str] = 1 worker_cpu_limits: typing.Union[int, str] = 1 @@ -50,7 +89,6 @@ class ClusterConfiguration: worker_memory_limits: typing.Union[int, str] = 2 min_memory: typing.Union[int, str] = None # Deprecating max_memory: typing.Union[int, str] = None # Deprecating - num_worker_gpus: int = 0 num_gpus: int = None # Deprecating template: str = f"{dir}/templates/base-template.yaml" appwrapper: bool = False @@ -60,6 +98,11 @@ class ClusterConfiguration: write_to_file: bool = False verify_tls: bool = True labels: dict = field(default_factory=dict) + worker_extended_resource_requests: typing.Dict[str, int] = field( + default_factory=dict + ) + extended_resource_mapping: typing.Dict[str, str] = field(default_factory=dict) + overwrite_default_resource_mapping: bool = False def __post_init__(self): if not self.verify_tls: @@ -70,8 +113,60 @@ def __post_init__(self): self._memory_to_string() self._str_mem_no_unit_add_GB() self._memory_to_resource() - self._gpu_to_resource() self._cpu_to_resource() + self._gpu_to_resource() + self._combine_extended_resource_mapping() + self._validate_extended_resource_requests(self.head_extended_resource_requests) + self._validate_extended_resource_requests( + self.worker_extended_resource_requests + ) + + def _combine_extended_resource_mapping(self): + if overwritten := set(self.extended_resource_mapping.keys()).intersection( + DEFAULT_RESOURCE_MAPPING.keys() + ): + if self.overwrite_default_resource_mapping: + warnings.warn( + f"Overwriting default resource mapping for {overwritten}", + UserWarning, + ) + else: + raise ValueError( + f"Resource mapping already exists for {overwritten}, set overwrite_default_resource_mapping to True to overwrite" + ) + self.extended_resource_mapping = { + **DEFAULT_RESOURCE_MAPPING, + **self.extended_resource_mapping, + } + + def _validate_extended_resource_requests( + self, extended_resources: typing.Dict[str, int] + ): + for k in extended_resources.keys(): + if k not in self.extended_resource_mapping.keys(): + raise ValueError( + f"extended resource '{k}' not found in extended_resource_mapping, available resources are {list(self.extended_resource_mapping.keys())}, to add more supported resources use extended_resource_mapping. i.e. extended_resource_mapping = {{'{k}': 'FOO_BAR'}}" + ) + + def _gpu_to_resource(self): + if self.head_gpus: + warnings.warn( + f"head_gpus is being deprecated, replacing with head_extended_resource_requests['nvidia.com/gpu'] = {self.head_gpus}" + ) + if "nvidia.com/gpu" in self.head_extended_resource_requests: + raise ValueError( + "nvidia.com/gpu already exists in head_extended_resource_requests" + ) + self.head_extended_resource_requests["nvidia.com/gpu"] = self.head_gpus + if self.num_gpus: + warnings.warn( + f"num_gpus is being deprecated, replacing with worker_extended_resource_requests['nvidia.com/gpu'] = {self.num_gpus}" + ) + if "nvidia.com/gpu" in self.worker_extended_resource_requests: + raise ValueError( + "nvidia.com/gpu already exists in worker_extended_resource_requests" + ) + self.worker_extended_resource_requests["nvidia.com/gpu"] = self.num_gpus def _str_mem_no_unit_add_GB(self): if isinstance(self.head_memory, str) and self.head_memory.isdecimal(): @@ -95,14 +190,6 @@ def _memory_to_string(self): if isinstance(self.worker_memory_limits, int): self.worker_memory_limits = f"{self.worker_memory_limits}G" - def _gpu_to_resource(self): - if self.head_gpus: - warnings.warn("head_gpus is being deprecated, use num_head_gpus") - self.num_head_gpus = self.head_gpus - if self.num_gpus: - warnings.warn("num_gpus is being deprecated, use num_worker_gpus") - self.num_worker_gpus = self.num_gpus - def _cpu_to_resource(self): if self.min_cpus: warnings.warn("min_cpus is being deprecated, use worker_cpu_requests") diff --git a/src/codeflare_sdk/cluster/model.py b/src/codeflare_sdk/cluster/model.py index 2547de25..5d6e2ed2 100644 --- a/src/codeflare_sdk/cluster/model.py +++ b/src/codeflare_sdk/cluster/model.py @@ -18,8 +18,9 @@ dataclasses to store information for Ray clusters and AppWrappers. """ -from dataclasses import dataclass +from dataclasses import dataclass, field from enum import Enum +import typing class RayClusterStatus(Enum): @@ -74,14 +75,14 @@ class RayCluster: status: RayClusterStatus head_cpus: int head_mem: str - head_gpu: int workers: int worker_mem_min: str worker_mem_max: str worker_cpu: int - worker_gpu: int namespace: str dashboard: str + worker_extended_resources: typing.Dict[str, int] = field(default_factory=dict) + head_extended_resources: typing.Dict[str, int] = field(default_factory=dict) @dataclass diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index 7b36146a..076bd262 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -86,11 +86,9 @@ spec: limits: cpu: 2 memory: "8G" - nvidia.com/gpu: 0 requests: cpu: 2 memory: "8G" - nvidia.com/gpu: 0 volumeMounts: - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert @@ -163,11 +161,9 @@ spec: limits: cpu: "2" memory: "12G" - nvidia.com/gpu: "1" requests: cpu: "2" memory: "12G" - nvidia.com/gpu: "1" volumeMounts: - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index cde49ed3..1644dc15 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -17,6 +17,7 @@ (in the cluster sub-module) for AppWrapper generation. """ +import json from typing import Optional import typing import yaml @@ -31,6 +32,7 @@ from base64 import b64encode from urllib3.util import parse_url from kubernetes.client.exceptions import ApiException +import codeflare_sdk def read_template(template): @@ -78,10 +80,13 @@ def is_kind_cluster(): return False -def update_names(cluster_yaml, cluster_name, namespace): - meta = cluster_yaml.get("metadata") - meta["name"] = cluster_name - meta["namespace"] = namespace +def update_names( + cluster_yaml: dict, + cluster: "codeflare_sdk.cluster.Cluster", +): + metadata = cluster_yaml.get("metadata") + metadata["name"] = cluster.config.name + metadata["namespace"] = cluster.config.namespace def update_image(spec, image): @@ -114,7 +119,7 @@ def update_resources( worker_cpu_limits, worker_memory_requests, worker_memory_limits, - num_worker_gpus, + custom_resources, ): container = spec.get("containers") for resource in container: @@ -122,59 +127,103 @@ def update_resources( if requests is not None: requests["cpu"] = worker_cpu_requests requests["memory"] = worker_memory_requests - requests["nvidia.com/gpu"] = num_worker_gpus limits = resource.get("resources").get("limits") if limits is not None: limits["cpu"] = worker_cpu_limits limits["memory"] = worker_memory_limits - limits["nvidia.com/gpu"] = num_worker_gpus + for k in custom_resources.keys(): + limits[k] = custom_resources[k] + requests[k] = custom_resources[k] + + +def head_worker_gpu_count_from_cluster( + cluster: "codeflare_sdk.cluster.Cluster", +) -> typing.Tuple[int, int]: + head_gpus = 0 + worker_gpus = 0 + for k in cluster.config.head_extended_resource_requests.keys(): + resource_type = cluster.config.extended_resource_mapping[k] + if resource_type == "GPU": + head_gpus += int(cluster.config.head_extended_resource_requests[k]) + for k in cluster.config.worker_extended_resource_requests.keys(): + resource_type = cluster.config.extended_resource_mapping[k] + if resource_type == "GPU": + worker_gpus += int(cluster.config.worker_extended_resource_requests[k]) + + return head_gpus, worker_gpus + + +FORBIDDEN_CUSTOM_RESOURCE_TYPES = ["GPU", "CPU", "memory"] + + +def head_worker_resources_from_cluster( + cluster: "codeflare_sdk.cluster.Cluster", +) -> typing.Tuple[dict, dict]: + to_return = {}, {} + for k in cluster.config.head_extended_resource_requests.keys(): + resource_type = cluster.config.extended_resource_mapping[k] + if resource_type in FORBIDDEN_CUSTOM_RESOURCE_TYPES: + continue + to_return[0][resource_type] = cluster.config.head_extended_resource_requests[ + k + ] + to_return[0].get(resource_type, 0) + + for k in cluster.config.worker_extended_resource_requests.keys(): + resource_type = cluster.config.extended_resource_mapping[k] + if resource_type in FORBIDDEN_CUSTOM_RESOURCE_TYPES: + continue + to_return[1][resource_type] = cluster.config.worker_extended_resource_requests[ + k + ] + to_return[1].get(resource_type, 0) + return to_return def update_nodes( - cluster_yaml, - appwrapper_name, - worker_cpu_requests, - worker_cpu_limits, - worker_memory_requests, - worker_memory_limits, - num_worker_gpus, - workers, - image, - env, - image_pull_secrets, - head_cpus, - head_memory, - num_head_gpus, + ray_cluster_dict: dict, + cluster: "codeflare_sdk.cluster.Cluster", ): - head = cluster_yaml.get("spec").get("headGroupSpec") - head["rayStartParams"]["num-gpus"] = str(int(num_head_gpus)) + head = ray_cluster_dict.get("spec").get("headGroupSpec") + worker = ray_cluster_dict.get("spec").get("workerGroupSpecs")[0] + head_gpus, worker_gpus = head_worker_gpu_count_from_cluster(cluster) + head_resources, worker_resources = head_worker_resources_from_cluster(cluster) + head_resources = json.dumps(head_resources).replace('"', '\\"') + head_resources = f'"{head_resources}"' + worker_resources = json.dumps(worker_resources).replace('"', '\\"') + worker_resources = f'"{worker_resources}"' + head["rayStartParams"]["num-gpus"] = str(head_gpus) + head["rayStartParams"]["resources"] = head_resources - worker = cluster_yaml.get("spec").get("workerGroupSpecs")[0] # Head counts as first worker - worker["replicas"] = workers - worker["minReplicas"] = workers - worker["maxReplicas"] = workers - worker["groupName"] = "small-group-" + appwrapper_name - worker["rayStartParams"]["num-gpus"] = str(int(num_worker_gpus)) + worker["replicas"] = cluster.config.num_workers + worker["minReplicas"] = cluster.config.num_workers + worker["maxReplicas"] = cluster.config.num_workers + worker["groupName"] = "small-group-" + cluster.config.name + worker["rayStartParams"]["num-gpus"] = str(worker_gpus) + worker["rayStartParams"]["resources"] = worker_resources for comp in [head, worker]: spec = comp.get("template").get("spec") - update_image_pull_secrets(spec, image_pull_secrets) - update_image(spec, image) - update_env(spec, env) + update_image_pull_secrets(spec, cluster.config.image_pull_secrets) + update_image(spec, cluster.config.image) + update_env(spec, cluster.config.envs) if comp == head: # TODO: Eventually add head node configuration outside of template update_resources( - spec, head_cpus, head_cpus, head_memory, head_memory, num_head_gpus + spec, + cluster.config.head_cpus, + cluster.config.head_cpus, + cluster.config.head_memory, + cluster.config.head_memory, + cluster.config.head_extended_resource_requests, ) else: update_resources( spec, - worker_cpu_requests, - worker_cpu_limits, - worker_memory_requests, - worker_memory_limits, - num_worker_gpus, + cluster.config.worker_cpu_requests, + cluster.config.worker_cpu_limits, + cluster.config.worker_memory_requests, + cluster.config.worker_memory_limits, + cluster.config.worker_extended_resource_requests, ) @@ -278,63 +327,30 @@ def write_user_yaml(user_yaml, output_file_name): print(f"Written to: {output_file_name}") -def generate_appwrapper( - name: str, - namespace: str, - head_cpus: int, - head_memory: int, - num_head_gpus: int, - worker_cpu_requests: int, - worker_cpu_limits: int, - worker_memory_requests: int, - worker_memory_limits: int, - num_worker_gpus: int, - workers: int, - template: str, - image: str, - appwrapper: bool, - env, - image_pull_secrets: list, - write_to_file: bool, - local_queue: Optional[str], - labels, -): - cluster_yaml = read_template(template) - appwrapper_name, cluster_name = gen_names(name) - update_names(cluster_yaml, cluster_name, namespace) - update_nodes( +def generate_appwrapper(cluster: "codeflare_sdk.cluster.Cluster"): + cluster_yaml = read_template(cluster.config.template) + appwrapper_name, _ = gen_names(cluster.config.name) + update_names( cluster_yaml, - appwrapper_name, - worker_cpu_requests, - worker_cpu_limits, - worker_memory_requests, - worker_memory_limits, - num_worker_gpus, - workers, - image, - env, - image_pull_secrets, - head_cpus, - head_memory, - num_head_gpus, + cluster, ) - augment_labels(cluster_yaml, labels) + update_nodes(cluster_yaml, cluster) + augment_labels(cluster_yaml, cluster.config.labels) notebook_annotations(cluster_yaml) - user_yaml = ( - wrap_cluster(cluster_yaml, appwrapper_name, namespace) - if appwrapper + wrap_cluster(cluster_yaml, appwrapper_name, cluster.config.namespace) + if cluster.config.appwrapper else cluster_yaml ) - add_queue_label(user_yaml, namespace, local_queue) + add_queue_label(user_yaml, cluster.config.namespace, cluster.config.local_queue) - if write_to_file: + if cluster.config.write_to_file: directory_path = os.path.expanduser("~/.codeflare/resources/") outfile = os.path.join(directory_path, appwrapper_name + ".yaml") write_user_yaml(user_yaml, outfile) return outfile else: user_yaml = yaml.dump(user_yaml) - print(f"Yaml resources loaded for {name}") + print(f"Yaml resources loaded for {cluster.config.name}") return user_yaml diff --git a/src/codeflare_sdk/utils/pretty_print.py b/src/codeflare_sdk/utils/pretty_print.py index 42ef8398..9431ffd7 100644 --- a/src/codeflare_sdk/utils/pretty_print.py +++ b/src/codeflare_sdk/utils/pretty_print.py @@ -138,7 +138,7 @@ def print_clusters(clusters: List[RayCluster]): workers = str(cluster.workers) memory = f"{cluster.worker_mem_min}~{cluster.worker_mem_max}" cpu = str(cluster.worker_cpu) - gpu = str(cluster.worker_gpu) + gpu = str(cluster.worker_extended_resources.get("nvidia.com/gpu", 0)) #'table0' to display the cluster name, status, url, and dashboard link table0 = Table(box=None, show_header=False) diff --git a/tests/e2e/local_interactive_sdk_kind_test.py b/tests/e2e/local_interactive_sdk_kind_test.py index 999c72e1..8ca0bdac 100644 --- a/tests/e2e/local_interactive_sdk_kind_test.py +++ b/tests/e2e/local_interactive_sdk_kind_test.py @@ -43,7 +43,6 @@ def run_local_interactives(self): worker_cpu_limits=1, worker_memory_requests=1, worker_memory_limits=2, - num_worker_gpus=0, image=ray_image, write_to_file=True, verify_tls=False, diff --git a/tests/e2e/local_interactive_sdk_oauth_test.py b/tests/e2e/local_interactive_sdk_oauth_test.py index a2d5b612..4177413e 100644 --- a/tests/e2e/local_interactive_sdk_oauth_test.py +++ b/tests/e2e/local_interactive_sdk_oauth_test.py @@ -48,7 +48,6 @@ def run_local_interactives(self): worker_cpu_limits=1, worker_memory_requests=4, worker_memory_limits=4, - num_worker_gpus=0, image=ray_image, verify_tls=False, ) diff --git a/tests/e2e/mnist_raycluster_sdk_kind_test.py b/tests/e2e/mnist_raycluster_sdk_kind_test.py index d8539722..af5fcc1f 100644 --- a/tests/e2e/mnist_raycluster_sdk_kind_test.py +++ b/tests/e2e/mnist_raycluster_sdk_kind_test.py @@ -41,7 +41,6 @@ def run_mnist_raycluster_sdk_kind(self): worker_cpu_limits=1, worker_memory_requests=1, worker_memory_limits=2, - num_worker_gpus=0, image=ray_image, write_to_file=True, verify_tls=False, diff --git a/tests/e2e/mnist_raycluster_sdk_oauth_test.py b/tests/e2e/mnist_raycluster_sdk_oauth_test.py index ed2f4fb1..4e85e9f1 100644 --- a/tests/e2e/mnist_raycluster_sdk_oauth_test.py +++ b/tests/e2e/mnist_raycluster_sdk_oauth_test.py @@ -48,7 +48,6 @@ def run_mnist_raycluster_sdk_oauth(self): worker_cpu_limits=1, worker_memory_requests=1, worker_memory_limits=2, - num_worker_gpus=0, image=ray_image, write_to_file=True, verify_tls=False, diff --git a/tests/e2e/start_ray_cluster.py b/tests/e2e/start_ray_cluster.py index 4fd7fb3f..b34f0331 100644 --- a/tests/e2e/start_ray_cluster.py +++ b/tests/e2e/start_ray_cluster.py @@ -19,7 +19,6 @@ worker_cpu_limits=1, worker_memory_requests=1, worker_memory_limits=2, - num_worker_gpus=0, image=ray_image, appwrapper=True, ) diff --git a/tests/test-case-bad.yaml b/tests/test-case-bad.yaml index d4d230d4..cb2f4a0a 100644 --- a/tests/test-case-bad.yaml +++ b/tests/test-case-bad.yaml @@ -33,6 +33,7 @@ spec: block: 'true' dashboard-host: 0.0.0.0 num-gpus: '0' + resources: '"{}"' serviceType: ClusterIP template: spec: @@ -63,11 +64,9 @@ spec: limits: cpu: 2 memory: 8G - nvidia.com/gpu: 0 requests: cpu: 2 memory: 8G - nvidia.com/gpu: 0 rayVersion: 2.23.0 workerGroupSpecs: - groupName: small-group-unit-test-cluster @@ -76,6 +75,7 @@ spec: rayStartParams: block: 'true' num-gpus: '7' + resources: '"{}"' replicas: 2 template: metadata: diff --git a/tests/test-case-no-kueue-no-aw.yaml b/tests/test-case-no-kueue-no-aw.yaml index dfca7951..3ea7a22d 100644 --- a/tests/test-case-no-kueue-no-aw.yaml +++ b/tests/test-case-no-kueue-no-aw.yaml @@ -26,6 +26,7 @@ spec: block: 'true' dashboard-host: 0.0.0.0 num-gpus: '0' + resources: '"{}"' serviceType: ClusterIP template: spec: @@ -51,11 +52,9 @@ spec: limits: cpu: 2 memory: 8G - nvidia.com/gpu: 0 requests: cpu: 2 memory: 8G - nvidia.com/gpu: 0 volumeMounts: - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert @@ -94,6 +93,7 @@ spec: rayStartParams: block: 'true' num-gpus: '7' + resources: '"{}"' replicas: 2 template: metadata: diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index 2d0e7e9b..45a3dfb9 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -29,6 +29,7 @@ spec: block: 'true' dashboard-host: 0.0.0.0 num-gpus: '0' + resources: '"{}"' serviceType: ClusterIP template: spec: @@ -54,11 +55,9 @@ spec: limits: cpu: 2 memory: 8G - nvidia.com/gpu: 0 requests: cpu: 2 memory: 8G - nvidia.com/gpu: 0 volumeMounts: - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert @@ -97,6 +96,7 @@ spec: rayStartParams: block: 'true' num-gpus: '7' + resources: '"{}"' replicas: 2 template: metadata: diff --git a/tests/test-case.yaml b/tests/test-case.yaml index 00b241af..461ed7df 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -34,6 +34,7 @@ spec: block: 'true' dashboard-host: 0.0.0.0 num-gpus: '0' + resources: '"{}"' serviceType: ClusterIP template: spec: @@ -59,11 +60,9 @@ spec: limits: cpu: 2 memory: 8G - nvidia.com/gpu: 0 requests: cpu: 2 memory: 8G - nvidia.com/gpu: 0 volumeMounts: - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert @@ -102,6 +101,7 @@ spec: rayStartParams: block: 'true' num-gpus: '7' + resources: '"{}"' replicas: 2 template: metadata: diff --git a/tests/test-default-appwrapper.yaml b/tests/test-default-appwrapper.yaml index cc44e234..cc27e37a 100644 --- a/tests/test-default-appwrapper.yaml +++ b/tests/test-default-appwrapper.yaml @@ -34,9 +34,11 @@ spec: block: 'true' dashboard-host: 0.0.0.0 num-gpus: '0' + resources: '"{}"' serviceType: ClusterIP template: spec: + imagePullSecrets: [] containers: - image: quay.io/rhoai/ray:2.23.0-py39-cu121 imagePullPolicy: Always @@ -59,11 +61,9 @@ spec: limits: cpu: 2 memory: 8G - nvidia.com/gpu: 0 requests: cpu: 2 memory: 8G - nvidia.com/gpu: 0 volumeMounts: - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert @@ -77,7 +77,6 @@ spec: - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt - imagePullSecrets: [] volumes: - configMap: items: @@ -101,6 +100,7 @@ spec: rayStartParams: block: 'true' num-gpus: '0' + resources: '"{}"' replicas: 1 template: metadata: @@ -109,6 +109,7 @@ spec: labels: key: value spec: + imagePullSecrets: [] containers: - image: quay.io/rhoai/ray:2.23.0-py39-cu121 lifecycle: @@ -123,11 +124,9 @@ spec: limits: cpu: 1 memory: 2G - nvidia.com/gpu: 0 requests: cpu: 1 memory: 2G - nvidia.com/gpu: 0 volumeMounts: - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert @@ -141,7 +140,6 @@ spec: - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt - imagePullSecrets: [] volumes: - configMap: items: diff --git a/tests/unit_test.py b/tests/unit_test.py index 3daba3a1..2decade2 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -260,7 +260,7 @@ def test_config_creation(): assert config.num_workers == 2 assert config.worker_cpu_requests == 3 and config.worker_cpu_limits == 4 assert config.worker_memory_requests == "5G" and config.worker_memory_limits == "6G" - assert config.num_worker_gpus == 7 + assert config.worker_extended_resource_requests == {"nvidia.com/gpu": 7} assert config.image == "quay.io/rhoai/ray:2.23.0-py39-cu121" assert config.template == f"{parent}/src/codeflare_sdk/templates/base-template.yaml" assert config.machine_types == ["cpu.small", "gpu.large"] @@ -406,7 +406,7 @@ def test_cluster_creation_no_mcad_local_queue(mocker): worker_cpu_limits=4, worker_memory_requests=5, worker_memory_limits=6, - num_worker_gpus=7, + worker_extended_resource_requests={"nvidia.com/gpu": 7}, machine_types=["cpu.small", "gpu.large"], image_pull_secrets=["unit-test-pull-secret"], image="quay.io/rhoai/ray:2.23.0-py39-cu121", @@ -883,12 +883,10 @@ def test_ray_details(mocker, capsys): worker_mem_min="2G", worker_mem_max="2G", worker_cpu=1, - worker_gpu=0, namespace="ns", dashboard="fake-uri", head_cpus=2, head_mem=8, - head_gpu=0, ) mocker.patch( "codeflare_sdk.cluster.cluster.Cluster.status", @@ -922,7 +920,7 @@ def test_ray_details(mocker, capsys): assert ray1.worker_mem_min == ray2.worker_mem_min assert ray1.worker_mem_max == ray2.worker_mem_max assert ray1.worker_cpu == ray2.worker_cpu - assert ray1.worker_gpu == ray2.worker_gpu + assert ray1.worker_extended_resources == ray2.worker_extended_resources try: print_clusters([ray1, ray2]) print_cluster_status(ray1) @@ -1129,12 +1127,10 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "limits": { "cpu": 2, "memory": "8G", - "nvidia.com/gpu": 0, }, "requests": { "cpu": 2, "memory": "8G", - "nvidia.com/gpu": 0, }, }, "volumeMounts": [ @@ -1198,7 +1194,10 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "groupName": "small-group-quicktest", "maxReplicas": 1, "minReplicas": 1, - "rayStartParams": {"block": "true", "num-gpus": "0"}, + "rayStartParams": { + "block": "true", + "num-gpus": "0", + }, "replicas": 1, "scaleStrategy": {}, "template": { @@ -1249,12 +1248,10 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "limits": { "cpu": 1, "memory": "2G", - "nvidia.com/gpu": 0, }, "requests": { "cpu": 1, "memory": "2G", - "nvidia.com/gpu": 0, }, }, "volumeMounts": [ @@ -1413,12 +1410,10 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "limits": { "cpu": 2, "memory": "8G", - "nvidia.com/gpu": 0, }, "requests": { "cpu": 2, "memory": "8G", - "nvidia.com/gpu": 0, }, }, } @@ -1432,7 +1427,10 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "groupName": "small-group-quicktest2", "maxReplicas": 1, "minReplicas": 1, - "rayStartParams": {"block": "true", "num-gpus": "0"}, + "rayStartParams": { + "block": "true", + "num-gpus": "0", + }, "replicas": 1, "template": { "metadata": { @@ -1469,12 +1467,10 @@ def get_ray_obj(group, version, namespace, plural, cls=None): "limits": { "cpu": 1, "memory": "2G", - "nvidia.com/gpu": 0, }, "requests": { "cpu": 1, "memory": "2G", - "nvidia.com/gpu": 0, }, }, } @@ -1591,12 +1587,10 @@ def get_aw_obj(group, version, namespace, plural): "limits": { "cpu": 2, "memory": "8G", - "nvidia.com/gpu": 0, }, "requests": { "cpu": 2, "memory": "8G", - "nvidia.com/gpu": 0, }, }, } @@ -1650,12 +1644,10 @@ def get_aw_obj(group, version, namespace, plural): "limits": { "cpu": 1, "memory": "2G", - "nvidia.com/gpu": 0, }, "requests": { "cpu": 1, "memory": "2G", - "nvidia.com/gpu": 0, }, }, } @@ -1786,12 +1778,10 @@ def get_aw_obj(group, version, namespace, plural): "limits": { "cpu": 2, "memory": "8G", - "nvidia.com/gpu": 0, }, "requests": { "cpu": 2, "memory": "8G", - "nvidia.com/gpu": 0, }, }, } @@ -1845,12 +1835,10 @@ def get_aw_obj(group, version, namespace, plural): "limits": { "cpu": 1, "memory": "2G", - "nvidia.com/gpu": 0, }, "requests": { "cpu": 1, "memory": "2G", - "nvidia.com/gpu": 0, }, }, } @@ -2002,7 +1990,7 @@ def custom_side_effect(group, version, namespace, plural, **kwargs): cluster_config.worker_memory_requests == "2G" and cluster_config.worker_memory_limits == "2G" ) - assert cluster_config.num_worker_gpus == 0 + assert cluster_config.worker_extended_resource_requests == {} assert ( cluster_config.image == "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103" @@ -2044,7 +2032,7 @@ def test_get_cluster(mocker): cluster_config.worker_memory_requests == "2G" and cluster_config.worker_memory_limits == "2G" ) - assert cluster_config.num_worker_gpus == 0 + assert cluster_config.worker_extended_resource_requests == {} assert ( cluster_config.image == "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103" @@ -2082,7 +2070,7 @@ def test_get_cluster_no_mcad(mocker): cluster_config.worker_memory_requests == "2G" and cluster_config.worker_memory_limits == "2G" ) - assert cluster_config.num_worker_gpus == 0 + assert cluster_config.worker_extended_resource_requests == {} assert ( cluster_config.image == "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103" @@ -2310,12 +2298,10 @@ def test_cluster_status(mocker): worker_mem_min=2, worker_mem_max=2, worker_cpu=1, - worker_gpu=0, namespace="ns", dashboard="fake-uri", head_cpus=2, head_mem=8, - head_gpu=0, ) cf = Cluster( ClusterConfiguration( @@ -2806,6 +2792,24 @@ def test_rjc_list_jobs(ray_job_client, mocker): assert job_list_jobs == jobs_list +def test_cluster_config_deprecation_conversion(mocker): + config = ClusterConfiguration( + name="test", + num_gpus=2, + head_gpus=1, + min_memory=3, + max_memory=4, + min_cpus=1, + max_cpus=2, + ) + assert config.worker_extended_resource_requests == {"nvidia.com/gpu": 2} + assert config.head_extended_resource_requests == {"nvidia.com/gpu": 1} + assert config.worker_memory_requests == "3G" + assert config.worker_memory_limits == "4G" + assert config.worker_cpu_requests == 1 + assert config.worker_cpu_limits == 2 + + # Make sure to always keep this function last def test_cleanup(): os.remove(f"{aw_dir}unit-test-no-kueue.yaml") diff --git a/tests/unit_test_support.py b/tests/unit_test_support.py index 51c47aa6..36c7d871 100644 --- a/tests/unit_test_support.py +++ b/tests/unit_test_support.py @@ -13,7 +13,7 @@ def createClusterConfig(): worker_cpu_limits=4, worker_memory_requests=5, worker_memory_limits=6, - num_worker_gpus=7, + worker_extended_resource_requests={"nvidia.com/gpu": 7}, appwrapper=True, machine_types=["cpu.small", "gpu.large"], image_pull_secrets=["unit-test-pull-secret"], diff --git a/tests/upgrade/raycluster_sdk_upgrade_test.py b/tests/upgrade/raycluster_sdk_upgrade_test.py index 98204811..ff98e28f 100644 --- a/tests/upgrade/raycluster_sdk_upgrade_test.py +++ b/tests/upgrade/raycluster_sdk_upgrade_test.py @@ -54,7 +54,6 @@ def run_mnist_raycluster_sdk_oauth(self): worker_cpu_limits=1, worker_memory_requests=1, worker_memory_limits=2, - num_worker_gpus=0, image=ray_image, write_to_file=True, verify_tls=False, From 58d11b7c172fe46d120a3c3e70efa2278600d57c Mon Sep 17 00:00:00 2001 From: Suyash Nalawade Date: Tue, 25 Jun 2024 22:02:32 +0530 Subject: [PATCH 287/496] Added support for checking version of the codeflare-sdk package - Resolves: Issue #504 [https://github.com/project-codeflare/codeflare-sdk/issues/504] --- src/codeflare_sdk/__init__.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/codeflare_sdk/__init__.py b/src/codeflare_sdk/__init__.py index 28a9c4db..d3f49266 100644 --- a/src/codeflare_sdk/__init__.py +++ b/src/codeflare_sdk/__init__.py @@ -19,3 +19,11 @@ from .job import RayJobClient from .utils import generate_cert + +from importlib.metadata import version, PackageNotFoundError + +try: + __version__ = version("codeflare-sdk") #Update with latest version with each release + +except PackageNotFoundError: + __version__ = "unknown" \ No newline at end of file From b375195bc1c976691b9e8f8318e63c7d78a338b6 Mon Sep 17 00:00:00 2001 From: Suyash Nalawade Date: Tue, 25 Jun 2024 22:23:43 +0530 Subject: [PATCH 288/496] updated with pre-commit changes --- src/codeflare_sdk/__init__.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/codeflare_sdk/__init__.py b/src/codeflare_sdk/__init__.py index d3f49266..fbfb7bf7 100644 --- a/src/codeflare_sdk/__init__.py +++ b/src/codeflare_sdk/__init__.py @@ -23,7 +23,9 @@ from importlib.metadata import version, PackageNotFoundError try: - __version__ = version("codeflare-sdk") #Update with latest version with each release + __version__ = version( + "codeflare-sdk" + ) # Update with latest version with each release except PackageNotFoundError: - __version__ = "unknown" \ No newline at end of file + __version__ = "unknown" From 342d54c8cec188349730110a018dff3e4ebbc5f6 Mon Sep 17 00:00:00 2001 From: Suyash Nalawade Date: Mon, 1 Jul 2024 19:01:43 +0530 Subject: [PATCH 289/496] changes as per suggestions - changed the comment - changed default value to v0.0.0 --- src/codeflare_sdk/__init__.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/codeflare_sdk/__init__.py b/src/codeflare_sdk/__init__.py index fbfb7bf7..358ff4f2 100644 --- a/src/codeflare_sdk/__init__.py +++ b/src/codeflare_sdk/__init__.py @@ -23,9 +23,7 @@ from importlib.metadata import version, PackageNotFoundError try: - __version__ = version( - "codeflare-sdk" - ) # Update with latest version with each release + __version__ = version("codeflare-sdk") # use metadata associated with built package except PackageNotFoundError: - __version__ = "unknown" + __version__ = "v0.0.0" From 685b0b6929bb6b4e1c6faa662aac7c076134bbe1 Mon Sep 17 00:00:00 2001 From: Karel Suta Date: Wed, 10 Jul 2024 14:57:11 +0200 Subject: [PATCH 290/496] Use existing CFO image with latest changes instead of building it --- .github/workflows/e2e_tests.yaml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index b83afb4b..df618a61 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -24,6 +24,9 @@ concurrency: group: ${{ github.head_ref }}-${{ github.workflow }} cancel-in-progress: true +env: + CODEFLARE_OPERATOR_IMG: "quay.io/project-codeflare/codeflare-operator:dev" + jobs: kubernetes: @@ -92,9 +95,7 @@ jobs: echo Setting up CodeFlare stack make setup-e2e echo Deploying CodeFlare operator - IMG="${REGISTRY_ADDRESS}"/codeflare-operator - make image-push -e IMG="${IMG}" - make deploy -e IMG="${IMG}" -e ENV="e2e" + make deploy -e IMG="${CODEFLARE_OPERATOR_IMG}" -e ENV="e2e" kubectl wait --timeout=120s --for=condition=Available=true deployment -n openshift-operators codeflare-operator-manager cd .. From c308c0f05cabc2f17977e466223f4cb51f0a08a0 Mon Sep 17 00:00:00 2001 From: Karel Suta Date: Tue, 9 Jul 2024 15:27:43 +0200 Subject: [PATCH 291/496] Add e2e tests using GPU to execute current test scenarios --- .github/workflows/e2e_tests.yaml | 30 +++++-------------- docs/e2e.md | 20 +++++++++++-- pyproject.toml | 3 +- tests/e2e/local_interactive_sdk_kind_test.py | 16 ++++++++-- tests/e2e/mnist.py | 5 +++- .../e2e/mnist_raycluster_sdk_aw_kind_test.py | 29 ++++++++++++------ tests/e2e/mnist_raycluster_sdk_kind_test.py | 22 ++++++++++---- tests/e2e/support.py | 2 +- 8 files changed, 83 insertions(+), 44 deletions(-) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index df618a61..d216df9d 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -30,26 +30,9 @@ env: jobs: kubernetes: - runs-on: ubuntu-20.04 + runs-on: ubuntu-20.04-4core-gpu steps: - - name: Cleanup - run: | - ls -lart - echo "Initial status:" - df -h - echo "Cleaning up resources:" - sudo swapoff -a - sudo rm -f /swapfile - sudo apt clean - sudo rm -rf /usr/share/dotnet - sudo rm -rf /opt/ghc - sudo rm -rf "/usr/local/share/boost" - sudo rm -rf "$AGENT_TOOLSDIRECTORY" - docker rmi $(docker image ls -aq) - echo "Final status:" - df -h - - name: Checkout code uses: actions/checkout@v4 with: @@ -85,9 +68,15 @@ jobs: python-version: '3.9' cache: 'pip' # caching pip dependencies + - name: Setup NVidia GPU environment for KinD + uses: ./common/github-actions/nvidia-gpu-setup + - name: Setup and start KinD cluster uses: ./common/github-actions/kind + - name: Install NVidia GPU operator for KinD + uses: ./common/github-actions/nvidia-gpu-operator + - name: Deploy CodeFlare stack id: deploy run: | @@ -104,9 +93,6 @@ jobs: with: user-name: sdk-user - - name: Add kueue resources - run: kubectl apply --server-side -f "https://github.com/kubernetes-sigs/kueue/releases/download/v0.6.2/manifests.yaml" - - name: Configure RBAC for sdk user with limited permissions run: | kubectl create clusterrole list-ingresses --verb=get,list --resource=ingresses @@ -136,7 +122,7 @@ jobs: pip install poetry poetry install --with test,docs echo "Running e2e tests..." - poetry run pytest -v -s ./tests/e2e -m kind > ${CODEFLARE_TEST_OUTPUT_DIR}/pytest_output.log 2>&1 + poetry run pytest -v -s ./tests/e2e -m 'kind and nvidia_gpu' > ${CODEFLARE_TEST_OUTPUT_DIR}/pytest_output.log 2>&1 env: GRPC_DNS_RESOLVER: "native" diff --git a/docs/e2e.md b/docs/e2e.md index 27324f70..039749d4 100644 --- a/docs/e2e.md +++ b/docs/e2e.md @@ -5,6 +5,9 @@ ## On KinD clusters Pre-requisite for KinD clusters: please add in your local `/etc/hosts` file `127.0.0.1 kind`. This will map your localhost IP address to the KinD cluster's hostname. This is already performed on [GitHub Actions](https://github.com/project-codeflare/codeflare-common/blob/1edd775e2d4088a5a0bfddafb06ff3a773231c08/github-actions/kind/action.yml#L70-L72) +If the system you run on contains NVidia GPU then you can enable the GPU support in KinD, this will allow you to run also GPU tests. +To enable GPU on KinD follow [these instructions](https://www.substratus.ai/blog/kind-with-gpus). + - Setup Phase: - Pull the [codeflare-operator repo](https://github.com/project-codeflare/codeflare-operator) and run the following make targets: ``` @@ -64,9 +67,13 @@ Pre-requisite for KinD clusters: please add in your local `/etc/hosts` file `127 - Once we have the codeflare-operator, kuberay-operator and kueue running and ready, we can run the e2e test on the codeflare-sdk repository: ``` poetry install --with test,docs - poetry run pytest -v -s ./tests/e2e/mnist_raycluster_sdk_test.py + poetry run pytest -v -s ./tests/e2e/mnist_raycluster_sdk_kind_test.py + ``` + - If the cluster doesn't have NVidia GPU support then we need to disable NVidia GPU tests by providing proper marker: + ``` + poetry install --with test,docs + poetry run pytest -v -s ./tests/e2e/mnist_raycluster_sdk_kind_test.py -m 'kind and not nvidia_gpu' ``` - ## On OpenShift clusters @@ -83,6 +90,10 @@ Pre-requisite for KinD clusters: please add in your local `/etc/hosts` file `127 kubectl apply --server-side -k "github.com/opendatahub-io/kueue/config/rhoai?ref=dev" ``` +If the system you run on contains NVidia GPU then you can enable the GPU support on OpenShift, this will allow you to run also GPU tests. +To enable GPU on OpenShift follow [these instructions](https://docs.nvidia.com/datacenter/cloud-native/openshift/latest/introduction.html). +Currently the SDK doesn't support tolerations, so e2e tests can't be executed on nodes with taint (i.e. GPU taint). + - Test Phase: - Once we have the codeflare-operator, kuberay-operator and kueue running and ready, we can run the e2e test on the codeflare-sdk repository: ``` @@ -97,3 +108,8 @@ Pre-requisite for KinD clusters: please add in your local `/etc/hosts` file `127 ``` poetry run pytest -v -s ./tests/e2e -m openshift --timeout=1200 ``` + - If the cluster doesn't have NVidia GPU support or GPU nodes have taint then we need to disable NVidia GPU tests by providing proper marker: + ``` + poetry install --with test,docs + poetry run pytest -v -s ./tests/e2e/mnist_raycluster_sdk_kind_test.py -m 'not nvidia_gpu' + ``` diff --git a/pyproject.toml b/pyproject.toml index 457e6de9..be225e90 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,6 +52,7 @@ filterwarnings = [ ] markers = [ "kind", - "openshift" + "openshift", + "nvidia_gpu" ] addopts = "--timeout=900" diff --git a/tests/e2e/local_interactive_sdk_kind_test.py b/tests/e2e/local_interactive_sdk_kind_test.py index 8ca0bdac..4479b41c 100644 --- a/tests/e2e/local_interactive_sdk_kind_test.py +++ b/tests/e2e/local_interactive_sdk_kind_test.py @@ -27,7 +27,16 @@ def test_local_interactives(self): create_kueue_resources(self) self.run_local_interactives() - def run_local_interactives(self): + @pytest.mark.nvidia_gpu + def test_local_interactives_nvidia_gpu(self): + self.setup_method() + create_namespace(self) + create_kueue_resources(self) + self.run_local_interactives(number_of_gpus=1) + + def run_local_interactives( + self, gpu_resource_name="nvidia.com/gpu", number_of_gpus=0 + ): ray_image = get_ray_image() cluster_name = "test-ray-cluster-li" @@ -43,6 +52,7 @@ def run_local_interactives(self): worker_cpu_limits=1, worker_memory_requests=1, worker_memory_limits=2, + worker_extended_resource_requests={gpu_resource_name: number_of_gpus}, image=ray_image, write_to_file=True, verify_tls=False, @@ -59,7 +69,7 @@ def run_local_interactives(self): ray.shutdown() ray.init(address=cluster.local_client_url(), logging_level="DEBUG") - @ray.remote + @ray.remote(num_gpus=number_of_gpus / 2) def heavy_calculation_part(num_iterations): result = 0.0 for i in range(num_iterations): @@ -68,7 +78,7 @@ def heavy_calculation_part(num_iterations): result += math.sin(i) * math.cos(j) * math.tan(k) return result - @ray.remote + @ray.remote(num_gpus=number_of_gpus / 2) def heavy_calculation(num_iterations): results = ray.get( [heavy_calculation_part.remote(num_iterations // 30) for _ in range(30)] diff --git a/tests/e2e/mnist.py b/tests/e2e/mnist.py index 2971d9c9..55ed91ea 100644 --- a/tests/e2e/mnist.py +++ b/tests/e2e/mnist.py @@ -32,6 +32,9 @@ print("MASTER_ADDR: is ", os.getenv("MASTER_ADDR")) print("MASTER_PORT: is ", os.getenv("MASTER_PORT")) +print("ACCELERATOR: is ", os.getenv("ACCELERATOR")) +ACCELERATOR = os.getenv("ACCELERATOR") + class LitMNIST(LightningModule): def __init__(self, data_dir=PATH_DATASETS, hidden_size=64, learning_rate=2e-4): @@ -149,7 +152,7 @@ def test_dataloader(self): # Initialize a trainer trainer = Trainer( - accelerator="auto", + accelerator=ACCELERATOR, # devices=1 if torch.cuda.is_available() else None, # limiting got iPython runs max_epochs=3, callbacks=[TQDMProgressBar(refresh_rate=20)], diff --git a/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py b/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py index 2aa5da16..39bd25fd 100644 --- a/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py +++ b/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py @@ -24,9 +24,18 @@ def test_mnist_ray_cluster_sdk_kind(self): self.setup_method() create_namespace(self) create_kueue_resources(self) - self.run_mnist_raycluster_sdk_kind() + self.run_mnist_raycluster_sdk_kind(accelerator="cpu") - def run_mnist_raycluster_sdk_kind(self): + @pytest.mark.nvidia_gpu + def test_mnist_ray_cluster_sdk_kind_nvidia_gpu(self): + self.setup_method() + create_namespace(self) + create_kueue_resources(self) + self.run_mnist_raycluster_sdk_kind(accelerator="gpu", number_of_gpus=1) + + def run_mnist_raycluster_sdk_kind( + self, accelerator, gpu_resource_name="nvidia.com/gpu", number_of_gpus=0 + ): ray_image = get_ray_image() cluster = Cluster( @@ -36,11 +45,11 @@ def run_mnist_raycluster_sdk_kind(self): num_workers=1, head_cpus="500m", head_memory=2, - min_cpus="500m", - max_cpus=1, - min_memory=1, - max_memory=2, - num_gpus=0, + worker_cpu_requests="500m", + worker_cpu_limits=1, + worker_memory_requests=1, + worker_memory_limits=4, + worker_extended_resource_requests={gpu_resource_name: number_of_gpus}, image=ray_image, write_to_file=True, verify_tls=False, @@ -58,11 +67,11 @@ def run_mnist_raycluster_sdk_kind(self): cluster.details() - self.assert_jobsubmit_withoutlogin_kind(cluster) + self.assert_jobsubmit_withoutlogin_kind(cluster, accelerator, number_of_gpus) # Assertions - def assert_jobsubmit_withoutlogin_kind(self, cluster): + def assert_jobsubmit_withoutlogin_kind(self, cluster, accelerator, number_of_gpus): ray_dashboard = cluster.cluster_dashboard_uri() client = RayJobClient(address=ray_dashboard, verify=False) @@ -71,7 +80,9 @@ def assert_jobsubmit_withoutlogin_kind(self, cluster): runtime_env={ "working_dir": "./tests/e2e/", "pip": "./tests/e2e/mnist_pip_requirements.txt", + "env_vars": {"ACCELERATOR": accelerator}, }, + entrypoint_num_gpus=number_of_gpus, ) print(f"Submitted job with ID: {submission_id}") done = False diff --git a/tests/e2e/mnist_raycluster_sdk_kind_test.py b/tests/e2e/mnist_raycluster_sdk_kind_test.py index af5fcc1f..356d56f9 100644 --- a/tests/e2e/mnist_raycluster_sdk_kind_test.py +++ b/tests/e2e/mnist_raycluster_sdk_kind_test.py @@ -25,9 +25,18 @@ def test_mnist_ray_cluster_sdk_kind(self): self.setup_method() create_namespace(self) create_kueue_resources(self) - self.run_mnist_raycluster_sdk_kind() + self.run_mnist_raycluster_sdk_kind(accelerator="cpu") - def run_mnist_raycluster_sdk_kind(self): + @pytest.mark.nvidia_gpu + def test_mnist_ray_cluster_sdk_kind_nvidia_gpu(self): + self.setup_method() + create_namespace(self) + create_kueue_resources(self) + self.run_mnist_raycluster_sdk_kind(accelerator="gpu", number_of_gpus=1) + + def run_mnist_raycluster_sdk_kind( + self, accelerator, gpu_resource_name="nvidia.com/gpu", number_of_gpus=0 + ): ray_image = get_ray_image() cluster = Cluster( @@ -40,7 +49,8 @@ def run_mnist_raycluster_sdk_kind(self): worker_cpu_requests="500m", worker_cpu_limits=1, worker_memory_requests=1, - worker_memory_limits=2, + worker_memory_limits=4, + worker_extended_resource_requests={gpu_resource_name: number_of_gpus}, image=ray_image, write_to_file=True, verify_tls=False, @@ -57,11 +67,11 @@ def run_mnist_raycluster_sdk_kind(self): cluster.details() - self.assert_jobsubmit_withoutlogin_kind(cluster) + self.assert_jobsubmit_withoutlogin_kind(cluster, accelerator, number_of_gpus) # Assertions - def assert_jobsubmit_withoutlogin_kind(self, cluster): + def assert_jobsubmit_withoutlogin_kind(self, cluster, accelerator, number_of_gpus): ray_dashboard = cluster.cluster_dashboard_uri() client = RayJobClient(address=ray_dashboard, verify=False) @@ -70,7 +80,9 @@ def assert_jobsubmit_withoutlogin_kind(self, cluster): runtime_env={ "working_dir": "./tests/e2e/", "pip": "./tests/e2e/mnist_pip_requirements.txt", + "env_vars": {"ACCELERATOR": accelerator}, }, + entrypoint_num_gpus=number_of_gpus, ) print(f"Submitted job with ID: {submission_id}") done = False diff --git a/tests/e2e/support.py b/tests/e2e/support.py index d8a06bb7..3eb24153 100644 --- a/tests/e2e/support.py +++ b/tests/e2e/support.py @@ -90,7 +90,7 @@ def create_cluster_queue(self, cluster_queue, flavor): "resources": [ {"name": "cpu", "nominalQuota": 9}, {"name": "memory", "nominalQuota": "36Gi"}, - {"name": "nvidia.com/gpu", "nominalQuota": 0}, + {"name": "nvidia.com/gpu", "nominalQuota": 1}, ], } ], From 166185d5896f691e5d7b4863633440fbe0d8d376 Mon Sep 17 00:00:00 2001 From: Srihari Venkataramaiah Date: Mon, 15 Jul 2024 10:59:38 +0530 Subject: [PATCH 292/496] Increase Cluster configuration memory for upgrade tests (#585) --- tests/upgrade/raycluster_sdk_upgrade_test.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/upgrade/raycluster_sdk_upgrade_test.py b/tests/upgrade/raycluster_sdk_upgrade_test.py index ff98e28f..e5fd1876 100644 --- a/tests/upgrade/raycluster_sdk_upgrade_test.py +++ b/tests/upgrade/raycluster_sdk_upgrade_test.py @@ -52,8 +52,8 @@ def run_mnist_raycluster_sdk_oauth(self): head_memory=2, worker_cpu_requests=1, worker_cpu_limits=1, - worker_memory_requests=1, - worker_memory_limits=2, + worker_memory_requests=4, + worker_memory_limits=4, image=ray_image, write_to_file=True, verify_tls=False, From c46efe2540a99040794cb796b339cc735f1d1a20 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Tue, 16 Jul 2024 08:59:24 +0100 Subject: [PATCH 293/496] Remove dependabot --- .github/dependabot.yml | 6 ------ 1 file changed, 6 deletions(-) delete mode 100644 .github/dependabot.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml deleted file mode 100644 index 6a7695c0..00000000 --- a/.github/dependabot.yml +++ /dev/null @@ -1,6 +0,0 @@ -version: 2 -updates: - - package-ecosystem: "pip" - directory: "/" - schedule: - interval: "weekly" From e7a45ba69bb2f49bf05c16b72f33669d993a8f92 Mon Sep 17 00:00:00 2001 From: abhijeet-dhumal Date: Mon, 15 Jul 2024 20:37:17 +0530 Subject: [PATCH 294/496] Update cluster configuration to increase worker memory and remove ray-img input parameter given in cluster configuration for e2e tests --- tests/e2e/local_interactive_sdk_kind_test.py | 5 +---- tests/e2e/local_interactive_sdk_oauth_test.py | 5 +---- tests/e2e/mnist_raycluster_sdk_aw_kind_test.py | 3 --- tests/e2e/mnist_raycluster_sdk_kind_test.py | 3 --- tests/e2e/mnist_raycluster_sdk_oauth_test.py | 5 +---- 5 files changed, 3 insertions(+), 18 deletions(-) diff --git a/tests/e2e/local_interactive_sdk_kind_test.py b/tests/e2e/local_interactive_sdk_kind_test.py index 4479b41c..6693117b 100644 --- a/tests/e2e/local_interactive_sdk_kind_test.py +++ b/tests/e2e/local_interactive_sdk_kind_test.py @@ -37,8 +37,6 @@ def test_local_interactives_nvidia_gpu(self): def run_local_interactives( self, gpu_resource_name="nvidia.com/gpu", number_of_gpus=0 ): - ray_image = get_ray_image() - cluster_name = "test-ray-cluster-li" cluster = Cluster( @@ -51,9 +49,8 @@ def run_local_interactives( worker_cpu_requests="500m", worker_cpu_limits=1, worker_memory_requests=1, - worker_memory_limits=2, + worker_memory_limits=4, worker_extended_resource_requests={gpu_resource_name: number_of_gpus}, - image=ray_image, write_to_file=True, verify_tls=False, ) diff --git a/tests/e2e/local_interactive_sdk_oauth_test.py b/tests/e2e/local_interactive_sdk_oauth_test.py index 4177413e..0e5c0204 100644 --- a/tests/e2e/local_interactive_sdk_oauth_test.py +++ b/tests/e2e/local_interactive_sdk_oauth_test.py @@ -28,8 +28,6 @@ def test_local_interactives(self): self.run_local_interactives() def run_local_interactives(self): - ray_image = get_ray_image() - auth = TokenAuthentication( token=run_oc_command(["whoami", "--show-token=true"]), server=run_oc_command(["whoami", "--show-server=true"]), @@ -46,9 +44,8 @@ def run_local_interactives(self): num_workers=1, worker_cpu_requests=1, worker_cpu_limits=1, - worker_memory_requests=4, + worker_memory_requests=1, worker_memory_limits=4, - image=ray_image, verify_tls=False, ) ) diff --git a/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py b/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py index 39bd25fd..d2d0995b 100644 --- a/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py +++ b/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py @@ -36,8 +36,6 @@ def test_mnist_ray_cluster_sdk_kind_nvidia_gpu(self): def run_mnist_raycluster_sdk_kind( self, accelerator, gpu_resource_name="nvidia.com/gpu", number_of_gpus=0 ): - ray_image = get_ray_image() - cluster = Cluster( ClusterConfiguration( name="mnist", @@ -50,7 +48,6 @@ def run_mnist_raycluster_sdk_kind( worker_memory_requests=1, worker_memory_limits=4, worker_extended_resource_requests={gpu_resource_name: number_of_gpus}, - image=ray_image, write_to_file=True, verify_tls=False, appwrapper=True, diff --git a/tests/e2e/mnist_raycluster_sdk_kind_test.py b/tests/e2e/mnist_raycluster_sdk_kind_test.py index 356d56f9..2635fecd 100644 --- a/tests/e2e/mnist_raycluster_sdk_kind_test.py +++ b/tests/e2e/mnist_raycluster_sdk_kind_test.py @@ -37,8 +37,6 @@ def test_mnist_ray_cluster_sdk_kind_nvidia_gpu(self): def run_mnist_raycluster_sdk_kind( self, accelerator, gpu_resource_name="nvidia.com/gpu", number_of_gpus=0 ): - ray_image = get_ray_image() - cluster = Cluster( ClusterConfiguration( name="mnist", @@ -51,7 +49,6 @@ def run_mnist_raycluster_sdk_kind( worker_memory_requests=1, worker_memory_limits=4, worker_extended_resource_requests={gpu_resource_name: number_of_gpus}, - image=ray_image, write_to_file=True, verify_tls=False, ) diff --git a/tests/e2e/mnist_raycluster_sdk_oauth_test.py b/tests/e2e/mnist_raycluster_sdk_oauth_test.py index 4e85e9f1..0ddab720 100644 --- a/tests/e2e/mnist_raycluster_sdk_oauth_test.py +++ b/tests/e2e/mnist_raycluster_sdk_oauth_test.py @@ -28,8 +28,6 @@ def test_mnist_ray_cluster_sdk_auth(self): self.run_mnist_raycluster_sdk_oauth() def run_mnist_raycluster_sdk_oauth(self): - ray_image = get_ray_image() - auth = TokenAuthentication( token=run_oc_command(["whoami", "--show-token=true"]), server=run_oc_command(["whoami", "--show-server=true"]), @@ -47,8 +45,7 @@ def run_mnist_raycluster_sdk_oauth(self): worker_cpu_requests="500m", worker_cpu_limits=1, worker_memory_requests=1, - worker_memory_limits=2, - image=ray_image, + worker_memory_limits=4, write_to_file=True, verify_tls=False, ) From 4dca262d8ac3c0ec07f89dc4832c52f0c4bed3a9 Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Tue, 16 Jul 2024 10:59:00 +0100 Subject: [PATCH 295/496] Add validation for Cluster configuration parameters --- src/codeflare_sdk/cluster/config.py | 88 +++++++++++++++++++---------- 1 file changed, 59 insertions(+), 29 deletions(-) diff --git a/src/codeflare_sdk/cluster/config.py b/src/codeflare_sdk/cluster/config.py index 6a522fbc..610d53c4 100644 --- a/src/codeflare_sdk/cluster/config.py +++ b/src/codeflare_sdk/cluster/config.py @@ -18,10 +18,10 @@ Cluster object. """ -from dataclasses import dataclass, field import pathlib -import typing import warnings +from dataclasses import dataclass, field, fields +from typing import Dict, List, Optional, Union, get_args, get_origin dir = pathlib.Path(__file__).parent.parent.resolve() @@ -73,36 +73,37 @@ class ClusterConfiguration: """ name: str - namespace: str = None - head_info: list = field(default_factory=list) - head_cpus: typing.Union[int, str] = 2 - head_memory: typing.Union[int, str] = 8 - head_gpus: int = None # Deprecating - head_extended_resource_requests: typing.Dict[str, int] = field(default_factory=dict) - machine_types: list = field(default_factory=list) # ["m4.xlarge", "g4dn.xlarge"] - worker_cpu_requests: typing.Union[int, str] = 1 - worker_cpu_limits: typing.Union[int, str] = 1 - min_cpus: typing.Union[int, str] = None # Deprecating - max_cpus: typing.Union[int, str] = None # Deprecating + namespace: Optional[str] = None + head_info: List[str] = field(default_factory=list) + head_cpus: Union[int, str] = 2 + head_memory: Union[int, str] = 8 + head_gpus: Optional[int] = None # Deprecating + head_extended_resource_requests: Dict[str, int] = field(default_factory=dict) + machine_types: List[str] = field( + default_factory=list + ) # ["m4.xlarge", "g4dn.xlarge"] + worker_cpu_requests: Union[int, str] = 1 + worker_cpu_limits: Union[int, str] = 1 + min_cpus: Optional[Union[int, str]] = None # Deprecating + max_cpus: Optional[Union[int, str]] = None # Deprecating num_workers: int = 1 - worker_memory_requests: typing.Union[int, str] = 2 - worker_memory_limits: typing.Union[int, str] = 2 - min_memory: typing.Union[int, str] = None # Deprecating - max_memory: typing.Union[int, str] = None # Deprecating - num_gpus: int = None # Deprecating + worker_memory_requests: Union[int, str] = 2 + worker_memory_limits: Union[int, str] = 2 + min_memory: Optional[Union[int, str]] = None # Deprecating + max_memory: Optional[Union[int, str]] = None # Deprecating + num_gpus: Optional[int] = None # Deprecating template: str = f"{dir}/templates/base-template.yaml" appwrapper: bool = False - envs: dict = field(default_factory=dict) + envs: Dict[str, str] = field(default_factory=dict) image: str = "" - image_pull_secrets: list = field(default_factory=list) + image_pull_secrets: List[str] = field(default_factory=list) write_to_file: bool = False verify_tls: bool = True - labels: dict = field(default_factory=dict) - worker_extended_resource_requests: typing.Dict[str, int] = field( - default_factory=dict - ) - extended_resource_mapping: typing.Dict[str, str] = field(default_factory=dict) + labels: Dict[str, str] = field(default_factory=dict) + worker_extended_resource_requests: Dict[str, int] = field(default_factory=dict) + extended_resource_mapping: Dict[str, str] = field(default_factory=dict) overwrite_default_resource_mapping: bool = False + local_queue: Optional[str] = None def __post_init__(self): if not self.verify_tls: @@ -110,6 +111,7 @@ def __post_init__(self): "Warning: TLS verification has been disabled - Endpoint checks will be bypassed" ) + self._validate_types() self._memory_to_string() self._str_mem_no_unit_add_GB() self._memory_to_resource() @@ -139,9 +141,7 @@ def _combine_extended_resource_mapping(self): **self.extended_resource_mapping, } - def _validate_extended_resource_requests( - self, extended_resources: typing.Dict[str, int] - ): + def _validate_extended_resource_requests(self, extended_resources: Dict[str, int]): for k in extended_resources.keys(): if k not in self.extended_resource_mapping.keys(): raise ValueError( @@ -206,4 +206,34 @@ def _memory_to_resource(self): warnings.warn("max_memory is being deprecated, use worker_memory_limits") self.worker_memory_limits = f"{self.max_memory}G" - local_queue: str = None + def _validate_types(self): + """Validate the types of all fields in the ClusterConfiguration dataclass.""" + for field_info in fields(self): + value = getattr(self, field_info.name) + expected_type = field_info.type + if not self._is_type(value, expected_type): + raise TypeError( + f"'{field_info.name}' should be of type {expected_type}" + ) + + @staticmethod + def _is_type(value, expected_type): + """Check if the value matches the expected type.""" + + def check_type(value, expected_type): + origin_type = get_origin(expected_type) + args = get_args(expected_type) + if origin_type is Union: + return any(check_type(value, union_type) for union_type in args) + if origin_type is list: + return all(check_type(elem, args[0]) for elem in value) + if origin_type is dict: + return all( + check_type(k, args[0]) and check_type(v, args[1]) + for k, v in value.items() + ) + if origin_type is tuple: + return all(check_type(elem, etype) for elem, etype in zip(value, args)) + return isinstance(value, expected_type) + + return check_type(value, expected_type) From 9c1e65d2460ca0cd85a35a4031ab4917f3e672ea Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Tue, 16 Jul 2024 15:16:37 +0100 Subject: [PATCH 296/496] Unit test to validate Cluster configuration parameters --- tests/unit_test.py | 6 ++++++ tests/unit_test_support.py | 20 ++++++++++++++++++++ 2 files changed, 26 insertions(+) diff --git a/tests/unit_test.py b/tests/unit_test.py index 2decade2..bb14d2b2 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -67,6 +67,7 @@ from tests.unit_test_support import ( createClusterWithConfig, createClusterConfig, + createClusterWrongType, ) import codeflare_sdk.utils.kube_api_helpers @@ -268,6 +269,11 @@ def test_config_creation(): assert config.appwrapper == True +def test_config_creation_wrong_type(): + with pytest.raises(TypeError): + config = createClusterWrongType() + + def test_cluster_creation(mocker): # Create AppWrapper containing a Ray Cluster with no local queue specified mocker.patch("kubernetes.client.ApisApi.get_api_versions") diff --git a/tests/unit_test_support.py b/tests/unit_test_support.py index 36c7d871..25e206c5 100644 --- a/tests/unit_test_support.py +++ b/tests/unit_test_support.py @@ -31,3 +31,23 @@ def createClusterWithConfig(mocker): ) cluster = Cluster(createClusterConfig()) return cluster + + +def createClusterWrongType(): + config = ClusterConfiguration( + name="unit-test-cluster", + namespace="ns", + num_workers=2, + worker_cpu_requests=[], + worker_cpu_limits=4, + worker_memory_requests=5, + worker_memory_limits=6, + worker_extended_resource_requests={"nvidia.com/gpu": 7}, + appwrapper=True, + machine_types=[True, False], + image_pull_secrets=["unit-test-pull-secret"], + image="quay.io/rhoai/ray:2.23.0-py39-cu121", + write_to_file=True, + labels={1: 1}, + ) + return config From 88577c7253861592bb005d1b1e817cd9561bc16b Mon Sep 17 00:00:00 2001 From: abhijeet-dhumal Date: Thu, 18 Jul 2024 16:10:20 +0530 Subject: [PATCH 297/496] Updated e2e tests to support S3 compatible storage bucket from whicyh to download MNISt datasets for disconnected automation --- .pre-commit-config.yaml | 1 + docs/e2e.md | 27 ++- tests/e2e/local_interactive_sdk_oauth_test.py | 3 + tests/e2e/minio_deployment.yaml | 163 ++++++++++++++++++ tests/e2e/mnist.py | 91 +++++++++- tests/e2e/mnist_pip_requirements.txt | 1 + .../e2e/mnist_raycluster_sdk_aw_kind_test.py | 3 +- tests/e2e/mnist_raycluster_sdk_kind_test.py | 2 +- tests/e2e/mnist_raycluster_sdk_oauth_test.py | 8 +- tests/e2e/support.py | 50 +++++- tests/upgrade/raycluster_sdk_upgrade_test.py | 1 + 11 files changed, 331 insertions(+), 19 deletions(-) create mode 100644 tests/e2e/minio_deployment.yaml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 89e037cd..7928084d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,6 +7,7 @@ repos: - id: trailing-whitespace - id: end-of-file-fixer - id: check-yaml + args: [--allow-multiple-documents] - id: check-added-large-files - repo: https://github.com/psf/black rev: 23.3.0 diff --git a/docs/e2e.md b/docs/e2e.md index 039749d4..f3d0c940 100644 --- a/docs/e2e.md +++ b/docs/e2e.md @@ -108,8 +108,25 @@ Currently the SDK doesn't support tolerations, so e2e tests can't be executed on ``` poetry run pytest -v -s ./tests/e2e -m openshift --timeout=1200 ``` - - If the cluster doesn't have NVidia GPU support or GPU nodes have taint then we need to disable NVidia GPU tests by providing proper marker: - ``` - poetry install --with test,docs - poetry run pytest -v -s ./tests/e2e/mnist_raycluster_sdk_kind_test.py -m 'not nvidia_gpu' - ``` + +## On OpenShift Disconnected clusters + +- In addition to setup phase mentioned above in case of Openshift cluster, Disconnected environment requires following pre-requisites : + - Mirror Image registry : + - Image mirror registry is used to host set of container images required locally for the applications and services. This ensures to pull images without needing an external network connection. It also ensures continuous operation and deployment capabilities in a network-isolated environment. + - PYPI Mirror Index : + - When trying to install Python packages in a disconnected environment, the pip command might fail because the connection cannot install packages from external URLs. This issue can be resolved by setting up PIP Mirror Index on separate endpoint in same environment. + - S3 compatible storage : + - Some of our distributed training examples require an external storage solution so that all nodes can access the same data in disconnected environment (For example: common-datasets and model files). + - Minio S3 compatible storage type instance can be deployed in disconnected environment using `/tests/e2e/minio_deployment.yaml` or using support methods in e2e test suite. + - The following are environment variables for configuring PIP index URl for accessing the common-python packages required and the S3 or Minio storage for your Ray Train script or interactive session. + ``` + export RAY_IMAGE=quay.io/project-codeflare/ray@sha256: (prefer image digest over image tag in disocnnected environment) + PIP_INDEX_URL=https:///root/pypi/+simple/ \ + PIP_TRUSTED_HOST= \ + AWS_DEFAULT_ENDPOINT= \ + AWS_ACCESS_KEY_ID= \ + AWS_SECRET_ACCESS_KEY= \ + AWS_STORAGE_BUCKET= + AWS_STORAGE_BUCKET_MNIST_DIR= + ``` diff --git a/tests/e2e/local_interactive_sdk_oauth_test.py b/tests/e2e/local_interactive_sdk_oauth_test.py index 0e5c0204..b5229deb 100644 --- a/tests/e2e/local_interactive_sdk_oauth_test.py +++ b/tests/e2e/local_interactive_sdk_oauth_test.py @@ -28,6 +28,8 @@ def test_local_interactives(self): self.run_local_interactives() def run_local_interactives(self): + ray_image = get_ray_image() + auth = TokenAuthentication( token=run_oc_command(["whoami", "--show-token=true"]), server=run_oc_command(["whoami", "--show-server=true"]), @@ -46,6 +48,7 @@ def run_local_interactives(self): worker_cpu_limits=1, worker_memory_requests=1, worker_memory_limits=4, + image=ray_image, verify_tls=False, ) ) diff --git a/tests/e2e/minio_deployment.yaml b/tests/e2e/minio_deployment.yaml new file mode 100644 index 00000000..86d4ef01 --- /dev/null +++ b/tests/e2e/minio_deployment.yaml @@ -0,0 +1,163 @@ +--- +kind: PersistentVolumeClaim +apiVersion: v1 +metadata: + name: minio-pvc +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 20Gi + volumeMode: Filesystem +--- +kind: Secret +apiVersion: v1 +metadata: + name: minio-secret +stringData: + # change the username and password to your own values. + # ensure that the user is at least 3 characters long and the password at least 8 + minio_root_user: minio + minio_root_password: minio123 +--- +kind: Deployment +apiVersion: apps/v1 +metadata: + name: minio +spec: + replicas: 1 + selector: + matchLabels: + app: minio + template: + metadata: + creationTimestamp: null + labels: + app: minio + spec: + volumes: + - name: data + persistentVolumeClaim: + claimName: minio-pvc + containers: + - resources: + limits: + cpu: 250m + memory: 1Gi + requests: + cpu: 20m + memory: 100Mi + readinessProbe: + tcpSocket: + port: 9000 + initialDelaySeconds: 5 + timeoutSeconds: 1 + periodSeconds: 5 + successThreshold: 1 + failureThreshold: 3 + terminationMessagePath: /dev/termination-log + name: minio + livenessProbe: + tcpSocket: + port: 9000 + initialDelaySeconds: 30 + timeoutSeconds: 1 + periodSeconds: 5 + successThreshold: 1 + failureThreshold: 3 + env: + - name: MINIO_ROOT_USER + valueFrom: + secretKeyRef: + name: minio-secret + key: minio_root_user + - name: MINIO_ROOT_PASSWORD + valueFrom: + secretKeyRef: + name: minio-secret + key: minio_root_password + ports: + - containerPort: 9000 + protocol: TCP + - containerPort: 9090 + protocol: TCP + imagePullPolicy: IfNotPresent + volumeMounts: + - name: data + mountPath: /data + subPath: minio + terminationMessagePolicy: File + image: >- + quay.io/minio/minio:RELEASE.2024-06-22T05-26-45Z + # In case of disconnected environment, use image digest instead of tag + # For example : /minio/minio@sha256:6b3abf2f59286b985bfde2b23e37230b466081eda5dccbf971524d54c8e406b5 + args: + - server + - /data + - --console-address + - :9090 + restartPolicy: Always + terminationGracePeriodSeconds: 30 + dnsPolicy: ClusterFirst + securityContext: {} + schedulerName: default-scheduler + strategy: + type: Recreate + revisionHistoryLimit: 10 + progressDeadlineSeconds: 600 +--- +kind: Service +apiVersion: v1 +metadata: + name: minio-service +spec: + ipFamilies: + - IPv4 + ports: + - name: api + protocol: TCP + port: 9000 + targetPort: 9000 + - name: ui + protocol: TCP + port: 9090 + targetPort: 9090 + internalTrafficPolicy: Cluster + type: ClusterIP + ipFamilyPolicy: SingleStack + sessionAffinity: None + selector: + app: minio +--- +kind: Route +apiVersion: route.openshift.io/v1 +metadata: + name: minio-api +spec: + to: + kind: Service + name: minio-service + weight: 100 + port: + targetPort: api + wildcardPolicy: None + tls: + termination: edge + insecureEdgeTerminationPolicy: Redirect +--- +kind: Route +apiVersion: route.openshift.io/v1 +metadata: + name: minio-ui +spec: + to: + kind: Service + name: minio-service + weight: 100 + port: + targetPort: ui + wildcardPolicy: None + tls: + termination: edge + insecureEdgeTerminationPolicy: Redirect diff --git a/tests/e2e/mnist.py b/tests/e2e/mnist.py index 55ed91ea..4c382f67 100644 --- a/tests/e2e/mnist.py +++ b/tests/e2e/mnist.py @@ -15,6 +15,7 @@ import os import torch +import requests from pytorch_lightning import LightningModule, Trainer from pytorch_lightning.callbacks.progress import TQDMProgressBar from torch import nn @@ -23,9 +24,15 @@ from torchmetrics import Accuracy from torchvision import transforms from torchvision.datasets import MNIST +import gzip +import shutil +from minio import Minio + PATH_DATASETS = os.environ.get("PATH_DATASETS", ".") BATCH_SIZE = 256 if torch.cuda.is_available() else 64 + +local_mnist_path = os.path.dirname(os.path.abspath(__file__)) # %% print("prior to running the trainer") @@ -35,6 +42,25 @@ print("ACCELERATOR: is ", os.getenv("ACCELERATOR")) ACCELERATOR = os.getenv("ACCELERATOR") +STORAGE_BUCKET_EXISTS = "AWS_DEFAULT_ENDPOINT" in os.environ +print("STORAGE_BUCKET_EXISTS: ", STORAGE_BUCKET_EXISTS) + +print( + f'Storage_Bucket_Default_Endpoint : is {os.environ.get("AWS_DEFAULT_ENDPOINT")}' + if "AWS_DEFAULT_ENDPOINT" in os.environ + else "" +) +print( + f'Storage_Bucket_Name : is {os.environ.get("AWS_STORAGE_BUCKET")}' + if "AWS_STORAGE_BUCKET" in os.environ + else "" +) +print( + f'Storage_Bucket_Mnist_Directory : is {os.environ.get("AWS_STORAGE_BUCKET_MNIST_DIR")}' + if "AWS_STORAGE_BUCKET_MNIST_DIR" in os.environ + else "" +) + class LitMNIST(LightningModule): def __init__(self, data_dir=PATH_DATASETS, hidden_size=64, learning_rate=2e-4): @@ -114,19 +140,74 @@ def configure_optimizers(self): def prepare_data(self): # download print("Downloading MNIST dataset...") - MNIST(self.data_dir, train=True, download=True) - MNIST(self.data_dir, train=False, download=True) + + if ( + STORAGE_BUCKET_EXISTS + and os.environ.get("AWS_DEFAULT_ENDPOINT") != "" + and os.environ.get("AWS_DEFAULT_ENDPOINT") != None + ): + print("Using storage bucket to download datasets...") + + dataset_dir = os.path.join(self.data_dir, "MNIST/raw") + endpoint = os.environ.get("AWS_DEFAULT_ENDPOINT") + access_key = os.environ.get("AWS_ACCESS_KEY_ID") + secret_key = os.environ.get("AWS_SECRET_ACCESS_KEY") + bucket_name = os.environ.get("AWS_STORAGE_BUCKET") + + client = Minio( + endpoint, + access_key=access_key, + secret_key=secret_key, + cert_check=False, + ) + + if not os.path.exists(dataset_dir): + os.makedirs(dataset_dir) + else: + print(f"Directory '{dataset_dir}' already exists") + + # To download datasets from storage bucket's specific directory, use prefix to provide directory name + prefix = os.environ.get("AWS_STORAGE_BUCKET_MNIST_DIR") + # download all files from prefix folder of storage bucket recursively + for item in client.list_objects(bucket_name, prefix=prefix, recursive=True): + file_name = item.object_name[len(prefix) + 1 :] + dataset_file_path = os.path.join(dataset_dir, file_name) + if not os.path.exists(dataset_file_path): + client.fget_object(bucket_name, item.object_name, dataset_file_path) + else: + print(f"File-path '{dataset_file_path}' already exists") + # Unzip files + with gzip.open(dataset_file_path, "rb") as f_in: + with open(dataset_file_path.split(".")[:-1][0], "wb") as f_out: + shutil.copyfileobj(f_in, f_out) + # delete zip file + os.remove(dataset_file_path) + unzipped_filepath = dataset_file_path.split(".")[0] + if os.path.exists(unzipped_filepath): + print( + f"Unzipped and saved dataset file to path - {unzipped_filepath}" + ) + download_datasets = False + + else: + print("Using default MNIST mirror reference to download datasets...") + download_datasets = True + + MNIST(self.data_dir, train=True, download=download_datasets) + MNIST(self.data_dir, train=False, download=download_datasets) def setup(self, stage=None): # Assign train/val datasets for use in dataloaders if stage == "fit" or stage is None: - mnist_full = MNIST(self.data_dir, train=True, transform=self.transform) + mnist_full = MNIST( + self.data_dir, train=True, transform=self.transform, download=False + ) self.mnist_train, self.mnist_val = random_split(mnist_full, [55000, 5000]) # Assign test dataset for use in dataloader(s) if stage == "test" or stage is None: self.mnist_test = MNIST( - self.data_dir, train=False, transform=self.transform + self.data_dir, train=False, transform=self.transform, download=False ) def train_dataloader(self): @@ -145,7 +226,7 @@ def test_dataloader(self): # Init DataLoader from MNIST Dataset -model = LitMNIST() +model = LitMNIST(data_dir=local_mnist_path) print("GROUP: ", int(os.environ.get("GROUP_WORLD_SIZE", 1))) print("LOCAL: ", int(os.environ.get("LOCAL_WORLD_SIZE", 1))) diff --git a/tests/e2e/mnist_pip_requirements.txt b/tests/e2e/mnist_pip_requirements.txt index 4c9d5fcb..907e9a52 100644 --- a/tests/e2e/mnist_pip_requirements.txt +++ b/tests/e2e/mnist_pip_requirements.txt @@ -1,3 +1,4 @@ pytorch_lightning==1.9.5 torchmetrics==0.9.1 torchvision==0.12.0 +minio diff --git a/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py b/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py index d2d0995b..012098a4 100644 --- a/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py +++ b/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py @@ -19,6 +19,7 @@ def setup_method(self): def teardown_method(self): delete_namespace(self) + delete_kueue_resources(self) def test_mnist_ray_cluster_sdk_kind(self): self.setup_method() @@ -77,7 +78,7 @@ def assert_jobsubmit_withoutlogin_kind(self, cluster, accelerator, number_of_gpu runtime_env={ "working_dir": "./tests/e2e/", "pip": "./tests/e2e/mnist_pip_requirements.txt", - "env_vars": {"ACCELERATOR": accelerator}, + "env_vars": get_setup_env_variables(ACCELERATOR=accelerator), }, entrypoint_num_gpus=number_of_gpus, ) diff --git a/tests/e2e/mnist_raycluster_sdk_kind_test.py b/tests/e2e/mnist_raycluster_sdk_kind_test.py index 2635fecd..2623b36c 100644 --- a/tests/e2e/mnist_raycluster_sdk_kind_test.py +++ b/tests/e2e/mnist_raycluster_sdk_kind_test.py @@ -77,7 +77,7 @@ def assert_jobsubmit_withoutlogin_kind(self, cluster, accelerator, number_of_gpu runtime_env={ "working_dir": "./tests/e2e/", "pip": "./tests/e2e/mnist_pip_requirements.txt", - "env_vars": {"ACCELERATOR": accelerator}, + "env_vars": get_setup_env_variables(ACCELERATOR=accelerator), }, entrypoint_num_gpus=number_of_gpus, ) diff --git a/tests/e2e/mnist_raycluster_sdk_oauth_test.py b/tests/e2e/mnist_raycluster_sdk_oauth_test.py index 0ddab720..bc052bea 100644 --- a/tests/e2e/mnist_raycluster_sdk_oauth_test.py +++ b/tests/e2e/mnist_raycluster_sdk_oauth_test.py @@ -28,6 +28,8 @@ def test_mnist_ray_cluster_sdk_auth(self): self.run_mnist_raycluster_sdk_oauth() def run_mnist_raycluster_sdk_oauth(self): + ray_image = get_ray_image() + auth = TokenAuthentication( token=run_oc_command(["whoami", "--show-token=true"]), server=run_oc_command(["whoami", "--show-server=true"]), @@ -42,10 +44,11 @@ def run_mnist_raycluster_sdk_oauth(self): num_workers=1, head_cpus="500m", head_memory=2, - worker_cpu_requests="500m", + worker_cpu_requests=1, worker_cpu_limits=1, worker_memory_requests=1, worker_memory_limits=4, + image=ray_image, write_to_file=True, verify_tls=False, ) @@ -73,6 +76,7 @@ def assert_jobsubmit_withoutLogin(self, cluster): "runtime_env": { "working_dir": "./tests/e2e/", "pip": "./tests/e2e/mnist_pip_requirements.txt", + "env_vars": get_setup_env_variables(), }, } try: @@ -100,7 +104,9 @@ def assert_jobsubmit_withlogin(self, cluster): runtime_env={ "working_dir": "./tests/e2e/", "pip": "./tests/e2e/mnist_pip_requirements.txt", + "env_vars": get_setup_env_variables(), }, + entrypoint_num_cpus=1, ) print(f"Submitted job with ID: {submission_id}") done = False diff --git a/tests/e2e/support.py b/tests/e2e/support.py index 3eb24153..c8346909 100644 --- a/tests/e2e/support.py +++ b/tests/e2e/support.py @@ -12,17 +12,55 @@ def get_ray_image(): return os.getenv("RAY_IMAGE", default_ray_image) +def get_setup_env_variables(**kwargs): + env_vars = dict() + + # Use input parameters provided for this function as environment variables + for key, value in kwargs.items(): + env_vars[str(key)] = value + + # Use specified pip index url instead of default(https://pypi.org/simple) if related environment variables exists + if ( + "PIP_INDEX_URL" in os.environ + and os.environ.get("PIP_INDEX_URL") != None + and os.environ.get("PIP_INDEX_URL") != "" + ): + env_vars["PIP_INDEX_URL"] = os.environ.get("PIP_INDEX_URL") + env_vars["PIP_TRUSTED_HOST"] = os.environ.get("PIP_TRUSTED_HOST") + else: + env_vars["PIP_INDEX_URL"] = "https://pypi.org/simple/" + env_vars["PIP_TRUSTED_HOST"] = "pypi.org" + + # Use specified storage bucket reference from which to download datasets + if ( + "AWS_DEFAULT_ENDPOINT" in os.environ + and os.environ.get("AWS_DEFAULT_ENDPOINT") != None + and os.environ.get("AWS_DEFAULT_ENDPOINT") != "" + ): + env_vars["AWS_DEFAULT_ENDPOINT"] = os.environ.get("AWS_DEFAULT_ENDPOINT") + env_vars["AWS_ACCESS_KEY_ID"] = os.environ.get("AWS_ACCESS_KEY_ID") + env_vars["AWS_SECRET_ACCESS_KEY"] = os.environ.get("AWS_SECRET_ACCESS_KEY") + env_vars["AWS_STORAGE_BUCKET"] = os.environ.get("AWS_STORAGE_BUCKET") + env_vars["AWS_STORAGE_BUCKET_MNIST_DIR"] = os.environ.get( + "AWS_STORAGE_BUCKET_MNIST_DIR" + ) + return env_vars + + def random_choice(): alphabet = string.ascii_lowercase + string.digits return "".join(random.choices(alphabet, k=5)) def create_namespace(self): - self.namespace = f"test-ns-{random_choice()}" - namespace_body = client.V1Namespace( - metadata=client.V1ObjectMeta(name=self.namespace) - ) - self.api_instance.create_namespace(namespace_body) + try: + self.namespace = f"test-ns-{random_choice()}" + namespace_body = client.V1Namespace( + metadata=client.V1ObjectMeta(name=self.namespace) + ) + self.api_instance.create_namespace(namespace_body) + except Exception as e: + return RuntimeError(e) def create_new_resource_flavor(self): @@ -60,7 +98,7 @@ def initialize_kubernetes_client(self): config.load_kube_config() # Initialize Kubernetes client self.api_instance = client.CoreV1Api() - self.custom_api = kubernetes.client.CustomObjectsApi(self.api_instance.api_client) + self.custom_api = client.CustomObjectsApi(self.api_instance.api_client) def run_oc_command(args): diff --git a/tests/upgrade/raycluster_sdk_upgrade_test.py b/tests/upgrade/raycluster_sdk_upgrade_test.py index e5fd1876..87f61f15 100644 --- a/tests/upgrade/raycluster_sdk_upgrade_test.py +++ b/tests/upgrade/raycluster_sdk_upgrade_test.py @@ -121,6 +121,7 @@ def assert_jobsubmit_withlogin(self, cluster): runtime_env={ "working_dir": "./tests/e2e/", "pip": "./tests/e2e/mnist_pip_requirements.txt", + "env_vars": get_setup_env_variables(), }, ) print(f"Submitted job with ID: {submission_id}") From 304a1e766036205739825615e5c8b0399f5602b3 Mon Sep 17 00:00:00 2001 From: Shilpa Chugh Date: Mon, 22 Jul 2024 13:50:14 +0530 Subject: [PATCH 298/496] Add unit tests for RHOAI ray image --- tests/test-case-custom-image.yaml | 156 ++++++++++++++++++++++++++++++ tests/unit_test.py | 63 ++++++++++-- tests/unit_test_support.py | 1 - 3 files changed, 211 insertions(+), 9 deletions(-) create mode 100644 tests/test-case-custom-image.yaml diff --git a/tests/test-case-custom-image.yaml b/tests/test-case-custom-image.yaml new file mode 100644 index 00000000..8a417a58 --- /dev/null +++ b/tests/test-case-custom-image.yaml @@ -0,0 +1,156 @@ +apiVersion: ray.io/v1 +kind: RayCluster +metadata: + annotations: + app.kubernetes.io/managed-by: test-prefix + labels: + controller-tools.k8s.io: '1.0' + kueue.x-k8s.io/queue-name: local-queue-default + testlabel: test + testlabel2: test + name: unit-test-cluster-custom-image + namespace: ns +spec: + autoscalerOptions: + idleTimeoutSeconds: 60 + imagePullPolicy: Always + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 500m + memory: 512Mi + upscalingMode: Default + enableInTreeAutoscaling: false + headGroupSpec: + enableIngress: false + rayStartParams: + block: 'true' + dashboard-host: 0.0.0.0 + num-gpus: '0' + resources: '"{}"' + serviceType: ClusterIP + template: + spec: + containers: + - image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 + imagePullPolicy: Always + lifecycle: + preStop: + exec: + command: + - /bin/sh + - -c + - ray stop + name: ray-head + ports: + - containerPort: 6379 + name: gcs + - containerPort: 8265 + name: dashboard + - containerPort: 10001 + name: client + resources: + limits: + cpu: 2 + memory: 8G + requests: + cpu: 2 + memory: 8G + volumeMounts: + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + imagePullSecrets: + - name: unit-test-pull-secret + volumes: + - configMap: + items: + - key: ca-bundle.crt + path: odh-trusted-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert + rayVersion: 2.23.0 + workerGroupSpecs: + - groupName: small-group-unit-test-cluster-custom-image + maxReplicas: 2 + minReplicas: 2 + rayStartParams: + block: 'true' + num-gpus: '7' + resources: '"{}"' + replicas: 2 + template: + metadata: + annotations: + key: value + labels: + key: value + spec: + containers: + - image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 + lifecycle: + preStop: + exec: + command: + - /bin/sh + - -c + - ray stop + name: machine-learning + resources: + limits: + cpu: 4 + memory: 6G + nvidia.com/gpu: 7 + requests: + cpu: 3 + memory: 5G + nvidia.com/gpu: 7 + volumeMounts: + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + imagePullSecrets: + - name: unit-test-pull-secret + volumes: + - configMap: + items: + - key: ca-bundle.crt + path: odh-trusted-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert diff --git a/tests/unit_test.py b/tests/unit_test.py index bb14d2b2..2709894e 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -262,7 +262,6 @@ def test_config_creation(): assert config.worker_cpu_requests == 3 and config.worker_cpu_limits == 4 assert config.worker_memory_requests == "5G" and config.worker_memory_limits == "6G" assert config.worker_extended_resource_requests == {"nvidia.com/gpu": 7} - assert config.image == "quay.io/rhoai/ray:2.23.0-py39-cu121" assert config.template == f"{parent}/src/codeflare_sdk/templates/base-template.yaml" assert config.machine_types == ["cpu.small", "gpu.large"] assert config.image_pull_secrets == ["unit-test-pull-secret"] @@ -415,7 +414,6 @@ def test_cluster_creation_no_mcad_local_queue(mocker): worker_extended_resource_requests={"nvidia.com/gpu": 7}, machine_types=["cpu.small", "gpu.large"], image_pull_secrets=["unit-test-pull-secret"], - image="quay.io/rhoai/ray:2.23.0-py39-cu121", write_to_file=True, appwrapper=False, local_queue="local-queue-default", @@ -443,7 +441,6 @@ def test_default_cluster_creation(mocker): ) default_config = ClusterConfiguration( name="unit-test-default-cluster", - image="quay.io/rhoai/ray:2.23.0-py39-cu121", appwrapper=True, ) cluster = Cluster(default_config) @@ -459,6 +456,61 @@ def test_default_cluster_creation(mocker): assert cluster.config.namespace == "opendatahub" +def test_cluster_creation_with_custom_image(mocker): + # With written resources + # Create Ray Cluster with local queue specified + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), + ) + mocker.patch("os.environ.get", return_value="test-prefix") + config = createClusterConfig() + config.name = "unit-test-cluster-custom-image" + config.appwrapper = False + config.image = "quay.io/project-codeflare/ray:2.20.0-py39-cu118" + config.local_queue = "local-queue-default" + config.labels = {"testlabel": "test", "testlabel2": "test"} + cluster = Cluster(config) + assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-custom-image.yaml" + assert cluster.app_wrapper_name == "unit-test-cluster-custom-image" + assert filecmp.cmp( + f"{aw_dir}unit-test-cluster-custom-image.yaml", + f"{parent}/tests/test-case-custom-image.yaml", + shallow=True, + ) + # With resources loaded in memory + config = ClusterConfiguration( + name="unit-test-cluster-custom-image", + namespace="ns", + num_workers=2, + worker_cpu_requests=3, + worker_cpu_limits=4, + worker_memory_requests=5, + worker_memory_limits=6, + worker_extended_resource_requests={"nvidia.com/gpu": 7}, + machine_types=["cpu.small", "gpu.large"], + image_pull_secrets=["unit-test-pull-secret"], + image="quay.io/project-codeflare/ray:2.20.0-py39-cu118", + write_to_file=True, + appwrapper=False, + local_queue="local-queue-default", + labels={"testlabel": "test", "testlabel2": "test"}, + ) + cluster = Cluster(config) + assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-custom-image.yaml" + assert cluster.app_wrapper_name == "unit-test-cluster-custom-image" + assert filecmp.cmp( + f"{aw_dir}unit-test-cluster-custom-image.yaml", + f"{parent}/tests/test-case-custom-image.yaml", + shallow=True, + ) + + def test_gen_names_with_name(mocker): mocker.patch.object( uuid, "uuid4", return_value=uuid.UUID("00000000-0000-0000-0000-000000000001") @@ -792,7 +844,6 @@ def test_ray_job_wrapping(mocker): return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), ) cluster = cluster = createClusterWithConfig(mocker) - cluster.config.image = "quay.io/rhoai/ray:2.23.0-py39-cu121" mocker.patch( "ray.job_submission.JobSubmissionClient._check_connection_and_version_with_url", return_value="None", @@ -910,7 +961,6 @@ def test_ray_details(mocker, capsys): ClusterConfiguration( name="raytest2", namespace="ns", - image="quay.io/rhoai/ray:2.23.0-py39-cu121", write_to_file=True, appwrapper=True, local_queue="local_default_queue", @@ -2313,7 +2363,6 @@ def test_cluster_status(mocker): ClusterConfiguration( name="test", namespace="ns", - image="quay.io/rhoai/ray:2.23.0-py39-cu121", write_to_file=True, appwrapper=True, local_queue="local_default_queue", @@ -2408,7 +2457,6 @@ def test_wait_ready(mocker, capsys): ClusterConfiguration( name="test", namespace="ns", - image="quay.io/rhoai/ray:2.23.0-py39-cu121", write_to_file=True, appwrapper=True, local_queue="local-queue-default", @@ -2635,7 +2683,6 @@ def throw_if_getting_raycluster(group, version, namespace, plural): cluster = Cluster( ClusterConfiguration( "test_cluster", - image="quay.io/rhoai/ray:2.23.0-py39-cu121", write_to_file=False, ) ) diff --git a/tests/unit_test_support.py b/tests/unit_test_support.py index 25e206c5..9fcdd5a5 100644 --- a/tests/unit_test_support.py +++ b/tests/unit_test_support.py @@ -17,7 +17,6 @@ def createClusterConfig(): appwrapper=True, machine_types=["cpu.small", "gpu.large"], image_pull_secrets=["unit-test-pull-secret"], - image="quay.io/rhoai/ray:2.23.0-py39-cu121", write_to_file=True, ) return config From f100ba16049677fb4feb904fc013158882fe347d Mon Sep 17 00:00:00 2001 From: Kevin Date: Fri, 19 Jul 2024 18:10:33 -0400 Subject: [PATCH 299/496] rm default namespace from demo notebooks Signed-off-by: Kevin --- .../additional-demos/hf_interactive.ipynb | 1039 ++++++----- .../additional-demos/local_interactive.ipynb | 7 +- .../additional-demos/ray_job_client.ipynb | 1 - demo-notebooks/guided-demos/0_basic_ray.ipynb | 1 - .../guided-demos/1_cluster_job_client.ipynb | 1 - .../guided-demos/2_basic_interactive.ipynb | 6 +- .../notebook-ex-outputs/0_basic_ray.ipynb | 45 +- .../1_cluster_job_client.ipynb | 1 - .../2_basic_interactive.ipynb | 1580 ++++++++--------- .../preview_nbs/0_basic_ray.ipynb | 1 - .../preview_nbs/1_cluster_job_client.ipynb | 1 - .../preview_nbs/2_basic_interactive.ipynb | 6 +- 12 files changed, 1337 insertions(+), 1352 deletions(-) diff --git a/demo-notebooks/additional-demos/hf_interactive.ipynb b/demo-notebooks/additional-demos/hf_interactive.ipynb index ad552451..fe4cd429 100644 --- a/demo-notebooks/additional-demos/hf_interactive.ipynb +++ b/demo-notebooks/additional-demos/hf_interactive.ipynb @@ -90,7 +90,6 @@ "# Create our cluster and submit\n", "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(name='hfgputest', \n", - " namespace=\"default\", # Update to your namespace\n", " head_gpus=1, # For GPU enabled workloads set the head_gpus and num_gpus\n", " num_gpus=1,\n", " num_workers=1,\n", @@ -153,13 +152,13 @@ ], "text/plain": [ "╭─────────────────────────╮\n", - "│ \u001B[3m \u001B[0m\u001B[1;3m 🚀 List of CodeFlare\u001B[0m\u001B[3m \u001B[0m │\n", - "│ \u001B[3m \u001B[0m\u001B[1;3mclusters in queue🚀\u001B[0m\u001B[3m \u001B[0m │\n", + "│ \u001b[3m \u001b[0m\u001b[1;3m 🚀 List of CodeFlare\u001b[0m\u001b[3m \u001b[0m │\n", + "│ \u001b[3m \u001b[0m\u001b[1;3mclusters in queue🚀\u001b[0m\u001b[3m \u001b[0m │\n", "│ +-----------+---------+ │\n", - "│ |\u001B[1m \u001B[0m\u001B[1mName \u001B[0m\u001B[1m \u001B[0m|\u001B[1m \u001B[0m\u001B[1mStatus \u001B[0m\u001B[1m \u001B[0m| │\n", + "│ |\u001b[1m \u001b[0m\u001b[1mName \u001b[0m\u001b[1m \u001b[0m|\u001b[1m \u001b[0m\u001b[1mStatus \u001b[0m\u001b[1m \u001b[0m| │\n", "│ +===========+=========+ │\n", - "│ |\u001B[36m \u001B[0m\u001B[36mhfgputest\u001B[0m\u001B[36m \u001B[0m|\u001B[35m \u001B[0m\u001B[35mpending\u001B[0m\u001B[35m \u001B[0m| │\n", - "│ |\u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[36m \u001B[0m|\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m| │\n", + "│ |\u001b[36m \u001b[0m\u001b[36mhfgputest\u001b[0m\u001b[36m \u001b[0m|\u001b[35m \u001b[0m\u001b[35mpending\u001b[0m\u001b[35m \u001b[0m| │\n", + "│ |\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m|\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m| │\n", "│ +-----------+---------+ │\n", "╰─────────────────────────╯\n" ] @@ -240,22 +239,22 @@ "\n" ], "text/plain": [ - "\u001B[3m \u001B[0m\u001B[1;3m 🚀 List of CodeFlare clusters 🚀\u001B[0m\u001B[3m \u001B[0m\n", - "\u001B[1m \u001B[0m\u001B[1m \u001B[0m\u001B[1m \u001B[0m\n", + "\u001b[3m \u001b[0m\u001b[1;3m 🚀 List of CodeFlare clusters 🚀\u001b[0m\u001b[3m \u001b[0m\n", + "\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\n", " ╭────────────────────────────────────────────────────────────────╮ \n", - " │ \u001B[1;37;42mOwner\u001B[0m │ \n", - " │ \u001B[1;4mhfgputest\u001B[0m Active ✅ │ \n", + " │ \u001b[1;37;42mOwner\u001b[0m │ \n", + " │ \u001b[1;4mhfgputest\u001b[0m Active ✅ │ \n", " │ │ \n", - " │ \u001B[1mURI:\u001B[0m ray://hfgputest-head-svc.default.svc:10001 │ \n", + " │ \u001b[1mURI:\u001b[0m ray://hfgputest-head-svc.default.svc:10001 │ \n", " │ │ \n", - " │ \u001B]8;id=552692;ray-dashboard-hfgputest-default.apps.prepfullinstall.psap.aws.rhperfscale.org\u001B\\\u001B[4;34mDashboard🔗\u001B[0m\u001B]8;;\u001B\\ │ \n", + " │ \u001b]8;id=552692;ray-dashboard-hfgputest-default.apps.prepfullinstall.psap.aws.rhperfscale.org\u001b\\\u001b[4;34mDashboard🔗\u001b[0m\u001b]8;;\u001b\\ │ \n", " │ │ \n", - " │ \u001B[3m Cluster Resources \u001B[0m │ \n", + " │ \u001b[3m Cluster Resources \u001b[0m │ \n", " │ ╭─ Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │ \n", - " │ │ \u001B[1m \u001B[0m\u001B[1mMin\u001B[0m\u001B[1m \u001B[0m\u001B[1m \u001B[0m\u001B[1mMax\u001B[0m\u001B[1m \u001B[0m │ │ \u001B[1m \u001B[0m\u001B[1mMemory \u001B[0m\u001B[1m \u001B[0m\u001B[1m \u001B[0m\u001B[1mCPU \u001B[0m\u001B[1m \u001B[0m\u001B[1m \u001B[0m\u001B[1mGPU \u001B[0m\u001B[1m \u001B[0m │ │ \n", - " │ │ \u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m │ │ \u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m │ │ \n", - " │ │ \u001B[36m \u001B[0m\u001B[36m1 \u001B[0m\u001B[36m \u001B[0m\u001B[35m \u001B[0m\u001B[35m1 \u001B[0m\u001B[35m \u001B[0m │ │ \u001B[36m \u001B[0m\u001B[36m16G~16G \u001B[0m\u001B[36m \u001B[0m\u001B[35m \u001B[0m\u001B[35m8 \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m4 \u001B[0m\u001B[35m \u001B[0m │ │ \n", - " │ │ \u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m │ │ \u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m │ │ \n", + " │ │ \u001b[1m \u001b[0m\u001b[1mMin\u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mMax\u001b[0m\u001b[1m \u001b[0m │ │ \u001b[1m \u001b[0m\u001b[1mMemory \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mCPU \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mGPU \u001b[0m\u001b[1m \u001b[0m │ │ \n", + " │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", + " │ │ \u001b[36m \u001b[0m\u001b[36m1 \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m1 \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m16G~16G \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m8 \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m4 \u001b[0m\u001b[35m \u001b[0m │ │ \n", + " │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", " │ ╰────────────╯ ╰──────────────────────────────────────╯ │ \n", " ╰────────────────────────────────────────────────────────────────╯ \n" ] @@ -469,7 +468,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Downloading and preparing dataset imdb/plain_text to /home/ray/.cache/huggingface/datasets/imdb/plain_text/1.0.0/2fdd8b9bcadd6e7055e742a706876ba43f19faee861df134affd7a3f60fc38a1...\n" + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Downloading and preparing dataset imdb/plain_text to /home/ray/.cache/huggingface/datasets/imdb/plain_text/1.0.0/2fdd8b9bcadd6e7055e742a706876ba43f19faee861df134affd7a3f60fc38a1...\n" ] }, { @@ -645,7 +644,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Dataset imdb downloaded and prepared to /home/ray/.cache/huggingface/datasets/imdb/plain_text/1.0.0/2fdd8b9bcadd6e7055e742a706876ba43f19faee861df134affd7a3f60fc38a1. Subsequent calls will reuse this data.\n" + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Dataset imdb downloaded and prepared to /home/ray/.cache/huggingface/datasets/imdb/plain_text/1.0.0/2fdd8b9bcadd6e7055e742a706876ba43f19faee861df134affd7a3f60fc38a1. Subsequent calls will reuse this data.\n" ] }, { @@ -767,13 +766,13 @@ "name": "stdout", "output_type": "stream", "text": [ - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m len of train Dataset({\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m features: ['text', 'label', 'input_ids', 'attention_mask'],\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m num_rows: 100\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m }) and test Dataset({\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m features: ['text', 'label', 'input_ids', 'attention_mask'],\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m num_rows: 100\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m })\n" + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m len of train Dataset({\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m features: ['text', 'label', 'input_ids', 'attention_mask'],\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m num_rows: 100\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m }) and test Dataset({\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m features: ['text', 'label', 'input_ids', 'attention_mask'],\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m num_rows: 100\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m })\n" ] }, { @@ -787,54 +786,54 @@ "name": "stdout", "output_type": "stream", "text": [ - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m To disable this warning, you can either:\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \t- Avoid using `tokenizers` before the fork if possible\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:55:58 (running for 00:00:05.07)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 6.4/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n" + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m To disable this warning, you can either:\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \t- Avoid using `tokenizers` before the fork if possible\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:55:58 (running for 00:00:05.07)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 6.4/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m 2022-11-04 07:56:02,047\tINFO torch.py:346 -- Setting up process group for: env:// [rank=0, world_size=4]\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001B[0m 2022-11-04 07:56:02,045\tINFO torch.py:346 -- Setting up process group for: env:// [rank=2, world_size=4]\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001B[0m 2022-11-04 07:56:02,047\tINFO torch.py:346 -- Setting up process group for: env:// [rank=1, world_size=4]\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001B[0m 2022-11-04 07:56:02,048\tINFO torch.py:346 -- Setting up process group for: env:// [rank=3, world_size=4]\n" + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m 2022-11-04 07:56:02,047\tINFO torch.py:346 -- Setting up process group for: env:// [rank=0, world_size=4]\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001b[0m 2022-11-04 07:56:02,045\tINFO torch.py:346 -- Setting up process group for: env:// [rank=2, world_size=4]\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001b[0m 2022-11-04 07:56:02,047\tINFO torch.py:346 -- Setting up process group for: env:// [rank=1, world_size=4]\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001b[0m 2022-11-04 07:56:02,048\tINFO torch.py:346 -- Setting up process group for: env:// [rank=3, world_size=4]\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:56:03 (running for 00:00:10.07)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 7.2/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n" + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:56:03 (running for 00:00:10.07)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 7.2/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n" ] }, { @@ -881,20 +880,20 @@ "name": "stdout", "output_type": "stream", "text": [ - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:56:08 (running for 00:00:15.07)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 7.5/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n" + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:56:08 (running for 00:00:15.07)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 7.5/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n" ] }, { @@ -911,510 +910,510 @@ "Downloading: 95%|█████████▌| 255M/268M [00:04<00:00, 65.7MB/s]\n", "Downloading: 98%|█████████▊| 262M/268M [00:04<00:00, 65.8MB/s]\n", "Downloading: 100%|██████████| 268M/268M [00:04<00:00, 63.9MB/s]\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_projector.bias', 'vocab_layer_norm.bias', 'vocab_transform.bias', 'vocab_layer_norm.weight', 'vocab_transform.weight', 'vocab_projector.weight']\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['pre_classifier.bias', 'classifier.bias', 'classifier.weight', 'pre_classifier.weight']\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001B[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_transform.weight', 'vocab_transform.bias', 'vocab_layer_norm.bias', 'vocab_projector.bias', 'vocab_projector.weight', 'vocab_layer_norm.weight']\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001B[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001B[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001B[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['classifier.bias', 'pre_classifier.weight', 'pre_classifier.bias', 'classifier.weight']\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001B[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001B[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_projector.weight', 'vocab_transform.weight', 'vocab_projector.bias', 'vocab_transform.bias', 'vocab_layer_norm.bias', 'vocab_layer_norm.weight']\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001B[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001B[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001B[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['classifier.bias', 'pre_classifier.bias', 'pre_classifier.weight', 'classifier.weight']\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001B[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001B[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_layer_norm.weight', 'vocab_projector.weight', 'vocab_transform.weight', 'vocab_projector.bias', 'vocab_layer_norm.bias', 'vocab_transform.bias']\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001B[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001B[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001B[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['classifier.bias', 'classifier.weight', 'pre_classifier.bias', 'pre_classifier.weight']\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001B[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m /tmp/ray/session_2022-11-04_07-51-23_507232_7/runtime_resources/pip/4a36d7bd0bbff8fccea52f9c0d942dd63707933f/virtualenv/lib/python3.8/site-packages/transformers/optimization.py:306: FutureWarning: This implementation of AdamW is deprecated and will be removed in a future version. Use the PyTorch implementation torch.optim.AdamW instead, or set `no_deprecation_warning=True` to disable this warning\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m warnings.warn(\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001B[0m /tmp/ray/session_2022-11-04_07-51-23_507232_7/runtime_resources/pip/4a36d7bd0bbff8fccea52f9c0d942dd63707933f/virtualenv/lib/python3.8/site-packages/transformers/optimization.py:306: FutureWarning: This implementation of AdamW is deprecated and will be removed in a future version. Use the PyTorch implementation torch.optim.AdamW instead, or set `no_deprecation_warning=True` to disable this warning\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001B[0m warnings.warn(\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001B[0m /tmp/ray/session_2022-11-04_07-51-23_507232_7/runtime_resources/pip/4a36d7bd0bbff8fccea52f9c0d942dd63707933f/virtualenv/lib/python3.8/site-packages/transformers/optimization.py:306: FutureWarning: This implementation of AdamW is deprecated and will be removed in a future version. Use the PyTorch implementation torch.optim.AdamW instead, or set `no_deprecation_warning=True` to disable this warning\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001B[0m warnings.warn(\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001B[0m /tmp/ray/session_2022-11-04_07-51-23_507232_7/runtime_resources/pip/4a36d7bd0bbff8fccea52f9c0d942dd63707933f/virtualenv/lib/python3.8/site-packages/transformers/optimization.py:306: FutureWarning: This implementation of AdamW is deprecated and will be removed in a future version. Use the PyTorch implementation torch.optim.AdamW instead, or set `no_deprecation_warning=True` to disable this warning\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001B[0m warnings.warn(\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m ***** Running training *****\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m Num examples = 6250\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m Num Epochs = 1\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m Instantaneous batch size per device = 16\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m Total train batch size (w. parallel, distributed & accumulation) = 64\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m Gradient Accumulation steps = 1\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m Total optimization steps = 391\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m Number of trainable parameters = 66955010\n" + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_projector.bias', 'vocab_layer_norm.bias', 'vocab_transform.bias', 'vocab_layer_norm.weight', 'vocab_transform.weight', 'vocab_projector.weight']\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['pre_classifier.bias', 'classifier.bias', 'classifier.weight', 'pre_classifier.weight']\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001b[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_transform.weight', 'vocab_transform.bias', 'vocab_layer_norm.bias', 'vocab_projector.bias', 'vocab_projector.weight', 'vocab_layer_norm.weight']\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001b[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001b[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001b[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['classifier.bias', 'pre_classifier.weight', 'pre_classifier.bias', 'classifier.weight']\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001b[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001b[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_projector.weight', 'vocab_transform.weight', 'vocab_projector.bias', 'vocab_transform.bias', 'vocab_layer_norm.bias', 'vocab_layer_norm.weight']\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001b[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001b[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001b[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['classifier.bias', 'pre_classifier.bias', 'pre_classifier.weight', 'classifier.weight']\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001b[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001b[0m Some weights of the model checkpoint at distilbert-base-uncased were not used when initializing DistilBertForSequenceClassification: ['vocab_layer_norm.weight', 'vocab_projector.weight', 'vocab_transform.weight', 'vocab_projector.bias', 'vocab_layer_norm.bias', 'vocab_transform.bias']\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001b[0m - This IS expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001b[0m - This IS NOT expected if you are initializing DistilBertForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001b[0m Some weights of DistilBertForSequenceClassification were not initialized from the model checkpoint at distilbert-base-uncased and are newly initialized: ['classifier.bias', 'classifier.weight', 'pre_classifier.bias', 'pre_classifier.weight']\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001b[0m You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m /tmp/ray/session_2022-11-04_07-51-23_507232_7/runtime_resources/pip/4a36d7bd0bbff8fccea52f9c0d942dd63707933f/virtualenv/lib/python3.8/site-packages/transformers/optimization.py:306: FutureWarning: This implementation of AdamW is deprecated and will be removed in a future version. Use the PyTorch implementation torch.optim.AdamW instead, or set `no_deprecation_warning=True` to disable this warning\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m warnings.warn(\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001b[0m /tmp/ray/session_2022-11-04_07-51-23_507232_7/runtime_resources/pip/4a36d7bd0bbff8fccea52f9c0d942dd63707933f/virtualenv/lib/python3.8/site-packages/transformers/optimization.py:306: FutureWarning: This implementation of AdamW is deprecated and will be removed in a future version. Use the PyTorch implementation torch.optim.AdamW instead, or set `no_deprecation_warning=True` to disable this warning\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001b[0m warnings.warn(\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001b[0m /tmp/ray/session_2022-11-04_07-51-23_507232_7/runtime_resources/pip/4a36d7bd0bbff8fccea52f9c0d942dd63707933f/virtualenv/lib/python3.8/site-packages/transformers/optimization.py:306: FutureWarning: This implementation of AdamW is deprecated and will be removed in a future version. Use the PyTorch implementation torch.optim.AdamW instead, or set `no_deprecation_warning=True` to disable this warning\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001b[0m warnings.warn(\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001b[0m /tmp/ray/session_2022-11-04_07-51-23_507232_7/runtime_resources/pip/4a36d7bd0bbff8fccea52f9c0d942dd63707933f/virtualenv/lib/python3.8/site-packages/transformers/optimization.py:306: FutureWarning: This implementation of AdamW is deprecated and will be removed in a future version. Use the PyTorch implementation torch.optim.AdamW instead, or set `no_deprecation_warning=True` to disable this warning\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001b[0m warnings.warn(\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m ***** Running training *****\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m Num examples = 6250\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m Num Epochs = 1\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m Instantaneous batch size per device = 16\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m Total train batch size (w. parallel, distributed & accumulation) = 64\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m Gradient Accumulation steps = 1\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m Total optimization steps = 391\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m Number of trainable parameters = 66955010\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:56:13 (running for 00:00:20.08)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 12.3/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n" + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:56:13 (running for 00:00:20.08)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 12.3/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m [W reducer.cpp:1251] Warning: find_unused_parameters=True was specified in DDP constructor, but did not find any unused parameters in the forward pass. This flag results in an extra traversal of the autograd graph every iteration, which can adversely affect performance. If your model indeed never has any unused parameters in the forward pass, consider turning this flag off. Note that this warning may be a false positive if your model has flow control causing later iterations to have unused parameters. (function operator())\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001B[0m [W reducer.cpp:1251] Warning: find_unused_parameters=True was specified in DDP constructor, but did not find any unused parameters in the forward pass. This flag results in an extra traversal of the autograd graph every iteration, which can adversely affect performance. If your model indeed never has any unused parameters in the forward pass, consider turning this flag off. Note that this warning may be a false positive if your model has flow control causing later iterations to have unused parameters. (function operator())\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001B[0m [W reducer.cpp:1251] Warning: find_unused_parameters=True was specified in DDP constructor, but did not find any unused parameters in the forward pass. This flag results in an extra traversal of the autograd graph every iteration, which can adversely affect performance. If your model indeed never has any unused parameters in the forward pass, consider turning this flag off. Note that this warning may be a false positive if your model has flow control causing later iterations to have unused parameters. (function operator())\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001B[0m [W reducer.cpp:1251] Warning: find_unused_parameters=True was specified in DDP constructor, but did not find any unused parameters in the forward pass. This flag results in an extra traversal of the autograd graph every iteration, which can adversely affect performance. If your model indeed never has any unused parameters in the forward pass, consider turning this flag off. Note that this warning may be a false positive if your model has flow control causing later iterations to have unused parameters. (function operator())\n" + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m [W reducer.cpp:1251] Warning: find_unused_parameters=True was specified in DDP constructor, but did not find any unused parameters in the forward pass. This flag results in an extra traversal of the autograd graph every iteration, which can adversely affect performance. If your model indeed never has any unused parameters in the forward pass, consider turning this flag off. Note that this warning may be a false positive if your model has flow control causing later iterations to have unused parameters. (function operator())\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=184, ip=10.129.66.16)\u001b[0m [W reducer.cpp:1251] Warning: find_unused_parameters=True was specified in DDP constructor, but did not find any unused parameters in the forward pass. This flag results in an extra traversal of the autograd graph every iteration, which can adversely affect performance. If your model indeed never has any unused parameters in the forward pass, consider turning this flag off. Note that this warning may be a false positive if your model has flow control causing later iterations to have unused parameters. (function operator())\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=183, ip=10.129.66.16)\u001b[0m [W reducer.cpp:1251] Warning: find_unused_parameters=True was specified in DDP constructor, but did not find any unused parameters in the forward pass. This flag results in an extra traversal of the autograd graph every iteration, which can adversely affect performance. If your model indeed never has any unused parameters in the forward pass, consider turning this flag off. Note that this warning may be a false positive if your model has flow control causing later iterations to have unused parameters. (function operator())\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=185, ip=10.129.66.16)\u001b[0m [W reducer.cpp:1251] Warning: find_unused_parameters=True was specified in DDP constructor, but did not find any unused parameters in the forward pass. This flag results in an extra traversal of the autograd graph every iteration, which can adversely affect performance. If your model indeed never has any unused parameters in the forward pass, consider turning this flag off. Note that this warning may be a false positive if your model has flow control causing later iterations to have unused parameters. (function operator())\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:56:18 (running for 00:00:25.08)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:56:23 (running for 00:00:30.08)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:56:28 (running for 00:00:35.09)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:56:33 (running for 00:00:40.09)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:56:38 (running for 00:00:45.10)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:56:43 (running for 00:00:50.10)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:56:48 (running for 00:00:55.10)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:56:53 (running for 00:01:00.10)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:56:59 (running for 00:01:05.11)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:57:04 (running for 00:01:10.11)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:57:09 (running for 00:01:15.11)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:57:14 (running for 00:01:20.12)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:57:19 (running for 00:01:25.12)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:57:24 (running for 00:01:30.12)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:57:29 (running for 00:01:35.13)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:57:34 (running for 00:01:40.13)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:57:39 (running for 00:01:45.13)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:57:44 (running for 00:01:50.13)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:57:49 (running for 00:01:55.14)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:57:54 (running for 00:02:00.14)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:57:59 (running for 00:02:05.15)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 13.7/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n" + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:56:18 (running for 00:00:25.08)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:56:23 (running for 00:00:30.08)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:56:28 (running for 00:00:35.09)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:56:33 (running for 00:00:40.09)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:56:38 (running for 00:00:45.10)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:56:43 (running for 00:00:50.10)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:56:48 (running for 00:00:55.10)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:56:53 (running for 00:01:00.10)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:56:59 (running for 00:01:05.11)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:57:04 (running for 00:01:10.11)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:57:09 (running for 00:01:15.11)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:57:14 (running for 00:01:20.12)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:57:19 (running for 00:01:25.12)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:57:24 (running for 00:01:30.12)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:57:29 (running for 00:01:35.13)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:57:34 (running for 00:01:40.13)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:57:39 (running for 00:01:45.13)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:57:44 (running for 00:01:50.13)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:57:49 (running for 00:01:55.14)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:57:54 (running for 00:02:00.14)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:57:59 (running for 00:02:05.15)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 13.7/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m Saving model checkpoint to /tmp/hf_imdb/test/checkpoint-391\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m Configuration saved in /tmp/hf_imdb/test/checkpoint-391/config.json\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m Model weights saved in /tmp/hf_imdb/test/checkpoint-391/pytorch_model.bin\n" + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m Saving model checkpoint to /tmp/hf_imdb/test/checkpoint-391\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m Configuration saved in /tmp/hf_imdb/test/checkpoint-391/config.json\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m Model weights saved in /tmp/hf_imdb/test/checkpoint-391/pytorch_model.bin\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result for HuggingFaceTrainer_c7d60_00000:\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m _time_this_iter_s: 118.07144260406494\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m _timestamp: 1667573883\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m _training_iteration: 1\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m date: 2022-11-04_07-58-03\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m done: false\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m epoch: 1.0\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m experiment_id: 7bc6ab25d0414fcbb589bcb5d0f29b99\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m hostname: hfgputest-worker-small-group-hfgputest-q4758\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m iterations_since_restore: 1\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m node_ip: 10.129.66.16\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m pid: 146\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m should_checkpoint: true\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m step: 391\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m time_since_restore: 124.55581378936768\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m time_this_iter_s: 124.55581378936768\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m time_total_s: 124.55581378936768\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m timestamp: 1667573883\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m timesteps_since_restore: 0\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m train_loss: 0.2760564701636429\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m train_runtime: 109.7668\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m train_samples_per_second: 56.939\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m train_steps_per_second: 3.562\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m training_iteration: 1\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m trial_id: c7d60_00000\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m warmup_time: 0.003995656967163086\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m {'train_runtime': 109.7668, 'train_samples_per_second': 56.939, 'train_steps_per_second': 3.562, 'train_loss': 0.2760564701636429, 'epoch': 1.0}\n" + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result for HuggingFaceTrainer_c7d60_00000:\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m _time_this_iter_s: 118.07144260406494\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m _timestamp: 1667573883\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m _training_iteration: 1\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m date: 2022-11-04_07-58-03\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m done: false\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m epoch: 1.0\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m experiment_id: 7bc6ab25d0414fcbb589bcb5d0f29b99\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m hostname: hfgputest-worker-small-group-hfgputest-q4758\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m iterations_since_restore: 1\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m node_ip: 10.129.66.16\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m pid: 146\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m should_checkpoint: true\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m step: 391\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m time_since_restore: 124.55581378936768\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m time_this_iter_s: 124.55581378936768\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m time_total_s: 124.55581378936768\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m timestamp: 1667573883\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m timesteps_since_restore: 0\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m train_loss: 0.2760564701636429\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m train_runtime: 109.7668\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m train_samples_per_second: 56.939\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m train_steps_per_second: 3.562\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m training_iteration: 1\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m trial_id: c7d60_00000\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m warmup_time: 0.003995656967163086\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m {'train_runtime': 109.7668, 'train_samples_per_second': 56.939, 'train_steps_per_second': 3.562, 'train_loss': 0.2760564701636429, 'epoch': 1.0}\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m \n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m \n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m Training completed. Do not forget to share your model on huggingface.co/models =)\n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m \n", - "\u001B[2m\u001B[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001B[0m \n" + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m \n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m \n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m Training completed. Do not forget to share your model on huggingface.co/models =)\n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m \n", + "\u001b[2m\u001b[36m(BaseWorkerMixin pid=182, ip=10.129.66.16)\u001b[0m \n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:58:13 (running for 00:02:19.36)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 16.0/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+--------+------------------+-----------------+----------------------------+--------------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc | iter | total time (s) | train_runtime | train_samples_per_second | train_steps_per_second |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+----------+------------------+--------+------------------+-----------------+----------------------------+--------------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 | 1 | 124.556 | 109.767 | 56.939 | 3.562 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+----------+------------------+--------+------------------+-----------------+----------------------------+--------------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n" + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:58:13 (running for 00:02:19.36)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 16.0/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 5.0/10 CPUs, 4.0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+--------+------------------+-----------------+----------------------------+--------------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc | iter | total time (s) | train_runtime | train_samples_per_second | train_steps_per_second |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+----------+------------------+--------+------------------+-----------------+----------------------------+--------------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | RUNNING | 10.129.66.16:146 | 1 | 124.556 | 109.767 | 56.939 | 3.562 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+----------+------------------+--------+------------------+-----------------+----------------------------+--------------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m 2022-11-04 07:58:13,248\tWARNING util.py:214 -- The `process_trial_save` operation took 9.709 s, which may be a performance bottleneck.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m 2022-11-04 07:58:13,248\tWARNING trial_runner.py:856 -- Consider turning off forced head-worker trial checkpoint syncs by setting sync_on_checkpoint=False. Note that this may result in faulty trial restoration if a failure occurs while the checkpoint is being synced from the worker to the head node.\n" + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m 2022-11-04 07:58:13,248\tWARNING util.py:214 -- The `process_trial_save` operation took 9.709 s, which may be a performance bottleneck.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m 2022-11-04 07:58:13,248\tWARNING trial_runner.py:856 -- Consider turning off forced head-worker trial checkpoint syncs by setting sync_on_checkpoint=False. Note that this may result in faulty trial restoration if a failure occurs while the checkpoint is being synced from the worker to the head node.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result for HuggingFaceTrainer_c7d60_00000:\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m _time_this_iter_s: 118.07144260406494\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m _timestamp: 1667573883\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m _training_iteration: 1\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m date: 2022-11-04_07-58-03\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m done: true\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m epoch: 1.0\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m experiment_id: 7bc6ab25d0414fcbb589bcb5d0f29b99\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m experiment_tag: '0'\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m hostname: hfgputest-worker-small-group-hfgputest-q4758\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m iterations_since_restore: 1\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m node_ip: 10.129.66.16\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m pid: 146\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m should_checkpoint: true\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m step: 391\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m time_since_restore: 124.55581378936768\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m time_this_iter_s: 124.55581378936768\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m time_total_s: 124.55581378936768\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m timestamp: 1667573883\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m timesteps_since_restore: 0\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m train_loss: 0.2760564701636429\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m train_runtime: 109.7668\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m train_samples_per_second: 56.939\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m train_steps_per_second: 3.562\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m training_iteration: 1\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m trial_id: c7d60_00000\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m warmup_time: 0.003995656967163086\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Current time: 2022-11-04 07:58:16 (running for 00:02:22.40)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Memory usage on this node: 9.1/240.1 GiB\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Resources requested: 0/10 CPUs, 0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m Number of trials: 1/1 (1 TERMINATED)\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+------------+------------------+--------+------------------+-----------------+----------------------------+--------------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | Trial name | status | loc | iter | total time (s) | train_runtime | train_samples_per_second | train_steps_per_second |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m |--------------------------------+------------+------------------+--------+------------------+-----------------+----------------------------+--------------------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m | HuggingFaceTrainer_c7d60_00000 | TERMINATED | 10.129.66.16:146 | 1 | 124.556 | 109.767 | 56.939 | 3.562 |\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m +--------------------------------+------------+------------------+--------+------------------+-----------------+----------------------------+--------------------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m \n" + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result for HuggingFaceTrainer_c7d60_00000:\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m _time_this_iter_s: 118.07144260406494\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m _timestamp: 1667573883\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m _training_iteration: 1\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m date: 2022-11-04_07-58-03\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m done: true\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m epoch: 1.0\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m experiment_id: 7bc6ab25d0414fcbb589bcb5d0f29b99\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m experiment_tag: '0'\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m hostname: hfgputest-worker-small-group-hfgputest-q4758\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m iterations_since_restore: 1\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m node_ip: 10.129.66.16\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m pid: 146\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m should_checkpoint: true\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m step: 391\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m time_since_restore: 124.55581378936768\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m time_this_iter_s: 124.55581378936768\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m time_total_s: 124.55581378936768\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m timestamp: 1667573883\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m timesteps_since_restore: 0\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m train_loss: 0.2760564701636429\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m train_runtime: 109.7668\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m train_samples_per_second: 56.939\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m train_steps_per_second: 3.562\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m training_iteration: 1\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m trial_id: c7d60_00000\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m warmup_time: 0.003995656967163086\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Current time: 2022-11-04 07:58:16 (running for 00:02:22.40)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Memory usage on this node: 9.1/240.1 GiB\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Resources requested: 0/10 CPUs, 0/4 GPUs, 0.0/22.35 GiB heap, 0.0/6.59 GiB objects\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2022-11-04_07-55-53\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Number of trials: 1/1 (1 TERMINATED)\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+------------+------------------+--------+------------------+-----------------+----------------------------+--------------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | Trial name | status | loc | iter | total time (s) | train_runtime | train_samples_per_second | train_steps_per_second |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m |--------------------------------+------------+------------------+--------+------------------+-----------------+----------------------------+--------------------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m | HuggingFaceTrainer_c7d60_00000 | TERMINATED | 10.129.66.16:146 | 1 | 124.556 | 109.767 | 56.939 | 3.562 |\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m +--------------------------------+------------+------------------+--------+------------------+-----------------+----------------------------+--------------------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m \n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m 2022-11-04 07:58:16,286\tWARNING util.py:214 -- The `process_trial_save` operation took 2.161 s, which may be a performance bottleneck.\n", - "\u001B[2m\u001B[36m(train_fn pid=250)\u001B[0m 2022-11-04 07:58:16,398\tINFO tune.py:747 -- Total run time: 142.70 seconds (142.40 seconds for the tuning loop).\n" + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m 2022-11-04 07:58:16,286\tWARNING util.py:214 -- The `process_trial_save` operation took 2.161 s, which may be a performance bottleneck.\n", + "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m 2022-11-04 07:58:16,398\tINFO tune.py:747 -- Total run time: 142.70 seconds (142.40 seconds for the tuning loop).\n" ] } ], diff --git a/demo-notebooks/additional-demos/local_interactive.ipynb b/demo-notebooks/additional-demos/local_interactive.ipynb index 1d3c83ad..8ea08834 100644 --- a/demo-notebooks/additional-demos/local_interactive.ipynb +++ b/demo-notebooks/additional-demos/local_interactive.ipynb @@ -50,10 +50,9 @@ "source": [ "# Create and submit our cluster\n", "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", - "namespace = \"default\" # Update to your namespace\n", "cluster_name = \"hfgputest-1\"\n", "\n", - "cluster = Cluster(ClusterConfiguration(namespace=namespace,\n", + "cluster = Cluster(ClusterConfiguration(\n", " name=cluster_name,\n", " head_gpus=0, # For GPU enabled workloads set the head_gpus and num_gpus\n", " num_gpus=0,\n", @@ -118,8 +117,8 @@ "source": [ "from codeflare_sdk import generate_cert\n", "\n", - "generate_cert.generate_tls_cert(cluster_name, namespace)\n", - "generate_cert.export_env(cluster_name, namespace)" + "generate_cert.generate_tls_cert(cluster_name, cluster.config.namespace)\n", + "generate_cert.export_env(cluster_name, cluster.config.namespace)" ] }, { diff --git a/demo-notebooks/additional-demos/ray_job_client.ipynb b/demo-notebooks/additional-demos/ray_job_client.ipynb index c452fb31..4b9434bc 100644 --- a/demo-notebooks/additional-demos/ray_job_client.ipynb +++ b/demo-notebooks/additional-demos/ray_job_client.ipynb @@ -45,7 +45,6 @@ "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(\n", " name='jobtest',\n", - " namespace='default', # Update to your namespace\n", " head_gpus=0, # For GPU enabled workloads set the head_gpus and num_gpus\n", " num_gpus=0,\n", " num_workers=2,\n", diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb index 0cd3419f..3f0f62e4 100644 --- a/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -62,7 +62,6 @@ "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(\n", " name='raytest', \n", - " namespace='default', # Update to your namespace\n", " head_gpus=0, # For GPU enabled workloads set the head_gpus and num_gpus\n", " num_gpus=0,\n", " num_workers=2,\n", diff --git a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb index de8fafdd..00576024 100644 --- a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb @@ -44,7 +44,6 @@ "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(\n", " name='jobtest',\n", - " namespace='default', # Update to your namespace\n", " head_gpus=1, # For GPU enabled workloads set the head_gpus and num_gpus\n", " num_gpus=1,\n", " num_workers=2,\n", diff --git a/demo-notebooks/guided-demos/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/2_basic_interactive.ipynb index b6a13b8c..0692caa4 100644 --- a/demo-notebooks/guided-demos/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/2_basic_interactive.ipynb @@ -57,11 +57,9 @@ "source": [ "# Create and configure our cluster object\n", "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", - "namespace = \"default\" # Update to your namespace\n", "cluster_name = \"interactivetest\"\n", "cluster = Cluster(ClusterConfiguration(\n", " name=cluster_name,\n", - " namespace=namespace,\n", " head_gpus=1, # For GPU enabled workloads set the head_gpus and num_gpus\n", " num_gpus=1,\n", " num_workers=2,\n", @@ -137,8 +135,8 @@ "source": [ "from codeflare_sdk import generate_cert\n", "# Create required TLS cert and export the environment variables to enable TLS\n", - "generate_cert.generate_tls_cert(cluster_name, namespace)\n", - "generate_cert.export_env(cluster_name, namespace)" + "generate_cert.generate_tls_cert(cluster_name, cluster.config.namespace)\n", + "generate_cert.export_env(cluster_name, cluster.config.namespace)" ] }, { diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb index 646e2424..8338ce74 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb @@ -70,7 +70,6 @@ "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(\n", " name='raytest',\n", - " namespace='default', # Update to your namespace\n", " head_gpus=0, # For GPU enabled workloads set the head_gpus and num_gpus\n", " num_gpus=0,\n", " num_workers=2,\n", @@ -134,13 +133,13 @@ ], "text/plain": [ "╭───────────────────────╮\n", - "│ \u001B[3m \u001B[0m\u001B[1;3m 🚀 Cluster Queue\u001B[0m\u001B[3m \u001B[0m │\n", - "│ \u001B[3m \u001B[0m\u001B[1;3mStatus 🚀\u001B[0m\u001B[3m \u001B[0m │\n", + "│ \u001b[3m \u001b[0m\u001b[1;3m 🚀 Cluster Queue\u001b[0m\u001b[3m \u001b[0m │\n", + "│ \u001b[3m \u001b[0m\u001b[1;3mStatus 🚀\u001b[0m\u001b[3m \u001b[0m │\n", "│ +---------+---------+ │\n", - "│ |\u001B[1m \u001B[0m\u001B[1mName \u001B[0m\u001B[1m \u001B[0m|\u001B[1m \u001B[0m\u001B[1mStatus \u001B[0m\u001B[1m \u001B[0m| │\n", + "│ |\u001b[1m \u001b[0m\u001b[1mName \u001b[0m\u001b[1m \u001b[0m|\u001b[1m \u001b[0m\u001b[1mStatus \u001b[0m\u001b[1m \u001b[0m| │\n", "│ +=========+=========+ │\n", - "│ |\u001B[36m \u001B[0m\u001B[36mraytest\u001B[0m\u001B[36m \u001B[0m|\u001B[35m \u001B[0m\u001B[35mpending\u001B[0m\u001B[35m \u001B[0m| │\n", - "│ |\u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[36m \u001B[0m|\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m| │\n", + "│ |\u001b[36m \u001b[0m\u001b[36mraytest\u001b[0m\u001b[36m \u001b[0m|\u001b[35m \u001b[0m\u001b[35mpending\u001b[0m\u001b[35m \u001b[0m| │\n", + "│ |\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m|\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m| │\n", "│ +---------+---------+ │\n", "╰───────────────────────╯\n" ] @@ -205,15 +204,15 @@ "\n" ], "text/plain": [ - "\u001B[3m \u001B[0m\u001B[1;3m 🚀 CodeFlare Cluster Status 🚀\u001B[0m\u001B[3m \u001B[0m\n", - "\u001B[1m \u001B[0m\u001B[1m \u001B[0m\u001B[1m \u001B[0m\n", + "\u001b[3m \u001b[0m\u001b[1;3m 🚀 CodeFlare Cluster Status 🚀\u001b[0m\u001b[3m \u001b[0m\n", + "\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\n", " ╭──────────────────────────────────────────────────────────────╮ \n", - " │ \u001B[1;37;42mName\u001B[0m │ \n", - " │ \u001B[1;4mraytest\u001B[0m Active ✅ │ \n", + " │ \u001b[1;37;42mName\u001b[0m │ \n", + " │ \u001b[1;4mraytest\u001b[0m Active ✅ │ \n", " │ │ \n", - " │ \u001B[1mURI:\u001B[0m ray://raytest-head-svc.default.svc:10001 │ \n", + " │ \u001b[1mURI:\u001b[0m ray://raytest-head-svc.default.svc:10001 │ \n", " │ │ \n", - " │ \u001B]8;id=630217;ray-dashboard-raytest-default.apps.meyceoz-07122023.psap.aws.rhperfscale.org\u001B\\\u001B[4;34mDashboard🔗\u001B[0m\u001B]8;;\u001B\\ │ \n", + " │ \u001b]8;id=630217;ray-dashboard-raytest-default.apps.meyceoz-07122023.psap.aws.rhperfscale.org\u001b\\\u001b[4;34mDashboard🔗\u001b[0m\u001b]8;;\u001b\\ │ \n", " │ │ \n", " ╰──────────────────────────────────────────────────────────────╯ \n" ] @@ -274,22 +273,22 @@ "\n" ], "text/plain": [ - "\u001B[3m \u001B[0m\u001B[1;3m 🚀 CodeFlare Cluster Details 🚀\u001B[0m\u001B[3m \u001B[0m\n", - "\u001B[1m \u001B[0m\u001B[1m \u001B[0m\u001B[1m \u001B[0m\n", + "\u001b[3m \u001b[0m\u001b[1;3m 🚀 CodeFlare Cluster Details 🚀\u001b[0m\u001b[3m \u001b[0m\n", + "\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\n", " ╭───────────────────────────────────────────────────────────────╮ \n", - " │ \u001B[1;37;42mName\u001B[0m │ \n", - " │ \u001B[1;4mraytest\u001B[0m Active ✅ │ \n", + " │ \u001b[1;37;42mName\u001b[0m │ \n", + " │ \u001b[1;4mraytest\u001b[0m Active ✅ │ \n", " │ │ \n", - " │ \u001B[1mURI:\u001B[0m ray://raytest-head-svc.default.svc:10001 │ \n", + " │ \u001b[1mURI:\u001b[0m ray://raytest-head-svc.default.svc:10001 │ \n", " │ │ \n", - " │ \u001B]8;id=623965;http://ray-dashboard-raytest-default.apps.meyceoz-07122023.psap.aws.rhperfscale.org\u001B\\\u001B[4;34mDashboard🔗\u001B[0m\u001B]8;;\u001B\\ │ \n", + " │ \u001b]8;id=623965;http://ray-dashboard-raytest-default.apps.meyceoz-07122023.psap.aws.rhperfscale.org\u001b\\\u001b[4;34mDashboard🔗\u001b[0m\u001b]8;;\u001b\\ │ \n", " │ │ \n", - " │ \u001B[3m Cluster Resources \u001B[0m │ \n", + " │ \u001b[3m Cluster Resources \u001b[0m │ \n", " │ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │ \n", - " │ │ \u001B[1m \u001B[0m\u001B[1m# Workers\u001B[0m\u001B[1m \u001B[0m │ │ \u001B[1m \u001B[0m\u001B[1mMemory \u001B[0m\u001B[1m \u001B[0m\u001B[1m \u001B[0m\u001B[1mCPU \u001B[0m\u001B[1m \u001B[0m\u001B[1m \u001B[0m\u001B[1mGPU \u001B[0m\u001B[1m \u001B[0m │ │ \n", - " │ │ \u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m │ │ \u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m │ │ \n", - " │ │ \u001B[35m \u001B[0m\u001B[35m2 \u001B[0m\u001B[35m \u001B[0m │ │ \u001B[36m \u001B[0m\u001B[36m4~4 \u001B[0m\u001B[36m \u001B[0m\u001B[35m \u001B[0m\u001B[35m1 \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m0 \u001B[0m\u001B[35m \u001B[0m │ │ \n", - " │ │ \u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m │ │ \u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m │ │ \n", + " │ │ \u001b[1m \u001b[0m\u001b[1m# Workers\u001b[0m\u001b[1m \u001b[0m │ │ \u001b[1m \u001b[0m\u001b[1mMemory \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mCPU \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mGPU \u001b[0m\u001b[1m \u001b[0m │ │ \n", + " │ │ \u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", + " │ │ \u001b[35m \u001b[0m\u001b[35m2 \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m4~4 \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m1 \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m0 \u001b[0m\u001b[35m \u001b[0m │ │ \n", + " │ │ \u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", " │ ╰─────────────╯ ╰──────────────────────────────────────╯ │ \n", " ╰───────────────────────────────────────────────────────────────╯ \n" ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb index b2e954e2..e354f36d 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb @@ -44,7 +44,6 @@ "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(\n", " name='jobtest',\n", - " namespace='default', # Update to your namespace\n", " head_gpus=1, # For GPU enabled workloads set the head_gpus and num_gpus\n", " num_gpus=1,\n", " num_workers=2,\n", diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb index 443ea063..4e28e53b 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb @@ -65,11 +65,9 @@ "source": [ "# Create and configure our cluster object\n", "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", - "namespace = \"default\" # Update to your namespace\n", "cluster_name = \"interactivetest\"\n", "cluster = Cluster(ClusterConfiguration(\n", " name=cluster_name,\n", - " namespace=namespace,\n", " head_gpus=1, # For GPU enabled workloads set the head_gpus and num_gpus\n", " num_gpus=1,\n", " num_workers=2,\n", @@ -134,22 +132,22 @@ "\n" ], "text/plain": [ - "\u001B[3m \u001B[0m\u001B[1;3m 🚀 CodeFlare Cluster Details 🚀\u001B[0m\u001B[3m \u001B[0m\n", - "\u001B[1m \u001B[0m\u001B[1m \u001B[0m\u001B[1m \u001B[0m\n", + "\u001b[3m \u001b[0m\u001b[1;3m 🚀 CodeFlare Cluster Details 🚀\u001b[0m\u001b[3m \u001b[0m\n", + "\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\n", " ╭──────────────────────────────────────────────────────────────────────╮ \n", - " │ \u001B[1;37;42mName\u001B[0m │ \n", - " │ \u001B[1;4minteractivetest\u001B[0m Active ✅ │ \n", + " │ \u001b[1;37;42mName\u001b[0m │ \n", + " │ \u001b[1;4minteractivetest\u001b[0m Active ✅ │ \n", " │ │ \n", - " │ \u001B[1mURI:\u001B[0m ray://interactivetest-head-svc.default.svc:10001 │ \n", + " │ \u001b[1mURI:\u001b[0m ray://interactivetest-head-svc.default.svc:10001 │ \n", " │ │ \n", - " │ \u001B]8;id=970589;http://ray-dashboard-interactivetest-default.apps.meyceoz-07122023.psap.aws.rhperfscale.org\u001B\\\u001B[4;34mDashboard🔗\u001B[0m\u001B]8;;\u001B\\ │ \n", + " │ \u001b]8;id=970589;http://ray-dashboard-interactivetest-default.apps.meyceoz-07122023.psap.aws.rhperfscale.org\u001b\\\u001b[4;34mDashboard🔗\u001b[0m\u001b]8;;\u001b\\ │ \n", " │ │ \n", - " │ \u001B[3m Cluster Resources \u001B[0m │ \n", + " │ \u001b[3m Cluster Resources \u001b[0m │ \n", " │ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │ \n", - " │ │ \u001B[1m \u001B[0m\u001B[1m# Workers\u001B[0m\u001B[1m \u001B[0m │ │ \u001B[1m \u001B[0m\u001B[1mMemory \u001B[0m\u001B[1m \u001B[0m\u001B[1m \u001B[0m\u001B[1mCPU \u001B[0m\u001B[1m \u001B[0m\u001B[1m \u001B[0m\u001B[1mGPU \u001B[0m\u001B[1m \u001B[0m │ │ \n", - " │ │ \u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m │ │ \u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m │ │ \n", - " │ │ \u001B[35m \u001B[0m\u001B[35m2 \u001B[0m\u001B[35m \u001B[0m │ │ \u001B[36m \u001B[0m\u001B[36m8~8 \u001B[0m\u001B[36m \u001B[0m\u001B[35m \u001B[0m\u001B[35m2 \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m1 \u001B[0m\u001B[35m \u001B[0m │ │ \n", - " │ │ \u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m │ │ \u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[36m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m\u001B[35m \u001B[0m │ │ \n", + " │ │ \u001b[1m \u001b[0m\u001b[1m# Workers\u001b[0m\u001b[1m \u001b[0m │ │ \u001b[1m \u001b[0m\u001b[1mMemory \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mCPU \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mGPU \u001b[0m\u001b[1m \u001b[0m │ │ \n", + " │ │ \u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", + " │ │ \u001b[35m \u001b[0m\u001b[35m2 \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m8~8 \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m2 \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m1 \u001b[0m\u001b[35m \u001b[0m │ │ \n", + " │ │ \u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", " │ ╰─────────────╯ ╰──────────────────────────────────────╯ │ \n", " ╰──────────────────────────────────────────────────────────────────────╯ \n" ] @@ -221,8 +219,8 @@ "source": [ "from codeflare_sdk import generate_cert\n", "# Create required TLS cert and export the environment variables to enable TLS\n", - "generate_cert.generate_tls_cert(cluster_name, namespace)\n", - "generate_cert.export_env(cluster_name, namespace)" + "generate_cert.generate_tls_cert(cluster_name, cluster.config.namespace)\n", + "generate_cert.export_env(cluster_name, cluster.config.namespace)" ] }, { @@ -381,7 +379,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Downloading and preparing dataset imdb/plain_text to /home/ray/.cache/huggingface/datasets/imdb/plain_text/1.0.0/d613c88cf8fa3bab83b4ded3713f1f74830d1100e171db75bbddb80b3345c9c0...\n" + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Downloading and preparing dataset imdb/plain_text to /home/ray/.cache/huggingface/datasets/imdb/plain_text/1.0.0/d613c88cf8fa3bab83b4ded3713f1f74830d1100e171db75bbddb80b3345c9c0...\n" ] }, { @@ -494,7 +492,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Dataset imdb downloaded and prepared to /home/ray/.cache/huggingface/datasets/imdb/plain_text/1.0.0/d613c88cf8fa3bab83b4ded3713f1f74830d1100e171db75bbddb80b3345c9c0. Subsequent calls will reuse this data.\n" + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Dataset imdb downloaded and prepared to /home/ray/.cache/huggingface/datasets/imdb/plain_text/1.0.0/d613c88cf8fa3bab83b4ded3713f1f74830d1100e171db75bbddb80b3345c9c0. Subsequent calls will reuse this data.\n" ] }, { @@ -613,13 +611,13 @@ "name": "stdout", "output_type": "stream", "text": [ - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m len of train Dataset({\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m features: ['text', 'label', 'input_ids', 'attention_mask'],\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m num_rows: 100\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m }) and test Dataset({\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m features: ['text', 'label', 'input_ids', 'attention_mask'],\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m num_rows: 100\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m })\n" + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m len of train Dataset({\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m features: ['text', 'label', 'input_ids', 'attention_mask'],\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m num_rows: 100\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m }) and test Dataset({\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m features: ['text', 'label', 'input_ids', 'attention_mask'],\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m num_rows: 100\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m })\n" ] }, { @@ -627,106 +625,106 @@ "output_type": "stream", "text": [ " \n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m 2023-08-09 14:51:50,865\tWARNING dataset.py:253 -- \u001B[33mImportant: Ray Data requires schemas for all datasets in Ray 2.5. This means that standalone Python objects are no longer supported. In addition, the default batch format is fixed to NumPy. To revert to legacy behavior temporarily, set the environment variable RAY_DATA_STRICT_MODE=0 on all cluster processes.\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Learn more here: https://docs.ray.io/en/master/data/faq.html#migrating-to-strict-mode\u001B[0m\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m /tmp/ipykernel_265/307576807.py:57: DeprecationWarning: `HuggingFaceTrainer`, `HuggingFacePredictor` and `HuggingFaceCheckpoint` have been renamed to `TransformersTrainer`, `TransformersPredictor` and `TransformersCheckpoint` respectively. Update your code to use the new import paths. This will raise an exception in the future.\n" + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m 2023-08-09 14:51:50,865\tWARNING dataset.py:253 -- \u001b[33mImportant: Ray Data requires schemas for all datasets in Ray 2.5. This means that standalone Python objects are no longer supported. In addition, the default batch format is fixed to NumPy. To revert to legacy behavior temporarily, set the environment variable RAY_DATA_STRICT_MODE=0 on all cluster processes.\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Learn more here: https://docs.ray.io/en/master/data/faq.html#migrating-to-strict-mode\u001b[0m\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m /tmp/ipykernel_265/307576807.py:57: DeprecationWarning: `HuggingFaceTrainer`, `HuggingFacePredictor` and `HuggingFaceCheckpoint` have been renamed to `TransformersTrainer`, `TransformersPredictor` and `TransformersCheckpoint` respectively. Update your code to use the new import paths. This will raise an exception in the future.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m To disable this warning, you can either:\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m \t- Avoid using `tokenizers` before the fork if possible\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m \t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Current time: 2023-08-09 14:51:51 (running for 00:00:00.12)\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Logical resource usage: 0/6 CPUs, 0/2 GPUs\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2023-08-09_14-51-51\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Number of trials: 1/1 (1 PENDING)\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m +--------------------------------+----------+-------+\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m |--------------------------------+----------+-------|\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m | HuggingFaceTrainer_f2621_00000 | PENDING | |\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m +--------------------------------+----------+-------+\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m \n" + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m huggingface/tokenizers: The current process just got forked, after parallelism has already been used. Disabling parallelism to avoid deadlocks...\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m To disable this warning, you can either:\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m \t- Avoid using `tokenizers` before the fork if possible\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m \t- Explicitly set the environment variable TOKENIZERS_PARALLELISM=(true | false)\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Current time: 2023-08-09 14:51:51 (running for 00:00:00.12)\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Logical resource usage: 0/6 CPUs, 0/2 GPUs\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2023-08-09_14-51-51\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Number of trials: 1/1 (1 PENDING)\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m +--------------------------------+----------+-------+\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m |--------------------------------+----------+-------|\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m | HuggingFaceTrainer_f2621_00000 | PENDING | |\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m +--------------------------------+----------+-------+\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m \n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "\u001B[2m\u001B[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001B[0m 2023-08-09 14:51:55,978\tWARNING dataset.py:253 -- \u001B[33mImportant: Ray Data requires schemas for all datasets in Ray 2.5. This means that standalone Python objects are no longer supported. In addition, the default batch format is fixed to NumPy. To revert to legacy behavior temporarily, set the environment variable RAY_DATA_STRICT_MODE=0 on all cluster processes.\n", - "\u001B[2m\u001B[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001B[0m \n", - "\u001B[2m\u001B[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001B[0m Learn more here: https://docs.ray.io/en/master/data/faq.html#migrating-to-strict-mode\u001B[0m\n" + "\u001b[2m\u001b[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001b[0m 2023-08-09 14:51:55,978\tWARNING dataset.py:253 -- \u001b[33mImportant: Ray Data requires schemas for all datasets in Ray 2.5. This means that standalone Python objects are no longer supported. In addition, the default batch format is fixed to NumPy. To revert to legacy behavior temporarily, set the environment variable RAY_DATA_STRICT_MODE=0 on all cluster processes.\n", + "\u001b[2m\u001b[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001b[0m \n", + "\u001b[2m\u001b[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001b[0m Learn more here: https://docs.ray.io/en/master/data/faq.html#migrating-to-strict-mode\u001b[0m\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Current time: 2023-08-09 14:51:56 (running for 00:00:05.16)\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Logical resource usage: 1.0/6 CPUs, 2.0/2 GPUs\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2023-08-09_14-51-51\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m +--------------------------------+----------+-----------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m |--------------------------------+----------+-----------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m | HuggingFaceTrainer_f2621_00000 | RUNNING | 10.130.4.19:196 |\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m +--------------------------------+----------+-----------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m \n" + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Current time: 2023-08-09 14:51:56 (running for 00:00:05.16)\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Logical resource usage: 1.0/6 CPUs, 2.0/2 GPUs\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2023-08-09_14-51-51\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m +--------------------------------+----------+-----------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m |--------------------------------+----------+-----------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m | HuggingFaceTrainer_f2621_00000 | RUNNING | 10.130.4.19:196 |\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m +--------------------------------+----------+-----------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m \n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "\u001B[2m\u001B[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001B[0m 2023-08-09 14:51:57,260\tINFO backend_executor.py:137 -- Starting distributed worker processes: ['235 (10.130.4.19)', '232 (10.129.4.19)']\n", - "\u001B[2m\u001B[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001B[0m 2023-08-09 14:51:58,957\tINFO streaming_executor.py:91 -- Executing DAG InputDataBuffer[Input] -> AllToAllOperator[RandomizeBlockOrder]\n", - "\u001B[2m\u001B[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001B[0m 2023-08-09 14:51:58,957\tINFO streaming_executor.py:92 -- Execution config: ExecutionOptions(resource_limits=ExecutionResources(cpu=None, gpu=None, object_store_memory=None), locality_with_output=False, preserve_order=False, actor_locality_enabled=True, verbose_progress=False)\n", - "\u001B[2m\u001B[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001B[0m 2023-08-09 14:51:58,958\tINFO streaming_executor.py:94 -- Tip: For detailed progress reporting, run `ray.data.DataContext.get_current().execution_options.verbose_progress = True`\n", - "\u001B[2m\u001B[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001B[0m 2023-08-09 14:51:58,969\tINFO streaming_executor.py:149 -- Shutting down .\n", - "\u001B[2m\u001B[36m(RayTrainWorker pid=235, ip=10.130.4.19)\u001B[0m 2023-08-09 14:51:58,912\tINFO config.py:86 -- Setting up process group for: env:// [rank=0, world_size=2]\n" + "\u001b[2m\u001b[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001b[0m 2023-08-09 14:51:57,260\tINFO backend_executor.py:137 -- Starting distributed worker processes: ['235 (10.130.4.19)', '232 (10.129.4.19)']\n", + "\u001b[2m\u001b[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001b[0m 2023-08-09 14:51:58,957\tINFO streaming_executor.py:91 -- Executing DAG InputDataBuffer[Input] -> AllToAllOperator[RandomizeBlockOrder]\n", + "\u001b[2m\u001b[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001b[0m 2023-08-09 14:51:58,957\tINFO streaming_executor.py:92 -- Execution config: ExecutionOptions(resource_limits=ExecutionResources(cpu=None, gpu=None, object_store_memory=None), locality_with_output=False, preserve_order=False, actor_locality_enabled=True, verbose_progress=False)\n", + "\u001b[2m\u001b[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001b[0m 2023-08-09 14:51:58,958\tINFO streaming_executor.py:94 -- Tip: For detailed progress reporting, run `ray.data.DataContext.get_current().execution_options.verbose_progress = True`\n", + "\u001b[2m\u001b[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001b[0m 2023-08-09 14:51:58,969\tINFO streaming_executor.py:149 -- Shutting down .\n", + "\u001b[2m\u001b[36m(RayTrainWorker pid=235, ip=10.130.4.19)\u001b[0m 2023-08-09 14:51:58,912\tINFO config.py:86 -- Setting up process group for: env:// [rank=0, world_size=2]\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m == Status ==\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Current time: 2023-08-09 14:52:01 (running for 00:00:10.18)\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Using FIFO scheduling algorithm.\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Logical resource usage: 1.0/6 CPUs, 2.0/2 GPUs\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2023-08-09_14-51-51\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m +--------------------------------+----------+-----------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m | Trial name | status | loc |\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m |--------------------------------+----------+-----------------|\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m | HuggingFaceTrainer_f2621_00000 | RUNNING | 10.130.4.19:196 |\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m +--------------------------------+----------+-----------------+\n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m \n", - "\u001B[2m\u001B[36m(train_fn pid=425)\u001B[0m \n" + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m == Status ==\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Current time: 2023-08-09 14:52:01 (running for 00:00:10.18)\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Using FIFO scheduling algorithm.\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Logical resource usage: 1.0/6 CPUs, 2.0/2 GPUs\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2023-08-09_14-51-51\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m +--------------------------------+----------+-----------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m | Trial name | status | loc |\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m |--------------------------------+----------+-----------------|\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m | HuggingFaceTrainer_f2621_00000 | RUNNING | 10.130.4.19:196 |\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m +--------------------------------+----------+-----------------+\n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m \n", + "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m \n" ] }, { "name": "stderr", "output_type": "stream", "text": [ - "\u001B[2m\u001B[36m(RayTrainWorker pid=235, ip=10.130.4.19)\u001B[0m 2023-08-09 14:52:01,262\tINFO streaming_executor.py:91 -- Executing DAG InputDataBuffer[Input] -> AllToAllOperator[RandomizeBlockOrder]\n", - "\u001B[2m\u001B[36m(RayTrainWorker pid=235, ip=10.130.4.19)\u001B[0m 2023-08-09 14:52:01,262\tINFO streaming_executor.py:92 -- Execution config: ExecutionOptions(resource_limits=ExecutionResources(cpu=None, gpu=None, object_store_memory=None), locality_with_output=False, preserve_order=False, actor_locality_enabled=True, verbose_progress=False)\n", - "\u001B[2m\u001B[36m(RayTrainWorker pid=235, ip=10.130.4.19)\u001B[0m 2023-08-09 14:52:01,262\tINFO streaming_executor.py:94 -- Tip: For detailed progress reporting, run `ray.data.DataContext.get_current().execution_options.verbose_progress = True`\n", - "\u001B[2m\u001B[36m(RayTrainWorker pid=235, ip=10.130.4.19)\u001B[0m 2023-08-09 14:52:01,274\tINFO streaming_executor.py:149 -- Shutting down .\n", - "\u001B[2m\u001B[36m(RayTrainWorker pid=232, ip=10.129.4.19)\u001B[0m 2023-08-09 14:52:01,252\tINFO streaming_executor.py:91 -- Executing DAG InputDataBuffer[Input] -> AllToAllOperator[RandomizeBlockOrder]\n", - "\u001B[2m\u001B[36m(RayTrainWorker pid=232, ip=10.129.4.19)\u001B[0m 2023-08-09 14:52:01,252\tINFO streaming_executor.py:92 -- Execution config: ExecutionOptions(resource_limits=ExecutionResources(cpu=None, gpu=None, object_store_memory=None), locality_with_output=False, preserve_order=False, actor_locality_enabled=True, verbose_progress=False)\n", - "\u001B[2m\u001B[36m(RayTrainWorker pid=232, ip=10.129.4.19)\u001B[0m 2023-08-09 14:52:01,252\tINFO streaming_executor.py:94 -- Tip: For detailed progress reporting, run `ray.data.DataContext.get_current().execution_options.verbose_progress = True`\n", - "\u001B[2m\u001B[36m(RayTrainWorker pid=232, ip=10.129.4.19)\u001B[0m 2023-08-09 14:52:01,263\tINFO streaming_executor.py:149 -- Shutting down .\n", + "\u001b[2m\u001b[36m(RayTrainWorker pid=235, ip=10.130.4.19)\u001b[0m 2023-08-09 14:52:01,262\tINFO streaming_executor.py:91 -- Executing DAG InputDataBuffer[Input] -> AllToAllOperator[RandomizeBlockOrder]\n", + "\u001b[2m\u001b[36m(RayTrainWorker pid=235, ip=10.130.4.19)\u001b[0m 2023-08-09 14:52:01,262\tINFO streaming_executor.py:92 -- Execution config: ExecutionOptions(resource_limits=ExecutionResources(cpu=None, gpu=None, object_store_memory=None), locality_with_output=False, preserve_order=False, actor_locality_enabled=True, verbose_progress=False)\n", + "\u001b[2m\u001b[36m(RayTrainWorker pid=235, ip=10.130.4.19)\u001b[0m 2023-08-09 14:52:01,262\tINFO streaming_executor.py:94 -- Tip: For detailed progress reporting, run `ray.data.DataContext.get_current().execution_options.verbose_progress = True`\n", + "\u001b[2m\u001b[36m(RayTrainWorker pid=235, ip=10.130.4.19)\u001b[0m 2023-08-09 14:52:01,274\tINFO streaming_executor.py:149 -- Shutting down .\n", + "\u001b[2m\u001b[36m(RayTrainWorker pid=232, ip=10.129.4.19)\u001b[0m 2023-08-09 14:52:01,252\tINFO streaming_executor.py:91 -- Executing DAG InputDataBuffer[Input] -> AllToAllOperator[RandomizeBlockOrder]\n", + "\u001b[2m\u001b[36m(RayTrainWorker pid=232, ip=10.129.4.19)\u001b[0m 2023-08-09 14:52:01,252\tINFO streaming_executor.py:92 -- Execution config: ExecutionOptions(resource_limits=ExecutionResources(cpu=None, gpu=None, object_store_memory=None), locality_with_output=False, preserve_order=False, actor_locality_enabled=True, verbose_progress=False)\n", + "\u001b[2m\u001b[36m(RayTrainWorker pid=232, ip=10.129.4.19)\u001b[0m 2023-08-09 14:52:01,252\tINFO streaming_executor.py:94 -- Tip: For detailed progress reporting, run `ray.data.DataContext.get_current().execution_options.verbose_progress = True`\n", + "\u001b[2m\u001b[36m(RayTrainWorker pid=232, ip=10.129.4.19)\u001b[0m 2023-08-09 14:52:01,263\tINFO streaming_executor.py:149 -- Shutting down .\n", "Downloading (…)lve/main/config.json: 100%|██████████| 483/483 [00:00<00:00, 151kB/s]\n", "Downloading (…)lve/main/config.json: 100%|██████████| 483/483 [00:00<00:00, 146kB/s]\n", "Downloading model.safetensors: 0%| | 0.00/268M [00:00 Date: Thu, 25 Jul 2024 11:27:25 +0100 Subject: [PATCH 300/496] adding tls cert creation and updating num workers in hf notebook --- .../additional-demos/hf_interactive.ipynb | 26 +++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/demo-notebooks/additional-demos/hf_interactive.ipynb b/demo-notebooks/additional-demos/hf_interactive.ipynb index fe4cd429..1c9f7cd1 100644 --- a/demo-notebooks/additional-demos/hf_interactive.ipynb +++ b/demo-notebooks/additional-demos/hf_interactive.ipynb @@ -89,7 +89,8 @@ "source": [ "# Create our cluster and submit\n", "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", - "cluster = Cluster(ClusterConfiguration(name='hfgputest', \n", + "cluster_name= \"hfgputest\"\n", + "cluster = Cluster(ClusterConfiguration(name=cluster_name, \n", " head_gpus=1, # For GPU enabled workloads set the head_gpus and num_gpus\n", " num_gpus=1,\n", " num_workers=1,\n", @@ -287,6 +288,27 @@ "ray_cluster_uri = cluster.cluster_uri()" ] }, + { + "cell_type": "markdown", + "id": "64d65c3c", + "metadata": {}, + "source": [ + "Now we can connect directly to our Ray cluster via the Ray python client:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "60276d86", + "metadata": {}, + "outputs": [], + "source": [ + "from codeflare_sdk import generate_cert\n", + "# Create required TLS cert and export the environment variables to enable TLS\n", + "generate_cert.generate_tls_cert(cluster_name, cluster.config.namespace)\n", + "generate_cert.export_env(cluster_name, cluster.config.namespace)" + ] + }, { "cell_type": "markdown", "id": "44dba6a0-8275-4726-8911-6b6ec467b6a3", @@ -432,7 +454,7 @@ "\n", " ray_trainer = TorchTrainer(\n", " train_func,\n", - " scaling_config=ScalingConfig(num_workers=3, use_gpu=True),\n", + " scaling_config=ScalingConfig(num_workers=2, use_gpu=True),\n", " # Configure persistent storage that is accessible across \n", " # all worker nodes.\n", " # Uncomment and update the RunConfig below to include your storage details.\n", From ecf04c32d5816fb5005793ebe5d411a7d5a7feb7 Mon Sep 17 00:00:00 2001 From: Karel Suta Date: Thu, 18 Jul 2024 11:16:50 +0200 Subject: [PATCH 301/496] Run PR check for guided notebooks --- .../resources/minio_remote_config_cell.json | 20 + .github/resources/wait_for_job_cell.json | 20 + .github/workflows/guided_notebook_tests.yaml | 381 ++++++++++++++++++ demo-notebooks/guided-demos/0_basic_ray.ipynb | 4 +- .../guided-demos/1_cluster_job_client.ipynb | 4 +- .../guided-demos/2_basic_interactive.ipynb | 22 +- demo-notebooks/guided-demos/mnist_fashion.py | 12 +- tests/e2e/minio_deployment.yaml | 37 +- 8 files changed, 455 insertions(+), 45 deletions(-) create mode 100644 .github/resources/minio_remote_config_cell.json create mode 100644 .github/resources/wait_for_job_cell.json create mode 100644 .github/workflows/guided_notebook_tests.yaml diff --git a/.github/resources/minio_remote_config_cell.json b/.github/resources/minio_remote_config_cell.json new file mode 100644 index 00000000..e36c4b18 --- /dev/null +++ b/.github/resources/minio_remote_config_cell.json @@ -0,0 +1,20 @@ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "@ray.remote\n", + "def get_minio_run_config():\n", + " import s3fs\n", + " import pyarrow\n", + " s3_fs = s3fs.S3FileSystem(\n", + " key = \"minio\",\n", + " secret = \"minio123\",\n", + " endpoint_url = \"http://minio-service.default.svc.cluster.local:9000\"\n", + " )\n", + " custom_fs = pyarrow.fs.PyFileSystem(pyarrow.fs.FSSpecHandler(s3_fs))\n", + " run_config = ray.train.RunConfig(storage_path='training', storage_filesystem=custom_fs)\n", + " return run_config" + ] + } diff --git a/.github/resources/wait_for_job_cell.json b/.github/resources/wait_for_job_cell.json new file mode 100644 index 00000000..eb8805bd --- /dev/null +++ b/.github/resources/wait_for_job_cell.json @@ -0,0 +1,20 @@ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from time import sleep\n", + "\n", + "finished = False\n", + "while not finished:\n", + " sleep(5)\n", + " status = client.get_job_status(submission_id)\n", + " finished = (status == \"SUCCEEDED\" or status == \"FAILED\" or status == \"STOPPED\")\n", + " print(status)\n", + "print(\"Job status \" + status)\n", + "print(\"Logs: \")\n", + "print(client.get_job_logs(submission_id))\n", + "assert status == \"SUCCEEDED\", \"Job failed or was stopped!\"" + ] + } diff --git a/.github/workflows/guided_notebook_tests.yaml b/.github/workflows/guided_notebook_tests.yaml new file mode 100644 index 00000000..299c67ef --- /dev/null +++ b/.github/workflows/guided_notebook_tests.yaml @@ -0,0 +1,381 @@ +name: Guided notebooks tests + +on: + pull_request: + types: [ labeled ] + +concurrency: + group: ${{ github.head_ref }}-${{ github.workflow }} + cancel-in-progress: true + +env: + CODEFLARE_OPERATOR_IMG: "quay.io/project-codeflare/codeflare-operator:dev" + +jobs: + verify-0_basic_ray: + if: ${{ github.event.label.name == 'test-guided-notebooks' }} + runs-on: ubuntu-20.04-4core + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Checkout common repo code + uses: actions/checkout@v4 + with: + repository: 'project-codeflare/codeflare-common' + ref: 'main' + path: 'common' + + - name: Checkout CodeFlare operator repository + uses: actions/checkout@v4 + with: + repository: project-codeflare/codeflare-operator + path: codeflare-operator + + - name: Set Go + uses: actions/setup-go@v5 + with: + go-version-file: './codeflare-operator/go.mod' + cache-dependency-path: "./codeflare-operator/go.sum" + + - name: Set up gotestfmt + uses: gotesttools/gotestfmt-action@v2 + with: + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up specific Python version + uses: actions/setup-python@v5 + with: + python-version: '3.9' + cache: 'pip' # caching pip dependencies + + - name: Setup and start KinD cluster + uses: ./common/github-actions/kind + + - name: Deploy CodeFlare stack + id: deploy + run: | + cd codeflare-operator + echo Setting up CodeFlare stack + make setup-e2e + echo Deploying CodeFlare operator + make deploy -e IMG="${CODEFLARE_OPERATOR_IMG}" -e ENV="e2e" + kubectl wait --timeout=120s --for=condition=Available=true deployment -n openshift-operators codeflare-operator-manager + cd .. + + - name: Setup Guided notebooks execution + run: | + echo "Installing papermill and dependencies..." + pip install poetry papermill ipython ipykernel + # Disable virtualenv due to problems using packaged in virtualenv in papermill + poetry config virtualenvs.create false + + echo "Installing SDK..." + poetry install --with test,docs + + - name: Run 0_basic_ray.ipynb + run: | + set -euo pipefail + + # Remove login/logout cells, as KinD doesn't support authentication using token + jq -r 'del(.cells[] | select(.source[] | contains("Create authentication object for user permissions")))' 0_basic_ray.ipynb > 0_basic_ray.ipynb.tmp && mv 0_basic_ray.ipynb.tmp 0_basic_ray.ipynb + jq -r 'del(.cells[] | select(.source[] | contains("auth.logout()")))' 0_basic_ray.ipynb > 0_basic_ray.ipynb.tmp && mv 0_basic_ray.ipynb.tmp 0_basic_ray.ipynb + # Set explicit namespace as SDK need it (currently) to resolve local queues + sed -i "s/head_memory=2,/head_memory=2, namespace='default',/" 0_basic_ray.ipynb + # Run notebook + poetry run papermill 0_basic_ray.ipynb 0_basic_ray_out.ipynb --log-output --execution-timeout 600 + working-directory: demo-notebooks/guided-demos + + - name: Print CodeFlare operator logs + if: always() && steps.deploy.outcome == 'success' + run: | + echo "Printing CodeFlare operator logs" + kubectl logs -n openshift-operators --tail -1 -l app.kubernetes.io/name=codeflare-operator | tee ${TEMP_DIR}/codeflare-operator.log + + - name: Print Kueue operator logs + if: always() && steps.deploy.outcome == 'success' + run: | + echo "Printing Kueue operator logs" + KUEUE_CONTROLLER_POD=$(kubectl get pods -n kueue-system | grep kueue-controller | awk '{print $1}') + kubectl logs -n kueue-system --tail -1 ${KUEUE_CONTROLLER_POD} | tee ${TEMP_DIR}/kueue.log + + - name: Print KubeRay operator logs + if: always() && steps.deploy.outcome == 'success' + run: | + echo "Printing KubeRay operator logs" + kubectl logs -n ray-system --tail -1 -l app.kubernetes.io/name=kuberay | tee ${TEMP_DIR}/kuberay.log + + - name: Export all KinD pod logs + uses: ./common/github-actions/kind-export-logs + if: always() && steps.deploy.outcome == 'success' + with: + output-directory: ${TEMP_DIR} + + - name: Upload logs + uses: actions/upload-artifact@v4 + if: always() && steps.deploy.outcome == 'success' + with: + name: logs-0_basic_ray + retention-days: 10 + path: | + ${{ env.TEMP_DIR }}/**/*.log + + verify-1_cluster_job_client: + if: ${{ github.event.label.name == 'test-guided-notebooks' }} + runs-on: ubuntu-20.04-4core-gpu + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Checkout common repo code + uses: actions/checkout@v4 + with: + repository: 'project-codeflare/codeflare-common' + ref: 'main' + path: 'common' + + - name: Checkout CodeFlare operator repository + uses: actions/checkout@v4 + with: + repository: project-codeflare/codeflare-operator + path: codeflare-operator + + - name: Set Go + uses: actions/setup-go@v5 + with: + go-version-file: './codeflare-operator/go.mod' + cache-dependency-path: "./codeflare-operator/go.sum" + + - name: Set up gotestfmt + uses: gotesttools/gotestfmt-action@v2 + with: + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up specific Python version + uses: actions/setup-python@v5 + with: + python-version: '3.9' + cache: 'pip' # caching pip dependencies + + - name: Setup NVidia GPU environment for KinD + uses: ./common/github-actions/nvidia-gpu-setup + + - name: Setup and start KinD cluster + uses: ./common/github-actions/kind + + - name: Install NVidia GPU operator for KinD + uses: ./common/github-actions/nvidia-gpu-operator + with: + enable-time-slicing: 'true' + + - name: Deploy CodeFlare stack + id: deploy + run: | + cd codeflare-operator + echo Setting up CodeFlare stack + make setup-e2e + echo Deploying CodeFlare operator + make deploy -e IMG="${CODEFLARE_OPERATOR_IMG}" -e ENV="e2e" + kubectl wait --timeout=120s --for=condition=Available=true deployment -n openshift-operators codeflare-operator-manager + cd .. + + - name: Setup Guided notebooks execution + run: | + echo "Installing papermill and dependencies..." + pip install poetry papermill ipython ipykernel + # Disable virtualenv due to problems using packaged in virtualenv in papermill + poetry config virtualenvs.create false + + echo "Installing SDK..." + poetry install --with test,docs + + - name: Run 1_cluster_job_client.ipynb + run: | + set -euo pipefail + + # Remove login/logout cells, as KinD doesn't support authentication using token + jq -r 'del(.cells[] | select(.source[] | contains("Create authentication object for user permissions")))' 1_cluster_job_client.ipynb > 1_cluster_job_client.ipynb.tmp && mv 1_cluster_job_client.ipynb.tmp 1_cluster_job_client.ipynb + jq -r 'del(.cells[] | select(.source[] | contains("auth.logout()")))' 1_cluster_job_client.ipynb > 1_cluster_job_client.ipynb.tmp && mv 1_cluster_job_client.ipynb.tmp 1_cluster_job_client.ipynb + # Replace async logs with waiting for job to finish, async logs don't work properly in papermill + JOB_WAIT=$(jq -r '.' ${GITHUB_WORKSPACE}/.github/resources/wait_for_job_cell.json) + jq --argjson job_wait "$JOB_WAIT" -r '(.cells[] | select(.source[] | contains("async for lines in client.tail_job_logs"))) |= $job_wait' 1_cluster_job_client.ipynb > 1_cluster_job_client.ipynb.tmp && mv 1_cluster_job_client.ipynb.tmp 1_cluster_job_client.ipynb + # Set explicit namespace as SDK need it (currently) to resolve local queues + sed -i "s/head_cpus=1,/head_cpus=1, namespace='default',/" 1_cluster_job_client.ipynb + # Run notebook + poetry run papermill 1_cluster_job_client.ipynb 1_cluster_job_client_out.ipynb --log-output --execution-timeout 1200 + working-directory: demo-notebooks/guided-demos + + - name: Print CodeFlare operator logs + if: always() && steps.deploy.outcome == 'success' + run: | + echo "Printing CodeFlare operator logs" + kubectl logs -n openshift-operators --tail -1 -l app.kubernetes.io/name=codeflare-operator | tee ${TEMP_DIR}/codeflare-operator.log + + - name: Print Kueue operator logs + if: always() && steps.deploy.outcome == 'success' + run: | + echo "Printing Kueue operator logs" + KUEUE_CONTROLLER_POD=$(kubectl get pods -n kueue-system | grep kueue-controller | awk '{print $1}') + kubectl logs -n kueue-system --tail -1 ${KUEUE_CONTROLLER_POD} | tee ${TEMP_DIR}/kueue.log + + - name: Print KubeRay operator logs + if: always() && steps.deploy.outcome == 'success' + run: | + echo "Printing KubeRay operator logs" + kubectl logs -n ray-system --tail -1 -l app.kubernetes.io/name=kuberay | tee ${TEMP_DIR}/kuberay.log + + - name: Export all KinD pod logs + uses: ./common/github-actions/kind-export-logs + if: always() && steps.deploy.outcome == 'success' + with: + output-directory: ${TEMP_DIR} + + - name: Upload logs + uses: actions/upload-artifact@v4 + if: always() && steps.deploy.outcome == 'success' + with: + name: logs-1_cluster_job_client + retention-days: 10 + path: | + ${{ env.TEMP_DIR }}/**/*.log + + verify-2_basic_interactive: + if: ${{ github.event.label.name == 'test-guided-notebooks' }} + runs-on: ubuntu-20.04-4core-gpu + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Checkout common repo code + uses: actions/checkout@v4 + with: + repository: 'project-codeflare/codeflare-common' + ref: 'main' + path: 'common' + + - name: Checkout CodeFlare operator repository + uses: actions/checkout@v4 + with: + repository: project-codeflare/codeflare-operator + path: codeflare-operator + + - name: Set Go + uses: actions/setup-go@v5 + with: + go-version-file: './codeflare-operator/go.mod' + cache-dependency-path: "./codeflare-operator/go.sum" + + - name: Set up gotestfmt + uses: gotesttools/gotestfmt-action@v2 + with: + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up specific Python version + uses: actions/setup-python@v5 + with: + python-version: '3.9' + cache: 'pip' # caching pip dependencies + + - name: Setup NVidia GPU environment for KinD + uses: ./common/github-actions/nvidia-gpu-setup + + - name: Setup and start KinD cluster + uses: ./common/github-actions/kind + + - name: Install NVidia GPU operator for KinD + uses: ./common/github-actions/nvidia-gpu-operator + with: + enable-time-slicing: 'true' + + - name: Deploy CodeFlare stack + id: deploy + run: | + cd codeflare-operator + echo Setting up CodeFlare stack + make setup-e2e + echo Deploying CodeFlare operator + make deploy -e IMG="${CODEFLARE_OPERATOR_IMG}" -e ENV="e2e" + kubectl wait --timeout=120s --for=condition=Available=true deployment -n openshift-operators codeflare-operator-manager + cd .. + + - name: Install MINIO + run: | + kubectl apply -f ./tests/e2e/minio_deployment.yaml + kubectl wait --timeout=120s --for=condition=Available=true deployment -n default minio + + - name: Setup Guided notebooks execution + run: | + echo "Installing papermill and dependencies..." + pip install poetry papermill ipython ipykernel + # Disable virtualenv due to problems using packaged in virtualenv in papermill + poetry config virtualenvs.create false + + echo "Installing SDK..." + poetry install --with test,docs + + - name: Run 2_basic_interactive.ipynb + run: | + set -euo pipefail + + # Remove login/logout cells, as KinD doesn't support authentication using token + jq -r 'del(.cells[] | select(.source[] | contains("Create authentication object for user permissions")))' 2_basic_interactive.ipynb > 2_basic_interactive.ipynb.tmp && mv 2_basic_interactive.ipynb.tmp 2_basic_interactive.ipynb + jq -r 'del(.cells[] | select(.source[] | contains("auth.logout()")))' 2_basic_interactive.ipynb > 2_basic_interactive.ipynb.tmp && mv 2_basic_interactive.ipynb.tmp 2_basic_interactive.ipynb + # Rewrite cluster_uri() to local_client_url() to retrieve client URL available out of cluster, as the test is executed outside of cluster + sed -i "s/cluster_uri()/local_client_url()/" 2_basic_interactive.ipynb + # Set explicit namespace as SDK need it (currently) to resolve local queues + sed -i "s/head_cpus=1,/head_cpus=1, namespace='default',/" 2_basic_interactive.ipynb + # Add MINIO related modules to runtime environment + sed -i "s/\\\\\"transformers/\\\\\"s3fs\\\\\", \\\\\"pyarrow\\\\\", \\\\\"transformers/" 2_basic_interactive.ipynb + # Replace markdown cell with remote configuration for MINIO + MINIO_CONFIG=$(jq -r '.' ${GITHUB_WORKSPACE}/.github/resources/minio_remote_config_cell.json) + jq --argjson minio_config "$MINIO_CONFIG" -r '(.cells[] | select(.source[] | contains("Now that we are connected"))) |= $minio_config' 2_basic_interactive.ipynb > 2_basic_interactive.ipynb.tmp && mv 2_basic_interactive.ipynb.tmp 2_basic_interactive.ipynb + # Configure persistent storage for Ray trainer + sed -i -E "s/# run_config.*\)/, run_config=ray.get(get_minio_run_config.remote())/" 2_basic_interactive.ipynb + # Run notebook + poetry run papermill 2_basic_interactive.ipynb 2_basic_interactive_out.ipynb --log-output --execution-timeout 1200 + env: + GRPC_DNS_RESOLVER: "native" + working-directory: demo-notebooks/guided-demos + + - name: Print CodeFlare operator logs + if: always() && steps.deploy.outcome == 'success' + run: | + echo "Printing CodeFlare operator logs" + kubectl logs -n openshift-operators --tail -1 -l app.kubernetes.io/name=codeflare-operator | tee ${TEMP_DIR}/codeflare-operator.log + + - name: Print Kueue operator logs + if: always() && steps.deploy.outcome == 'success' + run: | + echo "Printing Kueue operator logs" + KUEUE_CONTROLLER_POD=$(kubectl get pods -n kueue-system | grep kueue-controller | awk '{print $1}') + kubectl logs -n kueue-system --tail -1 ${KUEUE_CONTROLLER_POD} | tee ${TEMP_DIR}/kueue.log + + - name: Print KubeRay operator logs + if: always() && steps.deploy.outcome == 'success' + run: | + echo "Printing KubeRay operator logs" + kubectl logs -n ray-system --tail -1 -l app.kubernetes.io/name=kuberay | tee ${TEMP_DIR}/kuberay.log + + - name: Export all KinD pod logs + uses: ./common/github-actions/kind-export-logs + if: always() && steps.deploy.outcome == 'success' + with: + output-directory: ${TEMP_DIR} + + - name: Upload logs + uses: actions/upload-artifact@v4 + if: always() && steps.deploy.outcome == 'success' + with: + name: logs-2_basic_interactive + retention-days: 10 + path: | + ${{ env.TEMP_DIR }}/**/*.log diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb index 3f0f62e4..11f3a3b2 100644 --- a/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -62,10 +62,12 @@ "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(\n", " name='raytest', \n", + " head_cpus='500m',\n", + " head_memory=2,\n", " head_gpus=0, # For GPU enabled workloads set the head_gpus and num_gpus\n", " num_gpus=0,\n", " num_workers=2,\n", - " min_cpus=1,\n", + " min_cpus='250m',\n", " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", diff --git a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb index 00576024..bd5d6965 100644 --- a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb @@ -44,10 +44,12 @@ "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(\n", " name='jobtest',\n", + " head_cpus=1,\n", + " head_memory=4,\n", " head_gpus=1, # For GPU enabled workloads set the head_gpus and num_gpus\n", " num_gpus=1,\n", " num_workers=2,\n", - " min_cpus=1,\n", + " min_cpus='250m',\n", " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", diff --git a/demo-notebooks/guided-demos/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/2_basic_interactive.ipynb index 0692caa4..f95edc9d 100644 --- a/demo-notebooks/guided-demos/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/2_basic_interactive.ipynb @@ -60,13 +60,15 @@ "cluster_name = \"interactivetest\"\n", "cluster = Cluster(ClusterConfiguration(\n", " name=cluster_name,\n", + " head_cpus=1,\n", + " head_memory=6,\n", " head_gpus=1, # For GPU enabled workloads set the head_gpus and num_gpus\n", " num_gpus=1,\n", " num_workers=2,\n", - " min_cpus=2,\n", - " max_cpus=2,\n", - " min_memory=8,\n", - " max_memory=8,\n", + " min_cpus='250m',\n", + " max_cpus=1,\n", + " min_memory=4,\n", + " max_memory=6,\n", " image=\"quay.io/rhoai/ray:2.23.0-py39-cu121\",\n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", @@ -251,7 +253,17 @@ "\n", " ray_trainer = TorchTrainer(\n", " train_func,\n", - " scaling_config=ScalingConfig(num_workers=3, use_gpu=True),\n", + " scaling_config=ScalingConfig(\n", + " # num_workers = number of worker nodes with the ray head node included\n", + " num_workers=3,\n", + " use_gpu=True,\n", + " resources_per_worker={\n", + " \"CPU\": 1,\n", + " },\n", + " trainer_resources={\n", + " \"CPU\": 0,\n", + " }\n", + " )\n", " # Configure persistent storage that is accessible across \n", " # all worker nodes.\n", " # Uncomment and update the RunConfig below to include your storage details.\n", diff --git a/demo-notebooks/guided-demos/mnist_fashion.py b/demo-notebooks/guided-demos/mnist_fashion.py index 85cd6e64..ba5b2636 100644 --- a/demo-notebooks/guided-demos/mnist_fashion.py +++ b/demo-notebooks/guided-demos/mnist_fashion.py @@ -78,8 +78,16 @@ def train_func_distributed(): trainer = TorchTrainer( train_func_distributed, scaling_config=ScalingConfig( - num_workers=3, use_gpu=use_gpu - ), # num_workers = number of worker nodes with the ray head node included + # num_workers = number of worker nodes with the ray head node included + num_workers=3, + use_gpu=use_gpu, + resources_per_worker={ + "CPU": 1, + }, + trainer_resources={ + "CPU": 0, + }, + ), ) results = trainer.fit() diff --git a/tests/e2e/minio_deployment.yaml b/tests/e2e/minio_deployment.yaml index 86d4ef01..b2cdc54a 100644 --- a/tests/e2e/minio_deployment.yaml +++ b/tests/e2e/minio_deployment.yaml @@ -88,10 +88,7 @@ spec: mountPath: /data subPath: minio terminationMessagePolicy: File - image: >- - quay.io/minio/minio:RELEASE.2024-06-22T05-26-45Z - # In case of disconnected environment, use image digest instead of tag - # For example : /minio/minio@sha256:6b3abf2f59286b985bfde2b23e37230b466081eda5dccbf971524d54c8e406b5 + image: quay.io/minio/minio:RELEASE.2024-06-22T05-26-45Z args: - server - /data @@ -129,35 +126,3 @@ spec: sessionAffinity: None selector: app: minio ---- -kind: Route -apiVersion: route.openshift.io/v1 -metadata: - name: minio-api -spec: - to: - kind: Service - name: minio-service - weight: 100 - port: - targetPort: api - wildcardPolicy: None - tls: - termination: edge - insecureEdgeTerminationPolicy: Redirect ---- -kind: Route -apiVersion: route.openshift.io/v1 -metadata: - name: minio-ui -spec: - to: - kind: Service - name: minio-service - weight: 100 - port: - targetPort: ui - wildcardPolicy: None - tls: - termination: edge - insecureEdgeTerminationPolicy: Redirect From 62ce155d51b2f530a0bb035a3984fbb8216e1060 Mon Sep 17 00:00:00 2001 From: Kevin Date: Tue, 23 Jul 2024 13:38:19 -0400 Subject: [PATCH 302/496] mv demo nbs into src folder and add fn for cloning to current dir Signed-off-by: Kevin --- README.md | 4 +-- src/codeflare_sdk/__init__.py | 1 + .../additional-demos/hf_interactive.ipynb | 0 .../additional-demos/local_interactive.ipynb | 0 .../additional-demos/ray_job_client.ipynb | 0 .../guided-demos/0_basic_ray.ipynb | 0 .../guided-demos/1_cluster_job_client.ipynb | 0 .../guided-demos/2_basic_interactive.ipynb | 0 .../guided-demos/download_mnist_datasets.py | 0 .../demo-notebooks}/guided-demos/mnist.py | 0 .../guided-demos/mnist_disconnected.py | 0 .../guided-demos/mnist_fashion.py | 0 .../notebook-ex-outputs/0_basic_ray.ipynb | 0 .../1_cluster_job_client.ipynb | 0 .../2_basic_interactive.ipynb | 0 .../notebook-ex-outputs/interactivetest.yaml | 0 .../notebook-ex-outputs/jobtest.yaml | 0 .../guided-demos/notebook-ex-outputs/mnist.py | 0 .../notebook-ex-outputs/raytest.yaml | 0 .../notebook-ex-outputs/requirements.txt | 0 .../preview_nbs/0_basic_ray.ipynb | 0 .../preview_nbs/1_cluster_job_client.ipynb | 0 .../preview_nbs/2_basic_interactive.ipynb | 0 .../guided-demos/preview_nbs/mnist.py | 0 .../guided-demos/preview_nbs/requirements.txt | 0 .../guided-demos/requirements.txt | 0 src/codeflare_sdk/utils/demos.py | 27 +++++++++++++++++++ 27 files changed, 30 insertions(+), 2 deletions(-) rename {demo-notebooks => src/codeflare_sdk/demo-notebooks}/additional-demos/hf_interactive.ipynb (100%) rename {demo-notebooks => src/codeflare_sdk/demo-notebooks}/additional-demos/local_interactive.ipynb (100%) rename {demo-notebooks => src/codeflare_sdk/demo-notebooks}/additional-demos/ray_job_client.ipynb (100%) rename {demo-notebooks => src/codeflare_sdk/demo-notebooks}/guided-demos/0_basic_ray.ipynb (100%) rename {demo-notebooks => src/codeflare_sdk/demo-notebooks}/guided-demos/1_cluster_job_client.ipynb (100%) rename {demo-notebooks => src/codeflare_sdk/demo-notebooks}/guided-demos/2_basic_interactive.ipynb (100%) rename {demo-notebooks => src/codeflare_sdk/demo-notebooks}/guided-demos/download_mnist_datasets.py (100%) rename {demo-notebooks => src/codeflare_sdk/demo-notebooks}/guided-demos/mnist.py (100%) rename {demo-notebooks => src/codeflare_sdk/demo-notebooks}/guided-demos/mnist_disconnected.py (100%) rename {demo-notebooks => src/codeflare_sdk/demo-notebooks}/guided-demos/mnist_fashion.py (100%) rename {demo-notebooks => src/codeflare_sdk/demo-notebooks}/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb (100%) rename {demo-notebooks => src/codeflare_sdk/demo-notebooks}/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb (100%) rename {demo-notebooks => src/codeflare_sdk/demo-notebooks}/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb (100%) rename {demo-notebooks => src/codeflare_sdk/demo-notebooks}/guided-demos/notebook-ex-outputs/interactivetest.yaml (100%) rename {demo-notebooks => src/codeflare_sdk/demo-notebooks}/guided-demos/notebook-ex-outputs/jobtest.yaml (100%) rename {demo-notebooks => src/codeflare_sdk/demo-notebooks}/guided-demos/notebook-ex-outputs/mnist.py (100%) rename {demo-notebooks => src/codeflare_sdk/demo-notebooks}/guided-demos/notebook-ex-outputs/raytest.yaml (100%) rename {demo-notebooks => src/codeflare_sdk/demo-notebooks}/guided-demos/notebook-ex-outputs/requirements.txt (100%) rename {demo-notebooks => src/codeflare_sdk/demo-notebooks}/guided-demos/preview_nbs/0_basic_ray.ipynb (100%) rename {demo-notebooks => src/codeflare_sdk/demo-notebooks}/guided-demos/preview_nbs/1_cluster_job_client.ipynb (100%) rename {demo-notebooks => src/codeflare_sdk/demo-notebooks}/guided-demos/preview_nbs/2_basic_interactive.ipynb (100%) rename {demo-notebooks => src/codeflare_sdk/demo-notebooks}/guided-demos/preview_nbs/mnist.py (100%) rename {demo-notebooks => src/codeflare_sdk/demo-notebooks}/guided-demos/preview_nbs/requirements.txt (100%) rename {demo-notebooks => src/codeflare_sdk/demo-notebooks}/guided-demos/requirements.txt (100%) create mode 100644 src/codeflare_sdk/utils/demos.py diff --git a/README.md b/README.md index 31177840..3b3c8b14 100644 --- a/README.md +++ b/README.md @@ -7,8 +7,8 @@ An intuitive, easy-to-use python interface for batch resource requesting, access For guided demos and basics walkthroughs, check out the following links: -- Guided demo notebooks available [here](https://github.com/project-codeflare/codeflare-sdk/tree/main/demo-notebooks/guided-demos), and copies of the notebooks with [expected output](https://github.com/project-codeflare/codeflare-sdk/tree/main/demo-notebooks/guided-demos/notebook-ex-outputs) also available -- Note that these notebooks will work with the latest `codeflare-sdk` PyPI release. For testing and experimentation with `main` branch, please use the [preview notebooks](https://github.com/project-codeflare/codeflare-sdk/tree/main/demo-notebooks/guided-demos/preview_nbs) +- Guided demo notebooks available [here](https://github.com/project-codeflare/codeflare-sdk/tree/main/src/demo-notebooks/guided-demos), and copies of the notebooks with [expected output](https://github.com/project-codeflare/codeflare-sdk/tree/main/src/demo-notebooks/guided-demos/notebook-ex-outputs) also available +- these demos can be copied into your current working directory when using the `codeflare-sdk` by using the `codeflare_sdk.copy_demo_nbs()` function - Additionally, we have a [video walkthrough](https://www.youtube.com/watch?v=U76iIfd9EmE) of these basic demos from June, 2023 Full documentation can be found [here](https://project-codeflare.github.io/codeflare-sdk/detailed-documentation) diff --git a/src/codeflare_sdk/__init__.py b/src/codeflare_sdk/__init__.py index 358ff4f2..0390a3d2 100644 --- a/src/codeflare_sdk/__init__.py +++ b/src/codeflare_sdk/__init__.py @@ -19,6 +19,7 @@ from .job import RayJobClient from .utils import generate_cert +from .utils.demos import copy_demo_nbs from importlib.metadata import version, PackageNotFoundError diff --git a/demo-notebooks/additional-demos/hf_interactive.ipynb b/src/codeflare_sdk/demo-notebooks/additional-demos/hf_interactive.ipynb similarity index 100% rename from demo-notebooks/additional-demos/hf_interactive.ipynb rename to src/codeflare_sdk/demo-notebooks/additional-demos/hf_interactive.ipynb diff --git a/demo-notebooks/additional-demos/local_interactive.ipynb b/src/codeflare_sdk/demo-notebooks/additional-demos/local_interactive.ipynb similarity index 100% rename from demo-notebooks/additional-demos/local_interactive.ipynb rename to src/codeflare_sdk/demo-notebooks/additional-demos/local_interactive.ipynb diff --git a/demo-notebooks/additional-demos/ray_job_client.ipynb b/src/codeflare_sdk/demo-notebooks/additional-demos/ray_job_client.ipynb similarity index 100% rename from demo-notebooks/additional-demos/ray_job_client.ipynb rename to src/codeflare_sdk/demo-notebooks/additional-demos/ray_job_client.ipynb diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/src/codeflare_sdk/demo-notebooks/guided-demos/0_basic_ray.ipynb similarity index 100% rename from demo-notebooks/guided-demos/0_basic_ray.ipynb rename to src/codeflare_sdk/demo-notebooks/guided-demos/0_basic_ray.ipynb diff --git a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb b/src/codeflare_sdk/demo-notebooks/guided-demos/1_cluster_job_client.ipynb similarity index 100% rename from demo-notebooks/guided-demos/1_cluster_job_client.ipynb rename to src/codeflare_sdk/demo-notebooks/guided-demos/1_cluster_job_client.ipynb diff --git a/demo-notebooks/guided-demos/2_basic_interactive.ipynb b/src/codeflare_sdk/demo-notebooks/guided-demos/2_basic_interactive.ipynb similarity index 100% rename from demo-notebooks/guided-demos/2_basic_interactive.ipynb rename to src/codeflare_sdk/demo-notebooks/guided-demos/2_basic_interactive.ipynb diff --git a/demo-notebooks/guided-demos/download_mnist_datasets.py b/src/codeflare_sdk/demo-notebooks/guided-demos/download_mnist_datasets.py similarity index 100% rename from demo-notebooks/guided-demos/download_mnist_datasets.py rename to src/codeflare_sdk/demo-notebooks/guided-demos/download_mnist_datasets.py diff --git a/demo-notebooks/guided-demos/mnist.py b/src/codeflare_sdk/demo-notebooks/guided-demos/mnist.py similarity index 100% rename from demo-notebooks/guided-demos/mnist.py rename to src/codeflare_sdk/demo-notebooks/guided-demos/mnist.py diff --git a/demo-notebooks/guided-demos/mnist_disconnected.py b/src/codeflare_sdk/demo-notebooks/guided-demos/mnist_disconnected.py similarity index 100% rename from demo-notebooks/guided-demos/mnist_disconnected.py rename to src/codeflare_sdk/demo-notebooks/guided-demos/mnist_disconnected.py diff --git a/demo-notebooks/guided-demos/mnist_fashion.py b/src/codeflare_sdk/demo-notebooks/guided-demos/mnist_fashion.py similarity index 100% rename from demo-notebooks/guided-demos/mnist_fashion.py rename to src/codeflare_sdk/demo-notebooks/guided-demos/mnist_fashion.py diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb b/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb similarity index 100% rename from demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb rename to src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb b/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb similarity index 100% rename from demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb rename to src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb b/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb similarity index 100% rename from demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb rename to src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml b/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml similarity index 100% rename from demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml rename to src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml b/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml similarity index 100% rename from demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml rename to src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/mnist.py b/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/mnist.py similarity index 100% rename from demo-notebooks/guided-demos/notebook-ex-outputs/mnist.py rename to src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/mnist.py diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml b/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml similarity index 100% rename from demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml rename to src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt b/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt similarity index 100% rename from demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt rename to src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt diff --git a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb b/src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb similarity index 100% rename from demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb rename to src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb diff --git a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb b/src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb similarity index 100% rename from demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb rename to src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb diff --git a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb b/src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb similarity index 100% rename from demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb rename to src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb diff --git a/demo-notebooks/guided-demos/preview_nbs/mnist.py b/src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/mnist.py similarity index 100% rename from demo-notebooks/guided-demos/preview_nbs/mnist.py rename to src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/mnist.py diff --git a/demo-notebooks/guided-demos/preview_nbs/requirements.txt b/src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/requirements.txt similarity index 100% rename from demo-notebooks/guided-demos/preview_nbs/requirements.txt rename to src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/requirements.txt diff --git a/demo-notebooks/guided-demos/requirements.txt b/src/codeflare_sdk/demo-notebooks/guided-demos/requirements.txt similarity index 100% rename from demo-notebooks/guided-demos/requirements.txt rename to src/codeflare_sdk/demo-notebooks/guided-demos/requirements.txt diff --git a/src/codeflare_sdk/utils/demos.py b/src/codeflare_sdk/utils/demos.py new file mode 100644 index 00000000..2571a23d --- /dev/null +++ b/src/codeflare_sdk/utils/demos.py @@ -0,0 +1,27 @@ +import pathlib +import shutil + +package_dir = pathlib.Path(__file__).parent.parent.resolve() +demo_dir = f"{package_dir}/demo-notebooks" + + +def copy_demo_nbs(dir: str = "./demo-notebooks", overwrite: bool = False): + """ + Copy the demo notebooks from the package to the current working directory + + overwrite=True will overwrite any files that exactly match files written by copy_demo_nbs in the target directory. + Any files that exist in the directory that don't match these values will remain untouched. + + Args: + dir (str): The directory to copy the demo notebooks to. Defaults to "./demo-notebooks". overwrite (bool): + overwrite (bool): Whether to overwrite files in the directory if it already exists. Defaults to False. + Raises: + FileExistsError: If the directory already exists. + """ + # does dir exist already? + if overwrite is False and pathlib.Path(dir).exists(): + raise FileExistsError( + f"Directory {dir} already exists. Please remove it or provide a different location." + ) + + shutil.copytree(demo_dir, dir, dirs_exist_ok=True) From 029683fc1953728428ed196f66dc18e7b63c6d36 Mon Sep 17 00:00:00 2001 From: Shilpa Chugh Date: Thu, 18 Jul 2024 13:03:00 +0530 Subject: [PATCH 303/496] Make ray image parameter as optional --- docs/cluster-configuration.md | 3 ++- .../additional-demos/hf_interactive.ipynb | 6 +++--- .../additional-demos/local_interactive.ipynb | 6 +++--- .../additional-demos/ray_job_client.ipynb | 13 ++++++++++++- .../demo-notebooks/guided-demos/0_basic_ray.ipynb | 6 +++--- .../guided-demos/1_cluster_job_client.ipynb | 13 ++++++++++++- .../guided-demos/2_basic_interactive.ipynb | 14 +++++++------- .../notebook-ex-outputs/0_basic_ray.ipynb | 6 +++--- .../notebook-ex-outputs/1_cluster_job_client.ipynb | 13 ++++++++++++- .../notebook-ex-outputs/2_basic_interactive.ipynb | 6 +++--- .../guided-demos/preview_nbs/0_basic_ray.ipynb | 6 +++--- .../preview_nbs/1_cluster_job_client.ipynb | 13 ++++++++++++- .../preview_nbs/2_basic_interactive.ipynb | 6 +++--- 13 files changed, 78 insertions(+), 33 deletions(-) diff --git a/docs/cluster-configuration.md b/docs/cluster-configuration.md index c3af6343..40132894 100644 --- a/docs/cluster-configuration.md +++ b/docs/cluster-configuration.md @@ -19,11 +19,12 @@ cluster = Cluster(ClusterConfiguration( min_memory=2, # Default 2 max_memory=2, # Default 2 num_gpus=0, # Default 0 - image="quay.io/rhoai/ray:2.23.0-py39-cu121", # Mandatory Field + # image="", # Optional Field machine_types=["m5.xlarge", "g4dn.xlarge"], labels={"exampleLabel": "example", "secondLabel": "example"}, )) ``` +Note: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. If you have your own Ray image which suits your purposes, specify it in image field to override the default image. The `labels={"exampleLabel": "example"}` parameter can be used to apply additional labels to the RayCluster resource. diff --git a/src/codeflare_sdk/demo-notebooks/additional-demos/hf_interactive.ipynb b/src/codeflare_sdk/demo-notebooks/additional-demos/hf_interactive.ipynb index 1c9f7cd1..a8ab8946 100644 --- a/src/codeflare_sdk/demo-notebooks/additional-demos/hf_interactive.ipynb +++ b/src/codeflare_sdk/demo-notebooks/additional-demos/hf_interactive.ipynb @@ -68,8 +68,8 @@ "source": [ "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding Ray Cluster).\n", "\n", - "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image." + "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." ] }, { @@ -98,7 +98,7 @@ " max_cpus=8, \n", " min_memory=16, \n", " max_memory=16, \n", - " image=\"quay.io/rhoai/ray:2.23.0-py39-cu121\",\n", + " # image=\"\", # Optional Field \n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", " ))" diff --git a/src/codeflare_sdk/demo-notebooks/additional-demos/local_interactive.ipynb b/src/codeflare_sdk/demo-notebooks/additional-demos/local_interactive.ipynb index 8ea08834..d7e4ac8c 100644 --- a/src/codeflare_sdk/demo-notebooks/additional-demos/local_interactive.ipynb +++ b/src/codeflare_sdk/demo-notebooks/additional-demos/local_interactive.ipynb @@ -35,8 +35,8 @@ "metadata": {}, "source": [ "\n", - "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image." + "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." ] }, { @@ -61,7 +61,7 @@ " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " image=\"quay.io/rhoai/ray:2.23.0-py39-cu121\",\n", + " # image=\"\", # Optional Field \n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", " ))" diff --git a/src/codeflare_sdk/demo-notebooks/additional-demos/ray_job_client.ipynb b/src/codeflare_sdk/demo-notebooks/additional-demos/ray_job_client.ipynb index 4b9434bc..28efde9a 100644 --- a/src/codeflare_sdk/demo-notebooks/additional-demos/ray_job_client.ipynb +++ b/src/codeflare_sdk/demo-notebooks/additional-demos/ray_job_client.ipynb @@ -35,6 +35,16 @@ "auth.login()" ] }, + { + "cell_type": "markdown", + "id": "18de2d65", + "metadata": {}, + "source": [ + "\n", + "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." + ] + }, { "cell_type": "code", "execution_count": null, @@ -52,8 +62,9 @@ " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " image=\"quay.io/rhoai/ray:2.23.0-py39-cu121\",\n", + " # image=\"\", # Optional Field \n", " write_to_file=False # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", + " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" ] }, diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/0_basic_ray.ipynb b/src/codeflare_sdk/demo-notebooks/guided-demos/0_basic_ray.ipynb index 11f3a3b2..bbade0cc 100644 --- a/src/codeflare_sdk/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/src/codeflare_sdk/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -47,8 +47,8 @@ "source": [ "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", "\n", - "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image." + "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." ] }, { @@ -71,7 +71,7 @@ " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " image=\"quay.io/rhoai/ray:2.23.0-py39-cu121\",\n", + " # image=\"\", # Optional Field \n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/1_cluster_job_client.ipynb b/src/codeflare_sdk/demo-notebooks/guided-demos/1_cluster_job_client.ipynb index bd5d6965..61e360ef 100644 --- a/src/codeflare_sdk/demo-notebooks/guided-demos/1_cluster_job_client.ipynb +++ b/src/codeflare_sdk/demo-notebooks/guided-demos/1_cluster_job_client.ipynb @@ -34,6 +34,17 @@ "auth.login()" ] }, + { + "cell_type": "markdown", + "id": "bc27f84c", + "metadata": {}, + "source": [ + "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", + "\n", + "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." + ] + }, { "cell_type": "code", "execution_count": null, @@ -53,7 +64,7 @@ " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " image=\"quay.io/rhoai/ray:2.23.0-py39-cu121\",\n", + " # image=\"\", # Optional Field \n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/2_basic_interactive.ipynb b/src/codeflare_sdk/demo-notebooks/guided-demos/2_basic_interactive.ipynb index f95edc9d..5ce94a21 100644 --- a/src/codeflare_sdk/demo-notebooks/guided-demos/2_basic_interactive.ipynb +++ b/src/codeflare_sdk/demo-notebooks/guided-demos/2_basic_interactive.ipynb @@ -44,8 +44,8 @@ "source": [ "Once again, let's start by running through the same cluster setup as before:\n", "\n", - "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image." + "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." ] }, { @@ -65,11 +65,11 @@ " head_gpus=1, # For GPU enabled workloads set the head_gpus and num_gpus\n", " num_gpus=1,\n", " num_workers=2,\n", - " min_cpus='250m',\n", - " max_cpus=1,\n", - " min_memory=4,\n", - " max_memory=6,\n", - " image=\"quay.io/rhoai/ray:2.23.0-py39-cu121\",\n", + " min_cpus=2,\n", + " max_cpus=2,\n", + " min_memory=8,\n", + " max_memory=8,\n", + " # image=\"\", # Optional Field \n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb b/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb index 8338ce74..31ffb730 100644 --- a/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb +++ b/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb @@ -47,8 +47,8 @@ "source": [ "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", "\n", - "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image." + "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." ] }, { @@ -77,7 +77,7 @@ " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " image=\"quay.io/rhoai/ray:2.23.0-py39-cu121\",\n", + " # image=\"\", # Optional Field \n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb b/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb index e354f36d..c117c239 100644 --- a/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb +++ b/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb @@ -34,6 +34,17 @@ "auth.login()" ] }, + { + "cell_type": "markdown", + "id": "bc27f84c", + "metadata": {}, + "source": [ + "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", + "\n", + "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." + ] + }, { "cell_type": "code", "execution_count": null, @@ -51,7 +62,7 @@ " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " image=\"quay.io/rhoai/ray:2.23.0-py39-cu121\",\n", + " # image=\"\", # Optional Field \n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb b/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb index 4e28e53b..56640a5b 100644 --- a/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb +++ b/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb @@ -44,8 +44,8 @@ "source": [ "Once again, let's start by running through the same cluster setup as before:\n", "\n", - "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image." + "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." ] }, { @@ -75,7 +75,7 @@ " max_cpus=2,\n", " min_memory=8,\n", " max_memory=8,\n", - " image=\"quay.io/rhoai/ray:2.23.0-py39-cu121\",\n", + " # image=\"\", # Optional Field \n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb b/src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb index 8923759a..edf4fd5f 100644 --- a/src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb +++ b/src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb @@ -47,8 +47,8 @@ "source": [ "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", "\n", - "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image." + "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." ] }, { @@ -69,7 +69,7 @@ " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " image=\"quay.io/rhoai/ray:2.23.0-py39-cu121\",\n", + " # image=\"\", # Optional Field \n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb b/src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb index 20191434..692c4d37 100644 --- a/src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb +++ b/src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb @@ -34,6 +34,17 @@ "auth.login()" ] }, + { + "cell_type": "markdown", + "id": "bc27f84c", + "metadata": {}, + "source": [ + "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", + "\n", + "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." + ] + }, { "cell_type": "code", "execution_count": null, @@ -51,7 +62,7 @@ " max_cpus=1,\n", " min_memory=4,\n", " max_memory=4,\n", - " image=\"quay.io/rhoai/ray:2.23.0-py39-cu121\",\n", + " # image=\"\", # Optional Field \n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources\n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb b/src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb index 776add1a..91605499 100644 --- a/src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb +++ b/src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb @@ -44,8 +44,8 @@ "source": [ "Once again, let's start by running through the same cluster setup as before:\n", "\n", - "NOTE: We must specify the `image` which will be used in our RayCluster, we recommend you bring your own image which suits your purposes. \n", - "The example here is a community image." + "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." ] }, { @@ -67,7 +67,7 @@ " max_cpus=2,\n", " min_memory=8,\n", " max_memory=8,\n", - " image=\"quay.io/rhoai/ray:2.23.0-py39-cu121\",\n", + " # image=\"\", # Optional Field \n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" From 5262e26aa4c828341c4df91e34bec9fc7b51fb44 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Fri, 26 Jul 2024 16:13:46 +0100 Subject: [PATCH 304/496] Fixed automated documentation --- .github/workflows/release.yaml | 7 ++++--- .../additional-demos/hf_interactive.ipynb | 0 .../additional-demos/local_interactive.ipynb | 0 .../additional-demos/ray_job_client.ipynb | 0 .../guided-demos/0_basic_ray.ipynb | 0 .../guided-demos/1_cluster_job_client.ipynb | 0 .../guided-demos/2_basic_interactive.ipynb | 0 .../guided-demos/download_mnist_datasets.py | 0 .../guided-demos/mnist.py | 0 .../guided-demos/mnist_disconnected.py | 0 .../guided-demos/mnist_fashion.py | 0 .../guided-demos/notebook-ex-outputs/0_basic_ray.ipynb | 0 .../notebook-ex-outputs/1_cluster_job_client.ipynb | 0 .../notebook-ex-outputs/2_basic_interactive.ipynb | 0 .../guided-demos/notebook-ex-outputs/interactivetest.yaml | 0 .../guided-demos/notebook-ex-outputs/jobtest.yaml | 0 .../guided-demos/notebook-ex-outputs/mnist.py | 0 .../guided-demos/notebook-ex-outputs/raytest.yaml | 0 .../guided-demos/notebook-ex-outputs/requirements.txt | 0 .../guided-demos/preview_nbs/0_basic_ray.ipynb | 0 .../guided-demos/preview_nbs/1_cluster_job_client.ipynb | 0 .../guided-demos/preview_nbs/2_basic_interactive.ipynb | 0 .../guided-demos/preview_nbs/mnist.py | 0 .../guided-demos/preview_nbs/requirements.txt | 0 .../guided-demos/requirements.txt | 0 25 files changed, 4 insertions(+), 3 deletions(-) rename {src/codeflare_sdk/demo-notebooks => demo-notebooks}/additional-demos/hf_interactive.ipynb (100%) rename {src/codeflare_sdk/demo-notebooks => demo-notebooks}/additional-demos/local_interactive.ipynb (100%) rename {src/codeflare_sdk/demo-notebooks => demo-notebooks}/additional-demos/ray_job_client.ipynb (100%) rename {src/codeflare_sdk/demo-notebooks => demo-notebooks}/guided-demos/0_basic_ray.ipynb (100%) rename {src/codeflare_sdk/demo-notebooks => demo-notebooks}/guided-demos/1_cluster_job_client.ipynb (100%) rename {src/codeflare_sdk/demo-notebooks => demo-notebooks}/guided-demos/2_basic_interactive.ipynb (100%) rename {src/codeflare_sdk/demo-notebooks => demo-notebooks}/guided-demos/download_mnist_datasets.py (100%) rename {src/codeflare_sdk/demo-notebooks => demo-notebooks}/guided-demos/mnist.py (100%) rename {src/codeflare_sdk/demo-notebooks => demo-notebooks}/guided-demos/mnist_disconnected.py (100%) rename {src/codeflare_sdk/demo-notebooks => demo-notebooks}/guided-demos/mnist_fashion.py (100%) rename {src/codeflare_sdk/demo-notebooks => demo-notebooks}/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb (100%) rename {src/codeflare_sdk/demo-notebooks => demo-notebooks}/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb (100%) rename {src/codeflare_sdk/demo-notebooks => demo-notebooks}/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb (100%) rename {src/codeflare_sdk/demo-notebooks => demo-notebooks}/guided-demos/notebook-ex-outputs/interactivetest.yaml (100%) rename {src/codeflare_sdk/demo-notebooks => demo-notebooks}/guided-demos/notebook-ex-outputs/jobtest.yaml (100%) rename {src/codeflare_sdk/demo-notebooks => demo-notebooks}/guided-demos/notebook-ex-outputs/mnist.py (100%) rename {src/codeflare_sdk/demo-notebooks => demo-notebooks}/guided-demos/notebook-ex-outputs/raytest.yaml (100%) rename {src/codeflare_sdk/demo-notebooks => demo-notebooks}/guided-demos/notebook-ex-outputs/requirements.txt (100%) rename {src/codeflare_sdk/demo-notebooks => demo-notebooks}/guided-demos/preview_nbs/0_basic_ray.ipynb (100%) rename {src/codeflare_sdk/demo-notebooks => demo-notebooks}/guided-demos/preview_nbs/1_cluster_job_client.ipynb (100%) rename {src/codeflare_sdk/demo-notebooks => demo-notebooks}/guided-demos/preview_nbs/2_basic_interactive.ipynb (100%) rename {src/codeflare_sdk/demo-notebooks => demo-notebooks}/guided-demos/preview_nbs/mnist.py (100%) rename {src/codeflare_sdk/demo-notebooks => demo-notebooks}/guided-demos/preview_nbs/requirements.txt (100%) rename {src/codeflare_sdk/demo-notebooks => demo-notebooks}/guided-demos/requirements.txt (100%) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 0d589d95..641a148d 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -50,11 +50,12 @@ jobs: run: poetry version "${{ github.event.inputs.release-version }}" - name: Run poetry install run: poetry install --with docs - - name: Run poetry build - run: poetry build - name: Create new documentation run: poetry run pdoc --html -o docs/detailed-documentation src/codeflare_sdk && pushd docs/detailed-documentation && rm -rf cluster job utils && mv codeflare_sdk/* . && rm -rf codeflare_sdk && popd && find docs/detailed-documentation -type f -name "*.html" -exec bash -c "echo '' >> {}" \; - + - name: Copy demo notebooks into SDK package + run: cp demo-notebooks src/codeflare_sdk/demo-notebooks + - name: Run poetry build + run: poetry build - name: Commit changes in docs uses: stefanzweifel/git-auto-commit-action@v4 with: diff --git a/src/codeflare_sdk/demo-notebooks/additional-demos/hf_interactive.ipynb b/demo-notebooks/additional-demos/hf_interactive.ipynb similarity index 100% rename from src/codeflare_sdk/demo-notebooks/additional-demos/hf_interactive.ipynb rename to demo-notebooks/additional-demos/hf_interactive.ipynb diff --git a/src/codeflare_sdk/demo-notebooks/additional-demos/local_interactive.ipynb b/demo-notebooks/additional-demos/local_interactive.ipynb similarity index 100% rename from src/codeflare_sdk/demo-notebooks/additional-demos/local_interactive.ipynb rename to demo-notebooks/additional-demos/local_interactive.ipynb diff --git a/src/codeflare_sdk/demo-notebooks/additional-demos/ray_job_client.ipynb b/demo-notebooks/additional-demos/ray_job_client.ipynb similarity index 100% rename from src/codeflare_sdk/demo-notebooks/additional-demos/ray_job_client.ipynb rename to demo-notebooks/additional-demos/ray_job_client.ipynb diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb similarity index 100% rename from src/codeflare_sdk/demo-notebooks/guided-demos/0_basic_ray.ipynb rename to demo-notebooks/guided-demos/0_basic_ray.ipynb diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb similarity index 100% rename from src/codeflare_sdk/demo-notebooks/guided-demos/1_cluster_job_client.ipynb rename to demo-notebooks/guided-demos/1_cluster_job_client.ipynb diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/2_basic_interactive.ipynb similarity index 100% rename from src/codeflare_sdk/demo-notebooks/guided-demos/2_basic_interactive.ipynb rename to demo-notebooks/guided-demos/2_basic_interactive.ipynb diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/download_mnist_datasets.py b/demo-notebooks/guided-demos/download_mnist_datasets.py similarity index 100% rename from src/codeflare_sdk/demo-notebooks/guided-demos/download_mnist_datasets.py rename to demo-notebooks/guided-demos/download_mnist_datasets.py diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/mnist.py b/demo-notebooks/guided-demos/mnist.py similarity index 100% rename from src/codeflare_sdk/demo-notebooks/guided-demos/mnist.py rename to demo-notebooks/guided-demos/mnist.py diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/mnist_disconnected.py b/demo-notebooks/guided-demos/mnist_disconnected.py similarity index 100% rename from src/codeflare_sdk/demo-notebooks/guided-demos/mnist_disconnected.py rename to demo-notebooks/guided-demos/mnist_disconnected.py diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/mnist_fashion.py b/demo-notebooks/guided-demos/mnist_fashion.py similarity index 100% rename from src/codeflare_sdk/demo-notebooks/guided-demos/mnist_fashion.py rename to demo-notebooks/guided-demos/mnist_fashion.py diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb similarity index 100% rename from src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb rename to demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb similarity index 100% rename from src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb rename to demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb similarity index 100% rename from src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb rename to demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml similarity index 100% rename from src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml rename to demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml similarity index 100% rename from src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml rename to demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/mnist.py b/demo-notebooks/guided-demos/notebook-ex-outputs/mnist.py similarity index 100% rename from src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/mnist.py rename to demo-notebooks/guided-demos/notebook-ex-outputs/mnist.py diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml similarity index 100% rename from src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml rename to demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt b/demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt similarity index 100% rename from src/codeflare_sdk/demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt rename to demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb similarity index 100% rename from src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb rename to demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb similarity index 100% rename from src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb rename to demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb similarity index 100% rename from src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb rename to demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/mnist.py b/demo-notebooks/guided-demos/preview_nbs/mnist.py similarity index 100% rename from src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/mnist.py rename to demo-notebooks/guided-demos/preview_nbs/mnist.py diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/requirements.txt b/demo-notebooks/guided-demos/preview_nbs/requirements.txt similarity index 100% rename from src/codeflare_sdk/demo-notebooks/guided-demos/preview_nbs/requirements.txt rename to demo-notebooks/guided-demos/preview_nbs/requirements.txt diff --git a/src/codeflare_sdk/demo-notebooks/guided-demos/requirements.txt b/demo-notebooks/guided-demos/requirements.txt similarity index 100% rename from src/codeflare_sdk/demo-notebooks/guided-demos/requirements.txt rename to demo-notebooks/guided-demos/requirements.txt From 184dc0f782ab5fd820ba5fbfbe68a8d8790a279b Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Fri, 26 Jul 2024 16:48:58 +0100 Subject: [PATCH 305/496] Fixed copy demo notebooks in automated documentation --- .github/workflows/release.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 641a148d..15063f23 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -53,7 +53,7 @@ jobs: - name: Create new documentation run: poetry run pdoc --html -o docs/detailed-documentation src/codeflare_sdk && pushd docs/detailed-documentation && rm -rf cluster job utils && mv codeflare_sdk/* . && rm -rf codeflare_sdk && popd && find docs/detailed-documentation -type f -name "*.html" -exec bash -c "echo '' >> {}" \; - name: Copy demo notebooks into SDK package - run: cp demo-notebooks src/codeflare_sdk/demo-notebooks + run: cp -r demo-notebooks src/codeflare_sdk/demo-notebooks - name: Run poetry build run: poetry build - name: Commit changes in docs From 1b6bbc36c65780be8e5e86830ed19f7b9be6da9f Mon Sep 17 00:00:00 2001 From: codeflare-machine-account Date: Fri, 26 Jul 2024 15:51:55 +0000 Subject: [PATCH 306/496] Changes in docs for release: v0.18.0 --- .../cluster/cluster.html | 327 ++++-------- .../cluster/config.html | 497 +++++++++++++++--- .../detailed-documentation/cluster/model.html | 25 +- docs/detailed-documentation/index.html | 11 +- docs/detailed-documentation/utils/demos.html | 138 +++++ .../utils/generate_yaml.html | 456 +++++++++------- docs/detailed-documentation/utils/index.html | 5 + .../utils/pretty_print.html | 4 +- 8 files changed, 974 insertions(+), 489 deletions(-) create mode 100644 docs/detailed-documentation/utils/demos.html diff --git a/docs/detailed-documentation/cluster/cluster.html b/docs/detailed-documentation/cluster/cluster.html index 1e49cf6d..5c8688eb 100644 --- a/docs/detailed-documentation/cluster/cluster.html +++ b/docs/detailed-documentation/cluster/cluster.html @@ -61,6 +61,7 @@

    Module codeflare_sdk.cluster.cluster

    from ..utils import pretty_print from ..utils.generate_yaml import ( generate_appwrapper, + head_worker_gpu_count_from_cluster, ) from ..utils.kube_api_helpers import _kube_api_error_handling from ..utils.generate_yaml import is_openshift_cluster @@ -135,16 +136,6 @@

    Module codeflare_sdk.cluster.cluster

    ) return self._job_submission_client - def validate_image_config(self): - """ - Validates that the image configuration is not empty. - - :param image: The image string to validate - :raises ValueError: If the image is not specified - """ - if self.config.image == "" or self.config.image == None: - raise ValueError("Image must be specified in the ClusterConfiguration") - def create_app_wrapper(self): """ Called upon cluster object creation, creates an AppWrapper yaml based on @@ -160,51 +151,7 @@

    Module codeflare_sdk.cluster.cluster

    f"Namespace {self.config.namespace} is of type {type(self.config.namespace)}. Check your Kubernetes Authentication." ) - # Validate image configuration - self.validate_image_config() - - # Before attempting to create the cluster AW, let's evaluate the ClusterConfig - - name = self.config.name - namespace = self.config.namespace - head_cpus = self.config.head_cpus - head_memory = self.config.head_memory - head_gpus = self.config.head_gpus - min_cpu = self.config.min_cpus - max_cpu = self.config.max_cpus - min_memory = self.config.min_memory - max_memory = self.config.max_memory - gpu = self.config.num_gpus - workers = self.config.num_workers - template = self.config.template - image = self.config.image - appwrapper = self.config.appwrapper - env = self.config.envs - image_pull_secrets = self.config.image_pull_secrets - write_to_file = self.config.write_to_file - local_queue = self.config.local_queue - labels = self.config.labels - return generate_appwrapper( - name=name, - namespace=namespace, - head_cpus=head_cpus, - head_memory=head_memory, - head_gpus=head_gpus, - min_cpu=min_cpu, - max_cpu=max_cpu, - min_memory=min_memory, - max_memory=max_memory, - gpu=gpu, - workers=workers, - template=template, - image=image, - appwrapper=appwrapper, - env=env, - image_pull_secrets=image_pull_secrets, - write_to_file=write_to_file, - local_queue=local_queue, - labels=labels, - ) + return generate_appwrapper(self) # creates a new cluster with the provided or default spec def up(self): @@ -350,7 +297,7 @@

    Module codeflare_sdk.cluster.cluster

    if print_to_console: # overriding the number of gpus with requested - cluster.worker_gpu = self.config.num_gpus + _, cluster.worker_gpu = head_worker_gpu_count_from_cluster(self) pretty_print.print_cluster_status(cluster) elif print_to_console: if status == CodeFlareClusterStatus.UNKNOWN: @@ -488,6 +435,29 @@

    Module codeflare_sdk.cluster.cluster

    """ return self.job_client.get_job_logs(job_id) + @staticmethod + def _head_worker_extended_resources_from_rc_dict(rc: Dict) -> Tuple[dict, dict]: + head_extended_resources, worker_extended_resources = {}, {} + for resource in rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + "containers" + ][0]["resources"]["limits"].keys(): + if resource in ["memory", "cpu"]: + continue + worker_extended_resources[resource] = rc["spec"]["workerGroupSpecs"][0][ + "template" + ]["spec"]["containers"][0]["resources"]["limits"][resource] + + for resource in rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][ + 0 + ]["resources"]["limits"].keys(): + if resource in ["memory", "cpu"]: + continue + head_extended_resources[resource] = rc["spec"]["headGroupSpec"]["template"][ + "spec" + ]["containers"][0]["resources"]["limits"][resource] + + return head_extended_resources, worker_extended_resources + def from_k8_cluster_object( rc, appwrapper=True, @@ -501,28 +471,30 @@

    Module codeflare_sdk.cluster.cluster

    else [] ) + ( + head_extended_resources, + worker_extended_resources, + ) = Cluster._head_worker_extended_resources_from_rc_dict(rc) + cluster_config = ClusterConfiguration( name=rc["metadata"]["name"], namespace=rc["metadata"]["namespace"], machine_types=machine_types, num_workers=rc["spec"]["workerGroupSpecs"][0]["minReplicas"], - min_cpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + worker_cpu_requests=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ "containers" ][0]["resources"]["requests"]["cpu"], - max_cpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + worker_cpu_limits=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ "containers" ][0]["resources"]["limits"]["cpu"], - min_memory=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ - "containers" - ][0]["resources"]["requests"]["memory"], - max_memory=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + worker_memory_requests=rc["spec"]["workerGroupSpecs"][0]["template"][ + "spec" + ]["containers"][0]["resources"]["requests"]["memory"], + worker_memory_limits=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ "containers" ][0]["resources"]["limits"]["memory"], - num_gpus=int( - rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ - "resources" - ]["limits"]["nvidia.com/gpu"] - ), + worker_extended_resource_requests=worker_extended_resources, + head_extended_resource_requests=head_extended_resources, image=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][ 0 ]["image"], @@ -903,6 +875,11 @@

    Module codeflare_sdk.cluster.cluster

    protocol = "https" dashboard_url = f"{protocol}://{ingress.spec.rules[0].host}" + ( + head_extended_resources, + worker_extended_resources, + ) = Cluster._head_worker_extended_resources_from_rc_dict(rc) + return RayCluster( name=rc["metadata"]["name"], status=status, @@ -917,7 +894,7 @@

    Module codeflare_sdk.cluster.cluster

    worker_cpu=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][ 0 ]["resources"]["limits"]["cpu"], - worker_gpu=0, # hard to detect currently how many gpus, can override it with what the user asked for + worker_extended_resources=worker_extended_resources, namespace=rc["metadata"]["namespace"], head_cpus=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][0][ "resources" @@ -925,9 +902,7 @@

    Module codeflare_sdk.cluster.cluster

    head_mem=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][0][ "resources" ]["limits"]["memory"], - head_gpu=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][0][ - "resources" - ]["limits"]["nvidia.com/gpu"], + head_extended_resources=head_extended_resources, dashboard=dashboard_url, ) @@ -949,15 +924,15 @@

    Module codeflare_sdk.cluster.cluster

    name=cluster.config.name, status=cluster.status(print_to_console=False)[0], workers=cluster.config.num_workers, - worker_mem_min=cluster.config.min_memory, - worker_mem_max=cluster.config.max_memory, - worker_cpu=cluster.config.min_cpus, - worker_gpu=cluster.config.num_gpus, + worker_mem_min=cluster.config.worker_memory_requests, + worker_mem_max=cluster.config.worker_memory_limits, + worker_cpu=cluster.config.worker_cpu_requests, + worker_extended_resources=cluster.config.worker_extended_resource_requests, namespace=cluster.config.namespace, dashboard=cluster.cluster_dashboard_uri(), head_cpus=cluster.config.head_cpus, head_mem=cluster.config.head_memory, - head_gpu=cluster.config.head_gpus, + head_extended_resources=cluster.config.head_extended_resource_requests, ) if ray.status == CodeFlareClusterStatus.READY: ray.status = RayClusterStatus.READY @@ -1181,16 +1156,6 @@

    Classes

    ) return self._job_submission_client - def validate_image_config(self): - """ - Validates that the image configuration is not empty. - - :param image: The image string to validate - :raises ValueError: If the image is not specified - """ - if self.config.image == "" or self.config.image == None: - raise ValueError("Image must be specified in the ClusterConfiguration") - def create_app_wrapper(self): """ Called upon cluster object creation, creates an AppWrapper yaml based on @@ -1206,51 +1171,7 @@

    Classes

    f"Namespace {self.config.namespace} is of type {type(self.config.namespace)}. Check your Kubernetes Authentication." ) - # Validate image configuration - self.validate_image_config() - - # Before attempting to create the cluster AW, let's evaluate the ClusterConfig - - name = self.config.name - namespace = self.config.namespace - head_cpus = self.config.head_cpus - head_memory = self.config.head_memory - head_gpus = self.config.head_gpus - min_cpu = self.config.min_cpus - max_cpu = self.config.max_cpus - min_memory = self.config.min_memory - max_memory = self.config.max_memory - gpu = self.config.num_gpus - workers = self.config.num_workers - template = self.config.template - image = self.config.image - appwrapper = self.config.appwrapper - env = self.config.envs - image_pull_secrets = self.config.image_pull_secrets - write_to_file = self.config.write_to_file - local_queue = self.config.local_queue - labels = self.config.labels - return generate_appwrapper( - name=name, - namespace=namespace, - head_cpus=head_cpus, - head_memory=head_memory, - head_gpus=head_gpus, - min_cpu=min_cpu, - max_cpu=max_cpu, - min_memory=min_memory, - max_memory=max_memory, - gpu=gpu, - workers=workers, - template=template, - image=image, - appwrapper=appwrapper, - env=env, - image_pull_secrets=image_pull_secrets, - write_to_file=write_to_file, - local_queue=local_queue, - labels=labels, - ) + return generate_appwrapper(self) # creates a new cluster with the provided or default spec def up(self): @@ -1396,7 +1317,7 @@

    Classes

    if print_to_console: # overriding the number of gpus with requested - cluster.worker_gpu = self.config.num_gpus + _, cluster.worker_gpu = head_worker_gpu_count_from_cluster(self) pretty_print.print_cluster_status(cluster) elif print_to_console: if status == CodeFlareClusterStatus.UNKNOWN: @@ -1534,6 +1455,29 @@

    Classes

    """ return self.job_client.get_job_logs(job_id) + @staticmethod + def _head_worker_extended_resources_from_rc_dict(rc: Dict) -> Tuple[dict, dict]: + head_extended_resources, worker_extended_resources = {}, {} + for resource in rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + "containers" + ][0]["resources"]["limits"].keys(): + if resource in ["memory", "cpu"]: + continue + worker_extended_resources[resource] = rc["spec"]["workerGroupSpecs"][0][ + "template" + ]["spec"]["containers"][0]["resources"]["limits"][resource] + + for resource in rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][ + 0 + ]["resources"]["limits"].keys(): + if resource in ["memory", "cpu"]: + continue + head_extended_resources[resource] = rc["spec"]["headGroupSpec"]["template"][ + "spec" + ]["containers"][0]["resources"]["limits"][resource] + + return head_extended_resources, worker_extended_resources + def from_k8_cluster_object( rc, appwrapper=True, @@ -1547,28 +1491,30 @@

    Classes

    else [] ) + ( + head_extended_resources, + worker_extended_resources, + ) = Cluster._head_worker_extended_resources_from_rc_dict(rc) + cluster_config = ClusterConfiguration( name=rc["metadata"]["name"], namespace=rc["metadata"]["namespace"], machine_types=machine_types, num_workers=rc["spec"]["workerGroupSpecs"][0]["minReplicas"], - min_cpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + worker_cpu_requests=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ "containers" ][0]["resources"]["requests"]["cpu"], - max_cpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + worker_cpu_limits=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ "containers" ][0]["resources"]["limits"]["cpu"], - min_memory=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ - "containers" - ][0]["resources"]["requests"]["memory"], - max_memory=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + worker_memory_requests=rc["spec"]["workerGroupSpecs"][0]["template"][ + "spec" + ]["containers"][0]["resources"]["requests"]["memory"], + worker_memory_limits=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ "containers" ][0]["resources"]["limits"]["memory"], - num_gpus=int( - rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ - "resources" - ]["limits"]["nvidia.com/gpu"] - ), + worker_extended_resource_requests=worker_extended_resources, + head_extended_resource_requests=head_extended_resources, image=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][ 0 ]["image"], @@ -1749,51 +1695,7 @@

    Methods

    f"Namespace {self.config.namespace} is of type {type(self.config.namespace)}. Check your Kubernetes Authentication." ) - # Validate image configuration - self.validate_image_config() - - # Before attempting to create the cluster AW, let's evaluate the ClusterConfig - - name = self.config.name - namespace = self.config.namespace - head_cpus = self.config.head_cpus - head_memory = self.config.head_memory - head_gpus = self.config.head_gpus - min_cpu = self.config.min_cpus - max_cpu = self.config.max_cpus - min_memory = self.config.min_memory - max_memory = self.config.max_memory - gpu = self.config.num_gpus - workers = self.config.num_workers - template = self.config.template - image = self.config.image - appwrapper = self.config.appwrapper - env = self.config.envs - image_pull_secrets = self.config.image_pull_secrets - write_to_file = self.config.write_to_file - local_queue = self.config.local_queue - labels = self.config.labels - return generate_appwrapper( - name=name, - namespace=namespace, - head_cpus=head_cpus, - head_memory=head_memory, - head_gpus=head_gpus, - min_cpu=min_cpu, - max_cpu=max_cpu, - min_memory=min_memory, - max_memory=max_memory, - gpu=gpu, - workers=workers, - template=template, - image=image, - appwrapper=appwrapper, - env=env, - image_pull_secrets=image_pull_secrets, - write_to_file=write_to_file, - local_queue=local_queue, - labels=labels, - )
    + return generate_appwrapper(self)
    @@ -1868,28 +1770,30 @@

    Methods

    else [] ) + ( + head_extended_resources, + worker_extended_resources, + ) = Cluster._head_worker_extended_resources_from_rc_dict(rc) + cluster_config = ClusterConfiguration( name=rc["metadata"]["name"], namespace=rc["metadata"]["namespace"], machine_types=machine_types, num_workers=rc["spec"]["workerGroupSpecs"][0]["minReplicas"], - min_cpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + worker_cpu_requests=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ "containers" ][0]["resources"]["requests"]["cpu"], - max_cpus=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + worker_cpu_limits=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ "containers" ][0]["resources"]["limits"]["cpu"], - min_memory=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ - "containers" - ][0]["resources"]["requests"]["memory"], - max_memory=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + worker_memory_requests=rc["spec"]["workerGroupSpecs"][0]["template"][ + "spec" + ]["containers"][0]["resources"]["requests"]["memory"], + worker_memory_limits=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ "containers" ][0]["resources"]["limits"]["memory"], - num_gpus=int( - rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][0][ - "resources" - ]["limits"]["nvidia.com/gpu"] - ), + worker_extended_resource_requests=worker_extended_resources, + head_extended_resource_requests=head_extended_resources, image=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][ 0 ]["image"], @@ -2063,7 +1967,7 @@

    Methods

    if print_to_console: # overriding the number of gpus with requested - cluster.worker_gpu = self.config.num_gpus + _, cluster.worker_gpu = head_worker_gpu_count_from_cluster(self) pretty_print.print_cluster_status(cluster) elif print_to_console: if status == CodeFlareClusterStatus.UNKNOWN: @@ -2124,28 +2028,6 @@

    Methods

    return _kube_api_error_handling(e)
    -
    -def validate_image_config(self) -
    -
    -

    Validates that the image configuration is not empty.

    -

    :param image: The image string to validate -:raises ValueError: If the image is not specified

    -
    - -Expand source code - -
    def validate_image_config(self):
    -    """
    -    Validates that the image configuration is not empty.
    -
    -    :param image: The image string to validate
    -    :raises ValueError: If the image is not specified
    -    """
    -    if self.config.image == "" or self.config.image == None:
    -        raise ValueError("Image must be specified in the ClusterConfiguration")
    -
    -
    def wait_ready(self, timeout: Optional[int] = None, dashboard_check: bool = True)
    @@ -2234,7 +2116,6 @@

    local_client_url
  • status
  • up
  • -
  • validate_image_config
  • wait_ready
  • diff --git a/docs/detailed-documentation/cluster/config.html b/docs/detailed-documentation/cluster/config.html index 60b6dc95..87ad3b77 100644 --- a/docs/detailed-documentation/cluster/config.html +++ b/docs/detailed-documentation/cluster/config.html @@ -50,67 +50,225 @@

    Module codeflare_sdk.cluster.config

    Cluster object. """ -from dataclasses import dataclass, field import pathlib -import typing +import warnings +from dataclasses import dataclass, field, fields +from typing import Dict, List, Optional, Union, get_args, get_origin dir = pathlib.Path(__file__).parent.parent.resolve() +# https://docs.ray.io/en/latest/ray-core/scheduling/accelerators.html +DEFAULT_RESOURCE_MAPPING = { + "nvidia.com/gpu": "GPU", + "intel.com/gpu": "GPU", + "amd.com/gpu": "GPU", + "aws.amazon.com/neuroncore": "neuron_cores", + "google.com/tpu": "TPU", + "habana.ai/gaudi": "HPU", + "huawei.com/Ascend910": "NPU", + "huawei.com/Ascend310": "NPU", +} + @dataclass class ClusterConfiguration: """ This dataclass is used to specify resource requirements and other details, and is passed in as an argument when creating a Cluster object. + + Attributes: + - name: The name of the cluster. + - namespace: The namespace in which the cluster should be created. + - head_info: A list of strings containing information about the head node. + - head_cpus: The number of CPUs to allocate to the head node. + - head_memory: The amount of memory to allocate to the head node. + - head_gpus: The number of GPUs to allocate to the head node. (Deprecated, use head_extended_resource_requests) + - head_extended_resource_requests: A dictionary of extended resource requests for the head node. ex: {"nvidia.com/gpu": 1} + - machine_types: A list of machine types to use for the cluster. + - min_cpus: The minimum number of CPUs to allocate to each worker. + - max_cpus: The maximum number of CPUs to allocate to each worker. + - num_workers: The number of workers to create. + - min_memory: The minimum amount of memory to allocate to each worker. + - max_memory: The maximum amount of memory to allocate to each worker. + - num_gpus: The number of GPUs to allocate to each worker. (Deprecated, use worker_extended_resource_requests) + - template: The path to the template file to use for the cluster. + - appwrapper: A boolean indicating whether to use an AppWrapper. + - envs: A dictionary of environment variables to set for the cluster. + - image: The image to use for the cluster. + - image_pull_secrets: A list of image pull secrets to use for the cluster. + - write_to_file: A boolean indicating whether to write the cluster configuration to a file. + - verify_tls: A boolean indicating whether to verify TLS when connecting to the cluster. + - labels: A dictionary of labels to apply to the cluster. + - worker_extended_resource_requests: A dictionary of extended resource requests for each worker. ex: {"nvidia.com/gpu": 1} + - extended_resource_mapping: A dictionary of custom resource mappings to map extended resource requests to RayCluster resource names + - overwrite_default_resource_mapping: A boolean indicating whether to overwrite the default resource mapping. """ name: str - namespace: str = None - head_info: list = field(default_factory=list) - head_cpus: typing.Union[int, str] = 2 - head_memory: typing.Union[int, str] = 8 - head_gpus: int = 0 - machine_types: list = field(default_factory=list) # ["m4.xlarge", "g4dn.xlarge"] - min_cpus: typing.Union[int, str] = 1 - max_cpus: typing.Union[int, str] = 1 + namespace: Optional[str] = None + head_info: List[str] = field(default_factory=list) + head_cpus: Union[int, str] = 2 + head_memory: Union[int, str] = 8 + head_gpus: Optional[int] = None # Deprecating + head_extended_resource_requests: Dict[str, int] = field(default_factory=dict) + machine_types: List[str] = field( + default_factory=list + ) # ["m4.xlarge", "g4dn.xlarge"] + worker_cpu_requests: Union[int, str] = 1 + worker_cpu_limits: Union[int, str] = 1 + min_cpus: Optional[Union[int, str]] = None # Deprecating + max_cpus: Optional[Union[int, str]] = None # Deprecating num_workers: int = 1 - min_memory: typing.Union[int, str] = 2 - max_memory: typing.Union[int, str] = 2 - num_gpus: int = 0 + worker_memory_requests: Union[int, str] = 2 + worker_memory_limits: Union[int, str] = 2 + min_memory: Optional[Union[int, str]] = None # Deprecating + max_memory: Optional[Union[int, str]] = None # Deprecating + num_gpus: Optional[int] = None # Deprecating template: str = f"{dir}/templates/base-template.yaml" appwrapper: bool = False - envs: dict = field(default_factory=dict) + envs: Dict[str, str] = field(default_factory=dict) image: str = "" - image_pull_secrets: list = field(default_factory=list) + image_pull_secrets: List[str] = field(default_factory=list) write_to_file: bool = False verify_tls: bool = True - labels: dict = field(default_factory=dict) + labels: Dict[str, str] = field(default_factory=dict) + worker_extended_resource_requests: Dict[str, int] = field(default_factory=dict) + extended_resource_mapping: Dict[str, str] = field(default_factory=dict) + overwrite_default_resource_mapping: bool = False + local_queue: Optional[str] = None def __post_init__(self): if not self.verify_tls: print( "Warning: TLS verification has been disabled - Endpoint checks will be bypassed" ) + + self._validate_types() self._memory_to_string() self._str_mem_no_unit_add_GB() + self._memory_to_resource() + self._cpu_to_resource() + self._gpu_to_resource() + self._combine_extended_resource_mapping() + self._validate_extended_resource_requests(self.head_extended_resource_requests) + self._validate_extended_resource_requests( + self.worker_extended_resource_requests + ) + + def _combine_extended_resource_mapping(self): + if overwritten := set(self.extended_resource_mapping.keys()).intersection( + DEFAULT_RESOURCE_MAPPING.keys() + ): + if self.overwrite_default_resource_mapping: + warnings.warn( + f"Overwriting default resource mapping for {overwritten}", + UserWarning, + ) + else: + raise ValueError( + f"Resource mapping already exists for {overwritten}, set overwrite_default_resource_mapping to True to overwrite" + ) + self.extended_resource_mapping = { + **DEFAULT_RESOURCE_MAPPING, + **self.extended_resource_mapping, + } + + def _validate_extended_resource_requests(self, extended_resources: Dict[str, int]): + for k in extended_resources.keys(): + if k not in self.extended_resource_mapping.keys(): + raise ValueError( + f"extended resource '{k}' not found in extended_resource_mapping, available resources are {list(self.extended_resource_mapping.keys())}, to add more supported resources use extended_resource_mapping. i.e. extended_resource_mapping = {{'{k}': 'FOO_BAR'}}" + ) + + def _gpu_to_resource(self): + if self.head_gpus: + warnings.warn( + f"head_gpus is being deprecated, replacing with head_extended_resource_requests['nvidia.com/gpu'] = {self.head_gpus}" + ) + if "nvidia.com/gpu" in self.head_extended_resource_requests: + raise ValueError( + "nvidia.com/gpu already exists in head_extended_resource_requests" + ) + self.head_extended_resource_requests["nvidia.com/gpu"] = self.head_gpus + if self.num_gpus: + warnings.warn( + f"num_gpus is being deprecated, replacing with worker_extended_resource_requests['nvidia.com/gpu'] = {self.num_gpus}" + ) + if "nvidia.com/gpu" in self.worker_extended_resource_requests: + raise ValueError( + "nvidia.com/gpu already exists in worker_extended_resource_requests" + ) + self.worker_extended_resource_requests["nvidia.com/gpu"] = self.num_gpus def _str_mem_no_unit_add_GB(self): if isinstance(self.head_memory, str) and self.head_memory.isdecimal(): self.head_memory = f"{self.head_memory}G" - if isinstance(self.min_memory, str) and self.min_memory.isdecimal(): - self.min_memory = f"{self.min_memory}G" - if isinstance(self.max_memory, str) and self.max_memory.isdecimal(): - self.max_memory = f"{self.max_memory}G" + if ( + isinstance(self.worker_memory_requests, str) + and self.worker_memory_requests.isdecimal() + ): + self.worker_memory_requests = f"{self.worker_memory_requests}G" + if ( + isinstance(self.worker_memory_limits, str) + and self.worker_memory_limits.isdecimal() + ): + self.worker_memory_limits = f"{self.worker_memory_limits}G" def _memory_to_string(self): if isinstance(self.head_memory, int): self.head_memory = f"{self.head_memory}G" - if isinstance(self.min_memory, int): - self.min_memory = f"{self.min_memory}G" - if isinstance(self.max_memory, int): - self.max_memory = f"{self.max_memory}G" + if isinstance(self.worker_memory_requests, int): + self.worker_memory_requests = f"{self.worker_memory_requests}G" + if isinstance(self.worker_memory_limits, int): + self.worker_memory_limits = f"{self.worker_memory_limits}G" + + def _cpu_to_resource(self): + if self.min_cpus: + warnings.warn("min_cpus is being deprecated, use worker_cpu_requests") + self.worker_cpu_requests = self.min_cpus + if self.max_cpus: + warnings.warn("max_cpus is being deprecated, use worker_cpu_limits") + self.worker_cpu_limits = self.max_cpus + + def _memory_to_resource(self): + if self.min_memory: + warnings.warn("min_memory is being deprecated, use worker_memory_requests") + self.worker_memory_requests = f"{self.min_memory}G" + if self.max_memory: + warnings.warn("max_memory is being deprecated, use worker_memory_limits") + self.worker_memory_limits = f"{self.max_memory}G" + + def _validate_types(self): + """Validate the types of all fields in the ClusterConfiguration dataclass.""" + for field_info in fields(self): + value = getattr(self, field_info.name) + expected_type = field_info.type + if not self._is_type(value, expected_type): + raise TypeError( + f"'{field_info.name}' should be of type {expected_type}" + ) + + @staticmethod + def _is_type(value, expected_type): + """Check if the value matches the expected type.""" + + def check_type(value, expected_type): + origin_type = get_origin(expected_type) + args = get_args(expected_type) + if origin_type is Union: + return any(check_type(value, union_type) for union_type in args) + if origin_type is list: + return all(check_type(elem, args[0]) for elem in value) + if origin_type is dict: + return all( + check_type(k, args[0]) and check_type(v, args[1]) + for k, v in value.items() + ) + if origin_type is tuple: + return all(check_type(elem, etype) for elem, etype in zip(value, args)) + return isinstance(value, expected_type) - local_queue: str = None
    + return check_type(value, expected_type)
    @@ -124,11 +282,37 @@

    Classes

    class ClusterConfiguration -(name: str, namespace: str = None, head_info: list = <factory>, head_cpus: Union[int, str] = 2, head_memory: Union[int, str] = 8, head_gpus: int = 0, machine_types: list = <factory>, min_cpus: Union[int, str] = 1, max_cpus: Union[int, str] = 1, num_workers: int = 1, min_memory: Union[int, str] = 2, max_memory: Union[int, str] = 2, num_gpus: int = 0, template: str = '/home/runner/work/codeflare-sdk/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', appwrapper: bool = False, envs: dict = <factory>, image: str = '', image_pull_secrets: list = <factory>, write_to_file: bool = False, verify_tls: bool = True, labels: dict = <factory>, local_queue: str = None) +(name: str, namespace: Optional[str] = None, head_info: List[str] = <factory>, head_cpus: Union[int, str] = 2, head_memory: Union[int, str] = 8, head_gpus: Optional[int] = None, head_extended_resource_requests: Dict[str, int] = <factory>, machine_types: List[str] = <factory>, worker_cpu_requests: Union[int, str] = 1, worker_cpu_limits: Union[int, str] = 1, min_cpus: Union[int, str, ForwardRef(None)] = None, max_cpus: Union[int, str, ForwardRef(None)] = None, num_workers: int = 1, worker_memory_requests: Union[int, str] = 2, worker_memory_limits: Union[int, str] = 2, min_memory: Union[int, str, ForwardRef(None)] = None, max_memory: Union[int, str, ForwardRef(None)] = None, num_gpus: Optional[int] = None, template: str = '/home/runner/work/codeflare-sdk/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', appwrapper: bool = False, envs: Dict[str, str] = <factory>, image: str = '', image_pull_secrets: List[str] = <factory>, write_to_file: bool = False, verify_tls: bool = True, labels: Dict[str, str] = <factory>, worker_extended_resource_requests: Dict[str, int] = <factory>, extended_resource_mapping: Dict[str, str] = <factory>, overwrite_default_resource_mapping: bool = False, local_queue: Optional[str] = None)

    This dataclass is used to specify resource requirements and other details, and -is passed in as an argument when creating a Cluster object.

    +is passed in as an argument when creating a Cluster object.

    +

    Attributes: +- name: The name of the cluster. +- namespace: The namespace in which the cluster should be created. +- head_info: A list of strings containing information about the head node. +- head_cpus: The number of CPUs to allocate to the head node. +- head_memory: The amount of memory to allocate to the head node. +- head_gpus: The number of GPUs to allocate to the head node. (Deprecated, use head_extended_resource_requests) +- head_extended_resource_requests: A dictionary of extended resource requests for the head node. ex: {"nvidia.com/gpu": 1} +- machine_types: A list of machine types to use for the cluster. +- min_cpus: The minimum number of CPUs to allocate to each worker. +- max_cpus: The maximum number of CPUs to allocate to each worker. +- num_workers: The number of workers to create. +- min_memory: The minimum amount of memory to allocate to each worker. +- max_memory: The maximum amount of memory to allocate to each worker. +- num_gpus: The number of GPUs to allocate to each worker. (Deprecated, use worker_extended_resource_requests) +- template: The path to the template file to use for the cluster. +- appwrapper: A boolean indicating whether to use an AppWrapper. +- envs: A dictionary of environment variables to set for the cluster. +- image: The image to use for the cluster. +- image_pull_secrets: A list of image pull secrets to use for the cluster. +- write_to_file: A boolean indicating whether to write the cluster configuration to a file. +- verify_tls: A boolean indicating whether to verify TLS when connecting to the cluster. +- labels: A dictionary of labels to apply to the cluster. +- worker_extended_resource_requests: A dictionary of extended resource requests for each worker. ex: {"nvidia.com/gpu": 1} +- extended_resource_mapping: A dictionary of custom resource mappings to map extended resource requests to RayCluster resource names +- overwrite_default_resource_mapping: A boolean indicating whether to overwrite the default resource mapping.

    Expand source code @@ -138,55 +322,200 @@

    Classes

    """ This dataclass is used to specify resource requirements and other details, and is passed in as an argument when creating a Cluster object. + + Attributes: + - name: The name of the cluster. + - namespace: The namespace in which the cluster should be created. + - head_info: A list of strings containing information about the head node. + - head_cpus: The number of CPUs to allocate to the head node. + - head_memory: The amount of memory to allocate to the head node. + - head_gpus: The number of GPUs to allocate to the head node. (Deprecated, use head_extended_resource_requests) + - head_extended_resource_requests: A dictionary of extended resource requests for the head node. ex: {"nvidia.com/gpu": 1} + - machine_types: A list of machine types to use for the cluster. + - min_cpus: The minimum number of CPUs to allocate to each worker. + - max_cpus: The maximum number of CPUs to allocate to each worker. + - num_workers: The number of workers to create. + - min_memory: The minimum amount of memory to allocate to each worker. + - max_memory: The maximum amount of memory to allocate to each worker. + - num_gpus: The number of GPUs to allocate to each worker. (Deprecated, use worker_extended_resource_requests) + - template: The path to the template file to use for the cluster. + - appwrapper: A boolean indicating whether to use an AppWrapper. + - envs: A dictionary of environment variables to set for the cluster. + - image: The image to use for the cluster. + - image_pull_secrets: A list of image pull secrets to use for the cluster. + - write_to_file: A boolean indicating whether to write the cluster configuration to a file. + - verify_tls: A boolean indicating whether to verify TLS when connecting to the cluster. + - labels: A dictionary of labels to apply to the cluster. + - worker_extended_resource_requests: A dictionary of extended resource requests for each worker. ex: {"nvidia.com/gpu": 1} + - extended_resource_mapping: A dictionary of custom resource mappings to map extended resource requests to RayCluster resource names + - overwrite_default_resource_mapping: A boolean indicating whether to overwrite the default resource mapping. """ name: str - namespace: str = None - head_info: list = field(default_factory=list) - head_cpus: typing.Union[int, str] = 2 - head_memory: typing.Union[int, str] = 8 - head_gpus: int = 0 - machine_types: list = field(default_factory=list) # ["m4.xlarge", "g4dn.xlarge"] - min_cpus: typing.Union[int, str] = 1 - max_cpus: typing.Union[int, str] = 1 + namespace: Optional[str] = None + head_info: List[str] = field(default_factory=list) + head_cpus: Union[int, str] = 2 + head_memory: Union[int, str] = 8 + head_gpus: Optional[int] = None # Deprecating + head_extended_resource_requests: Dict[str, int] = field(default_factory=dict) + machine_types: List[str] = field( + default_factory=list + ) # ["m4.xlarge", "g4dn.xlarge"] + worker_cpu_requests: Union[int, str] = 1 + worker_cpu_limits: Union[int, str] = 1 + min_cpus: Optional[Union[int, str]] = None # Deprecating + max_cpus: Optional[Union[int, str]] = None # Deprecating num_workers: int = 1 - min_memory: typing.Union[int, str] = 2 - max_memory: typing.Union[int, str] = 2 - num_gpus: int = 0 + worker_memory_requests: Union[int, str] = 2 + worker_memory_limits: Union[int, str] = 2 + min_memory: Optional[Union[int, str]] = None # Deprecating + max_memory: Optional[Union[int, str]] = None # Deprecating + num_gpus: Optional[int] = None # Deprecating template: str = f"{dir}/templates/base-template.yaml" appwrapper: bool = False - envs: dict = field(default_factory=dict) + envs: Dict[str, str] = field(default_factory=dict) image: str = "" - image_pull_secrets: list = field(default_factory=list) + image_pull_secrets: List[str] = field(default_factory=list) write_to_file: bool = False verify_tls: bool = True - labels: dict = field(default_factory=dict) + labels: Dict[str, str] = field(default_factory=dict) + worker_extended_resource_requests: Dict[str, int] = field(default_factory=dict) + extended_resource_mapping: Dict[str, str] = field(default_factory=dict) + overwrite_default_resource_mapping: bool = False + local_queue: Optional[str] = None def __post_init__(self): if not self.verify_tls: print( "Warning: TLS verification has been disabled - Endpoint checks will be bypassed" ) + + self._validate_types() self._memory_to_string() self._str_mem_no_unit_add_GB() + self._memory_to_resource() + self._cpu_to_resource() + self._gpu_to_resource() + self._combine_extended_resource_mapping() + self._validate_extended_resource_requests(self.head_extended_resource_requests) + self._validate_extended_resource_requests( + self.worker_extended_resource_requests + ) + + def _combine_extended_resource_mapping(self): + if overwritten := set(self.extended_resource_mapping.keys()).intersection( + DEFAULT_RESOURCE_MAPPING.keys() + ): + if self.overwrite_default_resource_mapping: + warnings.warn( + f"Overwriting default resource mapping for {overwritten}", + UserWarning, + ) + else: + raise ValueError( + f"Resource mapping already exists for {overwritten}, set overwrite_default_resource_mapping to True to overwrite" + ) + self.extended_resource_mapping = { + **DEFAULT_RESOURCE_MAPPING, + **self.extended_resource_mapping, + } + + def _validate_extended_resource_requests(self, extended_resources: Dict[str, int]): + for k in extended_resources.keys(): + if k not in self.extended_resource_mapping.keys(): + raise ValueError( + f"extended resource '{k}' not found in extended_resource_mapping, available resources are {list(self.extended_resource_mapping.keys())}, to add more supported resources use extended_resource_mapping. i.e. extended_resource_mapping = {{'{k}': 'FOO_BAR'}}" + ) + + def _gpu_to_resource(self): + if self.head_gpus: + warnings.warn( + f"head_gpus is being deprecated, replacing with head_extended_resource_requests['nvidia.com/gpu'] = {self.head_gpus}" + ) + if "nvidia.com/gpu" in self.head_extended_resource_requests: + raise ValueError( + "nvidia.com/gpu already exists in head_extended_resource_requests" + ) + self.head_extended_resource_requests["nvidia.com/gpu"] = self.head_gpus + if self.num_gpus: + warnings.warn( + f"num_gpus is being deprecated, replacing with worker_extended_resource_requests['nvidia.com/gpu'] = {self.num_gpus}" + ) + if "nvidia.com/gpu" in self.worker_extended_resource_requests: + raise ValueError( + "nvidia.com/gpu already exists in worker_extended_resource_requests" + ) + self.worker_extended_resource_requests["nvidia.com/gpu"] = self.num_gpus def _str_mem_no_unit_add_GB(self): if isinstance(self.head_memory, str) and self.head_memory.isdecimal(): self.head_memory = f"{self.head_memory}G" - if isinstance(self.min_memory, str) and self.min_memory.isdecimal(): - self.min_memory = f"{self.min_memory}G" - if isinstance(self.max_memory, str) and self.max_memory.isdecimal(): - self.max_memory = f"{self.max_memory}G" + if ( + isinstance(self.worker_memory_requests, str) + and self.worker_memory_requests.isdecimal() + ): + self.worker_memory_requests = f"{self.worker_memory_requests}G" + if ( + isinstance(self.worker_memory_limits, str) + and self.worker_memory_limits.isdecimal() + ): + self.worker_memory_limits = f"{self.worker_memory_limits}G" def _memory_to_string(self): if isinstance(self.head_memory, int): self.head_memory = f"{self.head_memory}G" - if isinstance(self.min_memory, int): - self.min_memory = f"{self.min_memory}G" - if isinstance(self.max_memory, int): - self.max_memory = f"{self.max_memory}G" + if isinstance(self.worker_memory_requests, int): + self.worker_memory_requests = f"{self.worker_memory_requests}G" + if isinstance(self.worker_memory_limits, int): + self.worker_memory_limits = f"{self.worker_memory_limits}G" + + def _cpu_to_resource(self): + if self.min_cpus: + warnings.warn("min_cpus is being deprecated, use worker_cpu_requests") + self.worker_cpu_requests = self.min_cpus + if self.max_cpus: + warnings.warn("max_cpus is being deprecated, use worker_cpu_limits") + self.worker_cpu_limits = self.max_cpus + + def _memory_to_resource(self): + if self.min_memory: + warnings.warn("min_memory is being deprecated, use worker_memory_requests") + self.worker_memory_requests = f"{self.min_memory}G" + if self.max_memory: + warnings.warn("max_memory is being deprecated, use worker_memory_limits") + self.worker_memory_limits = f"{self.max_memory}G" + + def _validate_types(self): + """Validate the types of all fields in the ClusterConfiguration dataclass.""" + for field_info in fields(self): + value = getattr(self, field_info.name) + expected_type = field_info.type + if not self._is_type(value, expected_type): + raise TypeError( + f"'{field_info.name}' should be of type {expected_type}" + ) + + @staticmethod + def _is_type(value, expected_type): + """Check if the value matches the expected type.""" + + def check_type(value, expected_type): + origin_type = get_origin(expected_type) + args = get_args(expected_type) + if origin_type is Union: + return any(check_type(value, union_type) for union_type in args) + if origin_type is list: + return all(check_type(elem, args[0]) for elem in value) + if origin_type is dict: + return all( + check_type(k, args[0]) and check_type(v, args[1]) + for k, v in value.items() + ) + if origin_type is tuple: + return all(check_type(elem, etype) for elem, etype in zip(value, args)) + return isinstance(value, expected_type) - local_queue: str = None + return check_type(value, expected_type)

    Class variables

    @@ -194,7 +523,11 @@

    Class variables

    -
    var envs : dict
    +
    var envs : Dict[str, str]
    +
    +
    +
    +
    var extended_resource_mapping : Dict[str, str]
    @@ -202,11 +535,15 @@

    Class variables

    -
    var head_gpus : int
    +
    var head_extended_resource_requests : Dict[str, int]
    +
    +
    +
    +
    var head_gpus : Optional[int]
    -
    var head_info : list
    +
    var head_info : List[str]
    @@ -218,35 +555,35 @@

    Class variables

    -
    var image_pull_secrets : list
    +
    var image_pull_secrets : List[str]
    -
    var labels : dict
    +
    var labels : Dict[str, str]
    -
    var local_queue : str
    +
    var local_queue : Optional[str]
    -
    var machine_types : list
    +
    var machine_types : List[str]
    -
    var max_cpus : Union[int, str]
    +
    var max_cpus : Union[int, str, ForwardRef(None)]
    -
    var max_memory : Union[int, str]
    +
    var max_memory : Union[int, str, ForwardRef(None)]
    -
    var min_cpus : Union[int, str]
    +
    var min_cpus : Union[int, str, ForwardRef(None)]
    -
    var min_memory : Union[int, str]
    +
    var min_memory : Union[int, str, ForwardRef(None)]
    @@ -254,11 +591,11 @@

    Class variables

    -
    var namespace : str
    +
    var namespace : Optional[str]
    -
    var num_gpus : int
    +
    var num_gpus : Optional[int]
    @@ -266,6 +603,10 @@

    Class variables

    +
    var overwrite_default_resource_mapping : bool
    +
    +
    +
    var template : str
    @@ -274,6 +615,26 @@

    Class variables

    +
    var worker_cpu_limits : Union[int, str]
    +
    +
    +
    +
    var worker_cpu_requests : Union[int, str]
    +
    +
    +
    +
    var worker_extended_resource_requests : Dict[str, int]
    +
    +
    +
    +
    var worker_memory_limits : Union[int, str]
    +
    +
    +
    +
    var worker_memory_requests : Union[int, str]
    +
    +
    +
    var write_to_file : bool
    @@ -298,10 +659,12 @@

    Index

    class RayCluster -(name: str, status: RayClusterStatus, head_cpus: int, head_mem: str, head_gpu: int, workers: int, worker_mem_min: str, worker_mem_max: str, worker_cpu: int, worker_gpu: int, namespace: str, dashboard: str) +(name: str, status: RayClusterStatus, head_cpus: int, head_mem: str, workers: int, worker_mem_min: str, worker_mem_max: str, worker_cpu: int, namespace: str, dashboard: str, worker_extended_resources: Dict[str, int] = <factory>, head_extended_resources: Dict[str, int] = <factory>)

    For storing information about a Ray cluster.

    @@ -310,14 +311,14 @@

    Class variables

    status: RayClusterStatus head_cpus: int head_mem: str - head_gpu: int workers: int worker_mem_min: str worker_mem_max: str worker_cpu: int - worker_gpu: int namespace: str - dashboard: str + dashboard: str + worker_extended_resources: typing.Dict[str, int] = field(default_factory=dict) + head_extended_resources: typing.Dict[str, int] = field(default_factory=dict)

    Class variables

    @@ -329,7 +330,7 @@

    Class variables

    -
    var head_gpu : int
    +
    var head_extended_resources : Dict[str, int]
    @@ -353,7 +354,7 @@

    Class variables

    -
    var worker_gpu : int
    +
    var worker_extended_resources : Dict[str, int]
    @@ -471,16 +472,16 @@

    RayCluster

    -
      +
      • dashboard
      • head_cpus
      • -
      • head_gpu
      • +
      • head_extended_resources
      • head_mem
      • name
      • namespace
      • status
      • worker_cpu
      • -
      • worker_gpu
      • +
      • worker_extended_resources
      • worker_mem_max
      • worker_mem_min
      • workers
      • diff --git a/docs/detailed-documentation/index.html b/docs/detailed-documentation/index.html index 21bab3d1..fd74344f 100644 --- a/docs/detailed-documentation/index.html +++ b/docs/detailed-documentation/index.html @@ -46,7 +46,16 @@

        Package codeflare_sdk

        from .job import RayJobClient -from .utils import generate_cert +from .utils import generate_cert +from .utils.demos import copy_demo_nbs + +from importlib.metadata import version, PackageNotFoundError + +try: + __version__ = version("codeflare-sdk") # use metadata associated with built package + +except PackageNotFoundError: + __version__ = "v0.0.0"
    diff --git a/docs/detailed-documentation/utils/demos.html b/docs/detailed-documentation/utils/demos.html new file mode 100644 index 00000000..e0dc5a8e --- /dev/null +++ b/docs/detailed-documentation/utils/demos.html @@ -0,0 +1,138 @@ + + + + + + +codeflare_sdk.utils.demos API documentation + + + + + + + + + + + +
    +
    +
    +

    Module codeflare_sdk.utils.demos

    +
    +
    +
    + +Expand source code + +
    import pathlib
    +import shutil
    +
    +package_dir = pathlib.Path(__file__).parent.parent.resolve()
    +demo_dir = f"{package_dir}/demo-notebooks"
    +
    +
    +def copy_demo_nbs(dir: str = "./demo-notebooks", overwrite: bool = False):
    +    """
    +    Copy the demo notebooks from the package to the current working directory
    +
    +    overwrite=True will overwrite any files that exactly match files written by copy_demo_nbs in the target directory.
    +    Any files that exist in the directory that don't match these values will remain untouched.
    +
    +    Args:
    +        dir (str): The directory to copy the demo notebooks to. Defaults to "./demo-notebooks". overwrite (bool):
    +        overwrite (bool): Whether to overwrite files in the directory if it already exists. Defaults to False.
    +    Raises:
    +        FileExistsError: If the directory already exists.
    +    """
    +    # does dir exist already?
    +    if overwrite is False and pathlib.Path(dir).exists():
    +        raise FileExistsError(
    +            f"Directory {dir} already exists. Please remove it or provide a different location."
    +        )
    +
    +    shutil.copytree(demo_dir, dir, dirs_exist_ok=True)
    +
    +
    +
    +
    +
    +
    +
    +

    Functions

    +
    +
    +def copy_demo_nbs(dir: str = './demo-notebooks', overwrite: bool = False) +
    +
    +

    Copy the demo notebooks from the package to the current working directory

    +

    overwrite=True will overwrite any files that exactly match files written by copy_demo_nbs in the target directory. +Any files that exist in the directory that don't match these values will remain untouched.

    +

    Args

    +
    +
    dir : str
    +
    The directory to copy the demo notebooks to. Defaults to "./demo-notebooks". overwrite (bool):
    +
    overwrite : bool
    +
    Whether to overwrite files in the directory if it already exists. Defaults to False.
    +
    +

    Raises

    +
    +
    FileExistsError
    +
    If the directory already exists.
    +
    +
    + +Expand source code + +
    def copy_demo_nbs(dir: str = "./demo-notebooks", overwrite: bool = False):
    +    """
    +    Copy the demo notebooks from the package to the current working directory
    +
    +    overwrite=True will overwrite any files that exactly match files written by copy_demo_nbs in the target directory.
    +    Any files that exist in the directory that don't match these values will remain untouched.
    +
    +    Args:
    +        dir (str): The directory to copy the demo notebooks to. Defaults to "./demo-notebooks". overwrite (bool):
    +        overwrite (bool): Whether to overwrite files in the directory if it already exists. Defaults to False.
    +    Raises:
    +        FileExistsError: If the directory already exists.
    +    """
    +    # does dir exist already?
    +    if overwrite is False and pathlib.Path(dir).exists():
    +        raise FileExistsError(
    +            f"Directory {dir} already exists. Please remove it or provide a different location."
    +        )
    +
    +    shutil.copytree(demo_dir, dir, dirs_exist_ok=True)
    +
    +
    +
    +
    +
    +
    +
    + +
    + + + diff --git a/docs/detailed-documentation/utils/generate_yaml.html b/docs/detailed-documentation/utils/generate_yaml.html index 1e421762..7b41e3c9 100644 --- a/docs/detailed-documentation/utils/generate_yaml.html +++ b/docs/detailed-documentation/utils/generate_yaml.html @@ -48,6 +48,7 @@

    Module codeflare_sdk.utils.generate_yaml

    (in the cluster sub-module) for AppWrapper generation. """ +import json from typing import Optional import typing import yaml @@ -61,6 +62,8 @@

    Module codeflare_sdk.utils.generate_yaml

    from os import urandom from base64 import b64encode from urllib3.util import parse_url +from kubernetes.client.exceptions import ApiException +import codeflare_sdk def read_template(template): @@ -108,16 +111,20 @@

    Module codeflare_sdk.utils.generate_yaml

    return False -def update_names(cluster_yaml, cluster_name, namespace): - meta = cluster_yaml.get("metadata") - meta["name"] = cluster_name - meta["namespace"] = namespace +def update_names( + cluster_yaml: dict, + cluster: "codeflare_sdk.cluster.Cluster", +): + metadata = cluster_yaml.get("metadata") + metadata["name"] = cluster.config.name + metadata["namespace"] = cluster.config.namespace def update_image(spec, image): containers = spec.get("containers") - for container in containers: - container["image"] = image + if image != "": + for container in containers: + container["image"] = image def update_image_pull_secrets(spec, image_pull_secrets): @@ -137,60 +144,118 @@

    Module codeflare_sdk.utils.generate_yaml

    container["env"] = env -def update_resources(spec, min_cpu, max_cpu, min_memory, max_memory, gpu): +def update_resources( + spec, + worker_cpu_requests, + worker_cpu_limits, + worker_memory_requests, + worker_memory_limits, + custom_resources, +): container = spec.get("containers") for resource in container: requests = resource.get("resources").get("requests") if requests is not None: - requests["cpu"] = min_cpu - requests["memory"] = min_memory - requests["nvidia.com/gpu"] = gpu + requests["cpu"] = worker_cpu_requests + requests["memory"] = worker_memory_requests limits = resource.get("resources").get("limits") if limits is not None: - limits["cpu"] = max_cpu - limits["memory"] = max_memory - limits["nvidia.com/gpu"] = gpu + limits["cpu"] = worker_cpu_limits + limits["memory"] = worker_memory_limits + for k in custom_resources.keys(): + limits[k] = custom_resources[k] + requests[k] = custom_resources[k] + + +def head_worker_gpu_count_from_cluster( + cluster: "codeflare_sdk.cluster.Cluster", +) -> typing.Tuple[int, int]: + head_gpus = 0 + worker_gpus = 0 + for k in cluster.config.head_extended_resource_requests.keys(): + resource_type = cluster.config.extended_resource_mapping[k] + if resource_type == "GPU": + head_gpus += int(cluster.config.head_extended_resource_requests[k]) + for k in cluster.config.worker_extended_resource_requests.keys(): + resource_type = cluster.config.extended_resource_mapping[k] + if resource_type == "GPU": + worker_gpus += int(cluster.config.worker_extended_resource_requests[k]) + + return head_gpus, worker_gpus + + +FORBIDDEN_CUSTOM_RESOURCE_TYPES = ["GPU", "CPU", "memory"] + + +def head_worker_resources_from_cluster( + cluster: "codeflare_sdk.cluster.Cluster", +) -> typing.Tuple[dict, dict]: + to_return = {}, {} + for k in cluster.config.head_extended_resource_requests.keys(): + resource_type = cluster.config.extended_resource_mapping[k] + if resource_type in FORBIDDEN_CUSTOM_RESOURCE_TYPES: + continue + to_return[0][resource_type] = cluster.config.head_extended_resource_requests[ + k + ] + to_return[0].get(resource_type, 0) + + for k in cluster.config.worker_extended_resource_requests.keys(): + resource_type = cluster.config.extended_resource_mapping[k] + if resource_type in FORBIDDEN_CUSTOM_RESOURCE_TYPES: + continue + to_return[1][resource_type] = cluster.config.worker_extended_resource_requests[ + k + ] + to_return[1].get(resource_type, 0) + return to_return def update_nodes( - cluster_yaml, - appwrapper_name, - min_cpu, - max_cpu, - min_memory, - max_memory, - gpu, - workers, - image, - env, - image_pull_secrets, - head_cpus, - head_memory, - head_gpus, + ray_cluster_dict: dict, + cluster: "codeflare_sdk.cluster.Cluster", ): - head = cluster_yaml.get("spec").get("headGroupSpec") - head["rayStartParams"]["num-gpus"] = str(int(head_gpus)) + head = ray_cluster_dict.get("spec").get("headGroupSpec") + worker = ray_cluster_dict.get("spec").get("workerGroupSpecs")[0] + head_gpus, worker_gpus = head_worker_gpu_count_from_cluster(cluster) + head_resources, worker_resources = head_worker_resources_from_cluster(cluster) + head_resources = json.dumps(head_resources).replace('"', '\\"') + head_resources = f'"{head_resources}"' + worker_resources = json.dumps(worker_resources).replace('"', '\\"') + worker_resources = f'"{worker_resources}"' + head["rayStartParams"]["num-gpus"] = str(head_gpus) + head["rayStartParams"]["resources"] = head_resources - worker = cluster_yaml.get("spec").get("workerGroupSpecs")[0] # Head counts as first worker - worker["replicas"] = workers - worker["minReplicas"] = workers - worker["maxReplicas"] = workers - worker["groupName"] = "small-group-" + appwrapper_name - worker["rayStartParams"]["num-gpus"] = str(int(gpu)) + worker["replicas"] = cluster.config.num_workers + worker["minReplicas"] = cluster.config.num_workers + worker["maxReplicas"] = cluster.config.num_workers + worker["groupName"] = "small-group-" + cluster.config.name + worker["rayStartParams"]["num-gpus"] = str(worker_gpus) + worker["rayStartParams"]["resources"] = worker_resources for comp in [head, worker]: spec = comp.get("template").get("spec") - update_image_pull_secrets(spec, image_pull_secrets) - update_image(spec, image) - update_env(spec, env) + update_image_pull_secrets(spec, cluster.config.image_pull_secrets) + update_image(spec, cluster.config.image) + update_env(spec, cluster.config.envs) if comp == head: # TODO: Eventually add head node configuration outside of template update_resources( - spec, head_cpus, head_cpus, head_memory, head_memory, head_gpus + spec, + cluster.config.head_cpus, + cluster.config.head_cpus, + cluster.config.head_memory, + cluster.config.head_memory, + cluster.config.head_extended_resource_requests, ) else: - update_resources(spec, min_cpu, max_cpu, min_memory, max_memory, gpu) + update_resources( + spec, + cluster.config.worker_cpu_requests, + cluster.config.worker_cpu_limits, + cluster.config.worker_memory_requests, + cluster.config.worker_memory_limits, + cluster.config.worker_extended_resource_requests, + ) def del_from_list_by_name(l: list, target: typing.List[str]) -> list: @@ -208,8 +273,11 @@

    Module codeflare_sdk.utils.generate_yaml

    namespace=namespace, plural="localqueues", ) - except Exception as e: # pragma: no cover - return _kube_api_error_handling(e) + except ApiException as e: # pragma: no cover + if e.status == 404 or e.status == 403: + return + else: + return _kube_api_error_handling(e) for lq in local_queues["items"]: if ( "annotations" in lq["metadata"] @@ -218,9 +286,6 @@

    Module codeflare_sdk.utils.generate_yaml

    == "true" ): return lq["metadata"]["name"] - raise ValueError( - "Default Local Queue with kueue.x-k8s.io/default-queue: true annotation not found please create a default Local Queue or provide the local_queue name in Cluster Configuration" - ) def local_queue_exists(namespace: str, local_queue_name: str): @@ -245,7 +310,9 @@

    Module codeflare_sdk.utils.generate_yaml

    def add_queue_label(item: dict, namespace: str, local_queue: Optional[str]): lq_name = local_queue or get_default_kueue_name(namespace) - if not local_queue_exists(namespace, lq_name): + if lq_name == None: + return + elif not local_queue_exists(namespace, lq_name): raise ValueError( "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration" ) @@ -291,65 +358,32 @@

    Module codeflare_sdk.utils.generate_yaml

    print(f"Written to: {output_file_name}") -def generate_appwrapper( - name: str, - namespace: str, - head_cpus: int, - head_memory: int, - head_gpus: int, - min_cpu: int, - max_cpu: int, - min_memory: int, - max_memory: int, - gpu: int, - workers: int, - template: str, - image: str, - appwrapper: bool, - env, - image_pull_secrets: list, - write_to_file: bool, - local_queue: Optional[str], - labels, -): - cluster_yaml = read_template(template) - appwrapper_name, cluster_name = gen_names(name) - update_names(cluster_yaml, cluster_name, namespace) - update_nodes( +def generate_appwrapper(cluster: "codeflare_sdk.cluster.Cluster"): + cluster_yaml = read_template(cluster.config.template) + appwrapper_name, _ = gen_names(cluster.config.name) + update_names( cluster_yaml, - appwrapper_name, - min_cpu, - max_cpu, - min_memory, - max_memory, - gpu, - workers, - image, - env, - image_pull_secrets, - head_cpus, - head_memory, - head_gpus, + cluster, ) - augment_labels(cluster_yaml, labels) + update_nodes(cluster_yaml, cluster) + augment_labels(cluster_yaml, cluster.config.labels) notebook_annotations(cluster_yaml) - user_yaml = ( - wrap_cluster(cluster_yaml, appwrapper_name, namespace) - if appwrapper + wrap_cluster(cluster_yaml, appwrapper_name, cluster.config.namespace) + if cluster.config.appwrapper else cluster_yaml ) - add_queue_label(user_yaml, namespace, local_queue) + add_queue_label(user_yaml, cluster.config.namespace, cluster.config.local_queue) - if write_to_file: + if cluster.config.write_to_file: directory_path = os.path.expanduser("~/.codeflare/resources/") outfile = os.path.join(directory_path, appwrapper_name + ".yaml") write_user_yaml(user_yaml, outfile) return outfile else: user_yaml = yaml.dump(user_yaml) - print(f"Yaml resources loaded for {name}") + print(f"Yaml resources loaded for {cluster.config.name}") return user_yaml
    @@ -371,7 +405,9 @@

    Functions

    def add_queue_label(item: dict, namespace: str, local_queue: Optional[str]):
         lq_name = local_queue or get_default_kueue_name(namespace)
    -    if not local_queue_exists(namespace, lq_name):
    +    if lq_name == None:
    +        return
    +    elif not local_queue_exists(namespace, lq_name):
             raise ValueError(
                 "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration"
             )
    @@ -428,7 +464,7 @@ 

    Functions

    -def generate_appwrapper(name: str, namespace: str, head_cpus: int, head_memory: int, head_gpus: int, min_cpu: int, max_cpu: int, min_memory: int, max_memory: int, gpu: int, workers: int, template: str, image: str, appwrapper: bool, env, image_pull_secrets: list, write_to_file: bool, local_queue: Optional[str], labels) +def generate_appwrapper(cluster: codeflare_sdk.cluster.Cluster)
    @@ -436,65 +472,32 @@

    Functions

    Expand source code -
    def generate_appwrapper(
    -    name: str,
    -    namespace: str,
    -    head_cpus: int,
    -    head_memory: int,
    -    head_gpus: int,
    -    min_cpu: int,
    -    max_cpu: int,
    -    min_memory: int,
    -    max_memory: int,
    -    gpu: int,
    -    workers: int,
    -    template: str,
    -    image: str,
    -    appwrapper: bool,
    -    env,
    -    image_pull_secrets: list,
    -    write_to_file: bool,
    -    local_queue: Optional[str],
    -    labels,
    -):
    -    cluster_yaml = read_template(template)
    -    appwrapper_name, cluster_name = gen_names(name)
    -    update_names(cluster_yaml, cluster_name, namespace)
    -    update_nodes(
    +
    def generate_appwrapper(cluster: "codeflare_sdk.cluster.Cluster"):
    +    cluster_yaml = read_template(cluster.config.template)
    +    appwrapper_name, _ = gen_names(cluster.config.name)
    +    update_names(
             cluster_yaml,
    -        appwrapper_name,
    -        min_cpu,
    -        max_cpu,
    -        min_memory,
    -        max_memory,
    -        gpu,
    -        workers,
    -        image,
    -        env,
    -        image_pull_secrets,
    -        head_cpus,
    -        head_memory,
    -        head_gpus,
    +        cluster,
         )
    -    augment_labels(cluster_yaml, labels)
    +    update_nodes(cluster_yaml, cluster)
    +    augment_labels(cluster_yaml, cluster.config.labels)
         notebook_annotations(cluster_yaml)
    -
         user_yaml = (
    -        wrap_cluster(cluster_yaml, appwrapper_name, namespace)
    -        if appwrapper
    +        wrap_cluster(cluster_yaml, appwrapper_name, cluster.config.namespace)
    +        if cluster.config.appwrapper
             else cluster_yaml
         )
     
    -    add_queue_label(user_yaml, namespace, local_queue)
    +    add_queue_label(user_yaml, cluster.config.namespace, cluster.config.local_queue)
     
    -    if write_to_file:
    +    if cluster.config.write_to_file:
             directory_path = os.path.expanduser("~/.codeflare/resources/")
             outfile = os.path.join(directory_path, appwrapper_name + ".yaml")
             write_user_yaml(user_yaml, outfile)
             return outfile
         else:
             user_yaml = yaml.dump(user_yaml)
    -        print(f"Yaml resources loaded for {name}")
    +        print(f"Yaml resources loaded for {cluster.config.name}")
             return user_yaml
    @@ -518,8 +521,11 @@

    Functions

    namespace=namespace, plural="localqueues", ) - except Exception as e: # pragma: no cover - return _kube_api_error_handling(e) + except ApiException as e: # pragma: no cover + if e.status == 404 or e.status == 403: + return + else: + return _kube_api_error_handling(e) for lq in local_queues["items"]: if ( "annotations" in lq["metadata"] @@ -527,10 +533,64 @@

    Functions

    and lq["metadata"]["annotations"]["kueue.x-k8s.io/default-queue"].lower() == "true" ): - return lq["metadata"]["name"] - raise ValueError( - "Default Local Queue with kueue.x-k8s.io/default-queue: true annotation not found please create a default Local Queue or provide the local_queue name in Cluster Configuration" - )
    + return lq["metadata"]["name"] + + +
    +def head_worker_gpu_count_from_cluster(cluster: codeflare_sdk.cluster.Cluster) ‑> Tuple[int, int] +
    +
    +
    +
    + +Expand source code + +
    def head_worker_gpu_count_from_cluster(
    +    cluster: "codeflare_sdk.cluster.Cluster",
    +) -> typing.Tuple[int, int]:
    +    head_gpus = 0
    +    worker_gpus = 0
    +    for k in cluster.config.head_extended_resource_requests.keys():
    +        resource_type = cluster.config.extended_resource_mapping[k]
    +        if resource_type == "GPU":
    +            head_gpus += int(cluster.config.head_extended_resource_requests[k])
    +    for k in cluster.config.worker_extended_resource_requests.keys():
    +        resource_type = cluster.config.extended_resource_mapping[k]
    +        if resource_type == "GPU":
    +            worker_gpus += int(cluster.config.worker_extended_resource_requests[k])
    +
    +    return head_gpus, worker_gpus
    +
    +
    +
    +def head_worker_resources_from_cluster(cluster: codeflare_sdk.cluster.Cluster) ‑> Tuple[dict, dict] +
    +
    +
    +
    + +Expand source code + +
    def head_worker_resources_from_cluster(
    +    cluster: "codeflare_sdk.cluster.Cluster",
    +) -> typing.Tuple[dict, dict]:
    +    to_return = {}, {}
    +    for k in cluster.config.head_extended_resource_requests.keys():
    +        resource_type = cluster.config.extended_resource_mapping[k]
    +        if resource_type in FORBIDDEN_CUSTOM_RESOURCE_TYPES:
    +            continue
    +        to_return[0][resource_type] = cluster.config.head_extended_resource_requests[
    +            k
    +        ] + to_return[0].get(resource_type, 0)
    +
    +    for k in cluster.config.worker_extended_resource_requests.keys():
    +        resource_type = cluster.config.extended_resource_mapping[k]
    +        if resource_type in FORBIDDEN_CUSTOM_RESOURCE_TYPES:
    +            continue
    +        to_return[1][resource_type] = cluster.config.worker_extended_resource_requests[
    +            k
    +        ] + to_return[1].get(resource_type, 0)
    +    return to_return
    @@ -672,8 +732,9 @@

    Functions

    def update_image(spec, image):
         containers = spec.get("containers")
    -    for container in containers:
    -        container["image"] = image
    + if image != "": + for container in containers: + container["image"] = image
    @@ -693,7 +754,7 @@

    Functions

    -def update_names(cluster_yaml, cluster_name, namespace) +def update_names(cluster_yaml: dict, cluster: codeflare_sdk.cluster.Cluster)
    @@ -701,14 +762,17 @@

    Functions

    Expand source code -
    def update_names(cluster_yaml, cluster_name, namespace):
    -    meta = cluster_yaml.get("metadata")
    -    meta["name"] = cluster_name
    -    meta["namespace"] = namespace
    +
    def update_names(
    +    cluster_yaml: dict,
    +    cluster: "codeflare_sdk.cluster.Cluster",
    +):
    +    metadata = cluster_yaml.get("metadata")
    +    metadata["name"] = cluster.config.name
    +    metadata["namespace"] = cluster.config.namespace
    -def update_nodes(cluster_yaml, appwrapper_name, min_cpu, max_cpu, min_memory, max_memory, gpu, workers, image, env, image_pull_secrets, head_cpus, head_memory, head_gpus) +def update_nodes(ray_cluster_dict: dict, cluster: codeflare_sdk.cluster.Cluster)
    @@ -717,48 +781,56 @@

    Functions

    Expand source code
    def update_nodes(
    -    cluster_yaml,
    -    appwrapper_name,
    -    min_cpu,
    -    max_cpu,
    -    min_memory,
    -    max_memory,
    -    gpu,
    -    workers,
    -    image,
    -    env,
    -    image_pull_secrets,
    -    head_cpus,
    -    head_memory,
    -    head_gpus,
    +    ray_cluster_dict: dict,
    +    cluster: "codeflare_sdk.cluster.Cluster",
     ):
    -    head = cluster_yaml.get("spec").get("headGroupSpec")
    -    head["rayStartParams"]["num-gpus"] = str(int(head_gpus))
    +    head = ray_cluster_dict.get("spec").get("headGroupSpec")
    +    worker = ray_cluster_dict.get("spec").get("workerGroupSpecs")[0]
    +    head_gpus, worker_gpus = head_worker_gpu_count_from_cluster(cluster)
    +    head_resources, worker_resources = head_worker_resources_from_cluster(cluster)
    +    head_resources = json.dumps(head_resources).replace('"', '\\"')
    +    head_resources = f'"{head_resources}"'
    +    worker_resources = json.dumps(worker_resources).replace('"', '\\"')
    +    worker_resources = f'"{worker_resources}"'
    +    head["rayStartParams"]["num-gpus"] = str(head_gpus)
    +    head["rayStartParams"]["resources"] = head_resources
     
    -    worker = cluster_yaml.get("spec").get("workerGroupSpecs")[0]
         # Head counts as first worker
    -    worker["replicas"] = workers
    -    worker["minReplicas"] = workers
    -    worker["maxReplicas"] = workers
    -    worker["groupName"] = "small-group-" + appwrapper_name
    -    worker["rayStartParams"]["num-gpus"] = str(int(gpu))
    +    worker["replicas"] = cluster.config.num_workers
    +    worker["minReplicas"] = cluster.config.num_workers
    +    worker["maxReplicas"] = cluster.config.num_workers
    +    worker["groupName"] = "small-group-" + cluster.config.name
    +    worker["rayStartParams"]["num-gpus"] = str(worker_gpus)
    +    worker["rayStartParams"]["resources"] = worker_resources
     
         for comp in [head, worker]:
             spec = comp.get("template").get("spec")
    -        update_image_pull_secrets(spec, image_pull_secrets)
    -        update_image(spec, image)
    -        update_env(spec, env)
    +        update_image_pull_secrets(spec, cluster.config.image_pull_secrets)
    +        update_image(spec, cluster.config.image)
    +        update_env(spec, cluster.config.envs)
             if comp == head:
                 # TODO: Eventually add head node configuration outside of template
                 update_resources(
    -                spec, head_cpus, head_cpus, head_memory, head_memory, head_gpus
    +                spec,
    +                cluster.config.head_cpus,
    +                cluster.config.head_cpus,
    +                cluster.config.head_memory,
    +                cluster.config.head_memory,
    +                cluster.config.head_extended_resource_requests,
                 )
             else:
    -            update_resources(spec, min_cpu, max_cpu, min_memory, max_memory, gpu)
    + update_resources( + spec, + cluster.config.worker_cpu_requests, + cluster.config.worker_cpu_limits, + cluster.config.worker_memory_requests, + cluster.config.worker_memory_limits, + cluster.config.worker_extended_resource_requests, + )
    -def update_resources(spec, min_cpu, max_cpu, min_memory, max_memory, gpu) +def update_resources(spec, worker_cpu_requests, worker_cpu_limits, worker_memory_requests, worker_memory_limits, custom_resources)
    @@ -766,19 +838,27 @@

    Functions

    Expand source code -
    def update_resources(spec, min_cpu, max_cpu, min_memory, max_memory, gpu):
    +
    def update_resources(
    +    spec,
    +    worker_cpu_requests,
    +    worker_cpu_limits,
    +    worker_memory_requests,
    +    worker_memory_limits,
    +    custom_resources,
    +):
         container = spec.get("containers")
         for resource in container:
             requests = resource.get("resources").get("requests")
             if requests is not None:
    -            requests["cpu"] = min_cpu
    -            requests["memory"] = min_memory
    -            requests["nvidia.com/gpu"] = gpu
    +            requests["cpu"] = worker_cpu_requests
    +            requests["memory"] = worker_memory_requests
             limits = resource.get("resources").get("limits")
             if limits is not None:
    -            limits["cpu"] = max_cpu
    -            limits["memory"] = max_memory
    -            limits["nvidia.com/gpu"] = gpu
    + limits["cpu"] = worker_cpu_limits + limits["memory"] = worker_memory_limits + for k in custom_resources.keys(): + limits[k] = custom_resources[k] + requests[k] = custom_resources[k]
    @@ -844,6 +924,8 @@

    Index

  • gen_names
  • generate_appwrapper
  • get_default_kueue_name
  • +
  • head_worker_gpu_count_from_cluster
  • +
  • head_worker_resources_from_cluster
  • is_kind_cluster
  • is_openshift_cluster
  • local_queue_exists
  • diff --git a/docs/detailed-documentation/utils/index.html b/docs/detailed-documentation/utils/index.html index 1eb081d2..4a65cc39 100644 --- a/docs/detailed-documentation/utils/index.html +++ b/docs/detailed-documentation/utils/index.html @@ -26,6 +26,10 @@

    Module codeflare_sdk.utils

    Sub-modules

    +
    codeflare_sdk.utils.demos
    +
    +
    +
    codeflare_sdk.utils.generate_cert
    @@ -67,6 +71,7 @@

    Index

  • Sub-modules

      +
    • codeflare_sdk.utils.demos
    • codeflare_sdk.utils.generate_cert
    • codeflare_sdk.utils.generate_yaml
    • codeflare_sdk.utils.kube_api_helpers
    • diff --git a/docs/detailed-documentation/utils/pretty_print.html b/docs/detailed-documentation/utils/pretty_print.html index 2e7a69b8..cbffd122 100644 --- a/docs/detailed-documentation/utils/pretty_print.html +++ b/docs/detailed-documentation/utils/pretty_print.html @@ -169,7 +169,7 @@

      Module codeflare_sdk.utils.pretty_print

      workers = str(cluster.workers) memory = f"{cluster.worker_mem_min}~{cluster.worker_mem_max}" cpu = str(cluster.worker_cpu) - gpu = str(cluster.worker_gpu) + gpu = str(cluster.worker_extended_resources.get("nvidia.com/gpu", 0)) #'table0' to display the cluster name, status, url, and dashboard link table0 = Table(box=None, show_header=False) @@ -347,7 +347,7 @@

      Functions

      workers = str(cluster.workers) memory = f"{cluster.worker_mem_min}~{cluster.worker_mem_max}" cpu = str(cluster.worker_cpu) - gpu = str(cluster.worker_gpu) + gpu = str(cluster.worker_extended_resources.get("nvidia.com/gpu", 0)) #'table0' to display the cluster name, status, url, and dashboard link table0 = Table(box=None, show_header=False) From 9a8603d28d3fdec32cd5af1882913ce96af10721 Mon Sep 17 00:00:00 2001 From: Karel Suta Date: Fri, 26 Jul 2024 15:47:26 +0200 Subject: [PATCH 307/496] Fix 2_basic_interactive.ipynb to run properly in guided demos PR check --- README.md | 2 +- demo-notebooks/guided-demos/2_basic_interactive.ipynb | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 3b3c8b14..a98f542c 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ An intuitive, easy-to-use python interface for batch resource requesting, access For guided demos and basics walkthroughs, check out the following links: -- Guided demo notebooks available [here](https://github.com/project-codeflare/codeflare-sdk/tree/main/src/demo-notebooks/guided-demos), and copies of the notebooks with [expected output](https://github.com/project-codeflare/codeflare-sdk/tree/main/src/demo-notebooks/guided-demos/notebook-ex-outputs) also available +- Guided demo notebooks available [here](https://github.com/project-codeflare/codeflare-sdk/tree/main/demo-notebooks/guided-demos), and copies of the notebooks with [expected output](https://github.com/project-codeflare/codeflare-sdk/tree/main/demo-notebooks/guided-demos/notebook-ex-outputs) also available - these demos can be copied into your current working directory when using the `codeflare-sdk` by using the `codeflare_sdk.copy_demo_nbs()` function - Additionally, we have a [video walkthrough](https://www.youtube.com/watch?v=U76iIfd9EmE) of these basic demos from June, 2023 diff --git a/demo-notebooks/guided-demos/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/2_basic_interactive.ipynb index 5ce94a21..e344ff35 100644 --- a/demo-notebooks/guided-demos/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/2_basic_interactive.ipynb @@ -65,10 +65,10 @@ " head_gpus=1, # For GPU enabled workloads set the head_gpus and num_gpus\n", " num_gpus=1,\n", " num_workers=2,\n", - " min_cpus=2,\n", - " max_cpus=2,\n", - " min_memory=8,\n", - " max_memory=8,\n", + " min_cpus='250m',\n", + " max_cpus=1,\n", + " min_memory=4,\n", + " max_memory=6,\n", " # image=\"\", # Optional Field \n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", From f7518fab70d69a03464db49283d4cd23ddbc99a1 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Wed, 31 Jul 2024 13:52:06 +0100 Subject: [PATCH 308/496] Add optimised dependabot --- .github/dependabot.yml | 29 +++++++++++++++++++++++++++++ tests/unit_test.py | 9 +++++++-- tests/unit_test_support.py | 8 ++++++++ 3 files changed, 44 insertions(+), 2 deletions(-) create mode 100644 .github/dependabot.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..fb121a71 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,29 @@ +# Please see the documentation for all configuration options: +# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file + +version: 2 +updates: + # This is to update requirements.txt files in the guided-demos, and e2e directories. + # The group configuration option is used to group updates for consistency across related directories. + - package-ecosystem: "pip" + directories: + - "**/demo-notebooks/guided-demos*" + - "/tests/e2e" + schedule: + interval: "weekly" + groups: + requirements.txt: + patterns: + - "*" + open-pull-requests-limit: 10 + labels: + - "test-guided-notebooks" + + # pip means poetry in this case, this keeps poetry.lock up to date with constraints in pyproject.toml. + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "weekly" + open-pull-requests-limit: 10 + labels: + - "test-guided-notebooks" diff --git a/tests/unit_test.py b/tests/unit_test.py index 2709894e..ba937d87 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -68,6 +68,7 @@ createClusterWithConfig, createClusterConfig, createClusterWrongType, + get_package_and_version, ) import codeflare_sdk.utils.kube_api_helpers @@ -2832,9 +2833,13 @@ def test_rjc_tail_job_logs(ray_job_client, mocker): def test_rjc_list_jobs(ray_job_client, mocker): + requirements_path = "tests/e2e/mnist_pip_requirements.txt" + pytorch_lightning = get_package_and_version("pytorch_lightning", requirements_path) + torchmetrics = get_package_and_version("torchmetrics", requirements_path) + torchvision = get_package_and_version("torchvision", requirements_path) jobs_list = [ - "JobDetails(type=, job_id=None, submission_id='raysubmit_4k2NYS1YbRXYPZCM', driver_info=None, status=, entrypoint='python mnist.py', message='Job finished successfully.', error_type=None, start_time=1701352132585, end_time=1701352192002, metadata={}, runtime_env={'working_dir': 'gcs://_ray_pkg_6200b93a110e8033.zip', 'pip': {'packages': ['pytorch_lightning==1.9.5', 'ray_lightning', 'torchmetrics==0.9.1', 'torchvision==0.12.0'], 'pip_check': False}, '_ray_commit': 'b4bba4717f5ba04ee25580fe8f88eed63ef0c5dc'}, driver_agent_http_address='http://10.131.0.18:52365', driver_node_id='9fb515995f5fb13ad4db239ceea378333bebf0a2d45b6aa09d02e691')", - "JobDetails(type=, job_id=None, submission_id='raysubmit_iRuwU8vdkbUZZGvT', driver_info=None, status=, entrypoint='python mnist.py', message='Job was intentionally stopped.', error_type=None, start_time=1701353096163, end_time=1701353097733, metadata={}, runtime_env={'working_dir': 'gcs://_ray_pkg_6200b93a110e8033.zip', 'pip': {'packages': ['pytorch_lightning==1.9.5', 'ray_lightning', 'torchmetrics==0.9.1', 'torchvision==0.12.0'], 'pip_check': False}, '_ray_commit': 'b4bba4717f5ba04ee25580fe8f88eed63ef0c5dc'}, driver_agent_http_address='http://10.131.0.18:52365', driver_node_id='9fb515995f5fb13ad4db239ceea378333bebf0a2d45b6aa09d02e691')", + f"JobDetails(type=, job_id=None, submission_id='raysubmit_4k2NYS1YbRXYPZCM', driver_info=None, status=, entrypoint='python mnist.py', message='Job finished successfully.', error_type=None, start_time=1701352132585, end_time=1701352192002, metadata={{}}, runtime_env={{'working_dir': 'gcs://_ray_pkg_6200b93a110e8033.zip', 'pip': {{'packages': ['{pytorch_lightning}', 'ray_lightning', '{torchmetrics}', '{torchvision}'], 'pip_check': False}}, '_ray_commit': 'b4bba4717f5ba04ee25580fe8f88eed63ef0c5dc'}}, driver_agent_http_address='http://10.131.0.18:52365', driver_node_id='9fb515995f5fb13ad4db239ceea378333bebf0a2d45b6aa09d02e691')", + f"JobDetails(type=, job_id=None, submission_id='raysubmit_iRuwU8vdkbUZZGvT', driver_info=None, status=, entrypoint='python mnist.py', message='Job was intentionally stopped.', error_type=None, start_time=1701353096163, end_time=1701353097733, metadata={{}}, runtime_env={{'working_dir': 'gcs://_ray_pkg_6200b93a110e8033.zip', 'pip': {{'packages': ['{pytorch_lightning}', 'ray_lightning', '{torchmetrics}', '{torchvision}'], 'pip_check': False}}, '_ray_commit': 'b4bba4717f5ba04ee25580fe8f88eed63ef0c5dc'}}, driver_agent_http_address='http://10.131.0.18:52365', driver_node_id='9fb515995f5fb13ad4db239ceea378333bebf0a2d45b6aa09d02e691')", ] mocked_rjc_list_jobs = mocker.patch.object( JobSubmissionClient, "list_jobs", return_value=jobs_list diff --git a/tests/unit_test_support.py b/tests/unit_test_support.py index 9fcdd5a5..dd8c2fce 100644 --- a/tests/unit_test_support.py +++ b/tests/unit_test_support.py @@ -50,3 +50,11 @@ def createClusterWrongType(): labels={1: 1}, ) return config + + +def get_package_and_version(package_name, requirements_file_path): + with open(requirements_file_path, "r") as file: + for line in file: + if line.strip().startswith(f"{package_name}=="): + return line.strip() + return None From 3f39521e70d1bbf64a257cbe580a4207d1057afc Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Thu, 1 Aug 2024 11:37:15 +0100 Subject: [PATCH 309/496] Change custom-nb-image/requirements.txt path to exclude from dependency bumps --- custom-nb-image/Dockerfile | 2 +- custom-nb-image/{ => requirements}/requirements.txt | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename custom-nb-image/{ => requirements}/requirements.txt (100%) diff --git a/custom-nb-image/Dockerfile b/custom-nb-image/Dockerfile index a656618e..fca83e25 100644 --- a/custom-nb-image/Dockerfile +++ b/custom-nb-image/Dockerfile @@ -14,7 +14,7 @@ FROM quay.io/opendatahub/notebooks:jupyter-minimal-ubi9-python-3.9-2023a_20230302 -COPY requirements.txt requirements.txt +COPY requirements/requirements.txt requirements.txt RUN pip install -r requirements.txt diff --git a/custom-nb-image/requirements.txt b/custom-nb-image/requirements/requirements.txt similarity index 100% rename from custom-nb-image/requirements.txt rename to custom-nb-image/requirements/requirements.txt From b026faeb7cc0b846c1418ff90de3ee5cbc50a607 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 2 Aug 2024 11:18:27 +0000 Subject: [PATCH 310/496] Bump pytest-timeout from 2.2.0 to 2.3.1 Bumps [pytest-timeout](https://github.com/pytest-dev/pytest-timeout) from 2.2.0 to 2.3.1. - [Commits](https://github.com/pytest-dev/pytest-timeout/compare/2.2.0...2.3.1) --- updated-dependencies: - dependency-name: pytest-timeout dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 02398113..bd9102d3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2061,17 +2061,17 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "pytest-timeout" -version = "2.2.0" +version = "2.3.1" description = "pytest plugin to abort hanging tests" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-timeout-2.2.0.tar.gz", hash = "sha256:3b0b95dabf3cb50bac9ef5ca912fa0cfc286526af17afc806824df20c2f72c90"}, - {file = "pytest_timeout-2.2.0-py3-none-any.whl", hash = "sha256:bde531e096466f49398a59f2dde76fa78429a09a12411466f88a07213e220de2"}, + {file = "pytest-timeout-2.3.1.tar.gz", hash = "sha256:12397729125c6ecbdaca01035b9e5239d4db97352320af155b3f5de1ba5165d9"}, + {file = "pytest_timeout-2.3.1-py3-none-any.whl", hash = "sha256:68188cb703edfc6a18fad98dc25a3c61e9f24d644b0b70f33af545219fc7813e"}, ] [package.dependencies] -pytest = ">=5.0.0" +pytest = ">=7.0.0" [[package]] name = "python-dateutil" @@ -2807,4 +2807,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "70c25813b589204a08c4143380cb8fe06de901dd6a83162b8f93a51945072629" +content-hash = "7bb89ee82f06208a09aecaa8f4f31aa2e06194e79eae305546d255d13e895368" diff --git a/pyproject.toml b/pyproject.toml index be225e90..44f68d46 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,7 +43,7 @@ optional = true pytest = "7.4.0" coverage = "7.2.7" pytest-mock = "3.11.1" -pytest-timeout = "2.2.0" +pytest-timeout = "2.3.1" [tool.pytest.ini_options] filterwarnings = [ From 4017d8b1a9336f5a0636ffb21e356fe08f0b92e5 Mon Sep 17 00:00:00 2001 From: Srihari Venkataramaiah Date: Fri, 9 Aug 2024 10:26:05 +0530 Subject: [PATCH 311/496] Add Ray Cluster Upgrade test for Ray Job Long Running scenarios (#614) --- tests/e2e/mnist_sleep.py | 72 +++++++ .../raycluster_sdk_upgrade_sleep_test.py | 175 ++++++++++++++++++ 2 files changed, 247 insertions(+) create mode 100644 tests/e2e/mnist_sleep.py create mode 100644 tests/upgrade/raycluster_sdk_upgrade_sleep_test.py diff --git a/tests/e2e/mnist_sleep.py b/tests/e2e/mnist_sleep.py new file mode 100644 index 00000000..628dc74c --- /dev/null +++ b/tests/e2e/mnist_sleep.py @@ -0,0 +1,72 @@ +# Copyright 2022 IBM, Red Hat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time +import torch +import torch.nn as nn +from torch.utils.data import DataLoader +from torchvision import datasets, transforms + + +# Define a simple neural network +class NeuralNetwork(nn.Module): + def __init__(self): + super(NeuralNetwork, self).__init__() + self.flatten = nn.Flatten() + self.linear_relu_stack = nn.Sequential( + nn.Linear(28 * 28, 512), + nn.ReLU(), + nn.Linear(512, 512), + nn.ReLU(), + nn.Linear(512, 10), + ) + + def forward(self, x): + x = self.flatten(x) + logits = self.linear_relu_stack(x) + return logits + + +# Define the training function +def train(): + # Sleeping for 24 hours for upgrade test scenario + print("Sleeping for 24 hours before starting the training for upgrade testing...") + time.sleep(24 * 60 * 60) + + # Load dataset + transform = transforms.Compose([transforms.ToTensor()]) + train_dataset = datasets.FashionMNIST( + root="./data", train=True, download=True, transform=transform + ) + train_loader = DataLoader(train_dataset, batch_size=64, shuffle=True) + + # Initialize the neural network, loss function, and optimizer + model = NeuralNetwork() + criterion = nn.CrossEntropyLoss() + optimizer = torch.optim.SGD(model.parameters(), lr=0.01) + + # Train the model + num_epochs = 3 + for epoch in range(num_epochs): + for inputs, labels in train_loader: + optimizer.zero_grad() + outputs = model(inputs) + loss = criterion(outputs, labels) + loss.backward() + optimizer.step() + print(f"Epoch [{epoch+1}/{num_epochs}], Loss: {loss.item():.4f}") + + +if __name__ == "__main__": + train() diff --git a/tests/upgrade/raycluster_sdk_upgrade_sleep_test.py b/tests/upgrade/raycluster_sdk_upgrade_sleep_test.py new file mode 100644 index 00000000..c415b2fc --- /dev/null +++ b/tests/upgrade/raycluster_sdk_upgrade_sleep_test.py @@ -0,0 +1,175 @@ +import requests +from time import sleep + +from codeflare_sdk import ( + Cluster, + ClusterConfiguration, + TokenAuthentication, + get_cluster, +) +from codeflare_sdk.job import RayJobClient + +from tests.e2e.support import * + + +from codeflare_sdk.utils.kube_api_helpers import _kube_api_error_handling + +namespace = "test-ns-rayupgrade-sleep" +# Global variables for kueue resources +cluster_queue = "cluster-queue-mnist" +flavor = "default-flavor-mnist" +local_queue = "local-queue-mnist" + + +# Creates a Ray cluster , submit RayJob mnist script long running +class TestSetupSleepRayJob: + def setup_method(self): + initialize_kubernetes_client(self) + create_namespace_with_name(self, namespace) + try: + create_cluster_queue(self, cluster_queue, flavor) + create_resource_flavor(self, flavor) + create_local_queue(self, cluster_queue, local_queue) + except Exception as e: + delete_namespace(self) + delete_kueue_resources(self) + return _kube_api_error_handling(e) + + def test_mnist_ray_cluster_sdk_auth(self): + self.run_mnist_raycluster_sdk_oauth() + + def run_mnist_raycluster_sdk_oauth(self): + ray_image = get_ray_image() + + auth = TokenAuthentication( + token=run_oc_command(["whoami", "--show-token=true"]), + server=run_oc_command(["whoami", "--show-server=true"]), + skip_tls=True, + ) + auth.login() + + cluster = Cluster( + ClusterConfiguration( + name="mnist", + namespace=self.namespace, + num_workers=1, + head_cpus=1, + head_memory=4, + worker_cpu_requests=1, + worker_cpu_limits=1, + worker_memory_requests=4, + worker_memory_limits=4, + image=ray_image, + write_to_file=True, + verify_tls=False, + ) + ) + + try: + cluster.up() + cluster.status() + # wait for raycluster to be Ready + cluster.wait_ready() + cluster.status() + # Check cluster details + cluster.details() + # Assert the cluster status is READY + _, ready = cluster.status() + assert ready + submission_id = self.assert_jobsubmit() + print(f"Job submitted successfully, job submission id: ", submission_id) + + except Exception as e: + print(f"An unexpected error occurred. Error: ", e) + delete_namespace(self) + delete_kueue_resources(self) + assert False, "Cluster is not ready!" + + def assert_jobsubmit(self): + auth_token = run_oc_command(["whoami", "--show-token=true"]) + cluster = get_cluster("mnist", namespace) + cluster.details() + ray_dashboard = cluster.cluster_dashboard_uri() + header = {"Authorization": f"Bearer {auth_token}"} + client = RayJobClient(address=ray_dashboard, headers=header, verify=False) + + # Submit the job + submission_id = client.submit_job( + entrypoint="python mnist_sleep.py", + runtime_env={ + "working_dir": "./tests/e2e/", + "pip": {"packages": ["torchvision==0.12.0"], "pip_check": False}, + "env_vars": get_setup_env_variables(), + }, + ) + print(f"Submitted job with ID: {submission_id}") + done = False + time = 0 + timeout = 180 + while not done: + status = client.get_job_status(submission_id) + if status.is_terminal(): + break + if status == "RUNNING": + print(f"Job is Running: '{status}'") + assert True + break + if not done: + print(status) + if timeout and time >= timeout: + raise TimeoutError(f"job has timed out after waiting {timeout}s") + sleep(5) + time += 5 + + logs = client.get_job_logs(submission_id) + print(logs) + return submission_id + + +class TestVerifySleepRayJobRunning: + def setup_method(self): + initialize_kubernetes_client(self) + auth = TokenAuthentication( + token=run_oc_command(["whoami", "--show-token=true"]), + server=run_oc_command(["whoami", "--show-server=true"]), + skip_tls=True, + ) + auth.login() + self.namespace = namespace + self.cluster = get_cluster("mnist", self.namespace) + self.cluster_queue = cluster_queue + self.resource_flavor = flavor + if not self.cluster: + raise RuntimeError("TestSetupSleepRayJob needs to be run before this test") + + def teardown_method(self): + delete_namespace(self) + delete_kueue_resources(self) + + def test_mnist_job_running(self): + client = self.get_ray_job_client(self.cluster) + self.assertJobExists(client, 1) + self.assertJobRunning(client) + self.cluster.down() + + def get_ray_job_client(self, cluster): + auth_token = run_oc_command(["whoami", "--show-token=true"]) + ray_dashboard = cluster.cluster_dashboard_uri() + header = {"Authorization": f"Bearer {auth_token}"} + return RayJobClient(address=ray_dashboard, headers=header, verify=False) + + # Assertions + def assertJobExists(self, client, expectedJobsSize): + job_list = client.list_jobs() + assert len(job_list) == expectedJobsSize + + def assertJobRunning(self, client): + job_list = client.list_jobs() + submission_id = job_list[0].submission_id + status = client.get_job_status(submission_id) + if status == "RUNNING": + print(f"Job is Running: '{status}'") + assert True + else: + print(f"Job is not in Running state: '{status}'") + assert False From 8566c33beb98b332f60cdba7fba618ec2eab7f16 Mon Sep 17 00:00:00 2001 From: Karel Suta Date: Thu, 15 Aug 2024 15:00:00 +0200 Subject: [PATCH 312/496] Upgrade tests: Make sure head node has enough memory to execute test --- tests/upgrade/raycluster_sdk_upgrade_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/upgrade/raycluster_sdk_upgrade_test.py b/tests/upgrade/raycluster_sdk_upgrade_test.py index 87f61f15..640b1b5d 100644 --- a/tests/upgrade/raycluster_sdk_upgrade_test.py +++ b/tests/upgrade/raycluster_sdk_upgrade_test.py @@ -49,7 +49,7 @@ def run_mnist_raycluster_sdk_oauth(self): namespace=self.namespace, num_workers=1, head_cpus=1, - head_memory=2, + head_memory=4, worker_cpu_requests=1, worker_cpu_limits=1, worker_memory_requests=4, From 404e57bf8d56315f2e25a022caf2aa0a2c3c2a42 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Thu, 15 Aug 2024 17:13:45 +0100 Subject: [PATCH 313/496] Explicitly define setuptools dependency to fix odh-notebooks-sync workflow --- poetry.lock | 10 +++++----- pyproject.toml | 4 ++++ 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index bd9102d3..c75f3868 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "aiohttp" @@ -2416,13 +2416,13 @@ pyasn1 = ">=0.1.3" [[package]] name = "setuptools" -version = "70.1.1" +version = "70.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-70.1.1-py3-none-any.whl", hash = "sha256:a58a8fde0541dab0419750bcc521fbdf8585f6e5cb41909df3a472ef7b81ca95"}, - {file = "setuptools-70.1.1.tar.gz", hash = "sha256:937a48c7cdb7a21eb53cd7f9b59e525503aa8abaf3584c730dc5f7a5bec3a650"}, + {file = "setuptools-70.1.0-py3-none-any.whl", hash = "sha256:d9b8b771455a97c8a9f3ab3448ebe0b29b5e105f1228bba41028be116985a267"}, + {file = "setuptools-70.1.0.tar.gz", hash = "sha256:01a1e793faa5bd89abc851fa15d0a0db26f160890c7102cd8dce643e886b47f5"}, ] [package.extras] @@ -2807,4 +2807,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "7bb89ee82f06208a09aecaa8f4f31aa2e06194e79eae305546d255d13e895368" +content-hash = "1cccf4aa9ff3d9bc53049aa49aa2182ee4f41e773d8a5ccba5028724a2879966" diff --git a/pyproject.toml b/pyproject.toml index 44f68d46..62643fb6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,6 +29,10 @@ cryptography = "40.0.2" executing = "1.2.0" pydantic = "< 2" ipywidgets = "8.1.2" +# If updating setuptools version, please verify the odh-notebooks-sync workflow continues to work. +# https://github.com/project-codeflare/codeflare-sdk/actions/workflows/odh-notebooks-sync.yml +# Reference: https://github.com/target/strelka/pull/469 +setuptools = "70.1.0" [tool.poetry.group.docs] optional = true From 6f58a8bb44671e9380886f232702fd1a967bd139 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Fri, 16 Aug 2024 10:16:48 +0100 Subject: [PATCH 314/496] Allow setuptools to use previous versions for compatibility --- poetry.lock | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index c75f3868..1df39ebe 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2807,4 +2807,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "1cccf4aa9ff3d9bc53049aa49aa2182ee4f41e773d8a5ccba5028724a2879966" +content-hash = "f55c3a6780ecfed4a2fff84dec14688bab18c2815db93510c027774d26b44f80" diff --git a/pyproject.toml b/pyproject.toml index 62643fb6..f35c360d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,7 +32,7 @@ ipywidgets = "8.1.2" # If updating setuptools version, please verify the odh-notebooks-sync workflow continues to work. # https://github.com/project-codeflare/codeflare-sdk/actions/workflows/odh-notebooks-sync.yml # Reference: https://github.com/target/strelka/pull/469 -setuptools = "70.1.0" +setuptools = "<= 70.1.0" [tool.poetry.group.docs] optional = true From de2bd739a9d01b9ec1f8861cfde11c5cd7d1270d Mon Sep 17 00:00:00 2001 From: Abhijeet Dhumal <84722973+abhijeet-dhumal@users.noreply.github.com> Date: Mon, 19 Aug 2024 13:17:43 +0530 Subject: [PATCH 315/496] Update s3 bucket endpoint url to remove https prefix (#643) * Update s3 bucket endpoint url to remove https prefix * Update e2e-readme wrt secure parameter used for minio client --- docs/e2e.md | 1 + tests/e2e/mnist.py | 9 +++++++++ 2 files changed, 10 insertions(+) diff --git a/docs/e2e.md b/docs/e2e.md index f3d0c940..83d8ae4e 100644 --- a/docs/e2e.md +++ b/docs/e2e.md @@ -130,3 +130,4 @@ Currently the SDK doesn't support tolerations, so e2e tests can't be executed on AWS_STORAGE_BUCKET= AWS_STORAGE_BUCKET_MNIST_DIR= ``` + Note : When using the Python Minio client to connect to a minio storage bucket, the `AWS_DEFAULT_ENDPOINT` environment variable by default expects secure endpoint where user can use endpoint url with https/http prefix for autodetection of secure/insecure endpoint. diff --git a/tests/e2e/mnist.py b/tests/e2e/mnist.py index 4c382f67..143a6b6c 100644 --- a/tests/e2e/mnist.py +++ b/tests/e2e/mnist.py @@ -154,11 +154,20 @@ def prepare_data(self): secret_key = os.environ.get("AWS_SECRET_ACCESS_KEY") bucket_name = os.environ.get("AWS_STORAGE_BUCKET") + # remove prefix if specified in storage bucket endpoint url + secure = True + if endpoint.startswith("https://"): + endpoint = endpoint[len("https://") :] + elif endpoint.startswith("http://"): + endpoint = endpoint[len("http://") :] + secure = False + client = Minio( endpoint, access_key=access_key, secret_key=secret_key, cert_check=False, + secure=secure, ) if not os.path.exists(dataset_dir): From a146547840a8e130efba1c367942f271dfe629c1 Mon Sep 17 00:00:00 2001 From: Shilpa Chugh Date: Wed, 21 Aug 2024 14:31:54 +0530 Subject: [PATCH 316/496] Increase memory for ray head pod --- tests/e2e/mnist_raycluster_sdk_oauth_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/e2e/mnist_raycluster_sdk_oauth_test.py b/tests/e2e/mnist_raycluster_sdk_oauth_test.py index bc052bea..3fe6177c 100644 --- a/tests/e2e/mnist_raycluster_sdk_oauth_test.py +++ b/tests/e2e/mnist_raycluster_sdk_oauth_test.py @@ -43,7 +43,7 @@ def run_mnist_raycluster_sdk_oauth(self): namespace=self.namespace, num_workers=1, head_cpus="500m", - head_memory=2, + head_memory=4, worker_cpu_requests=1, worker_cpu_limits=1, worker_memory_requests=1, From ee307a9e3ea5911c8b4ccc5f201711c9add3a4b5 Mon Sep 17 00:00:00 2001 From: abhijeet-dhumal Date: Tue, 20 Aug 2024 14:45:00 +0530 Subject: [PATCH 317/496] Add provision in odh-sync workflow to adjust Pipfile.cpu and Pipfile.gpu with codeflare-sdk release version --- .github/workflows/odh-notebooks-sync.yml | 65 +++++++++++++++++------- 1 file changed, 47 insertions(+), 18 deletions(-) diff --git a/.github/workflows/odh-notebooks-sync.yml b/.github/workflows/odh-notebooks-sync.yml index 9ce1858f..00a9c2de 100644 --- a/.github/workflows/odh-notebooks-sync.yml +++ b/.github/workflows/odh-notebooks-sync.yml @@ -29,7 +29,7 @@ env: jobs: build: - runs-on: ubuntu-20.04-4core + runs-on: ubuntu-22.04-8core steps: - name: Clone repository and Sync run: | @@ -53,6 +53,23 @@ jobs: - name: Update Pipfiles in accordance with Codeflare-SDK latest release run: | package_name=codeflare-sdk + available_python_versions=("3.9") # add space separated python versions according to 'python-versions' specified in 'Setup Python Environment' step + install_package_using_pipenv(){ + if ! pipenv install ${package_name}~="${CODEFLARE_RELEASE_VERSION}"; then + echo "Failed to install ${package_name} with version ${CODEFLARE_RELEASE_VERSION} in $dir" + exit 1 + fi + # Lock dependencies, ensuring pre-release are included and clear previous state + if ! pipenv lock --pre --clear ; then + echo "Failed to lock dependencies" + exit 1 + fi + # remove virtual env and clear cache + if ! pipenv --rm --clear ; then + echo "Failed to remove virtual environment" + exit 1 + fi + } # Get the list of available versions for the package if ! versions=$(pipenv run pip-versions list $package_name);then echo "Failed to retrieve versions for $package_name" @@ -74,7 +91,7 @@ jobs: #Check if current_dir is not in exclude_directories list if [[ ! "${exclude_directories[@]}" =~ "$current_dir" ]]; then #Check if Pipfile exists in current_dir - if [ -f "$current_dir/Pipfile" ];then + if ls "$current_dir"/Pipfile* 1> /dev/null 2>&1;then directories+=("$current_dir") fi fi @@ -95,24 +112,36 @@ jobs: cd "$dir" minimum_supported_python_version_major=$(echo "${MINIMUM_SUPPORTED_PYTHON_VERSION}" | awk -F '.' '{print $1}') #integer of MINIMUM_SUPPORTED_PYTHON_VERSION env variable minimum_supported_python_version_minor=$(echo "${MINIMUM_SUPPORTED_PYTHON_VERSION}" | awk -F '.' '{print $2}') #decimal of MINIMUM_SUPPORTED_PYTHON_VERSION env variable - pipfile_python_version=$(grep -E '^python_version' ./Pipfile | cut -d '"' -f 2) # extracted from pipfile + if ! [ -f "Pipfile" ]; then + if [ -f "Pipfile.cpu" ]; then + pipfile_python_version=$(grep -E '^python_version' ./Pipfile.cpu | cut -d '"' -f 2) # extracted from pipfile.cpu + fi + else + pipfile_python_version=$(grep -E '^python_version' ./Pipfile | cut -d '"' -f 2) # extracted from pipfile + fi pipfile_python_version_major=$(echo "$pipfile_python_version" | awk -F '.' '{print $1}') pipfile_python_version_minor=$(echo "$pipfile_python_version" | awk -F '.' '{print $2}') - if [[ "pipfile_python_version_major" -ge "$minimum_supported_python_version_major" && "pipfile_python_version_minor" -ge "$minimum_supported_python_version_minor" ]]; then - #install specified package - if ! pipenv install ${package_name}~="${CODEFLARE_RELEASE_VERSION}"; then - echo "Failed to install ${package_name} with version ${CODEFLARE_RELEASE_VERSION} in $dir" - exit 1 - fi - # Lock dependencies, ensuring pre-release are included and clear previous state - if ! pipenv lock --pre --clear ; then - echo "Failed to lock dependencies" - exit 1 - fi - # remove virtual env and clear cache - if ! pipenv --rm --clear ; then - echo "Failed to remove virtual environment" - exit 1 + if [[ " ${available_python_versions[@]} " =~ " ${pipfile_python_version} " && "$pipfile_python_version_major" -ge "$minimum_supported_python_version_major" && "$pipfile_python_version_minor" -ge "$minimum_supported_python_version_minor" ]]; then + if ! [ -f "Pipfile" ]; then + if [ -f "Pipfile.cpu" ]; then + mv Pipfile.cpu Pipfile + mv Pipfile.lock.cpu Pipfile.lock + #install specified package + install_package_using_pipenv + mv Pipfile.lock Pipfile.lock.cpu + mv Pipfile Pipfile.cpu + fi + if [ -f "Pipfile.gpu" ]; then + mv Pipfile.gpu Pipfile + mv Pipfile.lock.gpu Pipfile.lock + #install specified package + install_package_using_pipenv + mv Pipfile.lock Pipfile.lock.gpu + mv Pipfile Pipfile.gpu + fi + else + #install specified package + install_package_using_pipenv fi else echo "Skipped installation of ${package_name} with version ${CODEFLARE_RELEASE_VERSION} in $dir" From 95b2165bc23659b0c7fd5ed6fc02dfc0a9acfb50 Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Tue, 20 Aug 2024 11:11:05 +0100 Subject: [PATCH 318/496] Update notebooks and docs with updated Cluster Configuration args --- .../additional-demos/hf_interactive.ipynb | 12 ++++++------ .../additional-demos/local_interactive.ipynb | 12 ++++++------ .../additional-demos/ray_job_client.ipynb | 12 ++++++------ demo-notebooks/guided-demos/0_basic_ray.ipynb | 12 ++++++------ .../guided-demos/1_cluster_job_client.ipynb | 12 ++++++------ .../guided-demos/2_basic_interactive.ipynb | 12 ++++++------ .../notebook-ex-outputs/0_basic_ray.ipynb | 12 ++++++------ .../1_cluster_job_client.ipynb | 12 ++++++------ .../notebook-ex-outputs/2_basic_interactive.ipynb | 12 ++++++------ .../guided-demos/preview_nbs/0_basic_ray.ipynb | 12 ++++++------ .../preview_nbs/1_cluster_job_client.ipynb | 12 ++++++------ .../preview_nbs/2_basic_interactive.ipynb | 12 ++++++------ docs/cluster-configuration.md | 15 +++++++-------- 13 files changed, 79 insertions(+), 80 deletions(-) diff --git a/demo-notebooks/additional-demos/hf_interactive.ipynb b/demo-notebooks/additional-demos/hf_interactive.ipynb index a8ab8946..3fc228f0 100644 --- a/demo-notebooks/additional-demos/hf_interactive.ipynb +++ b/demo-notebooks/additional-demos/hf_interactive.ipynb @@ -91,13 +91,13 @@ "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster_name= \"hfgputest\"\n", "cluster = Cluster(ClusterConfiguration(name=cluster_name, \n", - " head_gpus=1, # For GPU enabled workloads set the head_gpus and num_gpus\n", - " num_gpus=1,\n", + " head_extended_resource_requests={'nvidia.com/gpu':1}, # For GPU enabled workloads set the head_extended_resource_requests and worker_extended_resource_requests\n", + " worker_extended_resource_requests={'nvidia.com/gpu':1},\n", " num_workers=1,\n", - " min_cpus=8, \n", - " max_cpus=8, \n", - " min_memory=16, \n", - " max_memory=16, \n", + " worker_cpu_requests=8, \n", + " worker_cpu_limits=8, \n", + " worker_memory_requests=16, \n", + " worker_memory_limits=16, \n", " # image=\"\", # Optional Field \n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", diff --git a/demo-notebooks/additional-demos/local_interactive.ipynb b/demo-notebooks/additional-demos/local_interactive.ipynb index d7e4ac8c..1b62b988 100644 --- a/demo-notebooks/additional-demos/local_interactive.ipynb +++ b/demo-notebooks/additional-demos/local_interactive.ipynb @@ -54,13 +54,13 @@ "\n", "cluster = Cluster(ClusterConfiguration(\n", " name=cluster_name,\n", - " head_gpus=0, # For GPU enabled workloads set the head_gpus and num_gpus\n", - " num_gpus=0,\n", + " head_extended_resource_requests={'nvidia.com/gpu':0}, # For GPU enabled workloads set the head_extended_resource_requests and worker_extended_resource_requests\n", + " worker_extended_resource_requests={'nvidia.com/gpu':0},\n", " num_workers=1,\n", - " min_cpus=1,\n", - " max_cpus=1,\n", - " min_memory=4,\n", - " max_memory=4,\n", + " worker_cpu_requests=1,\n", + " worker_cpu_limits=1,\n", + " worker_memory_requests=4,\n", + " worker_memory_limits=4,\n", " # image=\"\", # Optional Field \n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", diff --git a/demo-notebooks/additional-demos/ray_job_client.ipynb b/demo-notebooks/additional-demos/ray_job_client.ipynb index 28efde9a..828b3469 100644 --- a/demo-notebooks/additional-demos/ray_job_client.ipynb +++ b/demo-notebooks/additional-demos/ray_job_client.ipynb @@ -55,13 +55,13 @@ "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(\n", " name='jobtest',\n", - " head_gpus=0, # For GPU enabled workloads set the head_gpus and num_gpus\n", - " num_gpus=0,\n", + " head_extended_resource_requests={'nvidia.com/gpu':0}, # For GPU enabled workloads set the head_extended_resource_requests and worker_extended_resource_requests\n", + " worker_extended_resource_requests={'nvidia.com/gpu':0},\n", " num_workers=2,\n", - " min_cpus=1,\n", - " max_cpus=1,\n", - " min_memory=4,\n", - " max_memory=4,\n", + " worker_cpu_requests=1,\n", + " worker_cpu_limits=1,\n", + " worker_memory_requests=4,\n", + " worker_memory_limits=4,\n", " # image=\"\", # Optional Field \n", " write_to_file=False # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb index bbade0cc..55fd81d9 100644 --- a/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -64,13 +64,13 @@ " name='raytest', \n", " head_cpus='500m',\n", " head_memory=2,\n", - " head_gpus=0, # For GPU enabled workloads set the head_gpus and num_gpus\n", - " num_gpus=0,\n", + " head_extended_resource_requests={'nvidia.com/gpu':0}, # For GPU enabled workloads set the head_extended_resource_requests and worker_extended_resource_requests\n", + " worker_extended_resource_requests={'nvidia.com/gpu':0},\n", " num_workers=2,\n", - " min_cpus='250m',\n", - " max_cpus=1,\n", - " min_memory=4,\n", - " max_memory=4,\n", + " worker_cpu_requests='250m',\n", + " worker_cpu_limits=1,\n", + " worker_memory_requests=4,\n", + " worker_memory_limits=4,\n", " # image=\"\", # Optional Field \n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", diff --git a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb index 61e360ef..eb37f70b 100644 --- a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb @@ -57,13 +57,13 @@ " name='jobtest',\n", " head_cpus=1,\n", " head_memory=4,\n", - " head_gpus=1, # For GPU enabled workloads set the head_gpus and num_gpus\n", - " num_gpus=1,\n", + " head_extended_resource_requests={'nvidia.com/gpu':1}, # For GPU enabled workloads set the head_extended_resource_requests and worker_extended_resource_requests\n", + " worker_extended_resource_requests={'nvidia.com/gpu':1},\n", " num_workers=2,\n", - " min_cpus='250m',\n", - " max_cpus=1,\n", - " min_memory=4,\n", - " max_memory=4,\n", + " worker_cpu_requests='250m',\n", + " worker_cpu_limits=1,\n", + " worker_memory_requests=4,\n", + " worker_memory_limits=4,\n", " # image=\"\", # Optional Field \n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", diff --git a/demo-notebooks/guided-demos/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/2_basic_interactive.ipynb index e344ff35..249f4150 100644 --- a/demo-notebooks/guided-demos/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/2_basic_interactive.ipynb @@ -62,13 +62,13 @@ " name=cluster_name,\n", " head_cpus=1,\n", " head_memory=6,\n", - " head_gpus=1, # For GPU enabled workloads set the head_gpus and num_gpus\n", - " num_gpus=1,\n", + " head_extended_resource_requests={'nvidia.com/gpu':1}, # For GPU enabled workloads set the head_extended_resource_requests and worker_extended_resource_requests\n", + " worker_extended_resource_requests={'nvidia.com/gpu':1},\n", " num_workers=2,\n", - " min_cpus='250m',\n", - " max_cpus=1,\n", - " min_memory=4,\n", - " max_memory=6,\n", + " worker_cpu_requests='250m',\n", + " worker_cpu_limits=1,\n", + " worker_memory_requests=4,\n", + " worker_memory_limits=6,\n", " # image=\"\", # Optional Field \n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb index 31ffb730..dc207376 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb @@ -70,13 +70,13 @@ "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(\n", " name='raytest',\n", - " head_gpus=0, # For GPU enabled workloads set the head_gpus and num_gpus\n", - " num_gpus=0,\n", + " head_extended_resource_requests={'nvidia.com/gpu':0}, # For GPU enabled workloads set the head_extended_resource_requests and worker_extended_resource_requests\n", + " worker_extended_resource_requests={'nvidia.com/gpu':0},\n", " num_workers=2,\n", - " min_cpus=1,\n", - " max_cpus=1,\n", - " min_memory=4,\n", - " max_memory=4,\n", + " worker_cpu_requests=1,\n", + " worker_cpu_limits=1,\n", + " worker_memory_requests=4,\n", + " worker_memory_limits=4,\n", " # image=\"\", # Optional Field \n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb index c117c239..903be9f0 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb @@ -55,13 +55,13 @@ "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(\n", " name='jobtest',\n", - " head_gpus=1, # For GPU enabled workloads set the head_gpus and num_gpus\n", - " num_gpus=1,\n", + " head_extended_resource_requests={'nvidia.com/gpu':1}, # For GPU enabled workloads set the head_extended_resource_requests and worker_extended_resource_requests\n", + " worker_extended_resource_requests={'nvidia.com/gpu':1},\n", " num_workers=2,\n", - " min_cpus=1,\n", - " max_cpus=1,\n", - " min_memory=4,\n", - " max_memory=4,\n", + " worker_cpu_requests=1,\n", + " worker_cpu_limits=1,\n", + " worker_memory_requests=4,\n", + " worker_memory_limits=4,\n", " # image=\"\", # Optional Field \n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb index 56640a5b..eb6c8977 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb @@ -68,13 +68,13 @@ "cluster_name = \"interactivetest\"\n", "cluster = Cluster(ClusterConfiguration(\n", " name=cluster_name,\n", - " head_gpus=1, # For GPU enabled workloads set the head_gpus and num_gpus\n", - " num_gpus=1,\n", + " head_extended_resource_requests={'nvidia.com/gpu':1}, # For GPU enabled workloads set the head_extended_resource_requests and worker_extended_resource_requests\n", + " worker_extended_resource_requests={'nvidia.com/gpu':1},\n", " num_workers=2,\n", - " min_cpus=2,\n", - " max_cpus=2,\n", - " min_memory=8,\n", - " max_memory=8,\n", + " worker_cpu_requests=2,\n", + " worker_cpu_limits=2,\n", + " worker_memory_requests=8,\n", + " worker_memory_limits=8,\n", " # image=\"\", # Optional Field \n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", diff --git a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb index edf4fd5f..03270e8b 100644 --- a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb @@ -62,13 +62,13 @@ "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(\n", " name='raytest',\n", - " head_gpus=0, # For GPU enabled workloads set the head_gpus and num_gpus\n", - " num_gpus=0,\n", + " head_extended_resource_requests={'nvidia.com/gpu':0}, # For GPU enabled workloads set the head_extended_resource_requests and worker_extended_resource_requests\n", + " worker_extended_resource_requests={'nvidia.com/gpu':0},\n", " num_workers=2,\n", - " min_cpus=1,\n", - " max_cpus=1,\n", - " min_memory=4,\n", - " max_memory=4,\n", + " worker_cpu_requests=1,\n", + " worker_cpu_limits=1,\n", + " worker_memory_requests=4,\n", + " worker_memory_limits=4,\n", " # image=\"\", # Optional Field \n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", diff --git a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb index 692c4d37..bcf6e86c 100644 --- a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb @@ -55,13 +55,13 @@ "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(\n", " name='jobtest',\n", - " head_gpus=1, # For GPU enabled workloads set the head_gpus and num_gpus\n", - " num_gpus=1,\n", + " head_extended_resource_requests={'nvidia.com/gpu':1}, # For GPU enabled workloads set the head_extended_resource_requests and worker_extended_resource_requests\n", + " worker_extended_resource_requests={'nvidia.com/gpu':1},\n", " num_workers=2,\n", - " min_cpus=1,\n", - " max_cpus=1,\n", - " min_memory=4,\n", - " max_memory=4,\n", + " worker_cpu_requests=1,\n", + " worker_cpu_limits=1,\n", + " worker_memory_requests=4,\n", + " worker_memory_limits=4,\n", " # image=\"\", # Optional Field \n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources\n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", diff --git a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb index 91605499..1f48d329 100644 --- a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb @@ -60,13 +60,13 @@ "cluster_name = \"interactivetest\"\n", "cluster = Cluster(ClusterConfiguration(\n", " name=cluster_name,\n", - " head_gpus=1, # For GPU enabled workloads set the head_gpus and num_gpus\n", - " num_gpus=1,\n", + " head_extended_resource_requests={'nvidia.com/gpu':1}, # For GPU enabled workloads set the head_extended_resource_requests and worker_extended_resource_requests\n", + " worker_extended_resource_requests={'nvidia.com/gpu':1},\n", " num_workers=2,\n", - " min_cpus=2,\n", - " max_cpus=2,\n", - " min_memory=8,\n", - " max_memory=8,\n", + " worker_cpu_requests=2,\n", + " worker_cpu_limits=2,\n", + " worker_memory_requests=8,\n", + " worker_memory_limits=8,\n", " # image=\"\", # Optional Field \n", " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", diff --git a/docs/cluster-configuration.md b/docs/cluster-configuration.md index 40132894..8ca4a2c2 100644 --- a/docs/cluster-configuration.md +++ b/docs/cluster-configuration.md @@ -11,14 +11,13 @@ cluster = Cluster(ClusterConfiguration( namespace='default', # Default None head_cpus=1, # Default 2 head_memory=1, # Default 8 - head_gpus=0, # Default 0 - num_gpus=0, # Default 0 + head_extended_resource_requests={'nvidia.com/gpu':0}, # Default 0 + worker_extended_resource_requests={'nvidia.com/gpu':0}, # Default 0 num_workers=1, # Default 1 - min_cpus=1, # Default 1 - max_cpus=1, # Default 1 - min_memory=2, # Default 2 - max_memory=2, # Default 2 - num_gpus=0, # Default 0 + worker_cpu_requests=1, # Default 1 + worker_cpu_limits=1, # Default 1 + worker_memory_requests=2, # Default 2 + worker_memory_limits=2, # Default 2 # image="", # Optional Field machine_types=["m5.xlarge", "g4dn.xlarge"], labels={"exampleLabel": "example", "secondLabel": "example"}, @@ -28,4 +27,4 @@ Note: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used The `labels={"exampleLabel": "example"}` parameter can be used to apply additional labels to the RayCluster resource. -After creating their`cluster`, a user can call `cluster.up()` and `cluster.down()` to respectively create or remove the Ray Cluster. +After creating their `cluster`, a user can call `cluster.up()` and `cluster.down()` to respectively create or remove the Ray Cluster. From 261da3f422e518c96634bc818f1987373a76c4c0 Mon Sep 17 00:00:00 2001 From: Jiri Petrlik Date: Mon, 26 Aug 2024 14:58:55 +0200 Subject: [PATCH 319/496] RHOAIENG-10371 - Clean up content of cells in SDK demo notebooks --- .../additional-demos/hf_interactive.ipynb | 1104 +-------------- .../additional-demos/local_interactive.ipynb | 147 +- .../additional-demos/ray_job_client.ipynb | 18 +- .../guided-demos/1_cluster_job_client.ipynb | 20 +- .../notebook-ex-outputs/0_basic_ray.ipynb | 187 +-- .../1_cluster_job_client.ipynb | 20 +- .../2_basic_interactive.ipynb | 1223 +---------------- .../preview_nbs/1_cluster_job_client.ipynb | 20 +- 8 files changed, 94 insertions(+), 2645 deletions(-) diff --git a/demo-notebooks/additional-demos/hf_interactive.ipynb b/demo-notebooks/additional-demos/hf_interactive.ipynb index 3fc228f0..7e2a7180 100644 --- a/demo-notebooks/additional-demos/hf_interactive.ipynb +++ b/demo-notebooks/additional-demos/hf_interactive.ipynb @@ -34,7 +34,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": null, "id": "c737a768-6e31-4767-a301-60ae932b4ed9", "metadata": {}, "outputs": [], @@ -74,18 +74,10 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": null, "id": "220b9d85-3a3c-4c0c-aaf2-0d866823dcd8", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Written to: hfgputest.yaml\n" - ] - } - ], + "outputs": [], "source": [ "# Create our cluster and submit\n", "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", @@ -132,52 +124,10 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": null, "id": "4d0db5f5-22f1-4806-ae7e-a0ee865625c1", "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
      ╭─────────────────────────╮\n",
      -       "│   🚀 List of CodeFlare  │\n",
      -       "│   clusters in queue🚀   │\n",
      -       "│ +-----------+---------+ │\n",
      -       "│ | Name      | Status  | │\n",
      -       "│ +===========+=========+ │\n",
      -       "│ | hfgputest | pending | │\n",
      -       "│ |           |         | │\n",
      -       "│ +-----------+---------+ │\n",
      -       "╰─────────────────────────╯\n",
      -       "
      \n" - ], - "text/plain": [ - "╭─────────────────────────╮\n", - "│ \u001b[3m \u001b[0m\u001b[1;3m 🚀 List of CodeFlare\u001b[0m\u001b[3m \u001b[0m │\n", - "│ \u001b[3m \u001b[0m\u001b[1;3mclusters in queue🚀\u001b[0m\u001b[3m \u001b[0m │\n", - "│ +-----------+---------+ │\n", - "│ |\u001b[1m \u001b[0m\u001b[1mName \u001b[0m\u001b[1m \u001b[0m|\u001b[1m \u001b[0m\u001b[1mStatus \u001b[0m\u001b[1m \u001b[0m| │\n", - "│ +===========+=========+ │\n", - "│ |\u001b[36m \u001b[0m\u001b[36mhfgputest\u001b[0m\u001b[36m \u001b[0m|\u001b[35m \u001b[0m\u001b[35mpending\u001b[0m\u001b[35m \u001b[0m| │\n", - "│ |\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m|\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m| │\n", - "│ +-----------+---------+ │\n", - "╰─────────────────────────╯\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/plain": [ - "(False, )" - ] - }, - "execution_count": 17, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "cluster.status()" ] @@ -212,75 +162,17 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": null, "id": "06a54428-f186-4c27-948e-4eaf9c0e34b5", "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
                        🚀 List of CodeFlare clusters 🚀                  \n",
      -       "                                                                    \n",
      -       " ╭────────────────────────────────────────────────────────────────╮ \n",
      -       " │   Owner                                                        │ \n",
      -       " │   hfgputest                                        Active ✅   │ \n",
      -       " │                                                                │ \n",
      -       " │   URI: ray://hfgputest-head-svc.default.svc:10001              │ \n",
      -       " │                                                                │ \n",
      -       " │   Dashboard🔗                                                  │ \n",
      -       " │                                                                │ \n",
      -       " │                      Cluster Resources                         │ \n",
      -       " │   ╭─ Workers ──╮  ╭───────── Worker specs(each) ─────────╮     │ \n",
      -       " │   │  Min  Max  │  │  Memory      CPU         GPU         │     │ \n",
      -       " │   │            │  │                                      │     │ \n",
      -       " │   │  1    1    │  │  16G~16G     8           4           │     │ \n",
      -       " │   │            │  │                                      │     │ \n",
      -       " │   ╰────────────╯  ╰──────────────────────────────────────╯     │ \n",
      -       " ╰────────────────────────────────────────────────────────────────╯ \n",
      -       "
      \n" - ], - "text/plain": [ - "\u001b[3m \u001b[0m\u001b[1;3m 🚀 List of CodeFlare clusters 🚀\u001b[0m\u001b[3m \u001b[0m\n", - "\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\n", - " ╭────────────────────────────────────────────────────────────────╮ \n", - " │ \u001b[1;37;42mOwner\u001b[0m │ \n", - " │ \u001b[1;4mhfgputest\u001b[0m Active ✅ │ \n", - " │ │ \n", - " │ \u001b[1mURI:\u001b[0m ray://hfgputest-head-svc.default.svc:10001 │ \n", - " │ │ \n", - " │ \u001b]8;id=552692;ray-dashboard-hfgputest-default.apps.prepfullinstall.psap.aws.rhperfscale.org\u001b\\\u001b[4;34mDashboard🔗\u001b[0m\u001b]8;;\u001b\\ │ \n", - " │ │ \n", - " │ \u001b[3m Cluster Resources \u001b[0m │ \n", - " │ ╭─ Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │ \n", - " │ │ \u001b[1m \u001b[0m\u001b[1mMin\u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mMax\u001b[0m\u001b[1m \u001b[0m │ │ \u001b[1m \u001b[0m\u001b[1mMemory \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mCPU \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mGPU \u001b[0m\u001b[1m \u001b[0m │ │ \n", - " │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ │ \u001b[36m \u001b[0m\u001b[36m1 \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m1 \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m16G~16G \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m8 \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m4 \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ ╰────────────╯ ╰──────────────────────────────────────╯ │ \n", - " ╰────────────────────────────────────────────────────────────────╯ \n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 18, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "cluster.details()" ] }, { "cell_type": "code", - "execution_count": 19, + "execution_count": null, "id": "8ac46c87-70f1-4c70-9648-881151665355", "metadata": {}, "outputs": [], @@ -319,18 +211,10 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": null, "id": "4c458589-5a17-47c6-a8db-625427ae4fe7", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Ray cluster is up and running: True\n" - ] - } - ], + "outputs": [], "source": [ "#before proceeding make sure the cluster exists and the uri is not empty\n", "assert ray_cluster_uri, \"Ray cluster needs to be started and set before proceeding\"\n", @@ -377,7 +261,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": null, "id": "e69994b4-1a13-43fe-b698-2a5374cb941b", "metadata": {}, "outputs": [], @@ -473,972 +357,10 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": null, "id": "7f0985e9-5e88-4d36-ab38-c3001c13f97c", "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Downloading builder script: 100%|██████████| 4.31k/4.31k [00:00<00:00, 5.60MB/s]\n", - "Downloading metadata: 100%|██████████| 2.17k/2.17k [00:00<00:00, 3.13MB/s]\n", - "Downloading readme: 100%|██████████| 7.59k/7.59k [00:00<00:00, 9.75MB/s]\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\u001b[2m\u001b[36m(train_fn pid=250)\u001b[0m Downloading and preparing dataset imdb/plain_text to /home/ray/.cache/huggingface/datasets/imdb/plain_text/1.0.0/2fdd8b9bcadd6e7055e742a706876ba43f19faee861df134affd7a3f60fc38a1...\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Downloading data: 0%| | 0.00/84.1M [00:00\n", - "
      \n", - "

      Ray

      \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - " \n", - "\n", - "\n", - "
      Python version:3.8.13
      Ray version: 2.1.0
      Dashboard:http://10.254.20.41:8265
      \n", - "
      \n", - "\n" - ], - "text/plain": [ - "ClientContext(dashboard_url='10.254.20.41:8265', python_version='3.8.13', ray_version='2.1.0', ray_commit='23f34d948dae8de9b168667ab27e6cf940b3ae85', protocol_version='2022-10-05', _num_clients=1, _context_to_restore=)" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "import ray\n", "\n", @@ -208,7 +129,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "id": "3436eb4a-217c-4109-a3c3-309fda7e2442", "metadata": {}, "outputs": [], @@ -232,72 +153,32 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "id": "5cca1874-2be3-4631-ae48-9adfa45e3af3", "metadata": { - "scrolled": true, "tags": [] }, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2023-06-27 19:14:28,222\tDEBUG worker.py:640 -- Retaining 00ffffffffffffffffffffffffffffffffffffff0100000002000000\n", - "2023-06-27 19:14:28,222\tDEBUG worker.py:564 -- Scheduling task heavy_calculation 0 b'\\x00\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x01\\x00\\x00\\x00\\x02\\x00\\x00\\x00'\n" - ] - } - ], + "outputs": [], "source": [ "ref = heavy_calculation.remote(3000)" ] }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "id": "01172c29-e8bf-41ef-8db5-eccb07906111", "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2023-06-27 19:14:29,202\tDEBUG worker.py:640 -- Retaining 16310a0f0a45af5cffffffffffffffffffffffff0100000001000000\n", - "2023-06-27 19:14:31,224\tDEBUG worker.py:439 -- Internal retry for get [ClientObjectRef(16310a0f0a45af5cffffffffffffffffffffffff0100000001000000)]\n" - ] - }, - { - "data": { - "text/plain": [ - "1789.4644387076714" - ] - }, - "execution_count": 9, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "ray.get(ref)" ] }, { "cell_type": "code", - "execution_count": 10, + "execution_count": null, "id": "9e79b547-a457-4232-b77d-19147067b972", "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2023-06-27 19:14:33,161\tDEBUG dataclient.py:287 -- Got unawaited response connection_cleanup {\n", - "}\n", - "\n", - "2023-06-27 19:14:34,460\tDEBUG dataclient.py:278 -- Shutting down data channel.\n" - ] - } - ], + "outputs": [], "source": [ "ray.cancel(ref)\n", "ray.shutdown()" @@ -305,7 +186,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "id": "2c198f1f-68bf-43ff-a148-02b5cb000ff2", "metadata": {}, "outputs": [], diff --git a/demo-notebooks/additional-demos/ray_job_client.ipynb b/demo-notebooks/additional-demos/ray_job_client.ipynb index 828b3469..2f43306e 100644 --- a/demo-notebooks/additional-demos/ray_job_client.ipynb +++ b/demo-notebooks/additional-demos/ray_job_client.ipynb @@ -36,15 +36,15 @@ ] }, { - "cell_type": "markdown", - "id": "18de2d65", - "metadata": {}, - "source": [ - "\n", - "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", - "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." - ] - }, + "cell_type": "markdown", + "id": "18de2d65", + "metadata": {}, + "source": [ + "\n", + "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." + ] + }, { "cell_type": "code", "execution_count": null, diff --git a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb index eb37f70b..56585e37 100644 --- a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb @@ -35,16 +35,16 @@ ] }, { - "cell_type": "markdown", - "id": "bc27f84c", - "metadata": {}, - "source": [ - "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", - "\n", - "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", - "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." - ] - }, + "cell_type": "markdown", + "id": "bc27f84c", + "metadata": {}, + "source": [ + "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", + "\n", + "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." + ] + }, { "cell_type": "code", "execution_count": null, diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb index dc207376..03270e8b 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb @@ -13,7 +13,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "id": "b55bc3ea-4ce3-49bf-bb1f-e209de8ca47a", "metadata": {}, "outputs": [], @@ -53,18 +53,10 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "id": "0f4bc870-091f-4e11-9642-cba145710159", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Written to: raytest.yaml\n" - ] - } - ], + "outputs": [], "source": [ "# Create and configure our cluster object\n", "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", @@ -93,7 +85,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "id": "f0884bbc-c224-4ca0-98a0-02dfa09c2200", "metadata": {}, "outputs": [], @@ -112,125 +104,30 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "id": "3c1b4311-2e61-44c9-8225-87c2db11363d", "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
      ╭───────────────────────╮\n",
      -       "│    🚀 Cluster Queue   │\n",
      -       "│       Status 🚀       │\n",
      -       "│ +---------+---------+ │\n",
      -       "│ | Name    | Status  | │\n",
      -       "│ +=========+=========+ │\n",
      -       "│ | raytest | pending | │\n",
      -       "│ |         |         | │\n",
      -       "│ +---------+---------+ │\n",
      -       "╰───────────────────────╯\n",
      -       "
      \n" - ], - "text/plain": [ - "╭───────────────────────╮\n", - "│ \u001b[3m \u001b[0m\u001b[1;3m 🚀 Cluster Queue\u001b[0m\u001b[3m \u001b[0m │\n", - "│ \u001b[3m \u001b[0m\u001b[1;3mStatus 🚀\u001b[0m\u001b[3m \u001b[0m │\n", - "│ +---------+---------+ │\n", - "│ |\u001b[1m \u001b[0m\u001b[1mName \u001b[0m\u001b[1m \u001b[0m|\u001b[1m \u001b[0m\u001b[1mStatus \u001b[0m\u001b[1m \u001b[0m| │\n", - "│ +=========+=========+ │\n", - "│ |\u001b[36m \u001b[0m\u001b[36mraytest\u001b[0m\u001b[36m \u001b[0m|\u001b[35m \u001b[0m\u001b[35mpending\u001b[0m\u001b[35m \u001b[0m| │\n", - "│ |\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m|\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m| │\n", - "│ +---------+---------+ │\n", - "╰───────────────────────╯\n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/plain": [ - "(, False)" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "cluster.status()" ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "id": "a99d5aff", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Waiting for requested resources to be set up...\n", - "Requested cluster up and running!\n" - ] - } - ], + "outputs": [], "source": [ "cluster.wait_ready()" ] }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "id": "df71c1ed", "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
                        🚀 CodeFlare Cluster Status 🚀                  \n",
      -       "                                                                  \n",
      -       " ╭──────────────────────────────────────────────────────────────╮ \n",
      -       " │   Name                                                       │ \n",
      -       " │   raytest                                        Active ✅   │ \n",
      -       " │                                                              │ \n",
      -       " │   URI: ray://raytest-head-svc.default.svc:10001              │ \n",
      -       " │                                                              │ \n",
      -       " │   Dashboard🔗                                                │ \n",
      -       " │                                                              │ \n",
      -       " ╰──────────────────────────────────────────────────────────────╯ \n",
      -       "
      \n" - ], - "text/plain": [ - "\u001b[3m \u001b[0m\u001b[1;3m 🚀 CodeFlare Cluster Status 🚀\u001b[0m\u001b[3m \u001b[0m\n", - "\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\n", - " ╭──────────────────────────────────────────────────────────────╮ \n", - " │ \u001b[1;37;42mName\u001b[0m │ \n", - " │ \u001b[1;4mraytest\u001b[0m Active ✅ │ \n", - " │ │ \n", - " │ \u001b[1mURI:\u001b[0m ray://raytest-head-svc.default.svc:10001 │ \n", - " │ │ \n", - " │ \u001b]8;id=630217;ray-dashboard-raytest-default.apps.meyceoz-07122023.psap.aws.rhperfscale.org\u001b\\\u001b[4;34mDashboard🔗\u001b[0m\u001b]8;;\u001b\\ │ \n", - " │ │ \n", - " ╰──────────────────────────────────────────────────────────────╯ \n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/plain": [ - "(, True)" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "cluster.status()" ] @@ -245,68 +142,10 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "id": "7fd45bc5-03c0-4ae5-9ec5-dd1c30f1a084", "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
                        🚀 CodeFlare Cluster Details 🚀                  \n",
      -       "                                                                   \n",
      -       " ╭───────────────────────────────────────────────────────────────╮ \n",
      -       " │   Name                                                        │ \n",
      -       " │   raytest                                        Active ✅    │ \n",
      -       " │                                                               │ \n",
      -       " │   URI: ray://raytest-head-svc.default.svc:10001               │ \n",
      -       " │                                                               │ \n",
      -       " │   Dashboard🔗                                                 │ \n",
      -       " │                                                               │ \n",
      -       " │                       Cluster Resources                       │ \n",
      -       " │   ╭── Workers ──╮  ╭───────── Worker specs(each) ─────────╮   │ \n",
      -       " │   │  # Workers  │  │  Memory      CPU         GPU         │   │ \n",
      -       " │   │             │  │                                      │   │ \n",
      -       " │   │  2          │  │  4~4         1           0           │   │ \n",
      -       " │   │             │  │                                      │   │ \n",
      -       " │   ╰─────────────╯  ╰──────────────────────────────────────╯   │ \n",
      -       " ╰───────────────────────────────────────────────────────────────╯ \n",
      -       "
      \n" - ], - "text/plain": [ - "\u001b[3m \u001b[0m\u001b[1;3m 🚀 CodeFlare Cluster Details 🚀\u001b[0m\u001b[3m \u001b[0m\n", - "\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\n", - " ╭───────────────────────────────────────────────────────────────╮ \n", - " │ \u001b[1;37;42mName\u001b[0m │ \n", - " │ \u001b[1;4mraytest\u001b[0m Active ✅ │ \n", - " │ │ \n", - " │ \u001b[1mURI:\u001b[0m ray://raytest-head-svc.default.svc:10001 │ \n", - " │ │ \n", - " │ \u001b]8;id=623965;http://ray-dashboard-raytest-default.apps.meyceoz-07122023.psap.aws.rhperfscale.org\u001b\\\u001b[4;34mDashboard🔗\u001b[0m\u001b]8;;\u001b\\ │ \n", - " │ │ \n", - " │ \u001b[3m Cluster Resources \u001b[0m │ \n", - " │ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │ \n", - " │ │ \u001b[1m \u001b[0m\u001b[1m# Workers\u001b[0m\u001b[1m \u001b[0m │ │ \u001b[1m \u001b[0m\u001b[1mMemory \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mCPU \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mGPU \u001b[0m\u001b[1m \u001b[0m │ │ \n", - " │ │ \u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ │ \u001b[35m \u001b[0m\u001b[35m2 \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m4~4 \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m1 \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m0 \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ │ \u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ ╰─────────────╯ ╰──────────────────────────────────────╯ │ \n", - " ╰───────────────────────────────────────────────────────────────╯ \n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/plain": [ - "RayCluster(name='raytest', status=, workers=2, worker_mem_min=4, worker_mem_max=4, worker_cpu=1, worker_gpu=0, namespace='default', dashboard='http://ray-dashboard-raytest-default.apps.meyceoz-07122023.psap.aws.rhperfscale.org')" - ] - }, - "execution_count": 7, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "cluster.details()" ] @@ -321,7 +160,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "id": "5f36db0f-31f6-4373-9503-dc3c1c4c3f57", "metadata": {}, "outputs": [], diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb index 903be9f0..1a5e77f4 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb @@ -35,16 +35,16 @@ ] }, { - "cell_type": "markdown", - "id": "bc27f84c", - "metadata": {}, - "source": [ - "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", - "\n", - "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", - "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." - ] - }, + "cell_type": "markdown", + "id": "bc27f84c", + "metadata": {}, + "source": [ + "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", + "\n", + "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." + ] + }, { "cell_type": "code", "execution_count": null, diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb index eb6c8977..08eaf0b8 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb @@ -10,7 +10,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "id": "b55bc3ea-4ce3-49bf-bb1f-e209de8ca47a", "metadata": {}, "outputs": [], @@ -50,18 +50,10 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "id": "0f4bc870-091f-4e11-9642-cba145710159", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Written to: interactivetest.yaml\n" - ] - } - ], + "outputs": [], "source": [ "# Create and configure our cluster object\n", "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", @@ -83,19 +75,10 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "id": "f0884bbc-c224-4ca0-98a0-02dfa09c2200", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Waiting for requested resources to be set up...\n", - "Requested cluster up and running!\n" - ] - } - ], + "outputs": [], "source": [ "# Bring up the cluster\n", "cluster.up()\n", @@ -104,68 +87,10 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "id": "df71c1ed", "metadata": {}, - "outputs": [ - { - "data": { - "text/html": [ - "
                            🚀 CodeFlare Cluster Details 🚀                     \n",
      -       "                                                                          \n",
      -       " ╭──────────────────────────────────────────────────────────────────────╮ \n",
      -       " │   Name                                                               │ \n",
      -       " │   interactivetest                                        Active ✅   │ \n",
      -       " │                                                                      │ \n",
      -       " │   URI: ray://interactivetest-head-svc.default.svc:10001              │ \n",
      -       " │                                                                      │ \n",
      -       " │   Dashboard🔗                                                        │ \n",
      -       " │                                                                      │ \n",
      -       " │                       Cluster Resources                              │ \n",
      -       " │   ╭── Workers ──╮  ╭───────── Worker specs(each) ─────────╮          │ \n",
      -       " │   │  # Workers  │  │  Memory      CPU         GPU         │          │ \n",
      -       " │   │             │  │                                      │          │ \n",
      -       " │   │  2          │  │  8~8         2           1           │          │ \n",
      -       " │   │             │  │                                      │          │ \n",
      -       " │   ╰─────────────╯  ╰──────────────────────────────────────╯          │ \n",
      -       " ╰──────────────────────────────────────────────────────────────────────╯ \n",
      -       "
      \n" - ], - "text/plain": [ - "\u001b[3m \u001b[0m\u001b[1;3m 🚀 CodeFlare Cluster Details 🚀\u001b[0m\u001b[3m \u001b[0m\n", - "\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\n", - " ╭──────────────────────────────────────────────────────────────────────╮ \n", - " │ \u001b[1;37;42mName\u001b[0m │ \n", - " │ \u001b[1;4minteractivetest\u001b[0m Active ✅ │ \n", - " │ │ \n", - " │ \u001b[1mURI:\u001b[0m ray://interactivetest-head-svc.default.svc:10001 │ \n", - " │ │ \n", - " │ \u001b]8;id=970589;http://ray-dashboard-interactivetest-default.apps.meyceoz-07122023.psap.aws.rhperfscale.org\u001b\\\u001b[4;34mDashboard🔗\u001b[0m\u001b]8;;\u001b\\ │ \n", - " │ │ \n", - " │ \u001b[3m Cluster Resources \u001b[0m │ \n", - " │ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │ \n", - " │ │ \u001b[1m \u001b[0m\u001b[1m# Workers\u001b[0m\u001b[1m \u001b[0m │ │ \u001b[1m \u001b[0m\u001b[1mMemory \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mCPU \u001b[0m\u001b[1m \u001b[0m\u001b[1m \u001b[0m\u001b[1mGPU \u001b[0m\u001b[1m \u001b[0m │ │ \n", - " │ │ \u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ │ \u001b[35m \u001b[0m\u001b[35m2 \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m8~8 \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m2 \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m1 \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ │ \u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[36m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m\u001b[35m \u001b[0m │ │ \n", - " │ ╰─────────────╯ ╰──────────────────────────────────────╯ │ \n", - " ╰──────────────────────────────────────────────────────────────────────╯ \n" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/plain": [ - "RayCluster(name='interactivetest', status=, workers=2, worker_mem_min=8, worker_mem_max=8, worker_cpu=2, worker_gpu=1, namespace='default', dashboard='http://ray-dashboard-interactivetest-default.apps.meyceoz-07122023.psap.aws.rhperfscale.org')" - ] - }, - "execution_count": 4, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "cluster.details()" ] @@ -182,19 +107,10 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "id": "c1719bca", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "http://ray-dashboard-interactivetest-default.apps.meyceoz-07122023.psap.aws.rhperfscale.org\n", - "ray://interactivetest-head-svc.default.svc:10001\n" - ] - } - ], + "outputs": [], "source": [ "ray_dashboard_uri = cluster.cluster_dashboard_uri()\n", "ray_cluster_uri = cluster.cluster_uri()\n", @@ -225,18 +141,10 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "id": "300146dc", "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Ray cluster is up and running: True\n" - ] - } - ], + "outputs": [], "source": [ "#before proceeding make sure the cluster exists and the uri is not empty\n", "assert ray_cluster_uri, \"Ray cluster needs to be started and set before proceeding\"\n", @@ -266,7 +174,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "id": "1b36e0d9", "metadata": {}, "outputs": [], @@ -362,1111 +270,10 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "id": "5901d958", "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Downloading builder script: 100%|██████████| 4.31k/4.31k [00:00<00:00, 20.9MB/s]\n", - "Downloading metadata: 100%|██████████| 2.17k/2.17k [00:00<00:00, 14.1MB/s]\n", - "Downloading readme: 100%|██████████| 7.59k/7.59k [00:00<00:00, 22.9MB/s]\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Downloading and preparing dataset imdb/plain_text to /home/ray/.cache/huggingface/datasets/imdb/plain_text/1.0.0/d613c88cf8fa3bab83b4ded3713f1f74830d1100e171db75bbddb80b3345c9c0...\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Downloading data: 0%| | 0.00/84.1M [00:00 AllToAllOperator[RandomizeBlockOrder]\n", - "\u001b[2m\u001b[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001b[0m 2023-08-09 14:51:58,957\tINFO streaming_executor.py:92 -- Execution config: ExecutionOptions(resource_limits=ExecutionResources(cpu=None, gpu=None, object_store_memory=None), locality_with_output=False, preserve_order=False, actor_locality_enabled=True, verbose_progress=False)\n", - "\u001b[2m\u001b[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001b[0m 2023-08-09 14:51:58,958\tINFO streaming_executor.py:94 -- Tip: For detailed progress reporting, run `ray.data.DataContext.get_current().execution_options.verbose_progress = True`\n", - "\u001b[2m\u001b[36m(HuggingFaceTrainer pid=196, ip=10.130.4.19)\u001b[0m 2023-08-09 14:51:58,969\tINFO streaming_executor.py:149 -- Shutting down .\n", - "\u001b[2m\u001b[36m(RayTrainWorker pid=235, ip=10.130.4.19)\u001b[0m 2023-08-09 14:51:58,912\tINFO config.py:86 -- Setting up process group for: env:// [rank=0, world_size=2]\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m == Status ==\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Current time: 2023-08-09 14:52:01 (running for 00:00:10.18)\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Using FIFO scheduling algorithm.\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Logical resource usage: 1.0/6 CPUs, 2.0/2 GPUs\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Result logdir: /home/ray/ray_results/HuggingFaceTrainer_2023-08-09_14-51-51\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m Number of trials: 1/1 (1 RUNNING)\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m +--------------------------------+----------+-----------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m | Trial name | status | loc |\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m |--------------------------------+----------+-----------------|\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m | HuggingFaceTrainer_f2621_00000 | RUNNING | 10.130.4.19:196 |\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m +--------------------------------+----------+-----------------+\n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m \n", - "\u001b[2m\u001b[36m(train_fn pid=425)\u001b[0m \n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "\u001b[2m\u001b[36m(RayTrainWorker pid=235, ip=10.130.4.19)\u001b[0m 2023-08-09 14:52:01,262\tINFO streaming_executor.py:91 -- Executing DAG InputDataBuffer[Input] -> AllToAllOperator[RandomizeBlockOrder]\n", - "\u001b[2m\u001b[36m(RayTrainWorker pid=235, ip=10.130.4.19)\u001b[0m 2023-08-09 14:52:01,262\tINFO streaming_executor.py:92 -- Execution config: ExecutionOptions(resource_limits=ExecutionResources(cpu=None, gpu=None, object_store_memory=None), locality_with_output=False, preserve_order=False, actor_locality_enabled=True, verbose_progress=False)\n", - "\u001b[2m\u001b[36m(RayTrainWorker pid=235, ip=10.130.4.19)\u001b[0m 2023-08-09 14:52:01,262\tINFO streaming_executor.py:94 -- Tip: For detailed progress reporting, run `ray.data.DataContext.get_current().execution_options.verbose_progress = True`\n", - "\u001b[2m\u001b[36m(RayTrainWorker pid=235, ip=10.130.4.19)\u001b[0m 2023-08-09 14:52:01,274\tINFO streaming_executor.py:149 -- Shutting down .\n", - "\u001b[2m\u001b[36m(RayTrainWorker pid=232, ip=10.129.4.19)\u001b[0m 2023-08-09 14:52:01,252\tINFO streaming_executor.py:91 -- Executing DAG InputDataBuffer[Input] -> AllToAllOperator[RandomizeBlockOrder]\n", - "\u001b[2m\u001b[36m(RayTrainWorker pid=232, ip=10.129.4.19)\u001b[0m 2023-08-09 14:52:01,252\tINFO streaming_executor.py:92 -- Execution config: ExecutionOptions(resource_limits=ExecutionResources(cpu=None, gpu=None, object_store_memory=None), locality_with_output=False, preserve_order=False, actor_locality_enabled=True, verbose_progress=False)\n", - "\u001b[2m\u001b[36m(RayTrainWorker pid=232, ip=10.129.4.19)\u001b[0m 2023-08-09 14:52:01,252\tINFO streaming_executor.py:94 -- Tip: For detailed progress reporting, run `ray.data.DataContext.get_current().execution_options.verbose_progress = True`\n", - "\u001b[2m\u001b[36m(RayTrainWorker pid=232, ip=10.129.4.19)\u001b[0m 2023-08-09 14:52:01,263\tINFO streaming_executor.py:149 -- Shutting down .\n", - "Downloading (…)lve/main/config.json: 100%|██████████| 483/483 [00:00<00:00, 151kB/s]\n", - "Downloading (…)lve/main/config.json: 100%|██████████| 483/483 [00:00<00:00, 146kB/s]\n", - "Downloading model.safetensors: 0%| | 0.00/268M [00:00 Date: Fri, 6 Sep 2024 13:19:33 +0100 Subject: [PATCH 320/496] fix(generate_cert.py): add get_secret_name function to solve issues with autogenerated secret names Signed-off-by: Bobbins228 --- src/codeflare_sdk/utils/generate_cert.py | 25 +++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/src/codeflare_sdk/utils/generate_cert.py b/src/codeflare_sdk/utils/generate_cert.py index 04b04d3e..5de56882 100644 --- a/src/codeflare_sdk/utils/generate_cert.py +++ b/src/codeflare_sdk/utils/generate_cert.py @@ -21,6 +21,7 @@ import datetime from ..cluster.auth import config_check, api_config_handler from kubernetes import client, config +from .kube_api_helpers import _kube_api_error_handling def generate_ca_cert(days: int = 30): @@ -74,6 +75,24 @@ def generate_ca_cert(days: int = 30): return key, certificate +def get_secret_name(cluster_name, namespace, api_instance): + label_selector = f"ray.openshift.ai/cluster-name={cluster_name}" + try: + secrets = api_instance.list_namespaced_secret( + namespace, label_selector=label_selector + ) + for secret in secrets.items: + if ( + f"{cluster_name}-ca-secret-" in secret.metadata.name + ): # Oauth secret share the same label this conditional is to make things more specific + return secret.metadata.name + else: + continue + raise KeyError(f"Unable to gather secret name for {cluster_name}") + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + + def generate_tls_cert(cluster_name, namespace, days=30): # Create a folder tls-- and store three files: ca.crt, tls.crt, and tls.key tls_dir = os.path.join(os.getcwd(), f"tls-{cluster_name}-{namespace}") @@ -85,7 +104,11 @@ def generate_tls_cert(cluster_name, namespace, days=30): # oc get secret ca-secret- -o template='{{index .data "ca.crt"}}'|base64 -d > ${TLSDIR}/ca.crt config_check() v1 = client.CoreV1Api(api_config_handler()) - secret = v1.read_namespaced_secret(f"ca-secret-{cluster_name}", namespace).data + + # Secrets have a suffix appended to the end so we must list them and gather the secret that includes cluster_name-ca-secret- + secret_name = get_secret_name(cluster_name, namespace, v1) + secret = v1.read_namespaced_secret(secret_name, namespace).data + ca_cert = secret.get("ca.crt") ca_key = secret.get("ca.key") From be64fb5e6f775d3e4cdc3e65874e8ef9b9a00194 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Fri, 6 Sep 2024 13:38:33 +0100 Subject: [PATCH 321/496] test(unit_test.py): update unit test for test_generate_tls_cert Signed-off-by: Bobbins228 --- tests/unit_test.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/unit_test.py b/tests/unit_test.py index ba937d87..fd0be3df 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -2619,6 +2619,10 @@ def test_generate_tls_cert(mocker): test the function codeflare_sdk.utils.generate_ca_cert generates the correct outputs """ mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch( + "codeflare_sdk.utils.generate_cert.get_secret_name", + return_value="ca-secret-cluster", + ) mocker.patch( "kubernetes.client.CoreV1Api.read_namespaced_secret", side_effect=secret_ca_retreival, From d47419c1e8854046220ce485899f994cdbf9e84e Mon Sep 17 00:00:00 2001 From: Fiona Waters Date: Fri, 6 Sep 2024 09:59:55 +0100 Subject: [PATCH 322/496] Remove Notebook Image Build and Push steps from release workflow --- .github/workflows/release.yaml | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 15063f23..aae3e324 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -86,23 +86,6 @@ jobs: - name: Publish package distributions to PyPI uses: pypa/gh-action-pypi-publish@release/v1 - - name: Notebook Image Build and Push - run: | - gh workflow run image-build-and-push.yaml --repo ${{ github.event.inputs.codeflare-repository-organization }}/codeflare-sdk --ref ${{ github.ref }} --field is-stable=${{ github.event.inputs.is-stable }} --field release-version=${{ github.event.inputs.release-version }} --field quay-organization=${{ github.event.inputs.quay-organization }} - env: - GITHUB_TOKEN: ${{ secrets.CODEFLARE_MACHINE_ACCOUNT_TOKEN }} - shell: bash - - - name: Wait for Notebook image build and push to finish - run: | - # wait for a while for Run to be started - sleep 5 - run_id=$(gh run list --workflow image-build-and-push.yaml --repo ${{ github.event.inputs.codeflare-repository-organization }}/codeflare-sdk --limit 1 --json databaseId --jq .[].databaseId) - gh run watch ${run_id} --repo ${{ github.event.inputs.codeflare-repository-organization }}/codeflare-sdk --interval 10 --exit-status - env: - GITHUB_TOKEN: ${{ secrets.CODEFLARE_MACHINE_ACCOUNT_TOKEN }} - shell: bash - - name: Sync ODH Notebooks run: | gh workflow run odh-notebooks-sync.yml \ From a22b5aef6c1fb9541b3b298e5336fb5e84a18b15 Mon Sep 17 00:00:00 2001 From: codeflare-machine-account Date: Fri, 6 Sep 2024 13:49:41 +0000 Subject: [PATCH 323/496] Changes in docs for release: v0.20.2 --- .../utils/generate_cert.html | 59 ++++++++++++++++++- 1 file changed, 57 insertions(+), 2 deletions(-) diff --git a/docs/detailed-documentation/utils/generate_cert.html b/docs/detailed-documentation/utils/generate_cert.html index b41846f9..176584c4 100644 --- a/docs/detailed-documentation/utils/generate_cert.html +++ b/docs/detailed-documentation/utils/generate_cert.html @@ -49,6 +49,7 @@

      Module codeflare_sdk.utils.generate_cert

      import datetime from ..cluster.auth import config_check, api_config_handler from kubernetes import client, config +from .kube_api_helpers import _kube_api_error_handling def generate_ca_cert(days: int = 30): @@ -102,6 +103,24 @@

      Module codeflare_sdk.utils.generate_cert

      return key, certificate +def get_secret_name(cluster_name, namespace, api_instance): + label_selector = f"ray.openshift.ai/cluster-name={cluster_name}" + try: + secrets = api_instance.list_namespaced_secret( + namespace, label_selector=label_selector + ) + for secret in secrets.items: + if ( + f"{cluster_name}-ca-secret-" in secret.metadata.name + ): # Oauth secret share the same label this conditional is to make things more specific + return secret.metadata.name + else: + continue + raise KeyError(f"Unable to gather secret name for {cluster_name}") + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + + def generate_tls_cert(cluster_name, namespace, days=30): # Create a folder tls-<cluster>-<namespace> and store three files: ca.crt, tls.crt, and tls.key tls_dir = os.path.join(os.getcwd(), f"tls-{cluster_name}-{namespace}") @@ -113,7 +132,11 @@

      Module codeflare_sdk.utils.generate_cert

      # oc get secret ca-secret-<cluster-name> -o template='{{index .data "ca.crt"}}'|base64 -d > ${TLSDIR}/ca.crt config_check() v1 = client.CoreV1Api(api_config_handler()) - secret = v1.read_namespaced_secret(f"ca-secret-{cluster_name}", namespace).data + + # Secrets have a suffix appended to the end so we must list them and gather the secret that includes cluster_name-ca-secret- + secret_name = get_secret_name(cluster_name, namespace, v1) + secret = v1.read_namespaced_secret(secret_name, namespace).data + ca_cert = secret.get("ca.crt") ca_key = secret.get("ca.key") @@ -294,7 +317,11 @@

      Functions

      # oc get secret ca-secret-<cluster-name> -o template='{{index .data "ca.crt"}}'|base64 -d > ${TLSDIR}/ca.crt config_check() v1 = client.CoreV1Api(api_config_handler()) - secret = v1.read_namespaced_secret(f"ca-secret-{cluster_name}", namespace).data + + # Secrets have a suffix appended to the end so we must list them and gather the secret that includes cluster_name-ca-secret- + secret_name = get_secret_name(cluster_name, namespace, v1) + secret = v1.read_namespaced_secret(secret_name, namespace).data + ca_cert = secret.get("ca.crt") ca_key = secret.get("ca.key") @@ -363,6 +390,33 @@

      Functions

      f.write(tls_cert.public_bytes(serialization.Encoding.PEM).decode("utf-8"))
  • +
    +def get_secret_name(cluster_name, namespace, api_instance) +
    +
    +
    +
    + +Expand source code + +
    def get_secret_name(cluster_name, namespace, api_instance):
    +    label_selector = f"ray.openshift.ai/cluster-name={cluster_name}"
    +    try:
    +        secrets = api_instance.list_namespaced_secret(
    +            namespace, label_selector=label_selector
    +        )
    +        for secret in secrets.items:
    +            if (
    +                f"{cluster_name}-ca-secret-" in secret.metadata.name
    +            ):  # Oauth secret share the same label this conditional is to make things more specific
    +                return secret.metadata.name
    +            else:
    +                continue
    +        raise KeyError(f"Unable to gather secret name for {cluster_name}")
    +    except Exception as e:  # pragma: no cover
    +        return _kube_api_error_handling(e)
    +
    +
    @@ -384,6 +438,7 @@

    Index

  • export_env
  • generate_ca_cert
  • generate_tls_cert
  • +
  • get_secret_name
  • From 9047a4c650396f78a49dbee96e5627b3754178b1 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Fri, 6 Sep 2024 14:03:29 +0000 Subject: [PATCH 324/496] Updated coverage.svg --- coverage.svg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/coverage.svg b/coverage.svg index 607d3de4..c1490035 100644 --- a/coverage.svg +++ b/coverage.svg @@ -15,7 +15,7 @@ coverage coverage - 91% - 91% + 90% + 90% From 0813065d3622493da35f1ba4755e0835a8377aed Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Tue, 10 Sep 2024 10:42:34 +0100 Subject: [PATCH 325/496] build: upgrade setuptools version to 73.0.0 Signed-off-by: Bobbins228 --- poetry.lock | 4 ++-- pyproject.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1df39ebe..1adcd397 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "aiohttp" @@ -2807,4 +2807,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "f55c3a6780ecfed4a2fff84dec14688bab18c2815db93510c027774d26b44f80" +content-hash = "f02d9217a65ea7004e4c8cb726fa4117f7bf23dfd3f986810fd3a2e4659d66eb" diff --git a/pyproject.toml b/pyproject.toml index f35c360d..93e485d9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,7 +32,7 @@ ipywidgets = "8.1.2" # If updating setuptools version, please verify the odh-notebooks-sync workflow continues to work. # https://github.com/project-codeflare/codeflare-sdk/actions/workflows/odh-notebooks-sync.yml # Reference: https://github.com/target/strelka/pull/469 -setuptools = "<= 70.1.0" +setuptools = "<= 73.0.0" [tool.poetry.group.docs] optional = true From 61493d47c3c4e8fc29d10b38bcede4204120b95d Mon Sep 17 00:00:00 2001 From: Kevin Date: Wed, 4 Sep 2024 15:38:43 -0400 Subject: [PATCH 326/496] install py-3.11 for sync Signed-off-by: Kevin --- .github/workflows/odh-notebooks-sync.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/odh-notebooks-sync.yml b/.github/workflows/odh-notebooks-sync.yml index 00a9c2de..88270487 100644 --- a/.github/workflows/odh-notebooks-sync.yml +++ b/.github/workflows/odh-notebooks-sync.yml @@ -46,6 +46,8 @@ jobs: with: python-version: | 3.9 + 3.11 + cache: 'pipenv' - name: Install pipenv and pip-versions run: pip install pipenv pip-versions @@ -53,7 +55,7 @@ jobs: - name: Update Pipfiles in accordance with Codeflare-SDK latest release run: | package_name=codeflare-sdk - available_python_versions=("3.9") # add space separated python versions according to 'python-versions' specified in 'Setup Python Environment' step + available_python_versions=("3.9", "3.11") # add space separated python versions according to 'python-versions' specified in 'Setup Python Environment' step install_package_using_pipenv(){ if ! pipenv install ${package_name}~="${CODEFLARE_RELEASE_VERSION}"; then echo "Failed to install ${package_name} with version ${CODEFLARE_RELEASE_VERSION} in $dir" From 29efd3326bf9860d117e33e6782f0303308e16fd Mon Sep 17 00:00:00 2001 From: Kevin Date: Mon, 9 Sep 2024 15:12:30 -0400 Subject: [PATCH 327/496] small refactor of odh-notebooks-sync.yml Signed-off-by: Kevin --- .github/workflows/odh-notebooks-sync.yml | 50 ++++++------------------ 1 file changed, 13 insertions(+), 37 deletions(-) diff --git a/.github/workflows/odh-notebooks-sync.yml b/.github/workflows/odh-notebooks-sync.yml index 88270487..80337fe0 100644 --- a/.github/workflows/odh-notebooks-sync.yml +++ b/.github/workflows/odh-notebooks-sync.yml @@ -57,6 +57,11 @@ jobs: package_name=codeflare-sdk available_python_versions=("3.9", "3.11") # add space separated python versions according to 'python-versions' specified in 'Setup Python Environment' step install_package_using_pipenv(){ + # args allow custom names for Pipfile and Pipfile.lock + if $# -eq 2; then + mv $1 Pipfile + mv $2 Pipfile.lock + fi if ! pipenv install ${package_name}~="${CODEFLARE_RELEASE_VERSION}"; then echo "Failed to install ${package_name} with version ${CODEFLARE_RELEASE_VERSION} in $dir" exit 1 @@ -71,6 +76,10 @@ jobs: echo "Failed to remove virtual environment" exit 1 fi + if $# -eq 2; then + mv Pipfile $1 + mv Pipfile.lock $2 + fi } # Get the list of available versions for the package if ! versions=$(pipenv run pip-versions list $package_name);then @@ -81,31 +90,8 @@ jobs: if echo "$versions" | grep -q "${CODEFLARE_RELEASE_VERSION}"; then echo "Version ${CODEFLARE_RELEASE_VERSION} is available for $package_name" # list all Pipfile paths having Codeflare-SDK listed - paths+=($(grep -rl "${package_name} = \"~=.*\"")) - # Extracting only directories from file paths, excluding a `.gitworkflow` directory - directories=() - exclude_directories=( - ".git/objects/pack" - ".github/workflows/", - ) - for path in "${paths[@]}"; do - current_dir=$(dirname "$path") - #Check if current_dir is not in exclude_directories list - if [[ ! "${exclude_directories[@]}" =~ "$current_dir" ]]; then - #Check if Pipfile exists in current_dir - if ls "$current_dir"/Pipfile* 1> /dev/null 2>&1;then - directories+=("$current_dir") - fi - fi - done - # Remove duplicates - directories=($(echo "${directories[@]}" | tr ' ' '\n' | sort -u | tr '\n' ' ')) - # Print the directories for verification - echo "Directories (Start updating Pipfile in these below directories in accordance with Codeflare-SDK latest release):" - for dir in "${directories[@]}"; do - echo "- $dir" - done - # iterate over the directories and update Pipfile + # Extracting only directories from file paths, excluding a `.gitworkflow` and `.git` directory + directories+=($(grep --exclude-dir=.git --exclude-dir=.github --include="Pipfile*" -rl "${package_name} = \"~=.*\"" | xargs dirname | sort | uniq)) counter=0 total=${#directories[@]} for dir in "${directories[@]}"; do @@ -126,20 +112,10 @@ jobs: if [[ " ${available_python_versions[@]} " =~ " ${pipfile_python_version} " && "$pipfile_python_version_major" -ge "$minimum_supported_python_version_major" && "$pipfile_python_version_minor" -ge "$minimum_supported_python_version_minor" ]]; then if ! [ -f "Pipfile" ]; then if [ -f "Pipfile.cpu" ]; then - mv Pipfile.cpu Pipfile - mv Pipfile.lock.cpu Pipfile.lock - #install specified package - install_package_using_pipenv - mv Pipfile.lock Pipfile.lock.cpu - mv Pipfile Pipfile.cpu + install_package_using_pipenv Pipfile.cpu Pipfile.lock.cpu fi if [ -f "Pipfile.gpu" ]; then - mv Pipfile.gpu Pipfile - mv Pipfile.lock.gpu Pipfile.lock - #install specified package - install_package_using_pipenv - mv Pipfile.lock Pipfile.lock.gpu - mv Pipfile Pipfile.gpu + install_package_using_pipenv Pipfile.gpu Pipfile.lock.gpu fi else #install specified package From 6fdeb3e6916526329b9f50126c09023c25729386 Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Tue, 10 Sep 2024 09:30:06 +0100 Subject: [PATCH 328/496] Add entrypoint_memory parameter in Ray Job Submission Client --- src/codeflare_sdk/job/ray_jobs.py | 4 ++++ tests/unit_test.py | 1 + 2 files changed, 5 insertions(+) diff --git a/src/codeflare_sdk/job/ray_jobs.py b/src/codeflare_sdk/job/ray_jobs.py index ff1ebdfe..9f002295 100644 --- a/src/codeflare_sdk/job/ray_jobs.py +++ b/src/codeflare_sdk/job/ray_jobs.py @@ -16,6 +16,7 @@ The ray_jobs sub-module contains methods needed to submit jobs and connect to Ray Clusters that were not created by CodeFlare. The SDK acts as a wrapper for the Ray Job Submission Client. """ + from ray.job_submission import JobSubmissionClient from ray.dashboard.modules.job.pydantic_models import JobDetails from typing import Iterator, Optional, Dict, Any, Union, List @@ -62,6 +63,7 @@ def submit_job( submission_id: Optional[str] = None, entrypoint_num_cpus: Optional[Union[int, float]] = None, entrypoint_num_gpus: Optional[Union[int, float]] = None, + entrypoint_memory: Optional[int] = None, entrypoint_resources: Optional[Dict[str, float]] = None, ) -> str: """ @@ -75,6 +77,7 @@ def submit_job( job_id -- DEPRECATED. This has been renamed to submission_id entrypoint_num_cpus -- The quantity of CPU cores to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0. entrypoint_num_gpus -- The quantity of GPUs to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0. + entrypoint_memory –- The quantity of memory to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0. entrypoint_resources -- The quantity of custom resources to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. """ return self.rayJobClient.submit_job( @@ -85,6 +88,7 @@ def submit_job( submission_id=submission_id, entrypoint_num_cpus=entrypoint_num_cpus, entrypoint_num_gpus=entrypoint_num_gpus, + entrypoint_memory=entrypoint_memory, entrypoint_resources=entrypoint_resources, ) diff --git a/tests/unit_test.py b/tests/unit_test.py index fd0be3df..a5f9cae9 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -2724,6 +2724,7 @@ def test_rjc_submit_job(ray_job_client, mocker): submission_id=None, entrypoint_num_cpus=None, entrypoint_num_gpus=None, + entrypoint_memory=None, entrypoint_resources=None, ) From fb59ba651b17eeb70ccc64630eb648382ad0bc42 Mon Sep 17 00:00:00 2001 From: Jiri Petrlik Date: Thu, 12 Sep 2024 12:19:20 +0200 Subject: [PATCH 329/496] Remove image-build-and-push.yaml Follow up clean up after https://github.com/project-codeflare/codeflare-sdk/pull/657 --- .github/workflows/image-build-and-push.yaml | 67 --------------------- 1 file changed, 67 deletions(-) delete mode 100644 .github/workflows/image-build-and-push.yaml diff --git a/.github/workflows/image-build-and-push.yaml b/.github/workflows/image-build-and-push.yaml deleted file mode 100644 index 451a9bdf..00000000 --- a/.github/workflows/image-build-and-push.yaml +++ /dev/null @@ -1,67 +0,0 @@ -name: Notebook Image Build and Push - -on: - workflow_dispatch: - inputs: - release-version: - type: string - required: true - description: 'Version number (for example: 0.1.0)' - is-stable: - description: 'Select if the built image should be tagged as stable' - required: true - type: boolean - quay-organization: - description: 'Quay organization used to push the built images to' - required: true - default: 'project-codeflare' - python_version: - type: string - default: "3.8" - required: true - poetry_version: - type: string - default: "1.5.1" - required: true - -jobs: - release: - runs-on: ubuntu-latest - steps: - - name: Cleanup - run: | - ls -lart - echo "Initial status:" - df -h - echo "Cleaning up resources:" - sudo swapoff -a - sudo rm -f /swapfile - sudo apt clean - sudo rm -rf /usr/share/dotnet - sudo rm -rf /opt/ghc - sudo rm -rf "/usr/local/share/boost" - sudo rm -rf "$AGENT_TOOLSDIRECTORY" - docker rmi $(docker image ls -aq) - echo "Final status:" - df -h - - name: Checkout the repository - uses: actions/checkout@v3 - - name: Install Python - uses: actions/setup-python@v4 - with: - python-version: ${{ github.event.inputs.python_version }} - - name: Image Build - run: | - cd custom-nb-image - podman build --build-arg SDK_VERSION="${{ github.event.inputs.release-version }}" -t quay.io/${{ github.event.inputs.quay-organization }}/notebook:v${{ github.event.inputs.release-version }} . - - name: Login to Quay.io - uses: redhat-actions/podman-login@v1 - with: - registry: quay.io - username: ${{ secrets.QUAY_ID }} - password: ${{ secrets.QUAY_TOKEN }} - - name: Image Push - run: podman push quay.io/${{ github.event.inputs.quay-organization }}/notebook:v${{ github.event.inputs.release-version }} - - name: Image Push Stable - if: ${{ inputs.is-stable }} - run: podman push quay.io/${{ github.event.inputs.quay-organization }}/notebook:v${{ github.event.inputs.release-version }} quay.io/${{ github.event.inputs.quay-organization }}/notebook:stable From bc38ed53949a08302fd84c542ef1540384d7f646 Mon Sep 17 00:00:00 2001 From: Fiona Waters Date: Tue, 10 Sep 2024 10:42:58 +0100 Subject: [PATCH 330/496] Upgrade Ray to version 2.35 --- .../additional-demos/hf_interactive.ipynb | 2 +- .../additional-demos/local_interactive.ipynb | 2 +- .../additional-demos/ray_job_client.ipynb | 2 +- demo-notebooks/guided-demos/0_basic_ray.ipynb | 2 +- .../guided-demos/1_cluster_job_client.ipynb | 2 +- .../guided-demos/2_basic_interactive.ipynb | 2 +- .../notebook-ex-outputs/0_basic_ray.ipynb | 2 +- .../1_cluster_job_client.ipynb | 2 +- .../2_basic_interactive.ipynb | 2 +- .../notebook-ex-outputs/interactivetest.yaml | 4 +- .../notebook-ex-outputs/jobtest.yaml | 4 +- .../notebook-ex-outputs/raytest.yaml | 4 +- .../preview_nbs/0_basic_ray.ipynb | 2 +- .../preview_nbs/1_cluster_job_client.ipynb | 2 +- .../preview_nbs/2_basic_interactive.ipynb | 2 +- docs/cluster-configuration.md | 2 +- poetry.lock | 41 ++++++++++++------- pyproject.toml | 2 +- .../templates/base-template.yaml | 6 +-- tests/e2e/support.py | 2 +- tests/test-case-bad.yaml | 6 +-- tests/test-case-custom-image.yaml | 2 +- tests/test-case-no-kueue-no-aw.yaml | 6 +-- tests/test-case-no-mcad.yamls | 6 +-- tests/test-case.yaml | 6 +-- tests/test-default-appwrapper.yaml | 6 +-- tests/unit_test.py | 6 +-- tests/unit_test_support.py | 2 +- 28 files changed, 71 insertions(+), 58 deletions(-) diff --git a/demo-notebooks/additional-demos/hf_interactive.ipynb b/demo-notebooks/additional-demos/hf_interactive.ipynb index 7e2a7180..39835929 100644 --- a/demo-notebooks/additional-demos/hf_interactive.ipynb +++ b/demo-notebooks/additional-demos/hf_interactive.ipynb @@ -68,7 +68,7 @@ "source": [ "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding Ray Cluster).\n", "\n", - "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "NOTE: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. \n", "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." ] }, diff --git a/demo-notebooks/additional-demos/local_interactive.ipynb b/demo-notebooks/additional-demos/local_interactive.ipynb index 6c3aa2ac..a491b97e 100644 --- a/demo-notebooks/additional-demos/local_interactive.ipynb +++ b/demo-notebooks/additional-demos/local_interactive.ipynb @@ -35,7 +35,7 @@ "metadata": {}, "source": [ "\n", - "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "NOTE: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. \n", "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." ] }, diff --git a/demo-notebooks/additional-demos/ray_job_client.ipynb b/demo-notebooks/additional-demos/ray_job_client.ipynb index 2f43306e..a16ae6e6 100644 --- a/demo-notebooks/additional-demos/ray_job_client.ipynb +++ b/demo-notebooks/additional-demos/ray_job_client.ipynb @@ -41,7 +41,7 @@ "metadata": {}, "source": [ "\n", - "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "NOTE: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. \n", "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." ] }, diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb index 55fd81d9..2a9ae48e 100644 --- a/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -47,7 +47,7 @@ "source": [ "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", "\n", - "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "NOTE: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. \n", "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." ] }, diff --git a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb index 56585e37..0857042a 100644 --- a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb @@ -41,7 +41,7 @@ "source": [ "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", "\n", - "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "NOTE: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. \n", "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." ] }, diff --git a/demo-notebooks/guided-demos/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/2_basic_interactive.ipynb index 249f4150..98fcafa2 100644 --- a/demo-notebooks/guided-demos/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/2_basic_interactive.ipynb @@ -44,7 +44,7 @@ "source": [ "Once again, let's start by running through the same cluster setup as before:\n", "\n", - "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "NOTE: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. \n", "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." ] }, diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb index 03270e8b..3d581e6f 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb @@ -47,7 +47,7 @@ "source": [ "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", "\n", - "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "NOTE: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. \n", "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." ] }, diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb index 1a5e77f4..1ccad58c 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb @@ -41,7 +41,7 @@ "source": [ "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", "\n", - "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "NOTE: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. \n", "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." ] }, diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb index 08eaf0b8..80207c37 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb @@ -44,7 +44,7 @@ "source": [ "Once again, let's start by running through the same cluster setup as before:\n", "\n", - "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "NOTE: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. \n", "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." ] }, diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml index 3ef8ff6f..fd6500a7 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml @@ -81,7 +81,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/rhoai/ray:2.23.0-py39-cu121 + image: quay.io/modh/ray:2.35.0-py39-cu121 imagePullPolicy: Always lifecycle: preStop: @@ -147,7 +147,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/rhoai/ray:2.23.0-py39-cu121 + image: quay.io/modh/ray:2.35.0-py39-cu121 lifecycle: preStop: exec: diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml index b439295f..a33a9cf5 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml @@ -70,7 +70,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/rhoai/ray:2.23.0-py39-cu121 + image: quay.io/modh/ray:2.35.0-py39-cu121 imagePullPolicy: Always lifecycle: preStop: @@ -127,7 +127,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/rhoai/ray:2.23.0-py39-cu121 + image: quay.io/modh/ray:2.35.0-py39-cu121 lifecycle: preStop: exec: diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml index b953b750..151d2e28 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml @@ -70,7 +70,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/rhoai/ray:2.23.0-py39-cu121 + image: quay.io/modh/ray:2.35.0-py39-cu121 imagePullPolicy: Always lifecycle: preStop: @@ -127,7 +127,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/rhoai/ray:2.23.0-py39-cu121 + image: quay.io/modh/ray:2.35.0-py39-cu121 lifecycle: preStop: exec: diff --git a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb index 03270e8b..3d581e6f 100644 --- a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb @@ -47,7 +47,7 @@ "source": [ "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", "\n", - "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "NOTE: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. \n", "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." ] }, diff --git a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb index d0a527e3..0a2b9343 100644 --- a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb @@ -41,7 +41,7 @@ "source": [ "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", "\n", - "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "NOTE: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. \n", "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." ] }, diff --git a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb index 1f48d329..a58c249e 100644 --- a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb @@ -44,7 +44,7 @@ "source": [ "Once again, let's start by running through the same cluster setup as before:\n", "\n", - "NOTE: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "NOTE: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. \n", "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." ] }, diff --git a/docs/cluster-configuration.md b/docs/cluster-configuration.md index 8ca4a2c2..38f516b0 100644 --- a/docs/cluster-configuration.md +++ b/docs/cluster-configuration.md @@ -23,7 +23,7 @@ cluster = Cluster(ClusterConfiguration( labels={"exampleLabel": "example", "secondLabel": "example"}, )) ``` -Note: 'quay.io/rhoai/ray:2.23.0-py39-cu121' is the default community image used by the CodeFlare SDK for creating a RayCluster resource. If you have your own Ray image which suits your purposes, specify it in image field to override the default image. +Note: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. If you have your own Ray image which suits your purposes, specify it in image field to override the default image. The `labels={"exampleLabel": "example"}` parameter can be used to apply additional labels to the RayCluster resource. diff --git a/poetry.lock b/poetry.lock index 1adcd397..21568298 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "aiohttp" @@ -2160,20 +2160,31 @@ files = [ [[package]] name = "ray" -version = "2.23.0" +version = "2.35.0" description = "Ray provides a simple, universal API for building distributed applications." optional = false python-versions = ">=3.8" files = [ - {file = "ray-2.23.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:4f5ea8dc8fc014704ea12ef8a569abf0deca2ba2a6f157dc5fdd1789db4e0a65"}, - {file = "ray-2.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc950898871c3ecf3b921295c5fcf47b4a30b57b54be8f369014fb1eb9b4cfa5"}, - {file = "ray-2.23.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:55610f8eae65ce5686bde75a5782ce63e2a0112ccd2262b8acd707264da6dbea"}, - {file = "ray-2.23.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:1a43d94ce3f14490e6f1e3e868fd6a5f3be4878cbf83c4bcdc741861d6a4dbf6"}, - {file = "ray-2.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f2d2c1d59d7c8bd8b97288f7ae9a6bf762bd4e703b57787282400d3176dd159d"}, - {file = "ray-2.23.0-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:15c109fd9969326323c8bdb0701cd9af21c85f465002f74950622f9a580ec4e5"}, - {file = "ray-2.23.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:e7d059f094dedae36dddeaf792ebb74d4eed1a8ab1fb540dbffce4ac22694800"}, - {file = "ray-2.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7c305f31674fb8319c147d66e27dd210c7ad6d375626307ddfc62137a26d4155"}, - {file = "ray-2.23.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:b40f85c67ee3d58732b4021460c4297eb418f466313d70b577e5bf9fbb4c2d16"}, + {file = "ray-2.35.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:1e7e2d2e987be728a81821b6fd2bccb23e4d8a6cca8417db08b24f06a08d8476"}, + {file = "ray-2.35.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bd48be4c362004d31e5df072fd58b929efc67adfefc0adece41483b15f84539"}, + {file = "ray-2.35.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:ef41e9254f3e18a90a8cf13fac9e35ac086eb778079ab6c76a37d3a6059186c5"}, + {file = "ray-2.35.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:1994aaf9996ffc45019856545e817d527ad572762f1af76ad669ae4e786fcfd6"}, + {file = "ray-2.35.0-cp310-cp310-win_amd64.whl", hash = "sha256:d3b7a7d73f818e249064460ffa95402ebd852bf97d9ec6167b8b0d95be03da9f"}, + {file = "ray-2.35.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:e29754fac4b69a9cb0d089841af59ec6fb10b5d4a248b7c579d319ca2ed1c96f"}, + {file = "ray-2.35.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d7a606c8ca53c64fc496703e9fd15d1a1ffb50e6b457a33d3622be2f13fc30a5"}, + {file = "ray-2.35.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:ac561e20a62ce941b74d02a0b92b7765c6ba87cc22e24f34f64ded2c454ba64e"}, + {file = "ray-2.35.0-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:587af570cbe5f6cedca854f15107740e63c67207bee900713cb2ee38f6ebf20f"}, + {file = "ray-2.35.0-cp311-cp311-win_amd64.whl", hash = "sha256:8e406cce41679790146d4d2b1b0cb0b413ca35276e43b68ee796366169c1dbde"}, + {file = "ray-2.35.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:eb86355a3a0e794e2f1dbd5a84805dddfca64921ad0999b7fa5276e40d243692"}, + {file = "ray-2.35.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b746913268d5ea5e19bff0eb6bdc7e0538036892a8b57c08411787481195df2"}, + {file = "ray-2.35.0-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:e2ccfd144180f03d38b02a81afdac2b437f27e46736bf2653a1f0e8d67ea56cd"}, + {file = "ray-2.35.0-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:2ca1a0de41d4462fd764598a5981cf55fc955599f38f9a1ae10868e94c6dd80d"}, + {file = "ray-2.35.0-cp312-cp312-win_amd64.whl", hash = "sha256:c5600f745bb0e4df840a5cd51e82b1acf517f73505df9869fe3e369966956129"}, + {file = "ray-2.35.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5e98d2bac394b806109782f316740c5b3c3f10a50117c8e28200a528df734928"}, + {file = "ray-2.35.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c395b46efd0dd871424b1b8d6baf99f91983946fbe351ff66ea34e8919daff29"}, + {file = "ray-2.35.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:4e6314bfdb8c73abcac13f41cc3d935dd1a8ad94c65005a4bfdc4861dc8b070d"}, + {file = "ray-2.35.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:70a154e3071cbb4d7a9b68f2dcf491b96b760be0ec6e2ef11a766071ac6acfef"}, + {file = "ray-2.35.0-cp39-cp39-win_amd64.whl", hash = "sha256:dd8bdf9d16989684486db9ebcd23679140e2d6769fcdaadc05e8cac6b373023e"}, ] [package.dependencies] @@ -2207,10 +2218,12 @@ smart-open = {version = "*", optional = true, markers = "extra == \"default\""} virtualenv = {version = ">=20.0.24,<20.21.1 || >20.21.1", optional = true, markers = "extra == \"default\""} [package.extras] +adag = ["cupy-cuda12x"] air = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "fsspec", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "memray", "numpy (>=1.20)", "opencensus", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] -all = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "dm-tree", "fastapi", "fsspec", "grpcio (!=1.56.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "gymnasium (==0.28.1)", "lz4", "memray", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "ray-cpp (==2.23.0)", "requests", "rich", "scikit-image", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "typer", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +all = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "cupy-cuda12x", "dm-tree", "fastapi", "fsspec", "grpcio (!=1.56.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "gymnasium (==0.28.1)", "lz4", "memray", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "requests", "rich", "scikit-image", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "typer", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +all-cpp = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "cupy-cuda12x", "dm-tree", "fastapi", "fsspec", "grpcio (!=1.56.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "gymnasium (==0.28.1)", "lz4", "memray", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "ray-cpp (==2.35.0)", "requests", "rich", "scikit-image", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "typer", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] client = ["grpcio (!=1.56.0)"] -cpp = ["ray-cpp (==2.23.0)"] +cpp = ["ray-cpp (==2.35.0)"] data = ["fsspec", "numpy (>=1.20)", "pandas (>=1.3)", "pyarrow (>=6.0.1)"] default = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "memray", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "virtualenv (>=20.0.24,!=20.21.1)"] observability = ["opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk"] @@ -2807,4 +2820,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "f02d9217a65ea7004e4c8cb726fa4117f7bf23dfd3f986810fd3a2e4659d66eb" +content-hash = "cbff1a5a7da645b81c7de962eadeb2a8378eb9f783ebed526a3634f1d97e2bf5" diff --git a/pyproject.toml b/pyproject.toml index 93e485d9..21729ee8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ keywords = ['codeflare', 'python', 'sdk', 'client', 'batch', 'scale'] python = "^3.9" openshift-client = "1.0.18" rich = "^12.5" -ray = {version = "2.23.0", extras = ["data", "default"]} +ray = {version = "2.35.0", extras = ["data", "default"]} kubernetes = ">= 25.3.0, < 27" cryptography = "40.0.2" executing = "1.2.0" diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index 076bd262..d6b2ce55 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -12,7 +12,7 @@ metadata: namespace: default spec: # The version of Ray you are using. Make sure all Ray containers are running this version of Ray. - rayVersion: '2.23.0' + rayVersion: '2.35.0' # If enableInTreeAutoscaling is true, the autoscaler sidecar will be added to the Ray head pod. # Ray autoscaler integration is supported only for Ray versions >= 1.11.0 # Ray autoscaler integration is Beta with KubeRay >= 0.3.0 and Ray >= 2.0.0. @@ -69,7 +69,7 @@ spec: containers: # The Ray head pod - name: ray-head - image: quay.io/rhoai/ray:2.23.0-py39-cu121 + image: quay.io/modh/ray:2.35.0-py39-cu121 imagePullPolicy: Always ports: - containerPort: 6379 @@ -150,7 +150,7 @@ spec: spec: containers: - name: machine-learning # must consist of lower case alphanumeric characters or '-', and must start and end with an alphanumeric character (e.g. 'my-name', or '123-abc' - image: quay.io/rhoai/ray:2.23.0-py39-cu121 + image: quay.io/modh/ray:2.35.0-py39-cu121 # environment variables to set in the container.Optional. # Refer to https://kubernetes.io/docs/tasks/inject-data-application/define-environment-variable-container/ lifecycle: diff --git a/tests/e2e/support.py b/tests/e2e/support.py index c8346909..4a8b6ca0 100644 --- a/tests/e2e/support.py +++ b/tests/e2e/support.py @@ -8,7 +8,7 @@ def get_ray_image(): - default_ray_image = "quay.io/rhoai/ray:2.23.0-py39-cu121" + default_ray_image = "quay.io/modh/ray:2.35.0-py39-cu121" return os.getenv("RAY_IMAGE", default_ray_image) diff --git a/tests/test-case-bad.yaml b/tests/test-case-bad.yaml index cb2f4a0a..3c9da72c 100644 --- a/tests/test-case-bad.yaml +++ b/tests/test-case-bad.yaml @@ -43,7 +43,7 @@ spec: valueFrom: fieldRef: fieldPath: status.podIP - image: quay.io/rhoai/ray:2.23.0-py39-cu121 + image: quay.io/modh/ray:2.35.0-py39-cu121 imagePullPolicy: Always lifecycle: preStop: @@ -67,7 +67,7 @@ spec: requests: cpu: 2 memory: 8G - rayVersion: 2.23.0 + rayVersion: 2.35.0 workerGroupSpecs: - groupName: small-group-unit-test-cluster maxReplicas: 2 @@ -90,7 +90,7 @@ spec: valueFrom: fieldRef: fieldPath: status.podIP - image: quay.io/rhoai/ray:2.23.0-py39-cu121 + image: quay.io/modh/ray:2.35.0-py39-cu121 lifecycle: preStop: exec: diff --git a/tests/test-case-custom-image.yaml b/tests/test-case-custom-image.yaml index 8a417a58..d7e52507 100644 --- a/tests/test-case-custom-image.yaml +++ b/tests/test-case-custom-image.yaml @@ -88,7 +88,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.23.0 + rayVersion: 2.35.0 workerGroupSpecs: - groupName: small-group-unit-test-cluster-custom-image maxReplicas: 2 diff --git a/tests/test-case-no-kueue-no-aw.yaml b/tests/test-case-no-kueue-no-aw.yaml index 3ea7a22d..87a9242e 100644 --- a/tests/test-case-no-kueue-no-aw.yaml +++ b/tests/test-case-no-kueue-no-aw.yaml @@ -31,7 +31,7 @@ spec: template: spec: containers: - - image: quay.io/rhoai/ray:2.23.0-py39-cu121 + - image: quay.io/modh/ray:2.35.0-py39-cu121 imagePullPolicy: Always lifecycle: preStop: @@ -85,7 +85,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.23.0 + rayVersion: 2.35.0 workerGroupSpecs: - groupName: small-group-unit-test-no-kueue maxReplicas: 2 @@ -103,7 +103,7 @@ spec: key: value spec: containers: - - image: quay.io/rhoai/ray:2.23.0-py39-cu121 + - image: quay.io/modh/ray:2.35.0-py39-cu121 lifecycle: preStop: exec: diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index 45a3dfb9..a686dd47 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -34,7 +34,7 @@ spec: template: spec: containers: - - image: quay.io/rhoai/ray:2.23.0-py39-cu121 + - image: quay.io/modh/ray:2.35.0-py39-cu121 imagePullPolicy: Always lifecycle: preStop: @@ -88,7 +88,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.23.0 + rayVersion: 2.35.0 workerGroupSpecs: - groupName: small-group-unit-test-cluster-ray maxReplicas: 2 @@ -106,7 +106,7 @@ spec: key: value spec: containers: - - image: quay.io/rhoai/ray:2.23.0-py39-cu121 + - image: quay.io/modh/ray:2.35.0-py39-cu121 lifecycle: preStop: exec: diff --git a/tests/test-case.yaml b/tests/test-case.yaml index 461ed7df..9d85c5a9 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -39,7 +39,7 @@ spec: template: spec: containers: - - image: quay.io/rhoai/ray:2.23.0-py39-cu121 + - image: quay.io/modh/ray:2.35.0-py39-cu121 imagePullPolicy: Always lifecycle: preStop: @@ -93,7 +93,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.23.0 + rayVersion: 2.35.0 workerGroupSpecs: - groupName: small-group-unit-test-cluster maxReplicas: 2 @@ -111,7 +111,7 @@ spec: key: value spec: containers: - - image: quay.io/rhoai/ray:2.23.0-py39-cu121 + - image: quay.io/modh/ray:2.35.0-py39-cu121 lifecycle: preStop: exec: diff --git a/tests/test-default-appwrapper.yaml b/tests/test-default-appwrapper.yaml index cc27e37a..7abc17ed 100644 --- a/tests/test-default-appwrapper.yaml +++ b/tests/test-default-appwrapper.yaml @@ -40,7 +40,7 @@ spec: spec: imagePullSecrets: [] containers: - - image: quay.io/rhoai/ray:2.23.0-py39-cu121 + - image: quay.io/modh/ray:2.35.0-py39-cu121 imagePullPolicy: Always lifecycle: preStop: @@ -92,7 +92,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.23.0 + rayVersion: 2.35.0 workerGroupSpecs: - groupName: small-group-unit-test-default-cluster maxReplicas: 1 @@ -111,7 +111,7 @@ spec: spec: imagePullSecrets: [] containers: - - image: quay.io/rhoai/ray:2.23.0-py39-cu121 + - image: quay.io/modh/ray:2.35.0-py39-cu121 lifecycle: preStop: exec: diff --git a/tests/unit_test.py b/tests/unit_test.py index a5f9cae9..8a51c6eb 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -1245,7 +1245,7 @@ def get_ray_obj(group, version, namespace, plural, cls=None): }, }, }, - "rayVersion": "2.23.0", + "rayVersion": "2.35.0", "workerGroupSpecs": [ { "groupName": "small-group-quicktest", @@ -1478,7 +1478,7 @@ def get_ray_obj(group, version, namespace, plural, cls=None): } }, }, - "rayVersion": "2.23.0", + "rayVersion": "2.35.0", "workerGroupSpecs": [ { "groupName": "small-group-quicktest2", @@ -1846,7 +1846,7 @@ def get_aw_obj(group, version, namespace, plural): } }, }, - "rayVersion": "2.23.0", + "rayVersion": "2.35.0", "workerGroupSpecs": [ { "groupName": "small-group-quicktest", diff --git a/tests/unit_test_support.py b/tests/unit_test_support.py index dd8c2fce..22713d2d 100644 --- a/tests/unit_test_support.py +++ b/tests/unit_test_support.py @@ -45,7 +45,7 @@ def createClusterWrongType(): appwrapper=True, machine_types=[True, False], image_pull_secrets=["unit-test-pull-secret"], - image="quay.io/rhoai/ray:2.23.0-py39-cu121", + image="quay.io/modh/ray:2.35.0-py39-cu121", write_to_file=True, labels={1: 1}, ) From 6e895a88a4cf2af907a2ea03ebab4b28b9eb851f Mon Sep 17 00:00:00 2001 From: Fiona Waters Date: Fri, 13 Sep 2024 11:31:13 +0100 Subject: [PATCH 331/496] adding reference to rocm compatible image --- docs/cluster-configuration.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/cluster-configuration.md b/docs/cluster-configuration.md index 38f516b0..47110e4b 100644 --- a/docs/cluster-configuration.md +++ b/docs/cluster-configuration.md @@ -23,7 +23,7 @@ cluster = Cluster(ClusterConfiguration( labels={"exampleLabel": "example", "secondLabel": "example"}, )) ``` -Note: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. If you have your own Ray image which suits your purposes, specify it in image field to override the default image. +Note: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. If you have your own Ray image which suits your purposes, specify it in image field to override the default image. If you are using ROCm compatible GPUs you can use 'quay.io/modh/ray:2.35.0-py39-rocm61'. You can also find documentation on building a custom image [here](https://github.com/opendatahub-io/distributed-workloads/tree/main/images/runtime/examples). The `labels={"exampleLabel": "example"}` parameter can be used to apply additional labels to the RayCluster resource. From 63344c57691226962df5dbe1cf58496aacd425f4 Mon Sep 17 00:00:00 2001 From: oksanabaza Date: Tue, 10 Sep 2024 13:26:29 +0100 Subject: [PATCH 332/496] Adjust CodeFlare SDK tests to run on FIPS --- tests/e2e/mnist.py | 6 ++++++ tests/e2e/mnist_pip_requirements.txt | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/tests/e2e/mnist.py b/tests/e2e/mnist.py index 143a6b6c..ba5ec96f 100644 --- a/tests/e2e/mnist.py +++ b/tests/e2e/mnist.py @@ -15,6 +15,7 @@ import os import torch +import torchvision.datasets.utils as utils import requests from pytorch_lightning import LightningModule, Trainer from pytorch_lightning.callbacks.progress import TQDMProgressBar @@ -138,6 +139,11 @@ def configure_optimizers(self): #################### def prepare_data(self): + def check_md5(fpath, md5=None): + return True + + utils.check_md5 = check_md5 + # download print("Downloading MNIST dataset...") diff --git a/tests/e2e/mnist_pip_requirements.txt b/tests/e2e/mnist_pip_requirements.txt index 907e9a52..05ed5816 100644 --- a/tests/e2e/mnist_pip_requirements.txt +++ b/tests/e2e/mnist_pip_requirements.txt @@ -1,4 +1,4 @@ pytorch_lightning==1.9.5 torchmetrics==0.9.1 -torchvision==0.12.0 +torchvision==0.19.0 minio From e1c1b97d9832587f9bd7c748aafe8ff657199fd9 Mon Sep 17 00:00:00 2001 From: Kevin Date: Mon, 16 Sep 2024 17:28:52 -0400 Subject: [PATCH 333/496] remove repeated conditional branch logic Signed-off-by: Kevin --- src/codeflare_sdk/cluster/cluster.py | 52 ++++++++++------------------ 1 file changed, 18 insertions(+), 34 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index f0f50eb3..20aa94c9 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -546,41 +546,25 @@ def list_all_queued( def get_current_namespace(): # pragma: no cover - if api_config_handler() != None: - if os.path.isfile("/var/run/secrets/kubernetes.io/serviceaccount/namespace"): - try: - file = open( - "/var/run/secrets/kubernetes.io/serviceaccount/namespace", "r" - ) - active_context = file.readline().strip("\n") - return active_context - except Exception as e: - print("Unable to find current namespace") - return None - else: + if os.path.isfile("/var/run/secrets/kubernetes.io/serviceaccount/namespace"): + try: + file = open("/var/run/secrets/kubernetes.io/serviceaccount/namespace", "r") + active_context = file.readline().strip("\n") + return active_context + except Exception as e: print("Unable to find current namespace") - return None - else: - if os.path.isfile("/var/run/secrets/kubernetes.io/serviceaccount/namespace"): - try: - file = open( - "/var/run/secrets/kubernetes.io/serviceaccount/namespace", "r" - ) - active_context = file.readline().strip("\n") - return active_context - except Exception as e: - print( - "unable to gather namespace from /var/run/secrets/kubernetes.io/serviceaccount/namespace trying to gather from current context" - ) - else: - try: - _, active_context = config.list_kube_config_contexts(config_check()) - except Exception as e: - return _kube_api_error_handling(e) - try: - return active_context["context"]["namespace"] - except KeyError: - return None + + if api_config_handler() != None: + return None + print("trying to gather from current context") + try: + _, active_context = config.list_kube_config_contexts(config_check()) + except Exception as e: + return _kube_api_error_handling(e) + try: + return active_context["context"]["namespace"] + except KeyError: + return None def get_cluster( From 1c0e8386670569981194feee103e1f1983c04d7c Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Mon, 16 Sep 2024 15:00:23 +0100 Subject: [PATCH 334/496] feat: add ui cluster creation and deletion buttons Signed-off-by: Bobbins228 --- src/codeflare_sdk/cluster/cluster.py | 14 +++++ src/codeflare_sdk/cluster/widgets.py | 91 ++++++++++++++++++++++++++++ tests/unit_test.py | 1 + 3 files changed, 106 insertions(+) create mode 100644 src/codeflare_sdk/cluster/widgets.py diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 20aa94c9..f9bcc84f 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -42,6 +42,10 @@ RayCluster, RayClusterStatus, ) +from .widgets import ( + cluster_up_down_buttons, + is_notebook, +) from kubernetes import client, config from kubernetes.utils import parse_quantity import yaml @@ -71,6 +75,8 @@ def __init__(self, config: ClusterConfiguration): self.app_wrapper_yaml = self.create_app_wrapper() self._job_submission_client = None self.app_wrapper_name = self.config.name + if is_notebook(): + cluster_up_down_buttons(self) @property def _client_headers(self): @@ -156,8 +162,12 @@ def up(self): plural="appwrappers", body=aw, ) + print(f"AppWrapper: '{self.config.name}' has successfully been created") else: self._component_resources_up(namespace, api_instance) + print( + f"Ray Cluster: '{self.config.name}' has successfully been created" + ) except Exception as e: # pragma: no cover return _kube_api_error_handling(e) @@ -198,8 +208,12 @@ def down(self): plural="appwrappers", name=self.app_wrapper_name, ) + print(f"AppWrapper: '{self.config.name}' has successfully been deleted") else: self._component_resources_down(namespace, api_instance) + print( + f"Ray Cluster: '{self.config.name}' has successfully been deleted" + ) except Exception as e: # pragma: no cover return _kube_api_error_handling(e) diff --git a/src/codeflare_sdk/cluster/widgets.py b/src/codeflare_sdk/cluster/widgets.py new file mode 100644 index 00000000..6bd02c55 --- /dev/null +++ b/src/codeflare_sdk/cluster/widgets.py @@ -0,0 +1,91 @@ +# Copyright 2022 IBM, Red Hat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +The widgets sub-module contains the ui widgets created using the ipywidgets package. +""" +import ipywidgets as widgets +from IPython.display import display +import os +import codeflare_sdk + + +def cluster_up_down_buttons(cluster: "codeflare_sdk.cluster.Cluster") -> widgets.Button: + """ + The cluster_up_down_buttons function returns two button widgets for a create and delete button. + The function uses the appwrapper bool to distinguish between resource type for the tool tip. + """ + resource = "Ray Cluster" + if cluster.config.appwrapper: + resource = "AppWrapper" + + up_button = widgets.Button( + description="Cluster Up", + tooltip=f"Create the {resource}", + icon="play", + ) + + delete_button = widgets.Button( + description="Cluster Down", + tooltip=f"Delete the {resource}", + icon="trash", + ) + + wait_ready_check = wait_ready_check_box() + output = widgets.Output() + + # Display the buttons in an HBox wrapped in a VBox which includes the wait_ready Checkbox + button_display = widgets.HBox([up_button, delete_button]) + display(widgets.VBox([button_display, wait_ready_check]), output) + + def on_up_button_clicked(b): # Handle the up button click event + with output: + output.clear_output() + cluster.up() + + # If the wait_ready Checkbox is clicked(value == True) trigger the wait_ready function + if wait_ready_check.value: + cluster.wait_ready() + + def on_down_button_clicked(b): # Handle the down button click event + with output: + output.clear_output() + cluster.down() + + up_button.on_click(on_up_button_clicked) + delete_button.on_click(on_down_button_clicked) + + +def wait_ready_check_box(): + """ + The wait_ready_check_box function will return a checkbox widget used for waiting for the resource to be in the state READY. + """ + wait_ready_check_box = widgets.Checkbox( + False, + description="Wait for Cluster?", + ) + return wait_ready_check_box + + +def is_notebook() -> bool: + """ + The is_notebook function checks if Jupyter Notebook environment variables exist in the given environment and return True/False based on that. + """ + if ( + "PYDEVD_IPYTHON_COMPATIBLE_DEBUGGING" in os.environ + or "JPY_SESSION_NAME" in os.environ + ): # If running Jupyter NBs in VsCode or RHOAI/ODH display UI buttons + return True + else: + return False diff --git a/tests/unit_test.py b/tests/unit_test.py index 8a51c6eb..2a1714b1 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -76,6 +76,7 @@ gen_names, is_openshift_cluster, ) +from codeflare_sdk.cluster.widgets import cluster_up_down_buttons import openshift from openshift.selector import Selector From 9c8edb6b61d87b63d57144644ea810a594d5e4be Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Tue, 17 Sep 2024 09:14:47 +0100 Subject: [PATCH 335/496] test: test ui up and down buttons Signed-off-by: Bobbins228 --- tests/unit_test.py | 79 +++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 78 insertions(+), 1 deletion(-) diff --git a/tests/unit_test.py b/tests/unit_test.py index 2a1714b1..111f737c 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -83,11 +83,14 @@ import ray import pytest import yaml -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch from pytest_mock import MockerFixture from ray.job_submission import JobSubmissionClient from codeflare_sdk.job.ray_jobs import RayJobClient +import ipywidgets as widgets +from IPython.display import display + # For mocking openshift client results fake_res = openshift.Result("fake") @@ -2874,6 +2877,80 @@ def test_cluster_config_deprecation_conversion(mocker): assert config.worker_cpu_limits == 2 +""" + Ipywidgets tests +""" + + +@patch.dict( + "os.environ", {"JPY_SESSION_NAME": "example-test"} +) # Mock Jupyter environment variable +def test_cluster_up_down_buttons(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), + ) + cluster = Cluster(createClusterConfig()) + + with patch("ipywidgets.Button") as MockButton, patch( + "ipywidgets.Checkbox" + ) as MockCheckbox, patch("ipywidgets.Output"), patch("ipywidgets.HBox"), patch( + "ipywidgets.VBox" + ), patch.object( + cluster, "up" + ) as mock_up, patch.object( + cluster, "down" + ) as mock_down, patch.object( + cluster, "wait_ready" + ) as mock_wait_ready: + # Create mock button & CheckBox instances + mock_up_button = MagicMock() + mock_down_button = MagicMock() + mock_wait_ready_check_box = MagicMock() + + # Ensure the mock Button class returns the mock button instances in sequence + MockCheckbox.side_effect = [mock_wait_ready_check_box] + MockButton.side_effect = [mock_up_button, mock_down_button] + + # Call the method under test + cluster_up_down_buttons(cluster) + + # Simulate checkbox being checked or unchecked + mock_wait_ready_check_box.value = True # Simulate checkbox being checked + + # Simulate the button clicks by calling the mock on_click handlers + mock_up_button.on_click.call_args[0][0](None) # Simulate clicking "Cluster Up" + mock_down_button.on_click.call_args[0][0]( + None + ) # Simulate clicking "Cluster Down" + + # Check if the `up` and `down` methods were called + mock_wait_ready.assert_called_once() + mock_up.assert_called_once() + mock_down.assert_called_once() + + +@patch.dict("os.environ", {}, clear=True) # Mock environment with no variables +def test_is_notebook_false(): + from codeflare_sdk.cluster.widgets import is_notebook + + assert is_notebook() is False + + +@patch.dict( + "os.environ", {"JPY_SESSION_NAME": "example-test"} +) # Mock Jupyter environment variable +def test_is_notebook_true(): + from codeflare_sdk.cluster.widgets import is_notebook + + assert is_notebook() is True + + # Make sure to always keep this function last def test_cleanup(): os.remove(f"{aw_dir}unit-test-no-kueue.yaml") From 9794a0f3ca59b7ed1d692abd556dabdd5179cc3b Mon Sep 17 00:00:00 2001 From: Mark Campbell Date: Wed, 18 Sep 2024 16:37:04 +0100 Subject: [PATCH 336/496] Update src/codeflare_sdk/cluster/widgets.py Co-authored-by: Kevin Postlethwait --- src/codeflare_sdk/cluster/widgets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/codeflare_sdk/cluster/widgets.py b/src/codeflare_sdk/cluster/widgets.py index 6bd02c55..351640e0 100644 --- a/src/codeflare_sdk/cluster/widgets.py +++ b/src/codeflare_sdk/cluster/widgets.py @@ -1,4 +1,4 @@ -# Copyright 2022 IBM, Red Hat +# Copyright 2024 IBM, Red Hat # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From ec47475f7ef902ad42a47f0dd8bb6fa7c5c2eab5 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Mon, 9 Sep 2024 15:03:08 +0100 Subject: [PATCH 337/496] feat: split head resources for limits and requests Signed-off-by: Bobbins228 --- src/codeflare_sdk/cluster/cluster.py | 44 +++++++++++++++++------- src/codeflare_sdk/cluster/config.py | 32 +++++++++++++---- src/codeflare_sdk/cluster/model.py | 10 +++--- src/codeflare_sdk/utils/generate_yaml.py | 24 ++++++------- src/codeflare_sdk/utils/pretty_print.py | 2 +- 5 files changed, 77 insertions(+), 35 deletions(-) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index f9bcc84f..7c652a18 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -462,6 +462,18 @@ def from_k8_cluster_object( name=rc["metadata"]["name"], namespace=rc["metadata"]["namespace"], machine_types=machine_types, + head_cpu_requests=rc["spec"]["headGroupSpec"]["template"]["spec"][ + "containers" + ][0]["resources"]["requests"]["cpu"], + head_cpu_limits=rc["spec"]["headGroupSpec"]["template"]["spec"][ + "containers" + ][0]["resources"]["limits"]["cpu"], + head_memory_requests=rc["spec"]["headGroupSpec"]["template"]["spec"][ + "containers" + ][0]["resources"]["requests"]["memory"], + head_memory_limits=rc["spec"]["headGroupSpec"]["template"]["spec"][ + "containers" + ][0]["resources"]["limits"]["memory"], num_workers=rc["spec"]["workerGroupSpecs"][0]["minReplicas"], worker_cpu_requests=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ "containers" @@ -851,10 +863,10 @@ def _map_to_ray_cluster(rc) -> Optional[RayCluster]: status=status, # for now we are not using autoscaling so same replicas is fine workers=rc["spec"]["workerGroupSpecs"][0]["replicas"], - worker_mem_max=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + worker_mem_limits=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ "containers" ][0]["resources"]["limits"]["memory"], - worker_mem_min=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + worker_mem_requests=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ "containers" ][0]["resources"]["requests"]["memory"], worker_cpu=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][ @@ -862,12 +874,18 @@ def _map_to_ray_cluster(rc) -> Optional[RayCluster]: ]["resources"]["limits"]["cpu"], worker_extended_resources=worker_extended_resources, namespace=rc["metadata"]["namespace"], - head_cpus=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][0][ - "resources" - ]["limits"]["cpu"], - head_mem=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][0][ - "resources" - ]["limits"]["memory"], + head_cpu_requests=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][ + 0 + ]["resources"]["requests"]["cpu"], + head_cpu_limits=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][ + 0 + ]["resources"]["limits"]["cpu"], + head_mem_requests=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][ + 0 + ]["resources"]["requests"]["memory"], + head_mem_limits=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][ + 0 + ]["resources"]["limits"]["memory"], head_extended_resources=head_extended_resources, dashboard=dashboard_url, ) @@ -890,14 +908,16 @@ def _copy_to_ray(cluster: Cluster) -> RayCluster: name=cluster.config.name, status=cluster.status(print_to_console=False)[0], workers=cluster.config.num_workers, - worker_mem_min=cluster.config.worker_memory_requests, - worker_mem_max=cluster.config.worker_memory_limits, + worker_mem_requests=cluster.config.worker_memory_requests, + worker_mem_limits=cluster.config.worker_memory_limits, worker_cpu=cluster.config.worker_cpu_requests, worker_extended_resources=cluster.config.worker_extended_resource_requests, namespace=cluster.config.namespace, dashboard=cluster.cluster_dashboard_uri(), - head_cpus=cluster.config.head_cpus, - head_mem=cluster.config.head_memory, + head_mem_requests=cluster.config.head_memory_requests, + head_mem_limits=cluster.config.head_memory_limits, + head_cpu_requests=cluster.config.head_cpu_requests, + head_cpu_limits=cluster.config.head_cpu_limits, head_extended_resources=cluster.config.head_extended_resource_requests, ) if ray.status == CodeFlareClusterStatus.READY: diff --git a/src/codeflare_sdk/cluster/config.py b/src/codeflare_sdk/cluster/config.py index 610d53c4..3c59d593 100644 --- a/src/codeflare_sdk/cluster/config.py +++ b/src/codeflare_sdk/cluster/config.py @@ -75,10 +75,16 @@ class ClusterConfiguration: name: str namespace: Optional[str] = None head_info: List[str] = field(default_factory=list) - head_cpus: Union[int, str] = 2 - head_memory: Union[int, str] = 8 + head_cpu_requests: Union[int, str] = 2 + head_cpu_limits: Union[int, str] = 2 + head_cpus: Optional[Union[int, str]] = None # Deprecating + head_memory_requests: Union[int, str] = 8 + head_memory_limits: Union[int, str] = 8 + head_memory: Optional[Union[int, str]] = None # Deprecating head_gpus: Optional[int] = None # Deprecating - head_extended_resource_requests: Dict[str, int] = field(default_factory=dict) + head_extended_resource_requests: Dict[str, Union[str, int]] = field( + default_factory=dict + ) machine_types: List[str] = field( default_factory=list ) # ["m4.xlarge", "g4dn.xlarge"] @@ -100,7 +106,9 @@ class ClusterConfiguration: write_to_file: bool = False verify_tls: bool = True labels: Dict[str, str] = field(default_factory=dict) - worker_extended_resource_requests: Dict[str, int] = field(default_factory=dict) + worker_extended_resource_requests: Dict[str, Union[str, int]] = field( + default_factory=dict + ) extended_resource_mapping: Dict[str, str] = field(default_factory=dict) overwrite_default_resource_mapping: bool = False local_queue: Optional[str] = None @@ -183,14 +191,21 @@ def _str_mem_no_unit_add_GB(self): self.worker_memory_limits = f"{self.worker_memory_limits}G" def _memory_to_string(self): - if isinstance(self.head_memory, int): - self.head_memory = f"{self.head_memory}G" + if isinstance(self.head_memory_requests, int): + self.head_memory_requests = f"{self.head_memory_requests}G" + if isinstance(self.head_memory_limits, int): + self.head_memory_limits = f"{self.head_memory_limits}G" if isinstance(self.worker_memory_requests, int): self.worker_memory_requests = f"{self.worker_memory_requests}G" if isinstance(self.worker_memory_limits, int): self.worker_memory_limits = f"{self.worker_memory_limits}G" def _cpu_to_resource(self): + if self.head_cpus: + warnings.warn( + "head_cpus is being deprecated, use head_cpu_requests and head_cpu_limits" + ) + self.head_cpu_requests = self.head_cpu_limits = self.head_cpus if self.min_cpus: warnings.warn("min_cpus is being deprecated, use worker_cpu_requests") self.worker_cpu_requests = self.min_cpus @@ -199,6 +214,11 @@ def _cpu_to_resource(self): self.worker_cpu_limits = self.max_cpus def _memory_to_resource(self): + if self.head_memory: + warnings.warn( + "head_memory is being deprecated, use head_memory_requests and head_memory_limits" + ) + self.head_memory_requests = self.head_memory_limits = self.head_memory if self.min_memory: warnings.warn("min_memory is being deprecated, use worker_memory_requests") self.worker_memory_requests = f"{self.min_memory}G" diff --git a/src/codeflare_sdk/cluster/model.py b/src/codeflare_sdk/cluster/model.py index 5d6e2ed2..ab7b30ed 100644 --- a/src/codeflare_sdk/cluster/model.py +++ b/src/codeflare_sdk/cluster/model.py @@ -73,11 +73,13 @@ class RayCluster: name: str status: RayClusterStatus - head_cpus: int - head_mem: str + head_cpu_requests: int + head_cpu_limits: int + head_mem_requests: str + head_mem_limits: str workers: int - worker_mem_min: str - worker_mem_max: str + worker_mem_requests: str + worker_mem_limits: str worker_cpu: int namespace: str dashboard: str diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 1644dc15..c4e1755d 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -115,22 +115,22 @@ def update_env(spec, env): def update_resources( spec, - worker_cpu_requests, - worker_cpu_limits, - worker_memory_requests, - worker_memory_limits, + cpu_requests, + cpu_limits, + memory_requests, + memory_limits, custom_resources, ): container = spec.get("containers") for resource in container: requests = resource.get("resources").get("requests") if requests is not None: - requests["cpu"] = worker_cpu_requests - requests["memory"] = worker_memory_requests + requests["cpu"] = cpu_requests + requests["memory"] = memory_requests limits = resource.get("resources").get("limits") if limits is not None: - limits["cpu"] = worker_cpu_limits - limits["memory"] = worker_memory_limits + limits["cpu"] = cpu_limits + limits["memory"] = memory_limits for k in custom_resources.keys(): limits[k] = custom_resources[k] requests[k] = custom_resources[k] @@ -210,10 +210,10 @@ def update_nodes( # TODO: Eventually add head node configuration outside of template update_resources( spec, - cluster.config.head_cpus, - cluster.config.head_cpus, - cluster.config.head_memory, - cluster.config.head_memory, + cluster.config.head_cpu_requests, + cluster.config.head_cpu_limits, + cluster.config.head_memory_requests, + cluster.config.head_memory_limits, cluster.config.head_extended_resource_requests, ) else: diff --git a/src/codeflare_sdk/utils/pretty_print.py b/src/codeflare_sdk/utils/pretty_print.py index 9431ffd7..4842c9cd 100644 --- a/src/codeflare_sdk/utils/pretty_print.py +++ b/src/codeflare_sdk/utils/pretty_print.py @@ -136,7 +136,7 @@ def print_clusters(clusters: List[RayCluster]): name = cluster.name dashboard = cluster.dashboard workers = str(cluster.workers) - memory = f"{cluster.worker_mem_min}~{cluster.worker_mem_max}" + memory = f"{cluster.worker_mem_requests}~{cluster.worker_mem_limits}" cpu = str(cluster.worker_cpu) gpu = str(cluster.worker_extended_resources.get("nvidia.com/gpu", 0)) From 1fec8705b04efc0e11007146a99602a1864c8909 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Mon, 9 Sep 2024 15:05:59 +0100 Subject: [PATCH 338/496] test: update unit and e2e tests with split head resources Signed-off-by: Bobbins228 --- .github/workflows/guided_notebook_tests.yaml | 6 ++--- tests/e2e/local_interactive_sdk_kind_test.py | 6 +++-- .../e2e/mnist_raycluster_sdk_aw_kind_test.py | 6 +++-- tests/e2e/mnist_raycluster_sdk_kind_test.py | 6 +++-- tests/e2e/mnist_raycluster_sdk_oauth_test.py | 6 +++-- tests/e2e/start_ray_cluster.py | 6 +++-- tests/unit_test.py | 24 +++++++++++-------- .../raycluster_sdk_upgrade_sleep_test.py | 6 +++-- tests/upgrade/raycluster_sdk_upgrade_test.py | 6 +++-- 9 files changed, 45 insertions(+), 27 deletions(-) diff --git a/.github/workflows/guided_notebook_tests.yaml b/.github/workflows/guided_notebook_tests.yaml index 299c67ef..eb6c5cd2 100644 --- a/.github/workflows/guided_notebook_tests.yaml +++ b/.github/workflows/guided_notebook_tests.yaml @@ -84,7 +84,7 @@ jobs: jq -r 'del(.cells[] | select(.source[] | contains("Create authentication object for user permissions")))' 0_basic_ray.ipynb > 0_basic_ray.ipynb.tmp && mv 0_basic_ray.ipynb.tmp 0_basic_ray.ipynb jq -r 'del(.cells[] | select(.source[] | contains("auth.logout()")))' 0_basic_ray.ipynb > 0_basic_ray.ipynb.tmp && mv 0_basic_ray.ipynb.tmp 0_basic_ray.ipynb # Set explicit namespace as SDK need it (currently) to resolve local queues - sed -i "s/head_memory=2,/head_memory=2, namespace='default',/" 0_basic_ray.ipynb + sed -i "s/head_memory_limits=2,/head_memory_limits=2, namespace='default',/" 0_basic_ray.ipynb # Run notebook poetry run papermill 0_basic_ray.ipynb 0_basic_ray_out.ipynb --log-output --execution-timeout 600 working-directory: demo-notebooks/guided-demos @@ -206,7 +206,7 @@ jobs: JOB_WAIT=$(jq -r '.' ${GITHUB_WORKSPACE}/.github/resources/wait_for_job_cell.json) jq --argjson job_wait "$JOB_WAIT" -r '(.cells[] | select(.source[] | contains("async for lines in client.tail_job_logs"))) |= $job_wait' 1_cluster_job_client.ipynb > 1_cluster_job_client.ipynb.tmp && mv 1_cluster_job_client.ipynb.tmp 1_cluster_job_client.ipynb # Set explicit namespace as SDK need it (currently) to resolve local queues - sed -i "s/head_cpus=1,/head_cpus=1, namespace='default',/" 1_cluster_job_client.ipynb + sed -i "s/head_cpu_limits=1,/head_cpu_limits=1, namespace='default',/" 1_cluster_job_client.ipynb # Run notebook poetry run papermill 1_cluster_job_client.ipynb 1_cluster_job_client_out.ipynb --log-output --execution-timeout 1200 working-directory: demo-notebooks/guided-demos @@ -332,7 +332,7 @@ jobs: # Rewrite cluster_uri() to local_client_url() to retrieve client URL available out of cluster, as the test is executed outside of cluster sed -i "s/cluster_uri()/local_client_url()/" 2_basic_interactive.ipynb # Set explicit namespace as SDK need it (currently) to resolve local queues - sed -i "s/head_cpus=1,/head_cpus=1, namespace='default',/" 2_basic_interactive.ipynb + sed -i "s/head_cpu_limits=1,/head_cpu_limits=1, namespace='default',/" 2_basic_interactive.ipynb # Add MINIO related modules to runtime environment sed -i "s/\\\\\"transformers/\\\\\"s3fs\\\\\", \\\\\"pyarrow\\\\\", \\\\\"transformers/" 2_basic_interactive.ipynb # Replace markdown cell with remote configuration for MINIO diff --git a/tests/e2e/local_interactive_sdk_kind_test.py b/tests/e2e/local_interactive_sdk_kind_test.py index 6693117b..c20fd879 100644 --- a/tests/e2e/local_interactive_sdk_kind_test.py +++ b/tests/e2e/local_interactive_sdk_kind_test.py @@ -44,8 +44,10 @@ def run_local_interactives( name=cluster_name, namespace=self.namespace, num_workers=1, - head_cpus="500m", - head_memory=2, + head_cpu_requests="500m", + head_cpu_limits="500m", + head_memory_requests=2, + head_memory_limits=2, worker_cpu_requests="500m", worker_cpu_limits=1, worker_memory_requests=1, diff --git a/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py b/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py index 012098a4..d7949b8c 100644 --- a/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py +++ b/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py @@ -42,8 +42,10 @@ def run_mnist_raycluster_sdk_kind( name="mnist", namespace=self.namespace, num_workers=1, - head_cpus="500m", - head_memory=2, + head_cpu_requests="500m", + head_cpu_limits="500m", + head_memory_requests=2, + head_memory_limits=2, worker_cpu_requests="500m", worker_cpu_limits=1, worker_memory_requests=1, diff --git a/tests/e2e/mnist_raycluster_sdk_kind_test.py b/tests/e2e/mnist_raycluster_sdk_kind_test.py index 2623b36c..d81e9149 100644 --- a/tests/e2e/mnist_raycluster_sdk_kind_test.py +++ b/tests/e2e/mnist_raycluster_sdk_kind_test.py @@ -42,8 +42,10 @@ def run_mnist_raycluster_sdk_kind( name="mnist", namespace=self.namespace, num_workers=1, - head_cpus="500m", - head_memory=2, + head_cpu_requests="500m", + head_cpu_limits="500m", + head_memory_requests=2, + head_memory_limits=2, worker_cpu_requests="500m", worker_cpu_limits=1, worker_memory_requests=1, diff --git a/tests/e2e/mnist_raycluster_sdk_oauth_test.py b/tests/e2e/mnist_raycluster_sdk_oauth_test.py index 3fe6177c..7e4002ad 100644 --- a/tests/e2e/mnist_raycluster_sdk_oauth_test.py +++ b/tests/e2e/mnist_raycluster_sdk_oauth_test.py @@ -42,8 +42,10 @@ def run_mnist_raycluster_sdk_oauth(self): name="mnist", namespace=self.namespace, num_workers=1, - head_cpus="500m", - head_memory=4, + head_cpu_requests="500m", + head_cpu_limits="500m", + head_memory_requests=4, + head_memory_limits=4, worker_cpu_requests=1, worker_cpu_limits=1, worker_memory_requests=1, diff --git a/tests/e2e/start_ray_cluster.py b/tests/e2e/start_ray_cluster.py index b34f0331..48ab604f 100644 --- a/tests/e2e/start_ray_cluster.py +++ b/tests/e2e/start_ray_cluster.py @@ -13,8 +13,10 @@ name="mnist", namespace=namespace, num_workers=1, - head_cpus="500m", - head_memory=2, + head_cpu_requests="500m", + head_cpu_limits="500m", + head_memory_requests=2, + head_memory_limits=2, worker_cpu_requests="500m", worker_cpu_limits=1, worker_memory_requests=1, diff --git a/tests/unit_test.py b/tests/unit_test.py index 111f737c..388723c5 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -942,13 +942,15 @@ def test_ray_details(mocker, capsys): name="raytest1", status=RayClusterStatus.READY, workers=1, - worker_mem_min="2G", - worker_mem_max="2G", + worker_mem_requests="2G", + worker_mem_limits="2G", worker_cpu=1, namespace="ns", dashboard="fake-uri", - head_cpus=2, - head_mem=8, + head_cpu_requests=2, + head_cpu_limits=2, + head_mem_requests=8, + head_mem_limits=8, ) mocker.patch( "codeflare_sdk.cluster.cluster.Cluster.status", @@ -978,8 +980,8 @@ def test_ray_details(mocker, capsys): assert ray2.name == "raytest2" assert ray1.namespace == ray2.namespace assert ray1.workers == ray2.workers - assert ray1.worker_mem_min == ray2.worker_mem_min - assert ray1.worker_mem_max == ray2.worker_mem_max + assert ray1.worker_mem_requests == ray2.worker_mem_requests + assert ray1.worker_mem_limits == ray2.worker_mem_limits assert ray1.worker_cpu == ray2.worker_cpu assert ray1.worker_extended_resources == ray2.worker_extended_resources try: @@ -2356,13 +2358,15 @@ def test_cluster_status(mocker): name="test", status=RayClusterStatus.UNKNOWN, workers=1, - worker_mem_min=2, - worker_mem_max=2, + worker_mem_requests=2, + worker_mem_limits=2, worker_cpu=1, namespace="ns", dashboard="fake-uri", - head_cpus=2, - head_mem=8, + head_cpu_requests=2, + head_cpu_limits=2, + head_mem_requests=8, + head_mem_limits=8, ) cf = Cluster( ClusterConfiguration( diff --git a/tests/upgrade/raycluster_sdk_upgrade_sleep_test.py b/tests/upgrade/raycluster_sdk_upgrade_sleep_test.py index c415b2fc..cf21fee0 100644 --- a/tests/upgrade/raycluster_sdk_upgrade_sleep_test.py +++ b/tests/upgrade/raycluster_sdk_upgrade_sleep_test.py @@ -53,8 +53,10 @@ def run_mnist_raycluster_sdk_oauth(self): name="mnist", namespace=self.namespace, num_workers=1, - head_cpus=1, - head_memory=4, + head_cpu_requests=1, + head_cpu_limits=1, + head_memory_requests=4, + head_memory_limits=4, worker_cpu_requests=1, worker_cpu_limits=1, worker_memory_requests=4, diff --git a/tests/upgrade/raycluster_sdk_upgrade_test.py b/tests/upgrade/raycluster_sdk_upgrade_test.py index 640b1b5d..05862d07 100644 --- a/tests/upgrade/raycluster_sdk_upgrade_test.py +++ b/tests/upgrade/raycluster_sdk_upgrade_test.py @@ -48,8 +48,10 @@ def run_mnist_raycluster_sdk_oauth(self): name="mnist", namespace=self.namespace, num_workers=1, - head_cpus=1, - head_memory=4, + head_cpu_requests=1, + head_cpu_limits=1, + head_memory_requests=4, + head_memory_limits=4, worker_cpu_requests=1, worker_cpu_limits=1, worker_memory_requests=4, From 2133af9489d83edc4c1ae74049abcb5eab385dab Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Mon, 9 Sep 2024 15:06:25 +0100 Subject: [PATCH 339/496] docs: update notebooks with split head resources Signed-off-by: Bobbins228 --- demo-notebooks/guided-demos/0_basic_ray.ipynb | 6 ++++-- demo-notebooks/guided-demos/1_cluster_job_client.ipynb | 6 ++++-- demo-notebooks/guided-demos/2_basic_interactive.ipynb | 6 ++++-- 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb index 2a9ae48e..58a52727 100644 --- a/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -62,8 +62,10 @@ "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(\n", " name='raytest', \n", - " head_cpus='500m',\n", - " head_memory=2,\n", + " head_cpu_requests='500m',\n", + " head_cpu_limits='500m',\n", + " head_memory_requests=2,\n", + " head_memory_limits=2,\n", " head_extended_resource_requests={'nvidia.com/gpu':0}, # For GPU enabled workloads set the head_extended_resource_requests and worker_extended_resource_requests\n", " worker_extended_resource_requests={'nvidia.com/gpu':0},\n", " num_workers=2,\n", diff --git a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb index 0857042a..05682d82 100644 --- a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb @@ -55,8 +55,10 @@ "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(\n", " name='jobtest',\n", - " head_cpus=1,\n", - " head_memory=4,\n", + " head_cpu_requests=1,\n", + " head_cpu_limits=1,\n", + " head_memory_requests=4,\n", + " head_memory_limits=4,\n", " head_extended_resource_requests={'nvidia.com/gpu':1}, # For GPU enabled workloads set the head_extended_resource_requests and worker_extended_resource_requests\n", " worker_extended_resource_requests={'nvidia.com/gpu':1},\n", " num_workers=2,\n", diff --git a/demo-notebooks/guided-demos/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/2_basic_interactive.ipynb index 98fcafa2..1612af3f 100644 --- a/demo-notebooks/guided-demos/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/2_basic_interactive.ipynb @@ -60,8 +60,10 @@ "cluster_name = \"interactivetest\"\n", "cluster = Cluster(ClusterConfiguration(\n", " name=cluster_name,\n", - " head_cpus=1,\n", - " head_memory=6,\n", + " head_cpu_requests=1,\n", + " head_cpu_limits=1,\n", + " head_memory_requests=6,\n", + " head_memory_limits=6,\n", " head_extended_resource_requests={'nvidia.com/gpu':1}, # For GPU enabled workloads set the head_extended_resource_requests and worker_extended_resource_requests\n", " worker_extended_resource_requests={'nvidia.com/gpu':1},\n", " num_workers=2,\n", From 1235fc82394465b1040a55ae0b8900cca3e3f51b Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Tue, 10 Sep 2024 14:47:53 +0100 Subject: [PATCH 340/496] docs: update documentation to include depreciating variables Signed-off-by: Bobbins228 --- docs/cluster-configuration.md | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/docs/cluster-configuration.md b/docs/cluster-configuration.md index 47110e4b..97068b49 100644 --- a/docs/cluster-configuration.md +++ b/docs/cluster-configuration.md @@ -9,8 +9,10 @@ from codeflare_sdk import Cluster, ClusterConfiguration cluster = Cluster(ClusterConfiguration( name='ray-example', # Mandatory Field namespace='default', # Default None - head_cpus=1, # Default 2 - head_memory=1, # Default 8 + head_cpu_requests=1, # Default 2 + head_cpu_limits=1, # Default 2 + head_memory_requests=1, # Default 8 + head_memory_limits=1, # Default 8 head_extended_resource_requests={'nvidia.com/gpu':0}, # Default 0 worker_extended_resource_requests={'nvidia.com/gpu':0}, # Default 0 num_workers=1, # Default 1 @@ -28,3 +30,17 @@ Note: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the Code The `labels={"exampleLabel": "example"}` parameter can be used to apply additional labels to the RayCluster resource. After creating their `cluster`, a user can call `cluster.up()` and `cluster.down()` to respectively create or remove the Ray Cluster. + + +## Deprecating Parameters +The following parameters of the `ClusterConfiguration` are being deprecated in release `v0.22.0`. +| Deprecated Parameter | Replaced By | +| :--------- | :-------- | +| `head_cpus` | `head_cpu_requests`, `head_cpu_limits` | +| `head_memory` | `head_memory_requests`, `head_memory_limits` | +| `min_cpus` | `worker_cpu_requests` | +| `max_cpus` | `worker_cpu_limits` | +| `min_memory` | `worker_memory_requests` | +| `max_memory` | `worker_memory_limits` | +| `head_gpus` | `head_extended_resource_requests` | +| `num_gpus` | `worker_extended_resource_requests` | From 9a59be80b2b2b4367f75ff83c24007c81b5551e0 Mon Sep 17 00:00:00 2001 From: Fiona Waters Date: Mon, 23 Sep 2024 11:52:33 +0100 Subject: [PATCH 341/496] Update default ray image to use static SHA tag --- src/codeflare_sdk/templates/base-template.yaml | 4 ++-- tests/e2e/support.py | 2 +- tests/test-case-bad.yaml | 4 ++-- tests/test-case-no-kueue-no-aw.yaml | 4 ++-- tests/test-case-no-mcad.yamls | 4 ++-- tests/test-case.yaml | 4 ++-- tests/test-default-appwrapper.yaml | 4 ++-- tests/unit_test_support.py | 2 +- 8 files changed, 14 insertions(+), 14 deletions(-) diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index d6b2ce55..6240e986 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -69,7 +69,7 @@ spec: containers: # The Ray head pod - name: ray-head - image: quay.io/modh/ray:2.35.0-py39-cu121 + image: quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283 imagePullPolicy: Always ports: - containerPort: 6379 @@ -150,7 +150,7 @@ spec: spec: containers: - name: machine-learning # must consist of lower case alphanumeric characters or '-', and must start and end with an alphanumeric character (e.g. 'my-name', or '123-abc' - image: quay.io/modh/ray:2.35.0-py39-cu121 + image: quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283 # environment variables to set in the container.Optional. # Refer to https://kubernetes.io/docs/tasks/inject-data-application/define-environment-variable-container/ lifecycle: diff --git a/tests/e2e/support.py b/tests/e2e/support.py index 4a8b6ca0..80f994cd 100644 --- a/tests/e2e/support.py +++ b/tests/e2e/support.py @@ -8,7 +8,7 @@ def get_ray_image(): - default_ray_image = "quay.io/modh/ray:2.35.0-py39-cu121" + default_ray_image = "quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283" return os.getenv("RAY_IMAGE", default_ray_image) diff --git a/tests/test-case-bad.yaml b/tests/test-case-bad.yaml index 3c9da72c..91b553aa 100644 --- a/tests/test-case-bad.yaml +++ b/tests/test-case-bad.yaml @@ -43,7 +43,7 @@ spec: valueFrom: fieldRef: fieldPath: status.podIP - image: quay.io/modh/ray:2.35.0-py39-cu121 + image: quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283 imagePullPolicy: Always lifecycle: preStop: @@ -90,7 +90,7 @@ spec: valueFrom: fieldRef: fieldPath: status.podIP - image: quay.io/modh/ray:2.35.0-py39-cu121 + image: quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283 lifecycle: preStop: exec: diff --git a/tests/test-case-no-kueue-no-aw.yaml b/tests/test-case-no-kueue-no-aw.yaml index 87a9242e..62c8cb0c 100644 --- a/tests/test-case-no-kueue-no-aw.yaml +++ b/tests/test-case-no-kueue-no-aw.yaml @@ -31,7 +31,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray:2.35.0-py39-cu121 + - image: quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283 imagePullPolicy: Always lifecycle: preStop: @@ -103,7 +103,7 @@ spec: key: value spec: containers: - - image: quay.io/modh/ray:2.35.0-py39-cu121 + - image: quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283 lifecycle: preStop: exec: diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index a686dd47..babc4fce 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -34,7 +34,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray:2.35.0-py39-cu121 + - image: quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283 imagePullPolicy: Always lifecycle: preStop: @@ -106,7 +106,7 @@ spec: key: value spec: containers: - - image: quay.io/modh/ray:2.35.0-py39-cu121 + - image: quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283 lifecycle: preStop: exec: diff --git a/tests/test-case.yaml b/tests/test-case.yaml index 9d85c5a9..e8b7b89d 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -39,7 +39,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray:2.35.0-py39-cu121 + - image: quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283 imagePullPolicy: Always lifecycle: preStop: @@ -111,7 +111,7 @@ spec: key: value spec: containers: - - image: quay.io/modh/ray:2.35.0-py39-cu121 + - image: quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283 lifecycle: preStop: exec: diff --git a/tests/test-default-appwrapper.yaml b/tests/test-default-appwrapper.yaml index 7abc17ed..e8bd7430 100644 --- a/tests/test-default-appwrapper.yaml +++ b/tests/test-default-appwrapper.yaml @@ -40,7 +40,7 @@ spec: spec: imagePullSecrets: [] containers: - - image: quay.io/modh/ray:2.35.0-py39-cu121 + - image: quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283 imagePullPolicy: Always lifecycle: preStop: @@ -111,7 +111,7 @@ spec: spec: imagePullSecrets: [] containers: - - image: quay.io/modh/ray:2.35.0-py39-cu121 + - image: quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283 lifecycle: preStop: exec: diff --git a/tests/unit_test_support.py b/tests/unit_test_support.py index 22713d2d..018857a3 100644 --- a/tests/unit_test_support.py +++ b/tests/unit_test_support.py @@ -45,7 +45,7 @@ def createClusterWrongType(): appwrapper=True, machine_types=[True, False], image_pull_secrets=["unit-test-pull-secret"], - image="quay.io/modh/ray:2.35.0-py39-cu121", + image="quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283", write_to_file=True, labels={1: 1}, ) From 868386ec038a8b62926f76351068f6b860183951 Mon Sep 17 00:00:00 2001 From: Fiona Waters Date: Mon, 23 Sep 2024 13:40:03 +0100 Subject: [PATCH 342/496] Updating ray image with correct SHA --- src/codeflare_sdk/templates/base-template.yaml | 4 ++-- tests/e2e/support.py | 2 +- tests/test-case-bad.yaml | 4 ++-- tests/test-case-no-kueue-no-aw.yaml | 4 ++-- tests/test-case-no-mcad.yamls | 4 ++-- tests/test-case.yaml | 4 ++-- tests/test-default-appwrapper.yaml | 4 ++-- tests/unit_test_support.py | 2 +- 8 files changed, 14 insertions(+), 14 deletions(-) diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index 6240e986..494177f9 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -69,7 +69,7 @@ spec: containers: # The Ray head pod - name: ray-head - image: quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283 + image: quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe imagePullPolicy: Always ports: - containerPort: 6379 @@ -150,7 +150,7 @@ spec: spec: containers: - name: machine-learning # must consist of lower case alphanumeric characters or '-', and must start and end with an alphanumeric character (e.g. 'my-name', or '123-abc' - image: quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283 + image: quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe # environment variables to set in the container.Optional. # Refer to https://kubernetes.io/docs/tasks/inject-data-application/define-environment-variable-container/ lifecycle: diff --git a/tests/e2e/support.py b/tests/e2e/support.py index 80f994cd..8290b8cd 100644 --- a/tests/e2e/support.py +++ b/tests/e2e/support.py @@ -8,7 +8,7 @@ def get_ray_image(): - default_ray_image = "quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283" + default_ray_image = "quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe" return os.getenv("RAY_IMAGE", default_ray_image) diff --git a/tests/test-case-bad.yaml b/tests/test-case-bad.yaml index 91b553aa..dedd2145 100644 --- a/tests/test-case-bad.yaml +++ b/tests/test-case-bad.yaml @@ -43,7 +43,7 @@ spec: valueFrom: fieldRef: fieldPath: status.podIP - image: quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283 + image: quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe imagePullPolicy: Always lifecycle: preStop: @@ -90,7 +90,7 @@ spec: valueFrom: fieldRef: fieldPath: status.podIP - image: quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283 + image: quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe lifecycle: preStop: exec: diff --git a/tests/test-case-no-kueue-no-aw.yaml b/tests/test-case-no-kueue-no-aw.yaml index 62c8cb0c..23d73606 100644 --- a/tests/test-case-no-kueue-no-aw.yaml +++ b/tests/test-case-no-kueue-no-aw.yaml @@ -31,7 +31,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283 + - image: quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe imagePullPolicy: Always lifecycle: preStop: @@ -103,7 +103,7 @@ spec: key: value spec: containers: - - image: quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283 + - image: quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe lifecycle: preStop: exec: diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index babc4fce..12a66dc4 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -34,7 +34,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283 + - image: quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe imagePullPolicy: Always lifecycle: preStop: @@ -106,7 +106,7 @@ spec: key: value spec: containers: - - image: quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283 + - image: quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe lifecycle: preStop: exec: diff --git a/tests/test-case.yaml b/tests/test-case.yaml index e8b7b89d..9c9a9d5a 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -39,7 +39,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283 + - image: quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe imagePullPolicy: Always lifecycle: preStop: @@ -111,7 +111,7 @@ spec: key: value spec: containers: - - image: quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283 + - image: quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe lifecycle: preStop: exec: diff --git a/tests/test-default-appwrapper.yaml b/tests/test-default-appwrapper.yaml index e8bd7430..6bf3acf5 100644 --- a/tests/test-default-appwrapper.yaml +++ b/tests/test-default-appwrapper.yaml @@ -40,7 +40,7 @@ spec: spec: imagePullSecrets: [] containers: - - image: quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283 + - image: quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe imagePullPolicy: Always lifecycle: preStop: @@ -111,7 +111,7 @@ spec: spec: imagePullSecrets: [] containers: - - image: quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283 + - image: quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe lifecycle: preStop: exec: diff --git a/tests/unit_test_support.py b/tests/unit_test_support.py index 018857a3..70ea8016 100644 --- a/tests/unit_test_support.py +++ b/tests/unit_test_support.py @@ -45,7 +45,7 @@ def createClusterWrongType(): appwrapper=True, machine_types=[True, False], image_pull_secrets=["unit-test-pull-secret"], - image="quay.io/modh/ray@sha256:83084b89885232e5733027b2ee45b83d3642ce32ae0b8e18b79ed45b6e734283", + image="quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe", write_to_file=True, labels={1: 1}, ) From c6ed1a1df93632c6f3763a31a328118f704f0b8b Mon Sep 17 00:00:00 2001 From: oksanabaza Date: Mon, 23 Sep 2024 13:20:31 +0100 Subject: [PATCH 343/496] Remove unused mock code for MD5 checks --- tests/e2e/mnist.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/tests/e2e/mnist.py b/tests/e2e/mnist.py index ba5ec96f..143a6b6c 100644 --- a/tests/e2e/mnist.py +++ b/tests/e2e/mnist.py @@ -15,7 +15,6 @@ import os import torch -import torchvision.datasets.utils as utils import requests from pytorch_lightning import LightningModule, Trainer from pytorch_lightning.callbacks.progress import TQDMProgressBar @@ -139,11 +138,6 @@ def configure_optimizers(self): #################### def prepare_data(self): - def check_md5(fpath, md5=None): - return True - - utils.check_md5 = check_md5 - # download print("Downloading MNIST dataset...") From 2d791b4e22eec647bdf1fb5147a249f2c3d57958 Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Mon, 16 Sep 2024 12:00:13 +0100 Subject: [PATCH 344/496] UI visual regression testing to cover UI widgets visibility --- .github/workflows/ui_notebooks_test.yaml | 113 + .gitignore | 4 + .../guided-demos/3_widget_example.ipynb | 119 + poetry.lock | 1105 ++++++- pyproject.toml | 1 + ui-tests/.yarnrc | 4 + ui-tests/jupyter_server_config.py | 6 + ui-tests/package.json | 22 + ui-tests/playwright.config.js | 13 + .../tests/widget_notebook_example.test.ts | 64 + .../widgets-cell-0-linux.png | Bin 0 -> 16694 bytes .../widgets-cell-2-linux.png | Bin 0 -> 33158 bytes .../widgets-cell-3-linux.png | Bin 0 -> 7513 bytes .../widgets-cell-4-linux.png | Bin 0 -> 9895 bytes ui-tests/yarn.lock | 2812 +++++++++++++++++ 15 files changed, 4261 insertions(+), 2 deletions(-) create mode 100644 .github/workflows/ui_notebooks_test.yaml create mode 100644 demo-notebooks/guided-demos/3_widget_example.ipynb create mode 100644 ui-tests/.yarnrc create mode 100644 ui-tests/jupyter_server_config.py create mode 100644 ui-tests/package.json create mode 100644 ui-tests/playwright.config.js create mode 100644 ui-tests/tests/widget_notebook_example.test.ts create mode 100644 ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-0-linux.png create mode 100644 ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-2-linux.png create mode 100644 ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-3-linux.png create mode 100644 ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-4-linux.png create mode 100644 ui-tests/yarn.lock diff --git a/.github/workflows/ui_notebooks_test.yaml b/.github/workflows/ui_notebooks_test.yaml new file mode 100644 index 00000000..864330b9 --- /dev/null +++ b/.github/workflows/ui_notebooks_test.yaml @@ -0,0 +1,113 @@ +name: UI notebooks tests + +on: + pull_request: + types: [labeled] + +concurrency: + group: ${{ github.head_ref }}-${{ github.workflow }} + cancel-in-progress: true + +env: + CODEFLARE_OPERATOR_IMG: "quay.io/project-codeflare/codeflare-operator:dev" + +jobs: + verify-3_widget_example: + if: ${{ github.event.label.name == 'test-guided-notebooks' || github.event.label.name == 'test-ui-notebooks'}} + runs-on: ubuntu-20.04-4core + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Checkout common repo code + uses: actions/checkout@v4 + with: + repository: "project-codeflare/codeflare-common" + ref: "main" + path: "common" + + - name: Checkout CodeFlare operator repository + uses: actions/checkout@v4 + with: + repository: project-codeflare/codeflare-operator + path: codeflare-operator + + - name: Set Go + uses: actions/setup-go@v5 + with: + go-version-file: "./codeflare-operator/go.mod" + cache-dependency-path: "./codeflare-operator/go.sum" + + - name: Set up gotestfmt + uses: gotesttools/gotestfmt-action@v2 + with: + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up specific Python version + uses: actions/setup-python@v5 + with: + python-version: "3.9" + cache: "pip" # caching pip dependencies + + - name: Setup and start KinD cluster + uses: ./common/github-actions/kind + + - name: Deploy CodeFlare stack + id: deploy + run: | + cd codeflare-operator + echo Setting up CodeFlare stack + make setup-e2e + echo Deploying CodeFlare operator + make deploy -e IMG="${CODEFLARE_OPERATOR_IMG}" -e ENV="e2e" + kubectl wait --timeout=120s --for=condition=Available=true deployment -n openshift-operators codeflare-operator-manager + cd .. + + - name: Setup Guided notebooks execution + run: | + echo "Installing papermill and dependencies..." + pip install poetry ipython ipykernel + poetry config virtualenvs.create false + echo "Installing SDK..." + poetry install --with test,docs + + - name: Install Yarn dependencies + run: | + poetry run yarn install + poetry run yarn playwright install chromium + working-directory: ui-tests + + - name: Fix 3_widget_example.ipynb notebook for test + run: | + # Remove login/logout cells, as KinD doesn't support authentication using token + jq -r 'del(.cells[] | select(.source[] | contains("Create authentication object for user permissions")))' 3_widget_example.ipynb > 3_widget_example.ipynb.tmp && mv 3_widget_example.ipynb.tmp 3_widget_example.ipynb + jq -r 'del(.cells[] | select(.source[] | contains("auth.logout()")))' 3_widget_example.ipynb > 3_widget_example.ipynb.tmp && mv 3_widget_example.ipynb.tmp 3_widget_example.ipynb + # Set explicit namespace as SDK need it (currently) to resolve local queues + sed -i "s/head_memory_limits=2,/head_memory_limits=2, namespace='default',/" 3_widget_example.ipynb + working-directory: demo-notebooks/guided-demos + + - name: Run UI notebook tests + run: | + set -euo pipefail + + poetry run yarn test + working-directory: ui-tests + + - name: Upload Playwright Test assets + if: always() + uses: actions/upload-artifact@v4 + with: + name: ipywidgets-test-assets + path: | + ui-tests/test-results + + - name: Upload Playwright Test report + if: always() + uses: actions/upload-artifact@v4 + with: + name: ipywidgets-test-report + path: | + ui-tests/playwright-report diff --git a/.gitignore b/.gitignore index 9b0018c2..c089dfc6 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,7 @@ Pipfile.lock build/ tls-cluster-namespace quicktest.yaml +node_modules +.DS_Store +ui-tests/playwright-report +ui-tests/test-results diff --git a/demo-notebooks/guided-demos/3_widget_example.ipynb b/demo-notebooks/guided-demos/3_widget_example.ipynb new file mode 100644 index 00000000..4d3d6ea7 --- /dev/null +++ b/demo-notebooks/guided-demos/3_widget_example.ipynb @@ -0,0 +1,119 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "8d4a42f6", + "metadata": {}, + "source": [ + "In this notebook, we will go through the basics of using the SDK to:\n", + " - Spin up a Ray cluster with our desired resources\n", + " - View the status and specs of our Ray cluster\n", + " - Take down the Ray cluster when finished" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b55bc3ea-4ce3-49bf-bb1f-e209de8ca47a", + "metadata": {}, + "outputs": [], + "source": [ + "# Import pieces from codeflare-sdk\n", + "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "614daa0c", + "metadata": {}, + "outputs": [], + "source": [ + "# Create authentication object for user permissions\n", + "# IF unused, SDK will automatically check for default kubeconfig, then in-cluster config\n", + "# KubeConfigFileAuthentication can also be used to specify kubeconfig path manually\n", + "auth = TokenAuthentication(\n", + " token = \"XXXXX\",\n", + " server = \"XXXXX\",\n", + " skip_tls=False\n", + ")\n", + "auth.login()" + ] + }, + { + "cell_type": "markdown", + "id": "bc27f84c", + "metadata": {}, + "source": [ + "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", + "\n", + "NOTE: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. \n", + "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0f4bc870-091f-4e11-9642-cba145710159", + "metadata": {}, + "outputs": [], + "source": [ + "# Create and configure our cluster object\n", + "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", + "cluster = Cluster(ClusterConfiguration(\n", + " name='raytest', \n", + " head_cpu_requests='500m',\n", + " head_cpu_limits='500m',\n", + " head_memory_requests=2,\n", + " head_memory_limits=2,\n", + " head_extended_resource_requests={'nvidia.com/gpu':0}, # For GPU enabled workloads set the head_extended_resource_requests and worker_extended_resource_requests\n", + " worker_extended_resource_requests={'nvidia.com/gpu':0},\n", + " num_workers=2,\n", + " worker_cpu_requests='250m',\n", + " worker_cpu_limits=1,\n", + " worker_memory_requests=2,\n", + " worker_memory_limits=2,\n", + " # image=\"\", # Optional Field \n", + " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", + " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", + "))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2d8e6ce3", + "metadata": {}, + "outputs": [], + "source": [ + "cluster.status()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.19" + }, + "vscode": { + "interpreter": { + "hash": "f9f85f796d01129d0dd105a088854619f454435301f6ffec2fea96ecbd9be4ac" + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/poetry.lock b/poetry.lock index 21568298..a56bb3f0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "aiohttp" @@ -124,6 +124,115 @@ files = [ [package.dependencies] frozenlist = ">=1.1.0" +[[package]] +name = "anyio" +version = "4.5.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.5.0-py3-none-any.whl", hash = "sha256:fdeb095b7cc5a5563175eedd926ec4ae55413bb4be5770c424af0ba46ccb4a78"}, + {file = "anyio-4.5.0.tar.gz", hash = "sha256:c5a275fe5ca0afd788001f58fca1e69e29ce706d746e317d660e21f70c530ef9"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1)"] +trio = ["trio (>=0.26.1)"] + +[[package]] +name = "appnope" +version = "0.1.4" +description = "Disable App Nap on macOS >= 10.9" +optional = false +python-versions = ">=3.6" +files = [ + {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, + {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, +] + +[[package]] +name = "argon2-cffi" +version = "23.1.0" +description = "Argon2 for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, + {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, +] + +[package.dependencies] +argon2-cffi-bindings = "*" + +[package.extras] +dev = ["argon2-cffi[tests,typing]", "tox (>4)"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-notfound-page"] +tests = ["hypothesis", "pytest"] +typing = ["mypy"] + +[[package]] +name = "argon2-cffi-bindings" +version = "21.2.0" +description = "Low-level CFFI bindings for Argon2" +optional = false +python-versions = ">=3.6" +files = [ + {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, + {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, +] + +[package.dependencies] +cffi = ">=1.0.1" + +[package.extras] +dev = ["cogapp", "pre-commit", "pytest", "wheel"] +tests = ["pytest"] + +[[package]] +name = "arrow" +version = "1.3.0" +description = "Better dates & times for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, + {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, +] + +[package.dependencies] +python-dateutil = ">=2.7.0" +types-python-dateutil = ">=2.8.10" + +[package.extras] +doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] +test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"] + [[package]] name = "asttokens" version = "2.4.1" @@ -142,6 +251,20 @@ six = ">=1.12.0" astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] +[[package]] +name = "async-lru" +version = "2.0.4" +description = "Simple LRU cache for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "async-lru-2.0.4.tar.gz", hash = "sha256:b8a59a5df60805ff63220b2a0c5b5393da5521b113cd5465a44eb037d81a5627"}, + {file = "async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} + [[package]] name = "async-timeout" version = "4.0.3" @@ -172,6 +295,20 @@ tests = ["attrs[tests-no-zope]", "zope-interface"] tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +[[package]] +name = "babel" +version = "2.16.0" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.8" +files = [ + {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, + {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, +] + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + [[package]] name = "bcrypt" version = "4.1.3" @@ -212,6 +349,45 @@ files = [ tests = ["pytest (>=3.2.1,!=3.3.0)"] typecheck = ["mypy"] +[[package]] +name = "beautifulsoup4" +version = "4.12.3" +description = "Screen-scraping library" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "bleach" +version = "6.1.0" +description = "An easy safelist-based HTML-sanitizing tool." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, + {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, +] + +[package.dependencies] +six = ">=1.9.0" +webencodings = "*" + +[package.extras] +css = ["tinycss2 (>=1.1.0,<1.3)"] + [[package]] name = "cachetools" version = "5.3.3" @@ -580,6 +756,37 @@ test = ["iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-co test-randomorder = ["pytest-randomly"] tox = ["tox"] +[[package]] +name = "debugpy" +version = "1.8.5" +description = "An implementation of the Debug Adapter Protocol for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "debugpy-1.8.5-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:7e4d594367d6407a120b76bdaa03886e9eb652c05ba7f87e37418426ad2079f7"}, + {file = "debugpy-1.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4413b7a3ede757dc33a273a17d685ea2b0c09dbd312cc03f5534a0fd4d40750a"}, + {file = "debugpy-1.8.5-cp310-cp310-win32.whl", hash = "sha256:dd3811bd63632bb25eda6bd73bea8e0521794cda02be41fa3160eb26fc29e7ed"}, + {file = "debugpy-1.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:b78c1250441ce893cb5035dd6f5fc12db968cc07f91cc06996b2087f7cefdd8e"}, + {file = "debugpy-1.8.5-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:606bccba19f7188b6ea9579c8a4f5a5364ecd0bf5a0659c8a5d0e10dcee3032a"}, + {file = "debugpy-1.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db9fb642938a7a609a6c865c32ecd0d795d56c1aaa7a7a5722d77855d5e77f2b"}, + {file = "debugpy-1.8.5-cp311-cp311-win32.whl", hash = "sha256:4fbb3b39ae1aa3e5ad578f37a48a7a303dad9a3d018d369bc9ec629c1cfa7408"}, + {file = "debugpy-1.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:345d6a0206e81eb68b1493ce2fbffd57c3088e2ce4b46592077a943d2b968ca3"}, + {file = "debugpy-1.8.5-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:5b5c770977c8ec6c40c60d6f58cacc7f7fe5a45960363d6974ddb9b62dbee156"}, + {file = "debugpy-1.8.5-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a65b00b7cdd2ee0c2cf4c7335fef31e15f1b7056c7fdbce9e90193e1a8c8cb"}, + {file = "debugpy-1.8.5-cp312-cp312-win32.whl", hash = "sha256:c9f7c15ea1da18d2fcc2709e9f3d6de98b69a5b0fff1807fb80bc55f906691f7"}, + {file = "debugpy-1.8.5-cp312-cp312-win_amd64.whl", hash = "sha256:28ced650c974aaf179231668a293ecd5c63c0a671ae6d56b8795ecc5d2f48d3c"}, + {file = "debugpy-1.8.5-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:3df6692351172a42af7558daa5019651f898fc67450bf091335aa8a18fbf6f3a"}, + {file = "debugpy-1.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cd04a73eb2769eb0bfe43f5bfde1215c5923d6924b9b90f94d15f207a402226"}, + {file = "debugpy-1.8.5-cp38-cp38-win32.whl", hash = "sha256:8f913ee8e9fcf9d38a751f56e6de12a297ae7832749d35de26d960f14280750a"}, + {file = "debugpy-1.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:a697beca97dad3780b89a7fb525d5e79f33821a8bc0c06faf1f1289e549743cf"}, + {file = "debugpy-1.8.5-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:0a1029a2869d01cb777216af8c53cda0476875ef02a2b6ff8b2f2c9a4b04176c"}, + {file = "debugpy-1.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84c276489e141ed0b93b0af648eef891546143d6a48f610945416453a8ad406"}, + {file = "debugpy-1.8.5-cp39-cp39-win32.whl", hash = "sha256:ad84b7cde7fd96cf6eea34ff6c4a1b7887e0fe2ea46e099e53234856f9d99a34"}, + {file = "debugpy-1.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:7b0fe36ed9d26cb6836b0a51453653f8f2e347ba7348f2bbfe76bfeb670bfb1c"}, + {file = "debugpy-1.8.5-py2.py3-none-any.whl", hash = "sha256:55919dce65b471eff25901acf82d328bbd5b833526b6c1364bd5133754777a44"}, + {file = "debugpy-1.8.5.zip", hash = "sha256:b2112cfeb34b4507399d298fe7023a16656fc553ed5246536060ca7bd0e668d0"}, +] + [[package]] name = "decorator" version = "5.1.1" @@ -591,6 +798,17 @@ files = [ {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] +[[package]] +name = "defusedxml" +version = "0.7.1" +description = "XML bomb protection for Python stdlib modules" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, + {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, +] + [[package]] name = "distlib" version = "0.3.8" @@ -630,6 +848,20 @@ files = [ [package.extras] tests = ["asttokens", "littleutils", "pytest", "rich"] +[[package]] +name = "fastjsonschema" +version = "2.20.0" +description = "Fastest Python implementation of JSON schema" +optional = false +python-versions = "*" +files = [ + {file = "fastjsonschema-2.20.0-py3-none-any.whl", hash = "sha256:5875f0b0fa7a0043a91e93a9b8f793bcbbba9691e7fd83dca95c28ba26d21f0a"}, + {file = "fastjsonschema-2.20.0.tar.gz", hash = "sha256:3d48fc5300ee96f5d116f10fe6f28d938e6008f59a6a025c2649475b87f76a23"}, +] + +[package.extras] +devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] + [[package]] name = "filelock" version = "3.15.4" @@ -646,6 +878,17 @@ docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1 testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] typing = ["typing-extensions (>=4.8)"] +[[package]] +name = "fqdn" +version = "1.5.1" +description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" +optional = false +python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" +files = [ + {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, + {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, +] + [[package]] name = "frozenlist" version = "1.4.1" @@ -892,6 +1135,63 @@ files = [ [package.extras] protobuf = ["grpcio-tools (>=1.64.1)"] +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.5" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.26.0)"] + +[[package]] +name = "httpx" +version = "0.27.2" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + [[package]] name = "idna" version = "3.7" @@ -933,6 +1233,39 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "ipykernel" +version = "6.29.5" +description = "IPython Kernel for Jupyter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5"}, + {file = "ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215"}, +] + +[package.dependencies] +appnope = {version = "*", markers = "platform_system == \"Darwin\""} +comm = ">=0.1.1" +debugpy = ">=1.6.5" +ipython = ">=7.23.1" +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +matplotlib-inline = ">=0.1" +nest-asyncio = "*" +packaging = "*" +psutil = "*" +pyzmq = ">=24" +tornado = ">=6.1" +traitlets = ">=5.4.0" + +[package.extras] +cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] +pyqt5 = ["pyqt5"] +pyside6 = ["pyside6"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"] + [[package]] name = "ipython" version = "8.18.1" @@ -991,6 +1324,20 @@ widgetsnbextension = ">=4.0.10,<4.1.0" [package.extras] test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] +[[package]] +name = "isoduration" +version = "20.11.0" +description = "Operations with ISO 8601 durations" +optional = false +python-versions = ">=3.7" +files = [ + {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, + {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, +] + +[package.dependencies] +arrow = ">=0.15.0" + [[package]] name = "jedi" version = "0.19.1" @@ -1027,6 +1374,28 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "json5" +version = "0.9.25" +description = "A Python implementation of the JSON5 data format." +optional = false +python-versions = ">=3.8" +files = [ + {file = "json5-0.9.25-py3-none-any.whl", hash = "sha256:34ed7d834b1341a86987ed52f3f76cd8ee184394906b6e22a1e0deb9ab294e8f"}, + {file = "json5-0.9.25.tar.gz", hash = "sha256:548e41b9be043f9426776f05df8635a00fe06104ea51ed24b67f908856e151ae"}, +] + +[[package]] +name = "jsonpointer" +version = "3.0.0" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, + {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, +] + [[package]] name = "jsonschema" version = "4.22.0" @@ -1040,9 +1409,17 @@ files = [ [package.dependencies] attrs = ">=22.2.0" +fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""} jsonschema-specifications = ">=2023.03.6" referencing = ">=0.28.4" +rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} rpds-py = ">=0.7.1" +uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +webcolors = {version = ">=1.11", optional = true, markers = "extra == \"format-nongpl\""} [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] @@ -1062,6 +1439,216 @@ files = [ [package.dependencies] referencing = ">=0.31.0" +[[package]] +name = "jupyter-client" +version = "8.6.3" +description = "Jupyter protocol implementation and client libraries" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f"}, + {file = "jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +python-dateutil = ">=2.8.2" +pyzmq = ">=23.0" +tornado = ">=6.2" +traitlets = ">=5.3" + +[package.extras] +docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] + +[[package]] +name = "jupyter-core" +version = "5.7.2" +description = "Jupyter core package. A base package on which Jupyter projects rely." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, + {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, +] + +[package.dependencies] +platformdirs = ">=2.5" +pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} +traitlets = ">=5.3" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] +test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "jupyter-events" +version = "0.10.0" +description = "Jupyter Event System library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_events-0.10.0-py3-none-any.whl", hash = "sha256:4b72130875e59d57716d327ea70d3ebc3af1944d3717e5a498b8a06c6c159960"}, + {file = "jupyter_events-0.10.0.tar.gz", hash = "sha256:670b8229d3cc882ec782144ed22e0d29e1c2d639263f92ca8383e66682845e22"}, +] + +[package.dependencies] +jsonschema = {version = ">=4.18.0", extras = ["format-nongpl"]} +python-json-logger = ">=2.0.4" +pyyaml = ">=5.3" +referencing = "*" +rfc3339-validator = "*" +rfc3986-validator = ">=0.1.1" +traitlets = ">=5.3" + +[package.extras] +cli = ["click", "rich"] +docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme", "sphinxcontrib-spelling"] +test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "rich"] + +[[package]] +name = "jupyter-lsp" +version = "2.2.5" +description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter-lsp-2.2.5.tar.gz", hash = "sha256:793147a05ad446f809fd53ef1cd19a9f5256fd0a2d6b7ce943a982cb4f545001"}, + {file = "jupyter_lsp-2.2.5-py3-none-any.whl", hash = "sha256:45fbddbd505f3fbfb0b6cb2f1bc5e15e83ab7c79cd6e89416b248cb3c00c11da"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} +jupyter-server = ">=1.1.2" + +[[package]] +name = "jupyter-server" +version = "2.14.2" +description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_server-2.14.2-py3-none-any.whl", hash = "sha256:47ff506127c2f7851a17bf4713434208fc490955d0e8632e95014a9a9afbeefd"}, + {file = "jupyter_server-2.14.2.tar.gz", hash = "sha256:66095021aa9638ced276c248b1d81862e4c50f292d575920bbe960de1c56b12b"}, +] + +[package.dependencies] +anyio = ">=3.1.0" +argon2-cffi = ">=21.1" +jinja2 = ">=3.0.3" +jupyter-client = ">=7.4.4" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-events = ">=0.9.0" +jupyter-server-terminals = ">=0.4.4" +nbconvert = ">=6.4.4" +nbformat = ">=5.3.0" +overrides = ">=5.0" +packaging = ">=22.0" +prometheus-client = ">=0.9" +pywinpty = {version = ">=2.0.1", markers = "os_name == \"nt\""} +pyzmq = ">=24" +send2trash = ">=1.8.2" +terminado = ">=0.8.3" +tornado = ">=6.2.0" +traitlets = ">=5.6.0" +websocket-client = ">=1.7" + +[package.extras] +docs = ["ipykernel", "jinja2", "jupyter-client", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi (>=0.8.0)", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"] +test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0,<9)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.7)", "pytest-timeout", "requests"] + +[[package]] +name = "jupyter-server-terminals" +version = "0.5.3" +description = "A Jupyter Server Extension Providing Terminals." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_server_terminals-0.5.3-py3-none-any.whl", hash = "sha256:41ee0d7dc0ebf2809c668e0fc726dfaf258fcd3e769568996ca731b6194ae9aa"}, + {file = "jupyter_server_terminals-0.5.3.tar.gz", hash = "sha256:5ae0295167220e9ace0edcfdb212afd2b01ee8d179fe6f23c899590e9b8a5269"}, +] + +[package.dependencies] +pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""} +terminado = ">=0.8.3" + +[package.extras] +docs = ["jinja2", "jupyter-server", "mistune (<4.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] +test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] + +[[package]] +name = "jupyterlab" +version = "4.2.5" +description = "JupyterLab computational environment" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyterlab-4.2.5-py3-none-any.whl", hash = "sha256:73b6e0775d41a9fee7ee756c80f58a6bed4040869ccc21411dc559818874d321"}, + {file = "jupyterlab-4.2.5.tar.gz", hash = "sha256:ae7f3a1b8cb88b4f55009ce79fa7c06f99d70cd63601ee4aa91815d054f46f75"}, +] + +[package.dependencies] +async-lru = ">=1.0.0" +httpx = ">=0.25.0" +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} +ipykernel = ">=6.5.0" +jinja2 = ">=3.0.3" +jupyter-core = "*" +jupyter-lsp = ">=2.0.0" +jupyter-server = ">=2.4.0,<3" +jupyterlab-server = ">=2.27.1,<3" +notebook-shim = ">=0.2" +packaging = "*" +setuptools = ">=40.1.0" +tomli = {version = ">=1.2.2", markers = "python_version < \"3.11\""} +tornado = ">=6.2.0" +traitlets = "*" + +[package.extras] +dev = ["build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.3.5)"] +docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-jupyter", "sphinx (>=1.8,<7.3.0)", "sphinx-copybutton"] +docs-screenshots = ["altair (==5.3.0)", "ipython (==8.16.1)", "ipywidgets (==8.1.2)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.1.post2)", "matplotlib (==3.8.3)", "nbconvert (>=7.0.0)", "pandas (==2.2.1)", "scipy (==1.12.0)", "vega-datasets (==0.9.0)"] +test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] +upgrade-extension = ["copier (>=9,<10)", "jinja2-time (<0.3)", "pydantic (<3.0)", "pyyaml-include (<3.0)", "tomli-w (<2.0)"] + +[[package]] +name = "jupyterlab-pygments" +version = "0.3.0" +description = "Pygments theme using JupyterLab CSS variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, + {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, +] + +[[package]] +name = "jupyterlab-server" +version = "2.27.3" +description = "A set of server components for JupyterLab and JupyterLab like applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyterlab_server-2.27.3-py3-none-any.whl", hash = "sha256:e697488f66c3db49df675158a77b3b017520d772c6e1548c7d9bcc5df7944ee4"}, + {file = "jupyterlab_server-2.27.3.tar.gz", hash = "sha256:eb36caca59e74471988f0ae25c77945610b887f777255aa21f8065def9e51ed4"}, +] + +[package.dependencies] +babel = ">=2.10" +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} +jinja2 = ">=3.0.3" +json5 = ">=0.9.0" +jsonschema = ">=4.18.0" +jupyter-server = ">=1.21,<3" +packaging = ">=21.3" +requests = ">=2.31" + +[package.extras] +docs = ["autodoc-traits", "jinja2 (<3.2.0)", "mistune (<4)", "myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-copybutton", "sphinxcontrib-openapi (>0.8)"] +openapi = ["openapi-core (>=0.18.0,<0.19.0)", "ruamel-yaml"] +test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-validator (>=0.6.0,<0.8.0)", "pytest (>=7.0,<8)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6.2)", "pytest-timeout", "requests-mock", "ruamel-yaml", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"] + [[package]] name = "jupyterlab-widgets" version = "3.0.11" @@ -1274,6 +1861,17 @@ docs = ["IPython", "bump2version", "furo", "sphinx", "sphinx-argparse", "towncri lint = ["black", "check-manifest", "flake8", "isort", "mypy"] test = ["Cython", "greenlet", "ipython", "pytest", "pytest-cov", "setuptools"] +[[package]] +name = "mistune" +version = "3.0.2" +description = "A sane and fast Markdown parser with useful plugins and renderers" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"}, + {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"}, +] + [[package]] name = "msgpack" version = "1.0.8" @@ -1438,6 +2036,115 @@ files = [ {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, ] +[[package]] +name = "nbclient" +version = "0.10.0" +description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "nbclient-0.10.0-py3-none-any.whl", hash = "sha256:f13e3529332a1f1f81d82a53210322476a168bb7090a0289c795fe9cc11c9d3f"}, + {file = "nbclient-0.10.0.tar.gz", hash = "sha256:4b3f1b7dba531e498449c4db4f53da339c91d449dc11e9af3a43b4eb5c5abb09"}, +] + +[package.dependencies] +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +nbformat = ">=5.1" +traitlets = ">=5.4" + +[package.extras] +dev = ["pre-commit"] +docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling"] +test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] + +[[package]] +name = "nbconvert" +version = "7.16.4" +description = "Converting Jupyter Notebooks (.ipynb files) to other formats. Output formats include asciidoc, html, latex, markdown, pdf, py, rst, script. nbconvert can be used both as a Python library (`import nbconvert`) or as a command line tool (invoked as `jupyter nbconvert ...`)." +optional = false +python-versions = ">=3.8" +files = [ + {file = "nbconvert-7.16.4-py3-none-any.whl", hash = "sha256:05873c620fe520b6322bf8a5ad562692343fe3452abda5765c7a34b7d1aa3eb3"}, + {file = "nbconvert-7.16.4.tar.gz", hash = "sha256:86ca91ba266b0a448dc96fa6c5b9d98affabde2867b363258703536807f9f7f4"}, +] + +[package.dependencies] +beautifulsoup4 = "*" +bleach = "!=5.0.0" +defusedxml = "*" +importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} +jinja2 = ">=3.0" +jupyter-core = ">=4.7" +jupyterlab-pygments = "*" +markupsafe = ">=2.0" +mistune = ">=2.0.3,<4" +nbclient = ">=0.5.0" +nbformat = ">=5.7" +packaging = "*" +pandocfilters = ">=1.4.1" +pygments = ">=2.4.1" +tinycss2 = "*" +traitlets = ">=5.1" + +[package.extras] +all = ["flaky", "ipykernel", "ipython", "ipywidgets (>=7.5)", "myst-parser", "nbsphinx (>=0.2.12)", "playwright", "pydata-sphinx-theme", "pyqtwebengine (>=5.15)", "pytest (>=7)", "sphinx (==5.0.2)", "sphinxcontrib-spelling", "tornado (>=6.1)"] +docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)", "sphinxcontrib-spelling"] +qtpdf = ["pyqtwebengine (>=5.15)"] +qtpng = ["pyqtwebengine (>=5.15)"] +serve = ["tornado (>=6.1)"] +test = ["flaky", "ipykernel", "ipywidgets (>=7.5)", "pytest (>=7)"] +webpdf = ["playwright"] + +[[package]] +name = "nbformat" +version = "5.10.4" +description = "The Jupyter Notebook format" +optional = false +python-versions = ">=3.8" +files = [ + {file = "nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b"}, + {file = "nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a"}, +] + +[package.dependencies] +fastjsonschema = ">=2.15" +jsonschema = ">=2.6" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +traitlets = ">=5.1" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["pep440", "pre-commit", "pytest", "testpath"] + +[[package]] +name = "nest-asyncio" +version = "1.6.0" +description = "Patch asyncio to allow nested event loops" +optional = false +python-versions = ">=3.5" +files = [ + {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, + {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, +] + +[[package]] +name = "notebook-shim" +version = "0.2.4" +description = "A shim layer for notebook traits and config" +optional = false +python-versions = ">=3.7" +files = [ + {file = "notebook_shim-0.2.4-py3-none-any.whl", hash = "sha256:411a5be4e9dc882a074ccbcae671eda64cceb068767e9a3419096986560e1cef"}, + {file = "notebook_shim-0.2.4.tar.gz", hash = "sha256:b4b2cfa1b65d98307ca24361f5b30fe785b53c3fd07b7a47e89acb5e6ac638cb"}, +] + +[package.dependencies] +jupyter-server = ">=1.8,<3" + +[package.extras] +test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync"] + [[package]] name = "numpy" version = "2.0.0" @@ -1551,6 +2258,17 @@ paramiko = "*" pyyaml = "*" six = "*" +[[package]] +name = "overrides" +version = "7.7.0" +description = "A decorator to automatically detect mismatch when overriding a method." +optional = false +python-versions = ">=3.6" +files = [ + {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, + {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, +] + [[package]] name = "packaging" version = "24.1" @@ -1635,6 +2353,17 @@ sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-d test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] xml = ["lxml (>=4.9.2)"] +[[package]] +name = "pandocfilters" +version = "1.5.1" +description = "Utilities for writing pandoc filters in python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, + {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, +] + [[package]] name = "paramiko" version = "3.4.0" @@ -1796,6 +2525,35 @@ files = [ {file = "protobuf-5.27.2.tar.gz", hash = "sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714"}, ] +[[package]] +name = "psutil" +version = "6.0.0" +description = "Cross-platform lib for process and system monitoring in Python." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "psutil-6.0.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a021da3e881cd935e64a3d0a20983bda0bb4cf80e4f74fa9bfcb1bc5785360c6"}, + {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1287c2b95f1c0a364d23bc6f2ea2365a8d4d9b726a3be7294296ff7ba97c17f0"}, + {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a9a3dbfb4de4f18174528d87cc352d1f788b7496991cca33c6996f40c9e3c92c"}, + {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6ec7588fb3ddaec7344a825afe298db83fe01bfaaab39155fa84cf1c0d6b13c3"}, + {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:1e7c870afcb7d91fdea2b37c24aeb08f98b6d67257a5cb0a8bc3ac68d0f1a68c"}, + {file = "psutil-6.0.0-cp27-none-win32.whl", hash = "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35"}, + {file = "psutil-6.0.0-cp27-none-win_amd64.whl", hash = "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1"}, + {file = "psutil-6.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0"}, + {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0"}, + {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd"}, + {file = "psutil-6.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132"}, + {file = "psutil-6.0.0-cp36-cp36m-win32.whl", hash = "sha256:fc8c9510cde0146432bbdb433322861ee8c3efbf8589865c8bf8d21cb30c4d14"}, + {file = "psutil-6.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:34859b8d8f423b86e4385ff3665d3f4d94be3cdf48221fbe476e883514fdb71c"}, + {file = "psutil-6.0.0-cp37-abi3-win32.whl", hash = "sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d"}, + {file = "psutil-6.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3"}, + {file = "psutil-6.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0"}, + {file = "psutil-6.0.0.tar.gz", hash = "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2"}, +] + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + [[package]] name = "ptyprocess" version = "0.7.0" @@ -2087,6 +2845,17 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "python-json-logger" +version = "2.0.7" +description = "A python library adding a json log formatter" +optional = false +python-versions = ">=3.6" +files = [ + {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, + {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, +] + [[package]] name = "pytz" version = "2024.1" @@ -2098,6 +2867,44 @@ files = [ {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, ] +[[package]] +name = "pywin32" +version = "306" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, +] + +[[package]] +name = "pywinpty" +version = "2.0.13" +description = "Pseudo terminal support for Windows from Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pywinpty-2.0.13-cp310-none-win_amd64.whl", hash = "sha256:697bff211fb5a6508fee2dc6ff174ce03f34a9a233df9d8b5fe9c8ce4d5eaf56"}, + {file = "pywinpty-2.0.13-cp311-none-win_amd64.whl", hash = "sha256:b96fb14698db1284db84ca38c79f15b4cfdc3172065b5137383910567591fa99"}, + {file = "pywinpty-2.0.13-cp312-none-win_amd64.whl", hash = "sha256:2fd876b82ca750bb1333236ce98488c1be96b08f4f7647cfdf4129dfad83c2d4"}, + {file = "pywinpty-2.0.13-cp38-none-win_amd64.whl", hash = "sha256:61d420c2116c0212808d31625611b51caf621fe67f8a6377e2e8b617ea1c1f7d"}, + {file = "pywinpty-2.0.13-cp39-none-win_amd64.whl", hash = "sha256:71cb613a9ee24174730ac7ae439fd179ca34ccb8c5349e8d7b72ab5dea2c6f4b"}, + {file = "pywinpty-2.0.13.tar.gz", hash = "sha256:c34e32351a3313ddd0d7da23d27f835c860d32fe4ac814d372a3ea9594f41dde"}, +] + [[package]] name = "pyyaml" version = "6.0.1" @@ -2158,6 +2965,127 @@ files = [ {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] +[[package]] +name = "pyzmq" +version = "26.2.0" +description = "Python bindings for 0MQ" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:ddf33d97d2f52d89f6e6e7ae66ee35a4d9ca6f36eda89c24591b0c40205a3629"}, + {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dacd995031a01d16eec825bf30802fceb2c3791ef24bcce48fa98ce40918c27b"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89289a5ee32ef6c439086184529ae060c741334b8970a6855ec0b6ad3ff28764"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5506f06d7dc6ecf1efacb4a013b1f05071bb24b76350832c96449f4a2d95091c"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ea039387c10202ce304af74def5021e9adc6297067f3441d348d2b633e8166a"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2224fa4a4c2ee872886ed00a571f5e967c85e078e8e8c2530a2fb01b3309b88"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:28ad5233e9c3b52d76196c696e362508959741e1a005fb8fa03b51aea156088f"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:1c17211bc037c7d88e85ed8b7d8f7e52db6dc8eca5590d162717c654550f7282"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b8f86dd868d41bea9a5f873ee13bf5551c94cf6bc51baebc6f85075971fe6eea"}, + {file = "pyzmq-26.2.0-cp310-cp310-win32.whl", hash = "sha256:46a446c212e58456b23af260f3d9fb785054f3e3653dbf7279d8f2b5546b21c2"}, + {file = "pyzmq-26.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:49d34ab71db5a9c292a7644ce74190b1dd5a3475612eefb1f8be1d6961441971"}, + {file = "pyzmq-26.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:bfa832bfa540e5b5c27dcf5de5d82ebc431b82c453a43d141afb1e5d2de025fa"}, + {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:8f7e66c7113c684c2b3f1c83cdd3376103ee0ce4c49ff80a648643e57fb22218"}, + {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3a495b30fc91db2db25120df5847d9833af237546fd59170701acd816ccc01c4"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77eb0968da535cba0470a5165468b2cac7772cfb569977cff92e240f57e31bef"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ace4f71f1900a548f48407fc9be59c6ba9d9aaf658c2eea6cf2779e72f9f317"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92a78853d7280bffb93df0a4a6a2498cba10ee793cc8076ef797ef2f74d107cf"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:689c5d781014956a4a6de61d74ba97b23547e431e9e7d64f27d4922ba96e9d6e"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aca98bc423eb7d153214b2df397c6421ba6373d3397b26c057af3c904452e37"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f3496d76b89d9429a656293744ceca4d2ac2a10ae59b84c1da9b5165f429ad3"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5c2b3bfd4b9689919db068ac6c9911f3fcb231c39f7dd30e3138be94896d18e6"}, + {file = "pyzmq-26.2.0-cp311-cp311-win32.whl", hash = "sha256:eac5174677da084abf378739dbf4ad245661635f1600edd1221f150b165343f4"}, + {file = "pyzmq-26.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:5a509df7d0a83a4b178d0f937ef14286659225ef4e8812e05580776c70e155d5"}, + {file = "pyzmq-26.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:c0e6091b157d48cbe37bd67233318dbb53e1e6327d6fc3bb284afd585d141003"}, + {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:ded0fc7d90fe93ae0b18059930086c51e640cdd3baebdc783a695c77f123dcd9"}, + {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17bf5a931c7f6618023cdacc7081f3f266aecb68ca692adac015c383a134ca52"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55cf66647e49d4621a7e20c8d13511ef1fe1efbbccf670811864452487007e08"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4661c88db4a9e0f958c8abc2b97472e23061f0bc737f6f6179d7a27024e1faa5"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea7f69de383cb47522c9c208aec6dd17697db7875a4674c4af3f8cfdac0bdeae"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7f98f6dfa8b8ccaf39163ce872bddacca38f6a67289116c8937a02e30bbe9711"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e3e0210287329272539eea617830a6a28161fbbd8a3271bf4150ae3e58c5d0e6"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6b274e0762c33c7471f1a7471d1a2085b1a35eba5cdc48d2ae319f28b6fc4de3"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:29c6a4635eef69d68a00321e12a7d2559fe2dfccfa8efae3ffb8e91cd0b36a8b"}, + {file = "pyzmq-26.2.0-cp312-cp312-win32.whl", hash = "sha256:989d842dc06dc59feea09e58c74ca3e1678c812a4a8a2a419046d711031f69c7"}, + {file = "pyzmq-26.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:2a50625acdc7801bc6f74698c5c583a491c61d73c6b7ea4dee3901bb99adb27a"}, + {file = "pyzmq-26.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:4d29ab8592b6ad12ebbf92ac2ed2bedcfd1cec192d8e559e2e099f648570e19b"}, + {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9dd8cd1aeb00775f527ec60022004d030ddc51d783d056e3e23e74e623e33726"}, + {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:28c812d9757fe8acecc910c9ac9dafd2ce968c00f9e619db09e9f8f54c3a68a3"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d80b1dd99c1942f74ed608ddb38b181b87476c6a966a88a950c7dee118fdf50"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c997098cc65e3208eca09303630e84d42718620e83b733d0fd69543a9cab9cb"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad1bc8d1b7a18497dda9600b12dc193c577beb391beae5cd2349184db40f187"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bea2acdd8ea4275e1278350ced63da0b166421928276c7c8e3f9729d7402a57b"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:23f4aad749d13698f3f7b64aad34f5fc02d6f20f05999eebc96b89b01262fb18"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a4f96f0d88accc3dbe4a9025f785ba830f968e21e3e2c6321ccdfc9aef755115"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ced65e5a985398827cc9276b93ef6dfabe0273c23de8c7931339d7e141c2818e"}, + {file = "pyzmq-26.2.0-cp313-cp313-win32.whl", hash = "sha256:31507f7b47cc1ead1f6e86927f8ebb196a0bab043f6345ce070f412a59bf87b5"}, + {file = "pyzmq-26.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:70fc7fcf0410d16ebdda9b26cbd8bf8d803d220a7f3522e060a69a9c87bf7bad"}, + {file = "pyzmq-26.2.0-cp313-cp313-win_arm64.whl", hash = "sha256:c3789bd5768ab5618ebf09cef6ec2b35fed88709b104351748a63045f0ff9797"}, + {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:034da5fc55d9f8da09015d368f519478a52675e558c989bfcb5cf6d4e16a7d2a"}, + {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:c92d73464b886931308ccc45b2744e5968cbaade0b1d6aeb40d8ab537765f5bc"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:794a4562dcb374f7dbbfb3f51d28fb40123b5a2abadee7b4091f93054909add5"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aee22939bb6075e7afededabad1a56a905da0b3c4e3e0c45e75810ebe3a52672"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ae90ff9dad33a1cfe947d2c40cb9cb5e600d759ac4f0fd22616ce6540f72797"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:43a47408ac52647dfabbc66a25b05b6a61700b5165807e3fbd40063fcaf46386"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:25bf2374a2a8433633c65ccb9553350d5e17e60c8eb4de4d92cc6bd60f01d306"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:007137c9ac9ad5ea21e6ad97d3489af654381324d5d3ba614c323f60dab8fae6"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:470d4a4f6d48fb34e92d768b4e8a5cc3780db0d69107abf1cd7ff734b9766eb0"}, + {file = "pyzmq-26.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3b55a4229ce5da9497dd0452b914556ae58e96a4381bb6f59f1305dfd7e53fc8"}, + {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9cb3a6460cdea8fe8194a76de8895707e61ded10ad0be97188cc8463ffa7e3a8"}, + {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8ab5cad923cc95c87bffee098a27856c859bd5d0af31bd346035aa816b081fe1"}, + {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ed69074a610fad1c2fda66180e7b2edd4d31c53f2d1872bc2d1211563904cd9"}, + {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cccba051221b916a4f5e538997c45d7d136a5646442b1231b916d0164067ea27"}, + {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0eaa83fc4c1e271c24eaf8fb083cbccef8fde77ec8cd45f3c35a9a123e6da097"}, + {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9edda2df81daa129b25a39b86cb57dfdfe16f7ec15b42b19bfac503360d27a93"}, + {file = "pyzmq-26.2.0-cp37-cp37m-win32.whl", hash = "sha256:ea0eb6af8a17fa272f7b98d7bebfab7836a0d62738e16ba380f440fceca2d951"}, + {file = "pyzmq-26.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4ff9dc6bc1664bb9eec25cd17506ef6672d506115095411e237d571e92a58231"}, + {file = "pyzmq-26.2.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:2eb7735ee73ca1b0d71e0e67c3739c689067f055c764f73aac4cc8ecf958ee3f"}, + {file = "pyzmq-26.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a534f43bc738181aa7cbbaf48e3eca62c76453a40a746ab95d4b27b1111a7d2"}, + {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:aedd5dd8692635813368e558a05266b995d3d020b23e49581ddd5bbe197a8ab6"}, + {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8be4700cd8bb02cc454f630dcdf7cfa99de96788b80c51b60fe2fe1dac480289"}, + {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fcc03fa4997c447dce58264e93b5aa2d57714fbe0f06c07b7785ae131512732"}, + {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:402b190912935d3db15b03e8f7485812db350d271b284ded2b80d2e5704be780"}, + {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8685fa9c25ff00f550c1fec650430c4b71e4e48e8d852f7ddcf2e48308038640"}, + {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:76589c020680778f06b7e0b193f4b6dd66d470234a16e1df90329f5e14a171cd"}, + {file = "pyzmq-26.2.0-cp38-cp38-win32.whl", hash = "sha256:8423c1877d72c041f2c263b1ec6e34360448decfb323fa8b94e85883043ef988"}, + {file = "pyzmq-26.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:76589f2cd6b77b5bdea4fca5992dc1c23389d68b18ccc26a53680ba2dc80ff2f"}, + {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:b1d464cb8d72bfc1a3adc53305a63a8e0cac6bc8c5a07e8ca190ab8d3faa43c2"}, + {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4da04c48873a6abdd71811c5e163bd656ee1b957971db7f35140a2d573f6949c"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d049df610ac811dcffdc147153b414147428567fbbc8be43bb8885f04db39d98"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05590cdbc6b902101d0e65d6a4780af14dc22914cc6ab995d99b85af45362cc9"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c811cfcd6a9bf680236c40c6f617187515269ab2912f3d7e8c0174898e2519db"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6835dd60355593de10350394242b5757fbbd88b25287314316f266e24c61d073"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc6bee759a6bddea5db78d7dcd609397449cb2d2d6587f48f3ca613b19410cfc"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c530e1eecd036ecc83c3407f77bb86feb79916d4a33d11394b8234f3bd35b940"}, + {file = "pyzmq-26.2.0-cp39-cp39-win32.whl", hash = "sha256:367b4f689786fca726ef7a6c5ba606958b145b9340a5e4808132cc65759abd44"}, + {file = "pyzmq-26.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:e6fa2e3e683f34aea77de8112f6483803c96a44fd726d7358b9888ae5bb394ec"}, + {file = "pyzmq-26.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:7445be39143a8aa4faec43b076e06944b8f9d0701b669df4af200531b21e40bb"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:706e794564bec25819d21a41c31d4df2d48e1cc4b061e8d345d7fb4dd3e94072"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b435f2753621cd36e7c1762156815e21c985c72b19135dac43a7f4f31d28dd1"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:160c7e0a5eb178011e72892f99f918c04a131f36056d10d9c1afb223fc952c2d"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4a71d5d6e7b28a47a394c0471b7e77a0661e2d651e7ae91e0cab0a587859ca"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:90412f2db8c02a3864cbfc67db0e3dcdbda336acf1c469526d3e869394fe001c"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2ea4ad4e6a12e454de05f2949d4beddb52460f3de7c8b9d5c46fbb7d7222e02c"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fc4f7a173a5609631bb0c42c23d12c49df3966f89f496a51d3eb0ec81f4519d6"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:878206a45202247781472a2d99df12a176fef806ca175799e1c6ad263510d57c"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17c412bad2eb9468e876f556eb4ee910e62d721d2c7a53c7fa31e643d35352e6"}, + {file = "pyzmq-26.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:0d987a3ae5a71c6226b203cfd298720e0086c7fe7c74f35fa8edddfbd6597eed"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:39887ac397ff35b7b775db7201095fc6310a35fdbae85bac4523f7eb3b840e20"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fdb5b3e311d4d4b0eb8b3e8b4d1b0a512713ad7e6a68791d0923d1aec433d919"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:226af7dcb51fdb0109f0016449b357e182ea0ceb6b47dfb5999d569e5db161d5"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bed0e799e6120b9c32756203fb9dfe8ca2fb8467fed830c34c877e25638c3fc"}, + {file = "pyzmq-26.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:29c7947c594e105cb9e6c466bace8532dc1ca02d498684128b339799f5248277"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cdeabcff45d1c219636ee2e54d852262e5c2e085d6cb476d938aee8d921356b3"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35cffef589bcdc587d06f9149f8d5e9e8859920a071df5a2671de2213bef592a"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18c8dc3b7468d8b4bdf60ce9d7141897da103c7a4690157b32b60acb45e333e6"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7133d0a1677aec369d67dd78520d3fa96dd7f3dcec99d66c1762870e5ea1a50a"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6a96179a24b14fa6428cbfc08641c779a53f8fcec43644030328f44034c7f1f4"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4f78c88905461a9203eac9faac157a2a0dbba84a0fd09fd29315db27be40af9f"}, + {file = "pyzmq-26.2.0.tar.gz", hash = "sha256:070672c258581c8e4f640b5159297580a9974b026043bd4ab0470be9ed324f1f"}, +] + +[package.dependencies] +cffi = {version = "*", markers = "implementation_name == \"pypy\""} + [[package]] name = "ray" version = "2.35.0" @@ -2287,6 +3215,31 @@ requests = ">=2.0.0" [package.extras] rsa = ["oauthlib[signedtoken] (>=3.0.0)"] +[[package]] +name = "rfc3339-validator" +version = "0.1.4" +description = "A pure python RFC3339 validator" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, + {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "rfc3986-validator" +version = "0.1.1" +description = "Pure python rfc3986 validator" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, + {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, +] + [[package]] name = "rich" version = "12.6.0" @@ -2427,6 +3380,22 @@ files = [ [package.dependencies] pyasn1 = ">=0.1.3" +[[package]] +name = "send2trash" +version = "1.8.3" +description = "Send file to trash natively under Mac OS X, Windows and Linux" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "Send2Trash-1.8.3-py3-none-any.whl", hash = "sha256:0c31227e0bd08961c7665474a3d1ef7193929fedda4233843689baa056be46c9"}, + {file = "Send2Trash-1.8.3.tar.gz", hash = "sha256:b18e7a3966d99871aefeb00cfbcfdced55ce4871194810fc71f4aa484b953abf"}, +] + +[package.extras] +nativelib = ["pyobjc-framework-Cocoa", "pywin32"] +objc = ["pyobjc-framework-Cocoa"] +win32 = ["pywin32"] + [[package]] name = "setuptools" version = "70.1.0" @@ -2478,6 +3447,28 @@ test = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-clo webhdfs = ["requests"] zst = ["zstandard"] +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "soupsieve" +version = "2.6" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.8" +files = [ + {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, + {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, +] + [[package]] name = "stack-data" version = "0.6.3" @@ -2497,6 +3488,45 @@ pure-eval = "*" [package.extras] tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] +[[package]] +name = "terminado" +version = "0.18.1" +description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0"}, + {file = "terminado-0.18.1.tar.gz", hash = "sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e"}, +] + +[package.dependencies] +ptyprocess = {version = "*", markers = "os_name != \"nt\""} +pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} +tornado = ">=6.1.0" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] +typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"] + +[[package]] +name = "tinycss2" +version = "1.3.0" +description = "A tiny CSS parser" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tinycss2-1.3.0-py3-none-any.whl", hash = "sha256:54a8dbdffb334d536851be0226030e9505965bb2f30f21a4a82c55fb2a80fae7"}, + {file = "tinycss2-1.3.0.tar.gz", hash = "sha256:152f9acabd296a8375fbca5b84c961ff95971fcfc32e79550c8df8e29118c54d"}, +] + +[package.dependencies] +webencodings = ">=0.4" + +[package.extras] +doc = ["sphinx", "sphinx_rtd_theme"] +test = ["pytest", "ruff"] + [[package]] name = "tomli" version = "2.0.1" @@ -2508,6 +3538,26 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] +[[package]] +name = "tornado" +version = "6.4.1" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +optional = false +python-versions = ">=3.8" +files = [ + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, + {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, + {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, + {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, +] + [[package]] name = "traitlets" version = "5.14.3" @@ -2523,6 +3573,17 @@ files = [ docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] +[[package]] +name = "types-python-dateutil" +version = "2.9.0.20240906" +description = "Typing stubs for python-dateutil" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-python-dateutil-2.9.0.20240906.tar.gz", hash = "sha256:9706c3b68284c25adffc47319ecc7947e5bb86b3773f843c73906fd598bc176e"}, + {file = "types_python_dateutil-2.9.0.20240906-py3-none-any.whl", hash = "sha256:27c8cc2d058ccb14946eebcaaa503088f4f6dbc4fb6093d3d456a49aef2753f6"}, +] + [[package]] name = "typing-extensions" version = "4.12.2" @@ -2545,6 +3606,20 @@ files = [ {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, ] +[[package]] +name = "uri-template" +version = "1.3.0" +description = "RFC 6570 URI Template Processor" +optional = false +python-versions = ">=3.7" +files = [ + {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, + {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, +] + +[package.extras] +dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-modern-annotations", "flake8-noqa", "flake8-pyproject", "flake8-requirements", "flake8-typechecking-import", "flake8-use-fstring", "mypy", "pep8-naming", "types-PyYAML"] + [[package]] name = "urllib3" version = "2.2.2" @@ -2593,6 +3668,32 @@ files = [ {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, ] +[[package]] +name = "webcolors" +version = "24.8.0" +description = "A library for working with the color formats defined by HTML and CSS." +optional = false +python-versions = ">=3.8" +files = [ + {file = "webcolors-24.8.0-py3-none-any.whl", hash = "sha256:fc4c3b59358ada164552084a8ebee637c221e4059267d0f8325b3b560f6c7f0a"}, + {file = "webcolors-24.8.0.tar.gz", hash = "sha256:08b07af286a01bcd30d583a7acadf629583d1f79bfef27dd2c2c5c263817277d"}, +] + +[package.extras] +docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"] +tests = ["coverage[toml]"] + +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" +optional = false +python-versions = "*" +files = [ + {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, + {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, +] + [[package]] name = "websocket-client" version = "1.8.0" @@ -2820,4 +3921,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "cbff1a5a7da645b81c7de962eadeb2a8378eb9f783ebed526a3634f1d97e2bf5" +content-hash = "6f7b98a5dfe61a527f7a5398ebccc955ba6e7233d8c77789dbcf55f3c9f8169f" diff --git a/pyproject.toml b/pyproject.toml index 21729ee8..a1f02ccc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -48,6 +48,7 @@ pytest = "7.4.0" coverage = "7.2.7" pytest-mock = "3.11.1" pytest-timeout = "2.3.1" +jupyterlab = "4.2.5" [tool.pytest.ini_options] filterwarnings = [ diff --git a/ui-tests/.yarnrc b/ui-tests/.yarnrc new file mode 100644 index 00000000..0f81e58d --- /dev/null +++ b/ui-tests/.yarnrc @@ -0,0 +1,4 @@ +disable-self-update-check true +ignore-optional true +network-timeout "300000" +registry "https://registry.npmjs.org/" diff --git a/ui-tests/jupyter_server_config.py b/ui-tests/jupyter_server_config.py new file mode 100644 index 00000000..e7983fe7 --- /dev/null +++ b/ui-tests/jupyter_server_config.py @@ -0,0 +1,6 @@ +from jupyterlab.galata import configure_jupyter_server + +configure_jupyter_server(c) + +# Uncomment to set server log level to debug level +# c.ServerApp.log_level = "DEBUG" diff --git a/ui-tests/package.json b/ui-tests/package.json new file mode 100644 index 00000000..17b79185 --- /dev/null +++ b/ui-tests/package.json @@ -0,0 +1,22 @@ +{ + "name": "@jupyter-widgets/ui-tests", + "private": true, + "version": "0.1.0", + "description": "ipywidgets UI Tests", + "scripts": { + "start": "jupyter lab --config ./jupyter_server_config.py", + "start:detached": "jlpm start&", + "test": "npx playwright test", + "test:debug": "PWDEBUG=1 npx playwright test", + "test:report": "http-server ./playwright-report -a localhost -o", + "test:update": "npx playwright test --update-snapshots", + "deduplicate": "jlpm && yarn-deduplicate -s fewer --fail" + }, + "author": "Project Jupyter", + "license": "BSD-3-Clause", + "devDependencies": { + "@jupyterlab/galata": "^5.0.1", + "@playwright/test": "^1.32.0", + "yarn-deduplicate": "^6.0.1" + } +} diff --git a/ui-tests/playwright.config.js b/ui-tests/playwright.config.js new file mode 100644 index 00000000..88003f8e --- /dev/null +++ b/ui-tests/playwright.config.js @@ -0,0 +1,13 @@ +const baseConfig = require('@jupyterlab/galata/lib/playwright-config'); + +module.exports = { + ...baseConfig, + timeout: 460000, + webServer: { + command: 'yarn start', + url: 'http://localhost:8888/lab', + timeout: 120 * 1000, + reuseExistingServer: !process.env.CI, + }, + retries: 0, +}; diff --git a/ui-tests/tests/widget_notebook_example.test.ts b/ui-tests/tests/widget_notebook_example.test.ts new file mode 100644 index 00000000..11eff319 --- /dev/null +++ b/ui-tests/tests/widget_notebook_example.test.ts @@ -0,0 +1,64 @@ +// Copyright 2024 IBM, Red Hat +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +import { test } from "@jupyterlab/galata"; +import { expect } from "@playwright/test"; +import * as path from "path"; + +test.setTimeout(460000); + +test.describe("Visual Regression", () => { + test.beforeEach(async ({ page, tmpPath }) => { + await page.contents.uploadDirectory( + path.resolve(__dirname, "../../demo-notebooks/guided-demos"), + tmpPath + ); + await page.filebrowser.openDirectory(tmpPath); + }); + + test("Run notebook and capture cell outputs", async ({ + page, + tmpPath, + }) => { + const notebook = "3_widget_example.ipynb"; + await page.notebook.openByPath(`${tmpPath}/${notebook}`); + await page.notebook.activate(notebook); + + const captures: (Buffer | null)[] = []; // Array to store cell screenshots + const cellCount = await page.notebook.getCellCount(); + + // Run all cells and capture their screenshots + await page.notebook.runCellByCell({ + onAfterCellRun: async (cellIndex: number) => { + const cell = await page.notebook.getCellOutput(cellIndex); + if (cell && (await cell.isVisible())) { + captures[cellIndex] = await cell.screenshot(); // Save the screenshot by cell index + } + await page.addStyleTag({ content: '.jp-cell-toolbar { display: none !important; }' }); + }, + }); + + await page.notebook.save(); + + // Ensure that each cell's screenshot is captured + for (let i = 0; i < cellCount; i++) { + const image = `widgets-cell-${i}.png`; + + if (captures[i]) { + expect.soft(captures[i]).toMatchSnapshot(image); // Compare pre-existing capture + continue; + } + } + }); +}); diff --git a/ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-0-linux.png b/ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-0-linux.png new file mode 100644 index 0000000000000000000000000000000000000000..efb098ef7d444d65a0d2be02ebe4f96f27849ccf GIT binary patch literal 16694 zcmd74cRZGH+Xq}yq_S6b$cpTdJ&Tf6NythXgi2giGRjH_Nj4#6hDc_TL}e2)Ln2#7 z#{1QMKll4Q&wYR1_1Am-Q9f6v>pIW#cl?gyyUqgjbdK#KVi>G9`j``eoSc07_U)<> z5|WawEiH;>?bCjw3QDgFDIUCd@gg@@xMcL#U9E}mp&=`IHYEuOiQTtmudgnQx4$^6 zn>6reX`F0y3X{OZ-vCc~{;iz5xu>@to?IXUEq9!Ey{)eYxcxV!&(d9k4*+hply`=JVf4d>y_ z#7~8~1`~rd{z_Wql*f+M>1O*F-M@cd^TY|St3ry$6n~f3jNDk7{K_RPyt$F0IA3jO zV1Un{my>(5gM=K1NlQvzw6>nHk-5Dlb63@MeN{5{Q-#OM%c!U`nwq2$_1_~}o0{^j z4ZZ1~-dERpf*FLD=L%oFGBz>sU7?p} ztGqd;L#ZSy`*V6aH!UqKHI;^uF(8F6U@ZPo;DZNJQc^tBfwHo)J+H3OoZhDGGxGhr zk&%%eU*wq=hV304R+)i;f$8b#&!0bk|Neb`em=*I{t8c*sb0Ljy|S5!nVE|Cik+R) z_3O<|O-TlsQ&Ur{tgMg21_v$K+1bhNrr-`wPb*1D-K%9+a65hWY_ukKcURZev%vDE+{I2S2=U-1q=dt={)zpSR#Y&UrIddjBIG9(YK|)5R#B1}m z-t_Enor1jl?TwYOVkQ!`%g62%^D#3sOG--i^$~Lk$;0)R%a+JvKRMsJ)!x>I+htzvcI(!yq5vO{kEFlPU%Mv67Rt`Z z*j45#&K9bxqceKw;*~4=_wNtfxwRzwb|t%rPFvh`bd4=k@C&;^JFN`s>#(6%`ea{T1uKdnP(_xkwER z4LjP}{7&98t?)Rfs>*rbKy^=#oUH7VX%bo=4p&!K9De)uZJbhYNJv`i`0)5l9lO`N zckd!2Bg5*gDJUo?Vc-FUZKGCDW`6VZpiY8>rM|II{`%0n)MKG}X%*{#mcBfZoaipHMSUx6uFW%;dsOr0y?GN` zYZda~L4uN(qNL=l;rAgHnq@@;1ka4;&%06bhYm5*(V@z%FI?d2Zez6StKGeOw**sm zQWD4EtrvM0<-$upKlK|o=EhqyE?l^fsNl|Hq?p{)|K7dBW@Q8dfsuH4Xy}oU z36Ig!loTTWnsUM{Q83n~xTvVsCsx|w_wV1kI%&y9aP#?s%wFH9ZEBi9HTV#TVz@cV zm}+S3B<<+z9Cj>0pQ6?1#C~&MvKPz<4Gq0}cV~&Sv$LYdvdAM^YU-CZpO^$?BqY?i zsZSg~ekbA4qen&tMn;bl64-)La?LBzu%i#3Cz{1ALDSeKhTXSs-@ao$1KY#b0J!<++*hkgX)#{x2FJ9PMSSV{~q&<74SvXhr z{rmT@`tRZ4R8q0oIXP^MjLXd&8b z=;)&E-``8CAucBN!CH)`AUS!ve@#(waVpd5oL7prGI`ePLc+KtMp1kDT-5HebuKH;-NNzM{v-$z9OGw~aK1|76c+ApZRJXr9*K8(BJGf1p>pcfsVi5m?D8EPACGzXkaOr4oBNS% zFZL*(R#V#%a<&E9z`?1yI_?LB4S4;%Jb%Ej*VTiwPj{v`Y7igI{Uqz22-f|N?|d*$CnBk^~N`hZ|=IX zb76YVqtBri1@Kams+w!n?U-QD(yGs`++dnuxIV?Oe}7C(roV1w4lg6s5 zL?*>mVbRgLy1GkCOPigQ8#{N!4M_3ciHxLmproml+fd(j`@Ph@7K<^})7vH= zhqZQhmqV>Rd)Cz6PIqd*SzmYa@cyxEiyb>?tKA}|XB85{S9Wnv%V%7Uca?stsi|2u zrpd<491$HoHW7WH?m_t(CPRl)r&<9XJF?E9bH?AhcMp4yi+uc8oNs(%^g~Fni)>czVi`HzrVWYG)2ys4z&Wsi(l>8t zsHxw-*t5nn@Qst1g~iUw>ZN7PE-Kq%p8$fBvZ56Gq!+>Bd-<(E4NBa9^l^f_dq!GX z(W_T;I^mUcMBdweTWtfhkBW?}`Vs!<(de0@=g+SU~{c^U2`! z>By+4+V81+#s&r{Sy_8Gwk>p;ds)r_2&O%KijGd|qp=7?C0fkoD_sx}Kq?R=aPVN- zixLBvxB5-Tp$$!*htGLO-n1V$x9;WFyHO|+2o|8 zsy9{dZGZ3XuO%i*E-0YsIo*Oo|glQxVgD|ORn#jceRs}laez0 zF}%Ah<{&Tc4hob^dvmkCq2ayqM}dY33x4A)du4FFi^CMvQrGh?|}q{`P6UGg#Nf`UF)c>cL+Yl~XCF;KPr z+_`hf=U-V=```qy@C(2+8;;(Oml|;Dw^v4@u{kjbK(iIxf6Kf7Zft9#ZD<1g@9Hu- zbLQ6k=t&Y1e;l*9Hm1}SDW_~;AjZK_)!XaQntn2rO@aFKJ%_bmf|ER9=1#3u`HRGR zWi(rR*c>0dvHoYrjvWdL3K&7PYn?x{&K029Gf+~B$jjfa8`h1Nt(9xuG}PACzWwJ{ zalxf8Y;w+aePymdbGS~9`sU{RloX$OYbXkR6`m$qTKrq+o5KQa8&G3oG}dA7zI_?G ziR5AhMp3vX(>i_}7vtkk)z!Pja7RWK=SEI&hBLCV?&FadZH%Fzp}D#6qdDr( z+0SuD(2!ah8iIADiM%P<*&Qxj;yrL+@Yk>NhK8eHh$*cCP_1Ir|R zrGJW2becgyp?rSq3%jh-vy6{xjG`feq#7QcKFIbr$oA~+D&E5UQ z^XFuSy!`yGpgCY)M~)oPIC>PkY&M-tG2p$s(^fMoDIr)}t82Ha@Z2HxukjumJV|sc zlr8(kiyEoaP1gO)%;J)g;3YNbCu0?h4i62DkK?pXOil-eeTjSZN@|zyb<{F-;6mmt z@3q!}8?O^bPaHZG_V(@D(AD$Rq3S13?u-2d+R4Jga{s}Dsqyjryu9VMJso62iWOpl1tLEE9#+0r795?_nf2^{6OPHw~Vo_F)fQHW87Ma$a4y z0%Rv7bdo0m)Jp=KTsOPTcsxmKe|lVx5gMnPC?P|;e&^=_mGIUj=!>^7ti=U2LfTa+Lr_r#3bRtsSn2KcQoY?pMMa-Ke@4erQ@haq{0wFu=aY1(8hGXA zPLGbhAGv{UqwMkTl9Uez@V1r~hzr>vHg@*h)Kt*4x4iTk%}N`~HEnH?n%vl^ta8rx zh1G0q(o0Ka>Fw<7-i_8lcf;6na&pq-ruMBSrJy)Q=~r#BckkYk*RKa>2@nPK1bBJt z-oM{vja$&w)I@3X3{dmy*RNMDU-r9uceVqSaem*v5C9O1h#MpnY{XhndR-hH2jXWz zgJsFah?c`1uh?s|1D9@L9`{vZ$R}klEN2@h@Uex#RO_? zZH>`RfAYlm`gPI7s*Vmr$cWT|)>c*zpFH^(y8toQ!NCE?gPYI?pD{Kj8#==B8T|#) zcD?mn5ZxAtSWcJNc}PQFLQL#N{lkOY)PeQZAWBw65Hv+&w5bCj`#i@;nq`_A7#NtC zc;nEo2s#36M5Sf^YcZeVy{_PP}0%ue8@-p5zN0z z(py~T=H!nbl#w=;mQ++!-aB2;ZXt(i6NMaok(H&cFCiyq0{Wkl zqN=9~CWM=2d-ZCU-ESa;wzf801#!Va9-bjb8OLv}tp&E7hgrx`okUj9_x6yHVG5Cv zkzsCmdU~pqL3K$?WaoL;-MtE7(dVshoJ{`Z<`jrW`Clg=K75GbFu$Lhi_2G==fVoO z8Uzur7QO~+u|om^*uOxG4!tGY=O95q9LUnNw}--7L73(Aui+OEc$t?sGdo+oS0ke} zI4B50^y=y=fo@`A;*_D`VId(6{VlByCk)8MaRmYqP{L9Dh586y8I~z%M|?p?)zzVD z&m4!}fnTk93(I17aTrt-Y6)rVwlrqtg-2<`g$`yY9n%xu>yb<-; z+1Y$Sz_8dds3Q=Y(eK~o-4+6Jy?vrE)ngt%?tOhT1L~-d(7uV6z=Al)mxSWw*U834 zZD@yC4a<4O#b@>PUwf>~H8ts(oA(nG+B-XsafU*?sg5OZ4mc}7H!*Fv^h2NL;q zZ)H+a(r5VMoSX+iL2ipbqe#_4qw1CdW8<%Ma0hEF2 z^uctLlrzh-7X<73`VwlbEUU3<6XWCFwu=6s$AK49n=h+62sD#VLlHG<0Uq6wAq_tP z42P^Ro8775+Omfr<_tQP;BrZ$98t9ZgO2eJAHF2a4kafapw&Q$VeO%0m~VJ{Z>J9A zE;P8#(wCs%?sxU=*v##<*}6M-wh0>P=_zwlW7~KqtE#Ehi3fy)oHR0G3`)Vt0w3aT zpFe*dXcK%&LqlUorVOH?qk^pL^UO^CLUw3tbv<&5ii*mgO<38QRKm*2gs4zpc1|nWo-z_Vj)dD<+HGrgbAqIU}+OF$P%4Nt(*l>9w zK`HwB`kX8Y<>d;c|I-weEH<&CdRXd$PhEEYe2U+P&J?y)@=_q-117 z?h-yeKJUIpS=iad#m1T#8vX`5RUpg^EJ5Q%6F+Zisw@zw{`Lkg;Fhy+8pV{$yfh)u8k{ni|3@0*%$lsqpDjZYimb zFJCBvzkL3z2%X@V+Ui8^-QO3~)M}uX<#R+GFTXKD7L?*ZrOdrQcrq z=JHq60%R}(Ca?{EoXwb_1u~H$&(+o1(;QFvb`+pZmgQ;tAmpyCu_Iv*RbrE+I1h=V9TUN+d zU9g^qx+)RdHpKT`mT7)pO;Z*Ymi+dbWUTwDd17XX{$;sx+R_9&iuZyUY*>1p%jrRn}k$iw#bQ@FeP z_r6C@CO&vSBm^SkwXQssXC720lu#dpO-r^qLVrJ4B>7rmSy>ri&SN=NLV=d1=KW4H zC|B1x^XdAFuYS&8qi6Xm`Y@Qz?;v;h`tl;+sq6BLB|rc$tzNKY6wyUYAivV1W;@k; zf{N)l-2~+2@sVp&-`-%4W0RCQ4*I00kCKSSH|bFQ>jo~T(}C&a(Nx#eTp+5Yqoacq zEz*Ba$P?ZU_@m2|X?3TZD6Ch)@5%l2)&*b7GCk<1NQqT;!;b_jSLf8%(Xodco0Fpx z=Z38SUR0HS4-g6RA22xn#CQUkof$1GD)PiBo0|4EHHAT>OyVQ*7V%tlYL315PXXrB z9*<9P3huAaFf1%AP&3BHOoWC=NJpMk;q~8v2CqMPPZDyJnT_p1KmZ&|oFa-E-+_;F zl07dXZk{%z#z9G7&#JAh1-OKC3W4b%)TNCbOKU4PMK>vqaZut{;0kQ_f#iYhNZe^H z_8K=4*4SkgG$c5{&z|uyFfjD7@(s6daUblcYHq;Sz7PQbfT3sW9SJFVd>GQlb#%af zcgbB>3>g1l{c=Di`MAP=WdYdz)tQg~c;>6D$j&o0BPb+Puq9Bvd>8Wt0x5n73JDQ9 zd^jgNyBz%jHnDH@W6=w{J~V-|O?HVMj*f+6Kt~5hSHiLu9mD=x>Egn8D@7qvW+=8s~QDOF}uOA^I%GAV#W% zoGEaPlaqvl4t8v9Favn2*XH`tL&;=Jwc~p5*M=PG)I)c!iEK zaWKZsDm+{p9tm;OLr!T)I|1_o-OZnzm3rQb18a}Ofzsy6mk%F4panq~4x)PsJxBIi zO%0ArMX_dOO=9#7kobp&>cW)-3dJ6@v`lU?*DQm1`?|C=;mFO=wUv20DY%fbii&Jy zOX#d!dYIwal8ScmwVqdvja$tcI0diA zk00ZgsqQg3w5f0;Qba5i6JNP9o^~SI?2Yp^C?oLiVJ8CxpdT9Tq7U8@Kmam@g>!FB zbd-nI0FCU3*SRDXbM)v@U^K`8V<*48adEhDLoA}c3_}?wuwuxy4X5}eQArw#oTOw_ zRMbTgMk~W|aC5Xv%pAaSG|e}!UqknYLW?LyyfANYEfX7?O}^1^;OclrV(+(aHE-YI z)UkNoJv}R8xF2`@{2+2(v9*2k=FPmZHW;;xOcyBl$B)m8iX>%anHn#;y17Ap@h#$; zi4X7U>H;Duvh9SR==A%8T*MEe1PF#e_}F=vUWATdp#OS@ z%5{1MhB|ApF9`}0BO};oJ0GGwZ(;iL=f9y2nfCAZTx>r(L>d_uM!IK@t*tG<%;@MS z_OV6n9&YFlPhw&qu@KM~kzuft`V;F=p+fnzvJ&P@D$TpJ=8lfv zYB8TbeBk5c`_VuF5wGn`rUnDqMf>!@oqRy3GI<7f)qGiT`7LK)_} zxJZdPc_O~gn4(*-uq<PpN5Q((~g7pKtXtBX7>WzjU7TJ@`7aN*L_qurNya zYnUaXUk3lY5fKyP9Q9tL{Lmk%#NX-aDxxtQ~T@9`owCgU^I&~^Sn;ZP;JYu7SQKMxOY2GPpr1nz{-52^pOKs!|L)KuS2dtg}-WTetHxb zUZ^x+DD+mR2|{_*$PJ)TbhNx7h$zG$mr}>+b;gP{uhW?6PLeuSt-M7QPMum5$mzNh{ zF02SjZC%~71_sdOcSy@e)DzEQcUz0CfTpsLXFPlM0bQ`Jjzpt5E{+9d4cDDk17r$# z)e~|Qtp?o3)2B}Z2NByM8iZvt0jgk1l%)W+BclBzi+oFJ8#lK<;4KZ-_Vq)rT3ZTTIaSI6wD!3BgLF?z`9fOC*k)X`cbh+^3$K$jb zxSMBAp9Y=Nqy05Cm2QxUkVz)K`Na#AB`)mXk1R++l%#%l?%YA~LVbl%15g^+S_P=! zc-#tKh`9#4)N5sgA?!4AFg1f%{l3gLLdQ}4w!S_y4UG|u4Nz(yD=?qyGAt#KeZLmj zGLZYj5U2XK2N4Wh$YV-d+X)0)$OJrw-UVs|)1NPO3jAaf9T~apl)UR4#ApT2Ra%W^ zj771S`y&pQFINpZVi+X}M$~6Ld-ele%+S~)JV_Oy~$AaZy8^JZw~&L{M%1? zLljUXP_7JKBnBE>SNw2&uHdU33%SeYh9{*HL?$OE;?6SiX~Z}7l9E;(hw)1p_zoT+ zkO08e!-v$g8jA}HC`TAnWCaJogH}z(yr25hdNpS?k-3L z1_##|ALpiyc=%9x4O9iYSkM0w}dlVmo>0-_R)n5;cR6 z2@go{ItW9x6a?75e3?|E85vEeP)$u$(Owo7chos|ueyt}{!>2q=k11h{h)JeOJb>F zmYK}-^eBx=Dagp6n|j!8+}*J|it0|yS^eUJn+CaW9;LDLdPT;RZ#H3GxS;=yG{$1sO!-+^$gJ@kSWVlimH zdv~dUnf3Or^>1zzHyNX1;qJgKt-49C688JIGg6AzC5RtCc>;PV&bSvPiP8iQ1i0Q~ z^JjgjAMYT=RkgPrWH^r-D7ujc=aZBMw2mXO2{^WVGyM3B2rd#T7o#i5*=$6Mo7y=$ zJ^fL5_;$DK!a~~6x_PoI_VyhuE!e5um0p{~Oh)GB$bw2<>)za}FA#-Jh&-0xxNu}( zlq#hJs{1>II4t4daz~f7HcDVhy z0`2)RSi(ufe;KsN@A@OCx)}BOp1m+ zv4c#6g(-7}=a`k3I*qryd?{9qP68z*LHyEZAWCczj2Xmzcb>K1hPX!EMrvy6fGYBM z3JTx)`!~%uSI=5lB;CDRi}|zs5t1$x!{wC~C;`thGwW+=e2eK7>yy%ajxtXB`$R;X za&`t@18iRbbZ})Lb+xU|fwqkdb9JXJFuBK>3jwXl7W2jL1I(|=+wQQwcu^x5D}LtG zDWo%TC$~($a85UsqepEZ&;!C^7Io&DU&=S~5xdLePeFCH^>QJK(^s+^%5LeKH+!Nu zh7zbV24M`{3zU=JBHU+tYHHLPIe}uq1?%hU@hLA1v#D1L4@T6(238G*+x93jQr=@( z@9bHIj%@n-CM)Ecsk!L7t(cVF6e=73QFIp^8#DkO5>i<|eVs0fs1 z%dLej0H)JL6DAB40ofNYyT->(iunzy?A%N1z~W*T>>*G0^TEJZEfdgDbyyF;F~l_hUuR(tpIRqZ{Z} zeHkX>f%fMY7a^zG6nq^Y9|zVX(n21}+8TnknyxOzawWD4$P&V`$th5t!eV24zI|JV zSU_crb@_K%xB<=A<14@c8YP?3?cVnGsOadrksH+ow>Q>Szyh|U8JLM^Ab^q;YjY=N zBs}mh2)O5&|L{0MMU3o32^b!t;3zxT>}ZMGZlD{9A2^VVwpLIee(>N<4gG48@`n!% zTwRY4eELgootTL_{cIg}QQg3Rg-9n8;+Ngs0uqQI{&zNe;4(jlq~s#-x}Ci}`JO!u z=!+(o{$sYIx}ynv^`eapqA!pS@v3|GZeejTuvHns4s32#MCX%}2N4Z`34JKIcJ=3r zT=imo^~~9<^_)hizTi?I(hk3N{tp#F+@_I|7RipW@86v;gb$s~aeUniG*nU|Qy~wz zNJV83@eB+5I2W}Wh-Shck8ZeTZx2TGSlWSYIdND22ZrmqDjcVR>Ni@*f@=};y$ilQ zEgs3>>AC*DcqU{kpHR-w#0&1JRyOKYn*;<|!VfgM*73q(!(E0kFt=AhR-zXQ2CxM= zoBVRet{^_igd*N5m}HnOVq#)_-FpP-ZLuU}iBM(N7Fu;drlBoCO$H7}4SDQ`ZlkKI zipc^k@7uR;@o{l^+z3ygBGKdz9eRN_fmou5YlHNhM(-(j->N<}gQWINca;4Q>|&v# zyJ%;(mPxnLv-KbckSJcd+2E_~gE0APizN<(d9mR(AjScvIlLah3Z5$#v2aC{)cQH% z9`OkYpRL8PoPUrEQ05^0r`sO6P3MX#c(3*IZLDltggBkJX;5YBjpP6KZ_LEV%A#zBBO)3; zf2McaE6k4M0hTm@cIN@g4{t>Glf#!MXJI8FufXFWFd_sj|XIT$t4@s=Wt1C6XVZ{ECqyju8@Do3JFglHOYN#^g$#(252 zh(?;8(9&{scW=Ec0Z|FhZ-lt)wb9fJ0Kt+mF38VsuC3L;V-v^IF!DmBUm&9hQAdlr z<@uQxdGk8kG2&3%s5Jn9E|oq|ruL>L0z5e>bH8~0oBwz;#O}8soaVQ0eN>Z4kM9y; zybq&ET3TA34Q{q?t@#`0PhDM)Rg>?`XR2@qr9g%_@kkKqVr`y?rMNRHuw3v$6N5ki z_434tJw6;DH%jXZG=Jj=&<|^j-~FYMgfCnnp`o44%~bO4<+-_EkdM~M*s&Ye>#<)< zTpW*Ak*WnU zwosC)n?iGv4UALrO3>u?nZO=}Uf9{zmiKiMFbY5jxdLx)B5y}H*5J5!z-O0lwM~Kk zf2M3BA3l(p(*ZUEe1W?`_XjKlD3D=62A@@<8Ke~u1bhiAy1Yy{bEX}Pil2wZs3Wt3a;wf_$^c{dqgEIQ{v zDRNcl2skkSM2+VE$aBM5u}-knXsloYFZ|F~tjBE^q;F_zli=m`Zx}(Bm4*KvTShvzVv47LXF={(h2Z!HF^M#WlF^8+F+ zd28S$IIE{m{d9Hyny0hi#iYP|mb(7HEx(`&G>i`qU$C^?o&&uJa)=)7w{myHJw%XN zfUn8OsHd+F>;G*+HXJ1mFHcVea{q~`DSEqrq?%J_&w8!Q9V9(^?3l30tGyb{|EFhr zoBhUMkWKupZZiB^S@Ddb+->hf~qV^JW9+`0A z=fnhxSdf23si5ez0Nn{963YuJ5a&id($Q)hh9vTLusbcD)%S^P2O2JJ0Ze{)@dvvdZ|ds8I4mjE{}E{P}(D z#EAyjod{nOo^S1Vpj!m1`@4ygWl*InE68f!sVTbq(bZNQa?tw-VVevK@f_Tx8rd|E zIxtar`M9$duytUvxc;GXI`|{p#Ygw8*XEMf{6%;dkZLc9FoM5TDRw0=^zjejF}7PS zF2Pd&tmd&(3H~lWnhr?CveMBlesk$Zi^ZcL^Eo0A*?>5eJeDcv)Bo{g&49@`71?wj zNrfjROhDD(A3uMZl{h8?R8>}*pFhtke-$8O>wX`c`9*_7B27kG88tG>RZsTu#Xfq( zlU}*~D~JIs29S@NQ@uw3bP&YBMPp+5y1Kw zA_KG80=Kz;CaTh!{s2W7f$VnTfUdd}QG&0q3xh+F_fXd*M*Bj3_|I06u^fE9p8urNUck(bthnG1Wf zIhNy}#xETdR{^9t~Sr5ix*EOwNM1yzuyR9qO80ed0@0+ zfKCl-dP_bkH6|kOfG_V@6YxX}bQL_5_-<6Z`e_rwI3Tm2R>OFr3Y4y5FFidy42Vsb zUZBO4#CQe_;-J&bn}-e`hN4~%vbeDD&RO0n>))I|D=ly{JVg=^7&z!CgDr~fJ%?w? zlv}XP>a8LDi4h)Oy>bQGJFFhWJ1T77{EJTtb#a;jJe!_B{{`D-W6f167Q>a5ot>hu zM_n9X9wQQoqV3F&qNA1Ef8&7}NWt5M1Cq0{VDwp`y>BIa-{0Twlk&q!wrkoAQydTx z9u}m`|3<4weF}?<6OY{7bz<&+X4JVUNqy)aJBs%goI=Bo*?N3r!sPt+NjCxGgX3Pwc3f(?xUMgv0W z5NSTei1UhyUdOJ%r%8#6Z$*zGUJQ77Lw9G!7D73tHpgLn#mDD!L;vo-{Rc+)gL-Q) z$_}vgj~~fSwEx38{9*{O3Iu`e-B-jZI*X15KHDIba&zk6Y`pW0l@Sizg$O?iaB*FL z<0rusQg036C_OE0R*JsLDj!n`-nt_X?@+EnOf{Fs;kc}IRL>--zIHL~*Zrbrhx z9#(+Ze}r}7vWNGjJ64&z89MDu)4g&Fw2xKNlb#lN!0GHJVUWLPHA y|9W2lAK(9W7Z7;7y z#WC>gp|YSkCU9DD?(N zj_^5e&uw#bHWTph@c8uRn(Ng!-Q7AI92`4$?mT!f$H#}5*n8nr!S4s}KYU=K^xJjN z)XnsWL6+eTHnugF+YD_V3$LC~Qc{R%`Evc!n=_|R2P^QrfB!yEcfwcK)Krs);oiM_ zau40!?jP}8e(?JBYuv(%7ccI*xVZCF$g%kBPO7Tf>VA4UPcY?HQ9*&5P(!8hl`B^$ zNo!x8%NZK~O0i#9xV5!4>&cUeks4p!49(m2_Nx*`D<=dy& z*w|=M6MVGJxpTWR-pV(%v>f2&9XoN#*Nw{U0T&}XJ3jV;v9a62NDX~@pX$ZWO-#oR z9pV)c`Lj52qFUusTf9_DOUt;uw7!1t^z?K`cGuR_>F-2ludmnI-?^h&XD4ZN>eQ)| zCr@thnn_dogoK8Ma{St%%IC3ZGB|S4=0H{eEj@iWx4fywp8m6H8bhZxDJG6bOBh|a zP+necBN6xN)vKJG`N;iR3hSvS0$4-^n3!nJ@-#E^6c2;s z_0NIXXGeNRM@I+C?##~4Mym0tsy5ZuDtT|`W@o3Srta?S?Ck9wEV{wH(mnej=gwSz zYDR`?dRIb10++0_SKTPr@w>`A4APv0@1$bQmEsi?Y<~YK zRn4#g!U4fVb}cQ!py=JvbZ z-YAvyhNh+^%DMS@dOA8uF)^OVW&;C*nVA_`w%7UjnR-I(*KBR~Fi?(6zBbI#e`#QQ z<%*8LBkZopFJGt%Bz<_A!nviSr1nQPOG--OJNr3=l%(u4e0+Up982H4SsAKuEio3Q z3;6Wu!Q`XkjCMG6_59eR~ z@G7V>+w0h!@508#4>b6%UcKsi_bweBo#nY?H#aw@`5_Y{qtiSLldZ8AFJFHA=#k&! zONU}fiBt3PBCKoF%{|F+CeNKacPN&&J2WJuljHu_{CskJeEg$Fq@`@7{kpok^C~)|2Ph+EsiAhCyxpB-Hzu2O(vQHmAoI7#CSN*AF@^D}v z<=l@84i2wNN{&)4g`cf0eVWVvx#X7h=EmCBuV3Th;!ZyncG*}J*D722K3I08SBOFL z)G0i`4>@KOUHeWy=9F_YbYoh)dLZo+-@biptgPAs%w}f87n{OT&#+i*x1&FwXL(Xh zEwHMpDlCl7-tWfl%PuN=Ig%HeTw6U-K4iuA#7Bc(zedX>qZ+ z?z?yI7Mx`xo0S>xOjXaFlj7w~!;)lX+KxAduB@!=-Mg34r@o;f^Ti8tGO~)hKfnH* z{lM!$onBRCcJkzx8ea-ZALC+sMeNS*Zbst^<<5&ZLWE<{k#zj}_3J!#Ha5@G(<76T z!g$_Rxc++I+ZzxNu!FSZ?$0YuPEJNfM(*wvIXO9riG%(9tO81WjAt}7xa8d4esSpO z?pEXL#;If=&3yH$aeVQhStavx`K?~Uk@))cvbFW$*w)TYZN{MYLqqLmV!XUcW@cM> zfFdFy@siilx@^3?y+0UKTy2@h$jB(btZr?+zgU?!!ok5oVRiG*Z)aI9 zzwxm#R7Rx}CysJznwcg0`kqizqb%0b)>cT>7Bw!$KgENZo14QPUHjSG*0zr^2tV1` z**L~HFCSi+oM4#1sdARBEG=bkYc?0_?(Wu3$5K6tin>|yd2`bvOF=Yxc4ETH);2ae zy6JNM($do5!-sc}X1;t`H@;X=Q91E0esi_1re>R8-bufo)}qlkfmo`K{r!eGIujFk>Nr#lJw1nIWjRBA#A|A5 z_Ta86D~ARLck7>Ml_TfF9p83x3d268qod9bQdn{*`AkN`A}uYghf4bXeQ|NIp^;H| zSQsxQBNx~AnVBF1$M2s@j^3K|66;{ymt{nKRmsy+K2}4{VdCw0Q#h_F$GGIj>f*&D zC*0=QOg%B)Yaa_vsj6E4Uin7pV=v8#uJZczp~W-x>DhtTBNG#|pFMLLttFF;PmGD7 zG`;G@u%oYf33| zxOp7q=3oil_kMJgY#XPko!uN-KxgZT6DRl>Gt<(DKXVC1y&JW!-nZlE$&u-4R&H+k zb#)C5+92hx-@iAm>=bW6sXQz2C?X=_%^OLfDE6H@$uzik?ksd$bJ)Cll#?z(&Cbq_ z^ek_L^Tz7UWg*`hqVR!ZN6V#d3_VbL^5_vW!xsnC+xCVqcCVTmMaCe$H8c!t8b7;~7C5tjZrhP~Z2!lP_!U&MJM%*)57*b% zXW^k|mwIP(4GnRHHa0bJu(SK*Mn(1JU2Ib5|MbZf(j{{%QpLFV_(kXP zRl1zqTn1%qw?$`J=fe#7;oGQ2QdzDsc~6Q63Gp&eUb}XUXMK4_7i*oBl~p7oJNM6C zA&zfDWoc_`+gMTDcH^75@*xQc4K=mg+}shDUX~tZ*^l?XODZZV+LRU*{c@F-ovRku zoDSXXZ)s_%p`oE&<-9a`hBsn$tbxX3Etv~T?%0r^lKyR|f+Mu=!v_jdQZM^!*S`5u z&^t?#z9}lg1p>xk3635;deecruDUv&!1y^o0@ujF23m~W8#)a zOMcPWxwy2nG&3`(#+;RdW4=F+B%m(p(WBPb!xR(ftoQHV2eKkAA`Pg!efu^FxA#kw zs^=0eE^AXA$-&x2Jm0>3!^Wc}CsE$V$LHwih$lvNxu>gZ+gUtQ_E7R#R2TdLJ3D*- zhPt}?jqD4jPEnaGHVK&#PPCq%DD}3|vNF%rQ3^Zi$mRs88(i$Yy}cEVGY_RbkGy*E zLP)HlveNYI*&T)7%Em@Vcd)YqFkHt2x^_)OTl4hkmXVQ^_;?P<9ltwVY8^?;e}}($ z&Z<*qdjT(y&v{v4EnU_DZ#HNc^;;fNTS!$|c!S2->Ii7i`yTC!%6Y0a2 zSiGjj#tTM9H6#4%+crokZ#y&bYgB*V9J80UVb#zj^&s2kTNrcHKQ$-Eb7gK|a&pq! zdz(-c^+qy)+{bhE%rvDYIGnd`<>SHN2?J{fH@tZDikyVn+4hLI_^H#U(G}xR9AY)N z*w_^Jsi|*%kZS)WcU}oU%FJZHPqJ^nRbuSo;aJnm|ss~fpL+}m66?AfkIg1W4ecL&+To*u}ez*ZBRnB9>`P;|B!m|t}CuKv*4i`RG~1P&aCkBrcLCRf4QA(-I%*^GH*nN{& zR+QtlwKYvm&3V1YYJ50|%!A?8R);9a$w?<4J$}s6_ss>Rcrlu>t9PyKG#?|-c;}KS zstH(-mkU^I-r&QGy^DH6xkmS|6DMkJ{rq5-V_NYdEzRe;T1{F`ju}qn)+X-NU5o+U zTmRfS-{~f?9T931Gu1YRZdc7+s=AB-ZC<}-{6;`AIyySevYUy1Zn8s7qatRN#&`~c zrMK}nyLa#Q_xInnZCn2tud#^!dmPVD{ua%^yub=9E(v8e3 zpOcl9h$DfPmiCot#r<|>Tx-?u?`1e@V)m-a$_wShG!p^b9uFQq#1TM+{Wd?p%UxeZ zrEzfZI97K5{*0$j_sPm8G*~XqYd?&NGI?Wn9u3~`{PCM($^-yvWls1RnVO15Q|#(G ztFM0>9niw)c!LF6BQAB%o;^5z=v$!A78Vv81AVVdet$6YMDxfq%%a&PY78jS7I$Ri zVP4)%b#?4P%78j58k)$54;eE|Nk~X?KOB&kuL4+1Ng)#cZm}mq4ZP91BZ=&4%QPQV zKphzc#jahu?%ck8L_z`u%hv+voZemf^C zE57))_IK}O?k+zM2%xyz+|cj^;2Nj6Uw^MK6&2O_^XKg(%PK04A2@Io?RL6$1ax6* zW2pmFVs&*jEsZzVQ>*^k&&9>&_V%#*_pg8=T)(cVu0A$@SvG`2>iXhX1FGv`>vrW2 z9S!wB_h{I|!&_bHswj}C3(*k~JEq@Tx5nUs^$rZIq49PlX|>8V`clSIO5~sN|5{;<6M{fU^Xoh(5=FOX-l3N+z z>RDNH$G*(X&eCk(zH_DWhK-H1q$D|KQb;Y;0@@-Ua^6$Hzyx!Am$l!=77zz+=JUA%IX(Q^(1B$`v!Su&k{Ayz1`$$7N{} zdpBqx9=L;%i3u(H&!0c&ssJ0{fG$7YHa9mkHkwwtokzX-@L|`i135W4F)^{Qh{)jQ z&(O&FPq*FpX6E_lh=9QK#@Z4d+W~{Dx2>(rTf^3PcArjg?O~Dl04R z-o1-zqwLAYldI!BsAI^)ubE(IV)AWf=BVq+TuKTLZ~<=0BM<-XiB9Hpz@CNkH155d zto*OBcS>*1dXrF0wkHDOd>tS6ea!gJHgr)(aq!+v1fu`u^=l>1b1Nr#?4u|5$yoBcRWro2wM=-eh*PH9)_DE4kgErABUc^h5O3R%91ORA!Y?F`}*~M zSQzl`)Udb!9kA}O+tPM#h3T(f*Ktaq5II>3lK$|#*D<-Ux{GAbgM$xNR-9;Pf|EVg z1rHti!L+p)74zij(`&<(iRw=YxuO+Wpsw+eaD(Gz~njI481ks#2%B$AQECi8aNsSrRx&}egfw5Ar& z=|Cr!#QC^o9BoEx{kc!Pri743MM}J0wJCr-OiHZSN+Q6(P)V@|e}QE?&U9;~4F*imRGq2iBITrue)LG<&iBvZ=YB%x0-kJW z6}EiDpxO~G<)$dN&up2t;~#nS*44#>Bk0P0TmDL^pa>Zm8J^dl%E=!zEf0jEL`9XE zmF0NrmM%0Kaq+d!>h3!KCCqZ~4oY2aX;c`0(Kux4^3W2Gj8> zH@O?!tKF*to>6{0Z~OcEqu1m$xJhq@wf$cHTzZ=(q0I5iyPc3TWx2w+;x})DJddM8 z0b$$ggai0a^-XUr?UUcjtuBQ>gQ7_Pg!v$#08TGJ#qX&!xfe;vUZbNH%dDn>Hl!U3 z_posDL)kewTD%dQyLWe`NByG{#$`W!N@ikDa)1MM0#yX3g}7tFeSJ9-d?8jt^6=p@ zr*G_PXx2t=>`*@U{S_Kfc(%;|C6<(wK=m~rgVI7ACTd*f1m4acrEXGezh{N^cR@h` zR-$Ldk(-;_J3vyzFn8o_#9p4csVP++1{^CfOLadGV0hK6-DagCX}8OA0iwi4_e zg4I}+q@<)^7Ll=>qO>&2=7;;N4#n1(`}z5myDX7E0Rs`aP!On@F*`k-Wm-|1o=zg7 zV`yk|@!|>o$X&vQJnO}JT+n^+V=M6yg$DHxx27QTzbc<2(|R7HFXEDuwbHv(R8_lv z>M!Sp1M_7Y<|+Bfp@D4xrs zffCCG>O--z*6o~0d!@+KrEg7!`1+DabC4x&rk##6_df7%@t5h7b-hG%?yDbPdk+wM zYwPOzPQLHyA$gf|!(CEUclz7{>J*dAmNiPKKA60)1;&}_9 z3R=Y}zK5x)qe;hIJ741{EUc_N(-t83FE_`@tJKW}C~0Y*blHG0L)}4bgKNSebl&gF z(vKgI@1`API&_D-yT`tKAtOcmNHdcn*d>gN(0OyPjO`ynf`jwlydiI4O-)VZq@$2M zGWN?GRUZ`@bmX;Q|uVQ)^i?HE>7Tx5AETt@$&9Co+_~Lprcn4(beMx5`lnbsznNK~h5EeOK2hEiF}s3Dh2JNwhd# zcDiLLF)@&XafcH0=adR)GBdNYM&-_VP#6I?e0Tu8AZ!*D?NWS@l*Gjz%EG|V`TqT* z+9_nXEQot>Sbg!rrR;kvvY!PkL?H!(Shu3>Fu1wMr?c0coqr)L%V5(E?m(h~bo z9{?HvYlbrC#jz6sbU_BKU0vdP_H0vp@K+~O=Zyfl*g=X`b^%i8TgbYv9lpGypy$dp z%)(Qp_Qzpq-W$;Jr9ED%qq#Y=vT}2@E&x&^AETg%$e98^{OZV}^D^ZN9K@31VlR7R z;{+55)?L^woSdAUa(i2iWhrNcphZKYO|d3sr6t$aUp`#>#R2OHbgkY;Lx#ySE(>2wo3(_S2_NDSh9qMbx6vtKP4lhY1H0pDmO208zVry0P{N_ z8be`&^XFp|6HV;xa{~>Qu7*8*`I08QDgC(Y!Gq!IPs2h&cqm`n_VDlCo%rn&_GRs; zJ?Ku$yLZarjE2Y2s&Z0OBOX5N?&~uE>snv7eW1z52%I2lT8)3SJ#rvz zRACFJOOM^=V0>E=T&8>5IlQVS*pmJc0~IJvGE7Xi#VhmfNU#m_uN!3us)sGq*@!+w zGDQ(4w`+Jb0*EUJz8s7Vfa#j25NB`S{#kZsJ}vg5rKQ0uW5LQ7*y2E&<^sU$+cas8 zJqk33m6GxbjIS8P`PebDGNdWW5%G!oqw2BrU3E&TQujr!ZCa zBkiKoI(?d^;wqj7iMxtlNp9e@(2d*9wIK$7^;<=$YTjpchb1ND?tJIeI-gXdsXVJZ zYoMox3&cZKSOy0~4+5vNsR4HOK8>CPp`!Ybq3Ub9f!A-}s_B{EyeS@}%+A5V)OV$e zj$Usa${GV%&Lz<05fjNhdunQHeWpwd>IJ;NHjmoBwL0{5A{UQkdfJA8i-V)*{ribu z6r6N~OJ24oRfIwW+t|C#_2ri?x}MLULz>2+7%-3qg@&3G*zCAqWn&Yl%Q1DN-8x-lLI~s4arGJ)SHVb=qV}XR#p%GI}4zs zs+v(!az0@#4fhxL5<-s+Oa*j|PdskR(_OHF0BL>o>FoI=I~Obp1i}_~uZ#i!PWZ zBjcglU-fR?T7o}|J%eg)r@6T#uKfIpxw*En@!Yp>COSG}-@lszz8V_~A3P|3{j>Bk z?OluM+1cddt|0DD{t8y+IJ)rb0S#fP(b3Y@kJ|sE{>t7-*qXNHk6Z&j??^bxq@1bu zA|Zj3GFZ@?8>C`+)fV-t6X4-Ple5a#*;y2)T!a#ltFwGH3^(&@*t5f|JUtys zIb0?t<)%;$im+q5ntnhB=ix!^s~b&{_k8vIIbn%MYDkE^y+nG4hM*6$dFF%2p_qK` z?*0MJ`T!-7Un8b^Sj8d^5})(kyU=6fflUENIXIR$MhPzX*|T^OgM))GrOl0Z+Ppf* zlsfzr2yZoAwob-IjKzCMO#&J*IAw5X=+mB=Nd7Z$Q(C}UW0<7Ly;I!Tp9_iYcNW4Q z!dV8~Dkv;0D=jq?W|3J$$yrChyz`{3gP%OOfFRGpBU0Pm7UHVTAS4FDU z{FBf2s-4TRsoDZ)CHTg}Qc_b6CDNRtd-slKU%aoNe>S7{^JiHxF@>i(-dmCI9qIPf zDwrLDiU~A#L`*Cf9`gPB)bGzb{1tw^&D>l@GHF8ZAM~b3YNuTfS)F9;(_d|)v`V9N zRZ(`lVR6KApDUj8`SV{UCx@y$cPk=ip8K+mm0 zVYXxYu11L9XaGfa1Bdt{n?3da5hkxGxww=-cIOHwbVpc{aEp&!Y^1fdodIDf=s^WA zmp?BhDOqWDy5==Uzk|p>Laj`d7}oFhT)#N1VDvR8DmVq^bww|>nI6Is!IKnVrgm;F z21oiTEXe#Wc-KKlkIF4*c-WuQqQ3C^9ODaP80(caZUs#XVJAL>UF<3O;lqdhlUmb& z4NFrUCvWKL;25fpyAZ)vf~299AUz6v?8Hm7H@!%|wQmuoCCYHWon)XB__w2@x!I3S z)%dzm2}#KxRa?Xcty03xU{P}GD31k}{k(>VHu3h6b#`T_NDf1;w;-M&>{ zfz+gr9YZkY=+P30f^#CP=Xk0|;Xj~Dsv?>&F#!tb7Z8wdD4e?kZ9`B%AmO+x3$4t? zrkMo7Dz>(^kZmDcm3VH+^&39T%G!W?P_?zm5l}Z+?gF=+zD@pOV+c9~TnS{WV04Or zhuYY@1(gPBw3GY|cVOB>OnjT~5mvVv-J3tZ>N_s%?6Tj1h)@sqV{h-yxxEczP(4ti zZa~avwgT?HgY}2Jgd=cPPjBOU*}TH(-|OqVyu6(Qjqqeq6aX$mP_$%Se6AlLgCUN~ z(T(9pW55p-2q;JJ#K>IxA^t%Wc3oW7eDN{Bh6V;KT3k>?fS$#k zJdTP&g-VS^ej+$H808MaGPZzM%i3rFH(Ha!ty_>(fB*j7V3(*ch_Dnc2#pt_{IfIh zm)qhlSy*^q4h#zu6%@3+c5U#@wfKU9Wk`Z9e0HCU^Kx^=`1qcHv1MnA;Lr*SYbd{T zk(i2*gb{Q6cm>RT3I7^%C|)S4Z1AD?|G~nIWtCoxh>ToXSm>|v+OaDV9clIy;x^Ucw%PCwD(I^o)bUk4pF7 z)pvFQ50Lv>pd(pY;)IVP6fudD3auPSqpP>~Ql23}ss-Xyl+P_H(o$Ey`yrWHoRt=e zz1XTxz%@HN+GDyq`s}+ParA%uC~EX3q-jb}NQi&mKJnOA=(yL0$_L)Pqn?|Ie*9R@ zaatP{8jm|NG7_r^V@luuc6`6SbY!@IMp&g5D^&} zF$a48n<{y`MPNzO@bKX&qij3b+pDRm5e=XBFN`z0#r0Uki2O~c*EGz^OuesuR7};@ zhZhttB*Hap9~&5f(AxIS14pm;~95)$}{6Lp1GKZsr^P{Y;)T{CfZqi+FU zLQm90CHeWa7w`+5H6M3fS@2X{E%)Mnj+6r!I!=U~=jKN6&nC0|_V*yz%E?0T)Dt?w&rkkGy!nFy zf@D=y^1%%cA3ug91P6qpUzBK#(L8_PUxM$J97=YuoX9_kTzWD`=l`d|@PAL|R;mOv zS>{3IMLx}58a87$j1c)EmH89y?#I0q7?!4E=_%ve`g(epR-?ZWlD+$uj>yOggB)_E zFDXvX&GBs<#1_ZmR|40_KO)4dV08d%Bx~>+85*Jr z3grZia&BEPv;c75{L5!9U!DZysJR(O>ErC`D(|teisFF|48;y#Fx|X}%ZJDcu0qcx zOC*SJDw!&utBcD{7MAP%xit+9`_LF=WY*Di%;O+dv*MT8+3x~TgPWOmSY~Htx~$Bd z2QGov?CI%gZqB17Z$DBUb>N)Sw~x2S8@J=;AbP%^AG0sUo=0cU*V97|6*j?xRpf85 z9gtIoatE7t?qlKUn4@5cpFVxUn#}_Z0SfKjy}kR_+yD@_{XZNVmhTRSWyur*KwJ!#QL!p9(h-*hb1?Yw3iW>KOZSBwI?|B3& zo-@p4lgjkiNW70YfzW*w2}`j5h7>1VfChiEoO|4UB(nk)cz~t6rvbH~w&uTn4P7EQ zI9(?bGI5&*A0xEJ&S@M%-o1OfZ2m2alp!S*6x0lH!ucx*AIusw8j3E+Lb6=LeSIsF zZ5)h2uyTpJON?PV-GU&ygH-Uq0VG1vKt^~f12QR* z4$uN@0q_Ow4dOo_Lf)GiZ)aow zcVVFa^XDw6%7Sb1J?Fu-EWVq8CSJI}9<=~Di<9mHh=;6fR%RwuH?)Neu&S}$meAxR z*hAYc&kvSK9XjOD{p1wSairHkA#hO88Oa4X>8xx0sIXlK0?d^wLZK=cV&Gt9h0n|t zqZ9o6q=u}lEF#A}Jw4to5K~r{Kv-z$=t@gUEST~FR4i`nQ!p@CfqJ-Qe4OyutSAk; znZ?_GWMy0E0+c(8TId!VUk5gy^<1i>mq&rLzI+*_^A{xf(EIn{v7JcV=Jb*#y5-~t z&9&ZY6^K*0(Z$B1lquIk_P3IJW!_fJJpa}Ca%)WLvuBq9e^^*1kwuU^0W!S4x+;93 zKtM_=y4eb$*Bt?MU}`?RphDnIz-W*-(A%q3#}=J&Om0tEz6dRhf)=Ey83v0GS*WSD z_&bxWY@1`%>NsHiWl$diE{{}|Aj>lng~1htO%Y0aXJy{AAPR%DK4-AYE%FC=K4NLR zx)k{s8#3=HrGBn-HyU{gLo4_2uWU3)5P&by zUr11}q^vAi`etoQ3yqsFsVMi);E}N4B@0G)W>j2XXmP(LxL?Q-NRgnn!53zXZv%WI zh_7@3hsDHFdS=X*l>U?Q?9n`eW`Xz$fI6}RHdn7gMxv#msT;M&$cVJ(rW?#xY%uJg z9i-cO*heTKJ^TQWK*2+QyI^S88h7OM{}f8XwFbX(NA42IS6B~?qxLxeM*-oE4XELc z2L_G`3BAb8eb8Wuln`=_OIl83FwJhG0zuP<{V7nuzz!QjoAoRoz#xyZ-M5wn+92_v1EKz!7x#+Dl z<0NK~^+Y8>btWYq1zed{m4*O{d%APyS7oIfTI+I?HJC4|Gng+DY`hU_|HN$=OnxJT z1Y*R(#Z?LF2WW&oPlz-GN7)n1Nls2U*WZ8!Fd}uu&aSoU-F^n3k&`DI&`1$!tgf!U zJ=^C!ERtnbm8436U>W`#Ga=?oh;q5Z)4+dX76=C1AHX?$HoB*5;XlzDsMGLP1Ox?L zoSp54%Gr$BSy?G5DW_*@ z6c+v`>i^u{{`~|MHy0Oq)mna0(N=B6ZbK&15{G@uPFQ9$3P)$lnlBDWMb%s8AvYq( z3~k{Zp1a(m1=LiyUAmd?@0i&95pInU_no4kB|zMq5gL3j4l^({w6L?Yo z(*)lCccSngV|@Srd`N{qc;7ugKR46eaO!NXU#F`Lpxb?+*nV`Xm*Pjm!++u?zeE8O z|26XWNbc_(o2<(ZWk*TT=rXWJ^sK*efF-V-LBk84Hff$au${3#$F;UbF21Fhm*l3e zq3akPPFCQ7sg)*}ateO9jr!fY>zHhKt|#QLbiUZ$;_h7;wJktwN+s;EZ4dJTd)6?@ zR_wX8N#8YerxD=#^y$+H3Fw``>c26EB44SK){l_@z!kaTe%D{$v(M+NfC4F{ZcNE< zl{kHitG0t0W}JVS026`;>3l>o-8MIl-i5kK<~obCBPli8?%fC#=OD9zaTf3!#G>TP zPl%e7O57Ndl}XfmssniG3HN*%B^P11zTV!aT{h4mTxZ2Z70mrfmHczt6&wzhnF_ z^MqU)$QPjfgnpu@H&IoJ`%hO5e`Qo8Dj}h#qw@nk6Y?u4y^wMimzR;rCmjIie`7yd z3ysEi8nHo)rC^Y!!c|I09T;G5!L0A-_wS;Y+mSFtb?}w+5M>|*E(!_@n>@wM#s+nI zbN@qtQjhhS3{(e*?q8rpV-4XWBUvL0n$ZY<6ZrI*Ml2yQf@=cx&v^1Ag{`)(4#th5 zGmIMIS>(1VaUI|Zp7pLmB%KuYiCiFxn&66FRMx-OxRsX(oabxME^2=`=6xfwV_r8sKYZ&TN&` zCbLS$XF~@}OZ!3fbv-(c@`r_ao*^YSxGVzbPtgJ3^(m^KKhFjgePxdAUzvnW?45pI z=%K*0;K2RL=$^3oxtbvoZydRQ8D=|7J(5w(nb>`L6AC&wJ&j~I#415CF$Tp4XnkXg zs|Z>YF#Fa3Fp`WCYTu8RgWzT0e0+RJ=@Kb2N;%zFrm=s{*V}qdj3-5r99h`q^kw?hNBO?Hi`n;FY8WBgJ%2 z{b^do?0nUhfxdnPQs>pJ%Dh0)UCqsyjB0xSei>tEBP>f>;j(NQGkfaf5@2ni&8ApZfce1o>fwq~ifDlE`M#a2G@y zz@cL`oE#kyxKv)6nw*r8mc|_e#NqH)xh$!eGa(C+m7V=pH^6`=e;+0BKCtC<87~jf z1>2q1EDA;eTl-ajjWqp&9v?IV`Kt=>_wSlHz8a<_^MoLPGbp)$iG)BLzg7r8uIeG7lai!ZD z@CqU9T~RcP$f}UH0`e|oTTg)=(hg^!Q6n*cnJTkV$4o>5Fs#Qp`rmmoyj}UNjr`P9 z3y_aE5u}e1XC&T#Vx-v(KsffWHQ*I8Ul>|NZK&==MFO`vaNqy~co{|<&tRYw@a^qe zL&y*Cs_+z$Z*oOI8`Uz9#Tn%j8l7E#?kEZklq|SljaRynRzZ9nr-)15qZ}6Vv1AZ( zNhvAS{rjLv{QR5DUitBZ9plvFNI5JPT_3~)JZ3+laQX5xo4qmd@if0Zj>^iy!an$M z5~Ue|w-1;(e)v#{2`CV*G}s0B0I2Qi=_8Vo@USrz`A z*RJCQ0H{_7AyO~`JtE38GB)-wHWm%c9mzMiExuLV_^xaiHD+e=d-v8sAS){s)lkCt zvVsCpsLIuA*Vf%4JXssU3G0J7-KIStr1C^aB91f%NlV+hTMz^}V3 z(@~Ru{D4VL7}G*5BAAr}WbBrM10GU)vV4bS9x(|?12?_AO3|7lW&9`*`yNVoUYO-; zJ6vakSSDN(D)ofgwRf@xkP&@K5MGf28nWMD?Wg3vQX>O7Sg@|en&~FWMssl22&No$wD$D#Yg~NiG-NP z9@;lNOrM5X4;0L4*HC<(wxOQ_1Rn@ndW6fEqeUPp%>W~p1;|O?g3Ax)>?0PThWH=T zWq62c_SPp^Yfz8@%r`C_G%iMB0jR={%B;$R;3si~gA&$iDj+q5M*0Wa9kTy}<_QFI zH(z5<*N!iO$M#RcE2qo~RO924wkHa`>*{*rV*!=o18f0I({!>S@4`x};^1&7?i)@Q zq~Y+U$Y!g8faR5y?wMs+1~`-Oy%!ej!9$)bBB_e0%e?tqFHgBTrpx_B?KEd|dv<|z zY_4sm2l~P50&JLNCow$2gIfyzS9<0=Mo&O*;_{1&W0ZMeI4n|zgoPnMDH-Q6R!{Yw zk{8#`*P1MEmU6E@LqCCy3pv%W-4O1Wnv~%^p~?u}ZEkI~!;a=@{o>HTrhxFh$M0`a z%iZwNx(F4KDt*0ZW?%gzAqp;k! z$gogT)|TPjU|I3yx3al;3IZJZ`cX08naX_w?@e(G;Y2G|qXlAQ#B9Q0F?nRrXY+XKf!VR&^20+{5f(2<3hXKEk?bYt>7EC#~q8Qsd?@k zR1iyltvCrPKj=AO->IQs;i&Y_GXV+q^K%}*D}fm*)FpHIsC)Ow)plJbO^V&egJu9< zS4JjIfrm2R3EDzwIlb(KueBtX2O?7(G#R62GbL{1m| z5RxpCPRJdit748;T}`cea_;;0O_;X1g21o1_Uh{?vuV-wk9fvxcb3h}6ZtDC*5OPX zFTJFK@afsJc|}F%3e>?brn$(F^vdV~d{s2uSfRGUB?6hk*@1?KlK{;L^2*LVD?kv? zmvHqw7)8FPl1|0+3SBZK#p|mqL=XPRqgVTc7n@FaY@RAwvnu~$O(ntJHZ`#vU@n@c^W#TMP2}5jrG5Tv%pd8pICe63 zCqr>7#>}5Qdxo4O7|C%Xf;GFsT-g4b2mWI0WJIboQ5mi zq18jQ-*Casfgpjc;?iBC8#?$21~3g*J0QcQ&4Yv|S$yAQSHnFdMj`VA+`9)A9Fy3c z=SVwE0eOL2s;gmr{Q<%7afCdEm>Fs#dKk_E)Frg}A0L9*6DS-XQKYmCj`2LR7^cF@ zP>}6HU>H3cLD+d%36?C%zd5EuM~;p;ln{*ctSU}xTxKWZuaIhgQV2Wq!=JEzNrqCc8b+Sz@EQqbk9r@R+Ba^wA z88Q{osx{-o`&KJg98;6v(k?rYsQEv*8hHA zMvd@gIM;C{!o<@3y$hgWyemf7-7#2-m>93rs>(_(SAEXC8!*~%lwf;~(Z4enYF@fd zijW?d^u#cU25$smR!3W#C=E&`7I36OADYjary}QRFjFO5l$pf__rA z2>V&~>lxr^%zxpKK_DW|cS^d8%nrPWJSobQloY%GrqgWiqUQ!nqbsX{1|G$KMREN9 zNc!SsI3mwJrDzL4vhlbo3H8as84}oc>mz}OncWty zFEu-t)&0g(z#Mk2d~BLdCeU6m_ElKeD`1VBobb?4g!jYo;tpw=5Kg)bE|R+eP@bnI zCj4HpQYsr5I015i&Wi{LK(+i->3%GrPB8~cEC?qwHdJe5HXL*O{>e?OD$Zj2f;8nF z0%6AC!D~={CvR;~XkecvTpucL)-C)y*+=23aOLV%scRo4Py+y<#0p>I#Y9)@8g@;O zj^3Q<-Z#<$fnhCbGy|cF*ROT_*a5!ix#dXZnZSAY`*-uyQjRp4PTyOFP6){hrDh99 za-=cOPlc?P1M9s+Om*$?d>rojw{BV z#2BwJ~RahcO8e_ta*$gI4)2K|$sn!5I#ql3eX z=g$?Uw&6q2wl?&kvikY^mz0+BQd*juGqAByxqT%p=8f41F>mx!Kn1`v^j@4vwA$(U zb^nO?cxc^w7E~jNklq0xxp?s~;z~H?u*LBzksT_lp5GE(Wc5FBEQIr|ohs61pKuEE zt*eP}p`*GPto}SK^?|zJ;HT$-{HYJpn4juBF}m5}n3HX)<#o~8hQV* zFoT7KKtulpTJI1Nhzd@(ZvF0ix~k(*|H_Lax3Vst2V4ZOruQL%AyLhHkJvD}cI2^gQ6rH&cF*N|@Q{71^ShTUeHre&)HE#JzAX#3xn>G6|x zju1vZt(5*>{DE252wrmr!7FUt2`7}BJ6T+$%Nf?tOJHF^K`M{DKY!AnJn`FHU&)x* zul-^^0To_1Qx8*lqQ?)1Qz7L{N^GIQxmUsCy7cA+c!~$$_X44T=%8GpprM>J*%wJ( zSzcboe(Tgla0_fJ%tch=eP3UaoO>xj=qYoB)(ATTW5GLi?Xni8|GUuxaSFiX&=iaF z3JVb$+CnqIvmDX7i~0iS0WDACdjotLq$H5%hs7_#!?RF$Re~D&fHSlPV3m|T7^fyu zV%5_2#)Q8^e!~1 z%7?xVZ_g1g+l>%LB9wV=LTkkt*zEhlezBjU9ItR1(QAODr_8&D@?moF{Fg77r++(L z?wVzYu1biXiN9pB7lnFiyjcTB(iIE_j|I6&kS1WHckS&STbsu}en8K}+&M-=2wIqk zNLxdLe^(0?GI!ze@kWpCezkevJ zY-3mjSXNZ50Z{{ut3=7^+IOV=#W7y&R}H#Z=$hl34!Ob0{=npjKZ2;aKV9BWZ1!`T z1rsx~a;i4`wXyMW@HXHQOs7w7&#Us;SzJP}CA3RtE^#^hB zzDL~h9=KA77yYFV%tbm7`hX5=Yb)r(gI?n-i-6WVRATG|L&%OmEQU;|`Zb3;!Jx&& z?D2Xjbafc`5SGCiB#<8|cU${`#hdu@<^0L6!9n-wPQr^S7=sF|njgXqCRGH^^=TSM zo(#!8q;(*~C2EF;gh=2`fe~sTm{@!zUGoDKQ0f~Qp*`|aDy3@UH4|OSFjAvMj4%^p zbLo=4iAg9wDrp_;mWOHxoxH^B3$+yu_SoOO(oYQD50<-M8nj>tkO`;&R5&VXr%(YJ z5cGYj3hXsCUEL+To?`C{(HDBqud2rvQRZzpF=vBP=QED^91J=_;RIZ`Ht?EE@c}`A zJ8KN&M(Kkii0MouHV`GjI_}&_9HkBocN0_+5q8LVEqZ3(qu!*Z*1&~PN`>JHr5yFv zX8|fII4)z*lNT?bfek{6s{OOEk&u|!6=HeuVi)<_3r{di2q6N=`9rbgSi*Pj)EOdm zk2Mgv`{4gS4~$80g0#s<>4#l?m<8I&_SN1oy}cO!fikuH7~4JnCmH$spHR+`g271) z`TFjZKdMW0BpEh5z8wMhn}UAgIdpH@?Y|yJzr=82^&-}J(Rl-&44EcGIS}UHuSSlJ z1qihGD*>;!pqre(6u4qHQjn8FAKo-v?!uIS8vW`*p*qH1V7`1?s-barbd-pD3mAxP zgtuv2T^}Zy8zjM_xol-6jX|j!?*(dWYjH7@J{S^v3b__m0w5>+Lbx*3GvB~z@wSvI z3|QiAc*t$2c-Dg!qQw{G=by&C($Q@qE-&A5QHYy+IAX7AcTW$YJpIjDdzs$d+;9PI z1cU^7g|}Z|tBb8kc-PQy6~<8~Mn9_ z2tX*g4U@dj2=knq8%tUvMfsJL9-xtgk^o5`QXJkENVJjjrl25Y5!xS-aWW3D*fU?A zK;*)&Uw>6q<$2-4N4VVxX?DFN)NJsg=g+lubf{6IZjRq8D!PM#sEmv$f@9Dt{iYi; z#h8|<%F40>X#^ZM+8^81)rEhFM}koifJ2@IoJ@2-Yz>@c6mejT2|lTijK;@krX>aj+Q3du&>ZL5Rdd$##kh%5_F_~(P4+o z0#Lf0jEto~WpI88*>k-blnhC`e)#Bsw>~~Tn_!K5Rm1Fc5B^`Joq1f&X}tH-defpT zsgNk8&~7P9A=#=4p-?KNRhyV(scglRvNTB{nkGi2h$2gyQU=9nqNpZI2r&^t=l#n$ z=k+@0`R92(PyY#Z-}mpjzSsBr*{+IF1o@#)V@1*hHeQQps;yNz_*J&SCpb8d42)lX zS@1)$+nT|WT(V%nSd6>``p$Jc?>m@Bf6}kKoM%B+0zWcBP{S`-W~b-j=?RVi)HX*k zJ1wml>PbT3LZMRW-zODz32Pn3j5)RV92OGv9cXvJuxRVp_^<)ZAK70FF`T$SA2^Lc zrB^5YM$RqACjq@{ccb=uczNSGIz4~?aTXRm^uzEv*#0<$%d{jp)CcS?x7~70K1=g5 zsPk0yV}}n{*UhCzR1yI_N(VixtsOajJOKJGjTqKGLvigP9&(%PGioN`CN&Gdle7l- z08?F7H@-S$#FPzk{&&d2JvX=bnj&Ped;`YM(nXdYg>=39e-7Z|Meit*EN0ywEyCrY9A<;9~$|(RjMMXn* zbP|ZSB$$sVbOHLe>&3;9oI|ZwfakLMGD0QK&zQdPJwc$I%xffQ;BSM|e(vBWIYT8{ z#39Vf<3Ib$SZC3x^p}(4OpIDg83JjlxMF5A)Wsgf4=nAF)%5aJhdb8$V8J0;a`;!8E7s5i1mq!s?iNpLqSkIZx8@d2RjWfeM+$MjOukrI zZ_!ASI9XSxd}NBde?B3Tu$zdR7cYvnNtt+y*IZK*1p5PM!8)hSSv_nio2ATWI&EN?i={ahN`K z73ZyeNXH&f6H&Mf(Z$S>bYI_qB`JyQbL;e|6*y8^7@dOa{HJ5I-adYQ>(Vcc$ugJo zl?mU$DMZiB7qA_3FpXyZQK@cJ7aCujDcx&sw|~lVBjT9L)$P-=KaO;JAdX zJ#xH#*uZ+A@{l1ou=x7i)bsuMn>QTtU{axmSB{@P%{l7Ki00zrLQC%u>*{_7l@S(- z#>Uk18Jm~oI+Y1ojr;c1QW0zW1{Bk;7JppX-v!wiUn0eo419G+0LlSBLvtlIQX4chK0}F-=O0-9D?=)v16!Nhm(_y zq!*p~i?bW28SLH0@^QfXLWg4~KO=!rLRa^cQlQp>Dj~D=px%lYtA98EOpJ{k4cz8} zPsE@4>525HeU4GeR%z$h&(Bdya>Jf|5!RWimB zmKD%+^$K86&G)<|Nk&#k4~`wXMtnu;)rcPTY0_JgiF_eznwbJ(lVdTmP-W8?B2Gy} z5A87b%}rc7b7m=&4gN_ETh~0R`SV}0j=bm53xsXlShCcd2DV~3%|TGLgNw^msL&5( z=M8)^8(k9(JWl?X7QoN1n{`8X#bCW|-8#jXg_=rA`;cuOFIfFo%s>^r-?FkU!&=jP zjYTZOA_U9YvCd$}yHg4q-qNR$my5Pyb*_8nsZ+_oCWVoc%sCY4t9@`T=^7=$=s zgRTNno4)Cz9M)`dSDAGe{}5x@0zU$UGFLE)^E=Vm7|*69po5=hW3w5L2&W5YP~@^r zrZZVj=$5Eb)J`G57i@yLChzv6Z0904_#Wh0Dzn8q5O|oGNl8mHA)uP0|6#f@H#3WK z{F$N$^o)uY=NP||?VX=*TIWqspcMuI|AjI@`T66IOIyEv0Zz`%DB$DgU-F3(r4!s! z`-X&gLPJp4*45V^I&wsQfX9}Mr{7N*wKO-|J2~|YAI5tH)Jw8XXG%Q+1;aTMYXpTx z$BzoJ?3292u9wE1zon-S95Q51dMzIim`QC*JGBGtBC%H#fPG}eYlzyid2$RN1Q*aB zqproBM{9_;VI^=VPN|fXBGelY4na<~gXA(8ifBLs0|U>{Dgw}-N#rDq=z4P`DQOu> zHTv|u>eGAyexalxQL?kY0o`-M0|~X1clLZpbts#MW0C{}&M`hMhzXfn%IQb=z$Y6T zcC7QO+;Q;WUuZBf`F%$BfBZm$%7nY5{Mgw^r})FkZPVWu)c-<1LZ$FVP9Hn=CNb^u zPp+O*JNDAGOPw`nss;`qZ!?3>nI1iYGj347r|Ueg??C*6vXmj1lQseeC(Lt)Z`hL? zN);npd3j8b99{G*k++Ms7UcFA`vBC^2hpbTaAa1!MfU_QYiKlFNvY-4tD>#n!X;;b ztMH#Jz1-a0?>08_(5FK3QCR3^aTG;KGI|62(5d^v&p-ZIWm$LbLVNpqJ_;J~86_Bd zD!$AchO+QE>KCLDV6m>!tDKxRqD`==rRC#S3ydIfa@1C89SE!8;bSPr1z4c^pl);7 zzbw~~FP?HI1yX`@i2Z2ji)9wk(Pgw8l*0XUe>-_{Ou+Pa&stjOliqiAHR0L@Z9<_= z>Se-e<^?zIwqG#d8?x{Gj(41 z%XR$}*B9Rkx#EI0#_4Q%Wu+Hd5~>8tANyhoZNzFD2wtaiYm|1EL-HSws{i9=3}#i- zne1jdgo|q%E&$6Y>XYn@797b4?i=x_AdDBe6A4_W)&=@_{aaI0&#wGR-A3uJKoGWz0hJJnfqUEp0#vs>Xd`i6+)+Ipvo$YWXN>8pc+ z6gSs94yx(GD|gub@v8*7Zd(Ml41;7-5tkpR_IHw#RN0`j_h= zgy;u4|7z5%bNY5$;ij&fq@*N*Xp>}>n}$u!pas@QICc!sudRP?z~Mk~@7}v)j7LUF z|6hOOqx79yx8y%4e3)h5oBvb)z0XB-aF!Z|-)A2kOgF3aEjV60;Owril7<(Cm>;k!L;b?{AtiY4fxU&p<)GuRMOa>9gc5 z1FQ7dA6Q%@m{weqO9+n4yCM*^bmIVvz{A%&-ro>)g_9a@(Ir#eWZz>( z`GK=ug~NwtjooAvuBS3PgDWEY;MUjodHEa3q zCgb4Ni_hk`v0GNXse2NAQ*EeP+{dRDi$_G8MhsGEV3iyI$((dsVPWCl13|fd@A($* z_D`!dtB0xsFBp~bwz;`E(D}qW;&&2ib+`cqCrNr(T9XCHZJMtTnpo%Qw&sbhEgv+i z0$(B+5W}g`wR0Y-#`UG%JbRv{Tl1*3yJWjx8o+6HEX1Y zS#i$-MH!Tadelu5yrg>R@IgI!y2GJ&jzKdO$M$rD( z7WtDLA7v>Pu%4;$^_w?9YhysExkrMe$A4T0J>!%-y)=7IOiT@6%YiDJ2T^7%v}8BS z%ROpO%w`rWp7HSc?>>QnZ^6c1ykKRJmAM(+`CpaoJPBS2jo@4@g82r&%*+MNqwMl( zsY5^fGa8p6ud(^?JruO0~fb^-A z43*+huHN3CkQSSm$Em5S*WJC#G~=c>^1@NW(#aBQ0s|wk%CoZ(V7ZMMpZzlLg;oz(eZH2v-02=7r$;0NvkF2NIvegZEiuX> zsJf~OsKCh9hta`_Edl9KRAlmb>9B>ysRz+?`yl{A`b1_reZ2Qqr4e;8ch{tpGT)RW zI73giN$WU6*lXU)zjo=8`Mu3de`Dv`Vj;8C2nyNzQ593rsB!)e^Q#&fh7N;f>$fum zD-lnK#UUQWB^rbwKH!fW3%Gy@Lm4T0Pi3JPafr|`0zNDr18+JgVH?_zy?c4xcF3l< z|0OEZGzkA$eQ~h{cL;Gu521uJj}xR~GWd7y#F;;z(00igifJAK*p-A5P5{m|ZmCcPg&1L!YP*xMj%Mn>59OZ9CW9S_lD2c44gy(LmiJAeKI z&^x*^j$4{@tOnC3+3F?gjUK)J`R(E6M;_PIAkAEdwkD{Rjl2^5>G7pYVLz+GaDS%t(lh z6=IN>7TbkQw0hI-m>9=Acd5w;L2yOCY;Tv$aDlyebLQsicT zG9quUGHqew<&Ed^^4{@uL5WT}EbL3n0Id^d27V7Dy3_LIw3hU;XP=d7k$;XAaVI?& z$cBfP7a!?}$w84H^kXYL@kLXtOn(PIrbOqC#lS@V2C^YaE)~jhEWZW;>u@!-mv7&C zZB9c{#-m2AM&AM6z^8$jTXVj2dbMR09X0=va!ub&U?5MZFv3ygZpB=|X4#7myS};$ zV#X~{58D(a8FwUho9L}2azQ=5r=vx}Z;M3|%~X!uW8o^U1uRl%1Iz1C%KLV+un{+C zi0%?6MI_TiEwwn9Oer}RGCNaK8yRnEiqL>Uvp6p1a3KqGj-m%c9R#n|fEQ@5U_yA^ zI8SGEze0+sc#Sm?U%7qnu7w)_3ep^G>`yt+5OlIGKTtNZYT;DbEXm??ygDF|$+zqV z*{)k@&pA0;Ye3Y@8gB-S^9)&}t|lcB^vfh+nGX@oBBPT~iaS8xl@w?Ppfm-b%JJM`A~5<TQAncmA7P_^bcYkpkS1) zvUad&fOa)?b<(>USe_WSDV#w;_8_?B$x}BPhi}5_!L?2I{?K0Ww>(D>2Os7*GP{C8 zLWtkKdFRd$W#u*RJ>qX)zQxTq(Pe#lYGYrpuOL{G)(%(odQEGGtfxxN>%S3T2=xg@ zMr;Vxc_G7`&U(zVvf2R%2evae?!MDUkSS#Ym(+v=FuMdUVQ6S5;(8^+0*L=hK`k>C zxrof4yZzWeaMtz7h@V9wk*gK|R7!^8&zZ9I^ZTbUgWYTg+H(62ZoKuZ&)$SI7#IOR z5BhTdcMflHq@znDeH?g7quID zGAItt2&*}BBI;h(i!EyxBl@D}dv{>@3T`Px^C#Ntb?fkGUESSpypj?0n?K@$zkw<$ zj?0(#vk_N__6rm)OyGvGbo((UyJuPJGQM#J8m_(g12cbi)|^8diH*&=zkuk;G}RX( z52rBCA%>1z%-{GYS|cGG-65@D$6-TmKW1)zib}ehX%58=b-`B(9tSTSDiC*?Z~b60 zqSom3^%EVQknXpB$&&jIA1;40x~y-$7uUH6u48!RvTwq|>55M|pdaV>{!E``((%D2 z;#A>#x{kj=0mg7J$jfKgkD-2mL({;yChlE)wz+~=uQZmR+F*7gj3~YUR)<8la24f~ zDs|_9(((hGr_C%GBi9;kyj)is_v!z}gAoJT2te=l6Q?!mMY6I8=_v+$NlHs&qt(*Z zW}q~hEn%<)1OZBfTJ9~mH2SoLWRam}zbU50)^^9*yb=&`fpH~m0Z$CB znG>0rP?l!qM>xZPh02%Dz3|RRZ>Q3Tam9wiS9`=Aw@8KHBxs;Uz`3Vy(bLwZ(0{wA z>srh@53644lk6Hg;sz5!Xg|FefpR{Vlfx?i8HNZb6BsCJb8gDHA(wQtwZ*qb2k2kVnd~P~;=Rj%lgXRWChmvZG@pHU z3*J95N!PV)!0FVB3!4o*{+vG5Kwlqg0Cp>>BO#uiqeWZ`afrZ0IEl%eH8TxBA#k99 z;>8{+UUAwp^b1PyOK25K|`)#xl#BSdD^!o_b1f3S%q!9Yk75-2; z93eFmp1_q}n1nmQXxeSv#lNelsi{v5GaNlSgYAaY|ARhxR{Sowp=nd5G?0-aNhd{1 z!I&E)T*?FAH)`LFEpC&e-rR0`{dxWcd8B%%l_wFG$%NeILej&cU6D0d(ZZ7Dg_= zx-8Y&2~$rWjkTcVVq^oGgcnD)$XM0ZAhvHVTGm;&TsSks8O%4cM`P}YOBg4Ay<^ei zT1l)5d&_!KXnsM#x`(;9wdOI~@#s&S5y(GeD^R5qw%EnVMhmiGCT3)dStR5QkK3~G zdTo~Kr0bEdeZ&H^{&lJ@^Gr7S?UrXhl}o(Aa^7Bm&Sn%+4gKFn?tU7gVKi#>nhroP zGMzz)fR>r(N))V#=+n?}_45<5M^aK~NBF=Vk@|l43MV+<0L_K(=BQ+-$zkLo{J*Z8 zv#HACJu_wG&c#cYZe_^y@j10?SB2N$5JVMp9B2q|W}^jqN*TZoS$ORcGA$5bW($50 zJcy~pfZbZ7MujlVq$#Fa4<)$Co7M&a$FgN8A&dV{fYq--;%zHUFee)S*z&mqAF%GA zJ&4b!M09q5X^^Gf9vfD*&Kp4I#aDALK*hfT?lPgbB!?TrPzQ*H|`Z z*Hu+v_%(!`&B+0gJ(HKG)c%Zwdn~_xGHGkdt4;r$?Y9338wKc6 z2;U_77H=_i3PA>(%z^)0aM&J5oB=9`C(-BL(N!+N#V8?YDx@~d;lPDMi1^iG`htyx z<4kttU$p5nD+%d7!_&%@*wZ-)NXYZ-9z#z@>4IJS%x<~!GUtKu8fj9$=<;s?1T9-O zY4qsp4<4vq$!EF+Y@Rf!fUytxKBdv^mI3kxtpJtC-9Ek3;gkIQ=peubOj6> z4#S5?e|GM?PQB*=Lm`yRoy7KSY}|Z(&tR|ws44nGz8Dh;`9-WqCO1*53^zhnQ0Aw} z!${)T#L9Oa-@o@y+{^=Mc@)h&s{|8h@t#4}b?Ns;`~ZuE&1seEcK3i%YlJ-RPJr&A zLx!Mb@Wd*}tI{{*!bSm<=HM`eK$VvVl$J4{qob$CC2EF5)57aus|FD3D-z}A*sM;= z&fY@r3CYNqg4_zFb#7?2sDJ;T!otFcNaiqL_ytr!ba;K@1nqhBc;EmRxrUDVK)#7F z;*Vo*qFxX(I1eUZMna) z_`6ZNb9y6L$f^Ya@j7Xm&C_Yz|K!TK!=t}kLlYGAWleK0$cEmw?RQ(zlTEuCoDZ*> z3kJ<0!2a}P-+X^XQ%lQ%h`{UxB{tmm2TjbwWWH1Bn)SWrhj#eO{{WY-uHUVp z7|?=H+N1l$PyvF|u#ksMsu}VBu8(e)KT0h2r-fQSew<8u&-b*9j0uFU1sllqes7iR zr_8|u;P>?_6)JCpU=0?U_rjHD3kt|qqNlio-VQEl+SI9(OzIZb6KZkU6Y0Eb*T$8# zu-g@en+tCh`Uk=q6>!@3jl{&Va^VX9gk1CT`n#^K0uKzgQ4fov;=iDfa7@#8Ft*ri z4H80K#8n3temVXc6$Ha50$w^bI6fvjNNE1p^ZEG*q~;*hkSx^ImUm&~>|n}aXy@R$ zb#z(-xl7SypS!zhYq;*%jq1*3{qx2R%PXUiebI3sufTo=`tuez1^F?xth@R=L5+$x zc5ENJT}MZ!fOt-hVogn03=?QCmI{9yKYsYYf!wEx;&lS*fX-_#Xf5E0Xs1z=qFlqk z$#DpAjL7fK9fK)ThFWz)IIymugU@iMZ{vrnFu3~q3TnOA8O55dRGn&^9IFoOv%=0! zVHgRwF?;p^!78V^a5v7#k;LT6hb75;Pe&AtGLY)}J#gy*o%8}azYkqq_;rk*Nevl9 zf|BI>3C-2byn^GElqZ`^hSBIUg4C=qX5vH~Gl(As!bXwLonNWz6ttSK3t~ctEbD## zisLHU%l<_sM;{JNl-zJZJTNGrkDZ;QMn*39K^h~~CV!%b+g=$jl(ucNvRPOx+S9chhgrVA`+Z5O+%s%Q;e046Ar#aqZr#e9oMzGc&}~F%CEI(~n%Iw3@c;D$d*@!Vw8P literal 0 HcmV?d00001 diff --git a/ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-3-linux.png b/ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-3-linux.png new file mode 100644 index 0000000000000000000000000000000000000000..3d01d57a3d77f46e1f64d8312e8e501b019bf977 GIT binary patch literal 7513 zcmcI}WmHt{+wXuP$O97U0}_ICcS(yNAl)D^h$1B+-3&;HbW4tah%|_FDMNQWDAFC$ zl9KPm^Iz{->zs8yykFk^ftk^nz4v|H*Y&F%s;(+eh);o!Kp+Se6=XFL2<&KhuZxEb ze-HoghQJG!lcu~3qPUN06@j3>sVMtE+dXY#(nDN(?E?;v{(oHI1Vn26zEwAjGY{A)s6K{HF!BWItmB~c#2AgGC!a%ciTd8bC(Pa4Lxs< z+;`>J7;o@$)7Kx>6QU!=3nrvIm}?EYi;s`k-WsyFdC(R?IXpb9^!PEcOmA;*Oiaw| z?5s9hMb$iZ!wg*I`1rVi8V7;!PJqh`3JL}U1klsb@eqhPF1(42)s&V-oa@TU2Eu}3 zVg^4b#`R@Lgpt+O)NtyTew~;QXJhN?@9%GJo`aj5T=Rs*)VXdvsddh4XlTgES=<<} zB9Tc*NbvXfH#awTb#*N(E6YmV}9@~qm^=>i=c9Y3n3Kh7RpaXuc*mfvYEzBbHQQBfgvaavwe zQ&Ul~pU7$GE`Ap|wOL9Z=;-9ctde48X7&YcC{vw<&7w>ni9`xe^#1($=X;T%`@N)Y z;|9-)(9q6mhxzU7vdEy|U_SEjuC6ZFH$HNLL4)=A_NcSdQ%ivqsERueUM5@>E#dXO zy}iBf@+fX>Yz#Pvx+@`S(U;cXbqGKD>#wlUQT;%52Zx<6R>Sbipdh(dueR&lwuog= z7iU$_6(K}4+temaz7o$`-sqJeb3T0FAtzvpOHNMid@V zF7+f6T%#8eISLE7f{5nPpB{m_n>GiYog7l$xdX)w?Rx(Fxks_LYr|=de#IDk}4B z5nJM-;sd>xd+c5j4IQ&VkiZL_npJ;ad? z4rTuS-_ph0WVa|NC{&aA7WdbFkO%AN>Iw-9yAAw~p_hLYLE*AK%1eL=l$qUK?%&C) zaorfpRYO-BZGL{yjg5`H-6bh0*&apHZqnY;a&&SMHWmttt+YvY??zMl-}o5XzdV28 zDaOx)K!grk^p}#zz($aekenQB?C$MR@tcQ#5@%siP*Mt$rX8>ppoq{#A|sR5R#q%u zzJxu3d*$RT`t|FVo!iFD%u{poVB%lXO-=dv)-W+XK48 zlBL`ES#IIt;_~+G+as={+ip%yPL!0Ce92VA#L}7S`W(7?VsdzA=O4ppi>KbPl}+Du`%^M0kaNbR@S^o!)(nwD6l*pNvJF=Ixrf#H!dy?PV?o<7c?5Z zLi4Vq#9mXA^j!4*{h#gaxdG-$mp0Iy!u0EZrKbSOd89mPGe23kwU+FWTAJr3%@4?ybZ~)4HD=6j|br z?bnd3#8_b?6tW~UO2^q)SVn7{mXrA{pjCs2VG7oIkhk!2;Mi@5?z|w^CnSvc7Jxf% zp-?nBc6s0jHCIFz!ni^ZB6 z8hLqnv%7!X`~JpZeNFSlMRU9hnv;t6ne3A%DPO(_gta#JH<8EyY!??7lW`eYT3IR3g`oC-9tkR7}!15Pq_2-AZ#C$45Ka_4W0|#auF5TJt!#1!@~pH z->=i`x4XD#dL;4C$Ovq&ua=gUl9H06<8QK)bV(miI37I;mZtqwRMg&M@taj5*W3Cn~hWuoV;44p)di=X#$WX_%OpK$-fN znHd<~G%c>JrKY5Kt`0HU+uQFr>gbFB{JgarSbKU^^c@pvsBDfVbk#rr>Mz91^n8fY zXk=t0V8iC-X1RIqz+*%1GfGDg z|I2ew8xf(G2QNRrlY>Lvr%x-7c#Nr&RZ@jIJ34?@riO>(RC950a9nuS@$vC_d3gZ? zr>3ToxQv_mMJ2HTt0kRRqJiluDqP?ukatN*v-0yrJ$^6v`5`1E8ti&-WIh2k%12Ul z_Voz~3K}-sJx-So;p7*uj|s$smrg#z)FXfvd4mB4RmZe!yNh!aSoATJNp76Q?EMQIMip5IkJ zMhAG6#+Gir2Cod#G&RA!^E=f>c1anw*q+oe3h~hEr~~GnYzAwwD1@JVW>lIfCbyme z)-&r^Gsnmi75iyJ92?OXBpF*etA&)QND+A_e;wy0?pL<~`F|zlK-Gi)oE^bn^Y6v> zV(MC&a`W@^$+(dNt9zAFp?K#NB2;~7)Oo0h^~mrr2DNw-?go!xohwz$Z4-(EsH9i) zM9{qF-P8?S=Y`R-=MfPRc$u&rf8V+_69kG!>!b2Fl=K(|bLC>y4uLpITBDC*f-;`j z`P`wToae^7+6>iw`0ybVxXEzKCn`2JcEu&CKO6Q^on`%fGbpc#i3!ijVVbz3%LU;m zj2bgN9u@KM`1Afft(24$U(MMf{=a$wwb+&Lqr{}UtBc~!9nL3T6iiHL?g8u^qc2W# zm89CdHz)3!m`oS9QBhJFB9Xo;1NSr(&CQpJjq8ARYR9yrPCi~g=}i?rD)FfTLbbHC zRA;%h)0~o$^6uR`=+`J3DNqPwKYwx}>-7~CSwYn@GA_dXr*EbAnkOYDii(Ih3$B-z zmZqgq@g*xWkdBOufULm2az#);pmwb9@alC+KYPN&mL^>vA0KUPZNLlQU=9wBG2J(B z-c+M5&Y`Ws!@~jb`!gh<$VN&bVpv;lbeO5qq}?UQS&3b-V{N00fG4bWj$@} zme$rkCnq2}F~1}bQhe36`4-|{2OKOcmQYDJ9|?C8ID$uy+8TXMI5;`MS>O^<+#)6Y z?0e?9vn(QsIyX8kob2!C=I6h8FFrPQ4n_t{z~W*;T3XMdqeXv4uX%YxeLeKlk@q<; zlZ3dqk+HE}q5ikoSHv=a@=8fughWK>`%dP}^d+A@m45y#>T|pYHsf*@eNJ5cr3aj@ z?3U0nG73Ix!3&J`_s8;=j!sJYF*IadwFI;U!YV_;>ro^XE1+R^cDe0%RAHif+DN3N z*MW`N<xZ{w_-oN{=u*Y8qO0e)u$eZ0N(ITBM-4P|6xz(}qCEMM+RH#9b$%+CG`aimIX zdibHIN128Oz1)uud;I(NZvp~>FBKK71vRy`H>gFw%zp%_2qSO<_#R;VXnVG>kb^D; zd_^0ODibsFOH0c%QI~um#e4VeadJjH&awOX8K+-aPEHQ+5wL#KtcQU#ucjusf0;(q znT3lhvh`W$kRqDbefz2S%izF30xFS#j*botCVMgkgc_KoTP6%iv9Z7R_Y;zmwyMKk zzsCMMECDzMxSEQ}`|Q{SBq7LiFRv5$@;@UZoQ9Pgw6w5Ci!Qibi$w@?>h!bnX` z$DKJjdHMFpakQw*uYVmI6e%kx@7?8yA<5fB**@ zTl0^Dg8Y1C6&2Q7k2N$jU{H*V(q6xA{D!31Sh zRZXrN6uu;r-@avz^i;n5>W8(zzmJy*MnUBMeVAJMKpPvI_=JS5sRpIo;hjJq*!w)K zTuI+ka!e^0H^7yHwI5N^wD9#91%|-r;$pi3Ia-9@2fx6{I4Ep(R#s0>k1Qil-sFil zLyQ8mY8rqN9zMRKo0}StvyMMzR#sjfx)csBqiXd>kFH6n!_sWVDg31^UCIT}{q0iU z9J2To?%%)f?Q+)s;nSzlhCf5Y!{g-^YDi=Tj8dUN`Su=b^X#ipo65C|?*9Igf`Xg- zqsa&C?Cd+-^rb&-c8qV`x&?9;Frd4;J4nXV)HD++QCWEq$E*fEVZ~V#siLZS1NDG` zbhc(1p}rp#h9x0ApwDG5G;jU&^mqvb4x9alauo( zPG8dJI56p@wRN#!)s1`F`ugM)6ky3)40Sa$uH)ldyFAn~Ft`J&qRasQ0%|(gG|TPc zc#RuSSOW)iIyCgDq%96kPzE_^sM(7b2{58mR2=N=VM%ybUjs1Avi}kx4Q1%^$m*2A zGtj?0w%zMnT3U1pbVE%N;W~r=`~v`JGk$kGy`)l8OKW3egG5G~P3IJO4AK`mgOHp@ zT1kmrN(vMAl>4fXZwQ5TZj+(b>5pRI;J%17OGVY*9zVPj|iQDk`dYoq{%c}~j%Tz-}P zG+m(GWNqv0D`jQn?S~N{?UGYc=mO!B-9+`b`JE?|;#i1i%U8i?I-taXy?*Wp&I9M7 zVf(-OU(wOg0Tf(XT5@o3m=jJ1-OXw6+3e5n#Q~Of@S76^o6?%{=5oBZN+ad#{OZ-p)Q{-%-jvi-N(zeYt(hHnajv8=sq?}j01vc? z1D%kNkhnOw2)lvz45u;pIukSkGoxtXgCExkB;@Soc3~9Moy3hgJtA?0TvmZfpL9zL#F|*gcf5OUZijG^EpigL zi43gkb!t{_jP;Z*wH6={z}%P=9;^(~gDg@Z z9|wQ@_|ef}xjtH!k&$uz`t{38I1ldnD`WULIMYTSgO$F@$hzwna9-nM&ebMrU#AH1O;ER7m_Q>0}AXdWb zpwOgg3WK4SkT?SiO(|pp>eB1vz;=23)Kez26j~3=Z>JfUfN*kNIu4F?FYqx(gRsvR z*^oYFWxckrge*y2eS5cGDvVSptTk)U66}1gYC8M{I{sW%_OsG9l_hhod+_${+b@3< z`y6gkUv?NdIPf{+>e|{%rMA1&OK>gw(i=oTu3`m>k>h(sUg zEIQ0F`xhcVD{Z4S(pdJv8s)t)Mx+K2rRSc7ZkSvEIT@?Y#jCkbvM6h6SG2Byt!G9sCqaSIYsF>ps^G9Pz23z-{79dOCGf zJM8>Rohdc~ApNB!^Feb+fX`pc(s3G9Zx7`tfxriuT%cECH=Yz6+!lTBA>YMx1f?*D zxP^s`0|UV*0)o!X%|%BO5B6?ULtG@~b9_sR0t~|gS63lq1{!?;D3h-+Ph( literal 0 HcmV?d00001 diff --git a/ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-4-linux.png b/ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-4-linux.png new file mode 100644 index 0000000000000000000000000000000000000000..9d881da2855f221ac585377caeb0773a740a7df8 GIT binary patch literal 9895 zcmd6NbyQT**EWb!(v6e|2na|C(jyJhEz;fHjnXQh)KCK=4bojhgLHR;bobET@weW! zzVBP>UGLxT{bTOTotbmb+56ege)c{$LRnE78}k_^5)u-&tc-*z64ImZ;JpU=Bk<~E zC(aB`$S$hVVo1e~zj+x_~4?}u`Kn3$esdi6@6wy~y$w7pcxn(9$# zAgv)f*&}Km*xlXz&yZl`Wfe+CMMa#2OhZ(w@!Pey zg9Cd#y@kL)Bs;r(2dbxFO-D(QY5aS=y}kGM{(mOH1*z{5u|MqW*ad@j^QUzQ5jVlf37T3^kQNhlS39^k!;Ea4{|kbVu}X?>wR={)x3<|Fo|F4S^)efC+v%t!qL z#TW5#&*P#sV~x9}rb}M?@udn25AmS=_OK-Z$|JTPZ|#KwjpW#M5xRoD9yb^G-Vn?q z;qaMegZ9NVC9%)d!$&nsF^x_6JUj-K+hLFgU%Q?d8Nr^Tn`@<+_ zV98-~VyV-W$APlp;El^qb*)R~{4ZTyLMB~~ekJKeNWS{x$;o@A917Ci$m=FV#AZEz zzsW)2{xVk{hfxzF+J1*=W)cz_-q%5Pb8vsG<*(O=FKs(%{AF@IXugwE`dW|D@a{c`2V$b1pM>%BoEpP=2W zifAjv&|vAgP#0eHB3+1giZ|IC~>IPX&V{2qZ%U&H(gDw9qN-e zzBQ22L{BdyDN)R(J*unQU%TK#@ivrISEkX{`(Y6#QP9)hASIf~~*_;uxmt$`^04Nl?``ueFk+=S{0Y<+O|$m5w7 z&MgA;jK8sre#AsLr^ipe_jHz(c^Zw^yB*N0ekpMfb$#Rik^26Y8*W{klG3`^Wa8#Z z6o4?|BBUZHO?|L$n_MhsxVyc&U(}y%>(web_^nsiU^kbuXFH)$_@e0dv!|Gevs9tN z3l$bUSEmgW6i~!Xg^TlryF2hQNfL*Vxc7BIUS4lyWw)UFvYQLt3w@YygAjd3cAllx zJ0wO3M1OH?k-FM7zhgX~VyThy_*~)k&cmG12CXUzufYFKV0>KN|6m@QoP5&bz>uc- zR5K;kWntd8tKKce#YI68_66d}@m7e^;5;lcGO_$*BbBl{i+dKlm0m*w*D$d! z3>Q74=*bf_4%_|iq!7WVD7Nfuoa5taTpCTdcjwd$YFRwUtY8-qqBpR1Flp?A5d0%q>pF=nQwD?w-upM#Fu1~<&1Wgv9da(4TBC2HNa1@t9Jxt^@l25%g8r~(qdP71IGd=Ck%u<1l8-2B3Q{O1m zbq;X4e06$L-!LWQx04w6M&BU4R{mDqj+nS+@^{kq>Z+=G<@KWQh-~bUvzyKw3R(5Z zB%7lDJqnpSQadl)E2zYHILrwwb;8kcT3fARV}={qYWv5NP`uto?}>|*c3=x%V`GoW z;%q&QliRvIyyjRwP3hd%Z);$S(IVI_KjpHnZko?7^!GMv_!*^L_CKzE8Qxp%L?O%B zUHxTh{Utp?l7`(2qiSYm#>nUy4i24&;}NV)>Z`}$;y@y2P*O7BvU=L_u@NUt1-7?b z<=Macu`^Zi*qJ#crs={2gK+wEv?gP&nyhQ}m~s=f_= zp9W!k#--2JT;+7}zUFf)_y=0+CELqbwdtDc@7ZS>); z!NZGuIlDVqqRV6(oxNhMA>V$mx;6g@*b!z`+JN>RJsI6(E68 z<2&kT5W`=2_a&5?^@R%eXViBnH!*b%9Rh+dP{O?*wYWv}vuKgM{js{~Aus{{`0-=!hn6P=z z@k)GluF?<=HkI}+LW6ffZZG-w@A}3@tmJJZPBSxK(e9b6>Lxz`4`*%*#fnOzbgqz| zNi<8=S7PY0{*(duikeLu-5AC-t&1wVWGlxJ`M*bLvx>$Pa> z4@t@AQBW=U-$lP38Tdb(S*51v*eT+Ocn@71C%FDDE+t&ZOGVP=xVXSL zqEjjdrl5w$D37?fY-s`n!!G{fX7dW_B*?!+4ZFuqBIo{lb~ad0@aq03Xudzmx=;(( z^P3F^svp^cN&rN<%XZ9=NaHUYv}U z*06h@{9P#00f|;nC}5E94hy3lwd2+!?}3%lk(EAqH>EisA0}a(Nj^g5?&|Tu_D%Cn zXw9gm3ck5Huf-BvLq(Y(AW<}UJ?7f z4@-iZEA-#rRq^v{XJo|6%Oe;VA~fju^st8a39!8Jx6`!}0@81XEb=A+ex_d1$H6^S zC`qB-*RG?-Yapf6g#?ow+U1FSTO%Q%7F7>YddkT;GBw6+C@$i39F_C3{-%T70}7e=p?yeCFEO>g&euUsI9h_WOQAcRoZ3v!Gzs`5FyX`AZu(Y&m*Pc zWGV<%S1XiPdbkm-5LGBlOkqDc=V@@=9`d;?g^M(vceRNb$L{pY|8k?o|6z~$#K4}J z&5SEB2H|5#T{@zBO;tzr zf(Z5|!V{?|v&?BZmp-&A0gCG*{pZ7Z%3Jhytc!znVq!`3yR~OH8CEP>B;8#g4{DrW zpTA*YX;fBKD5}ZK8eMS%$uknW0pO7fd<*q&yMy3C!tCg0^=#f)>9uH4k^0V8b zO?-S&biW-PNF4szuMC-je2wf}pk`gCoH5YPaO#^)hXo>FexEZkRz9HsW9jtu6>@Qf zYf#S52j!lUUmBs#?(QUi{c2UgwZ<*q$PCg2I;~2w5{i|=K!*9Xjf-<_NN{+MXV;cH zh6o0RKQl9G_n+2*1<2O3M;6pXS+?d~@?(MPx4DG8_gmVncQ+k!Y>c_MWo68{iaaDl zK!1IJLf@J!)c@GmZXb+_jb#Zk%7XohyNcLU=!ns$kFb|d- zydTos-Irt0BPrhYbo?D*Z&uIFg)%aB@6F{}+eI#&{fX%TN}b{O7`Q0l9j3D`X=4S^Jl=~ty^^^;v2=~6t^)QLqXQ~)Pi0@TyTI?ZW_OMgQj^G){h86 z2SL7X6O$xKeE94{S}IA_R##y~b}Q{+3WrOiBA$3~UoS12-BLTN>A76Q)w!F8QqC21 z(PSug4;=nvWW>$O3sgfyF|jF?j9!=rM_e73mscWm z>&m&V!_Vtn@)+0tk=r6c5D(Tx9$Zvz6XU#r<(if zxQ=GQi;F#K94;$sF5NuVLd4Hkj24#aCp=aqW@{pj$p>pegfw{AR8*wz&GIrlX zqcbNZs^?^5!(WS{$9HtIkWs7!jQ9VXW59a2Y(|KO=^NCKtE;OgBqEMB)0ZG#Vm==W zxVUzfns)d1C>=#(x7Ib1zqgo<1y~$R%8GBk#cI?Gg1lAXE+s(j)*y&F~Vk<)8sw=FT;Ck_hee>_8f!n5{ zo_+Dd+4g0P{lpv`hDg{H!i^qGAYo_D4<}|B*DNxAiDadqkg~Of%gK44oJ{-jWfKhM zSr=D-y|cBoT3ZW-WI5d=751hV_7m`jisu)@jqLUd^ALu+nsgo>#4q8ZZ{p0LuQ7Uh zo{&G&zgs~A!x2Itjk@yMUTI;{!6BWpURU6b=87fdNlA+r`%v?t3pLf%riCk9Bu81^$miWsQ@Yy!=J0Tka;IZK5n-i6jIWR zGLx|6@vLlP`@x>>w*#Xwu#^<0@j19&``|%!)18;Xz{T~ohR4Bj1rj|kZ^c*AC0+O) z19Q^e8u!5H$r9xTy7!1O-__N=vGMfuOi6!wfQr`TT`XfIuF2O|%y-NCcVKZ2#J0NgM+<9@Z|M*fA_l@IZKB{p`rG*&N7qS zT=|H~a?mx1R#ARFSui)-I|TD_+UN&WYqXx8)um=y>b$CEm+x+1$ai>$>BQR~>FLeh z=R0R-XAV^8hytz>-MSj1puU^S%Jh>ZZfk`B41WHb+FGXyi-6q^NOVAnCWHlZ)Y=YU zV#uDK>Of<}bP~xaQ#TF|5ho0%*rHG*`D-PJE2`9|)#(pK*c2aL=v-c2R8LI^&($c& zYmpPyc!>=}TK)Tk;YT66)$_ zI@13dGrstlj7BKnO(q$=EjR@}dDn&t&L5Ee>*KeyU^XVE>5&mmYie&a@Qr0;T8Wzx zS+!y7=WIFX&h9Q!PBf#>U3fGymg!&}vHAFhIf488p-|F`-+Kp7inY z;l#Shn*6`bCPI!R`5YgjellSU3fedr>~6QDIUz32kgTh_drI*TTFyj-iHnPih6Z~e zqkI1RUB;yj5onBo_L!>lNY(W`+j&dyuZ zPq?Kt{<9dV!V&JUe=WIh#{bohLi+!^Q~ygo9%U1_`+XFsxmx}-)KguzPrRPgX7VoM`!2f-5_(>4&>zI=ry=o zSy?H-1SaO^g&>dz?ocB$v+Q>$N(nZTzhz})5WgZ$TZ7QRFV%A7MJKLKJdN&cX))~jMh=6)5N;WtKa1(>Q|nVdd-g?ZaA;_1auV*m{@%;0?(^ri z*(#gklf*0dw5@ zHH3zS`uX`?9xgpxHauDu>^PqIxJrPpR=&_nM=Ne@Qt0W-GG?>+TkS1=#F-?;{=oqb zm=qQkWwp73LrG9j(9fUCV4#l9&e+)4hK2@+14jz4-E6fT1zi5R4K2#&7I7R!B`72$ z^er-y;@L9-Lc(UT%=E1C^3k52o`HdZ@$t&4DxTM`i)(5wb7|6-lw1;Bz@bDn2&CJYrd3kqsc38BEZEbDoFJ!Y-7Zw&ktG%kK%42_4GDA6B z1AH-0)Xc2<^Vu`V+mT@aqL ziwidwSMT7Ui2rTvkAH`Xv^1I+6CGW9Z|{3?adByBi?y!EjyXy>g($< zpFRb11t_1dcau|5DFf!JsyYY5VPaw)E;beB<>3MQkdk(zeJ3O!2sg=H-`=(|2P*vU zr%#`JF7`ONxBwj0)YK>h-C4!{05HzYHDqP|ja7OqtgEXl5oTdxB91O8A;J2wQbteD zZ!ndgnwq*@pAh$)mw|!d4zU&uT=12I?aSAj?yQtgpG2jkr2PEuEchQI!C4G$GAcR- z`UogVHw>=zW?O>Q-g!=6FDRVvYh=^ezh-2VmX)o)t}O%~})73;~#%l};uRGBR~@ps$Dh*x3(;Q-NXHE zz_JhK>j+6nfqC4|wnp%S7skd+3=NOAMl#RNT)DZq(a^#!F5HdntgQ>ae^&;Wa&|uX zCHXbfI_`Zo7(i1=$>A~bFwk|=buPInDRU`r9S)Y7OR}>=piAKUo>)foj6$uFuCA_C zC{BCYr{Q5`2&6eUIJmTw1N4jY)pDz=s=PcsP0h^C&(81wzs?(*m`se1>+0#5{O9oU z@`|__$;!&w97;buIU#W9R&K@WTv*UkQevm0qvPas%DXW!x3mG(l=Djl25hE>(;*08 z13Oz=%L*Pb%k!NHuxn&vV?*~IxF7U4k22d!V!wR(l9iqPe6Q2izAlr(vjO^pgW+SfoAlaZIVU2FvHnI95BGpw(y z(CVL@oT%oj3%Nk!;{i|d^YLNHOnYXotgHkbD*O5RCMHF8m5>=58{027UM;r@I?o1$w!WM$@_J#KCx@6BC_2H*FzLk7I~gH09GpV8D&< z-&-HAcDBR)T$geHhXbx6BqRix0q8a=Dp;;k`lvkvP?V#iT;++#h%~<@ep^bA6QJSK z9mF;Uwz|JgZ#1YelGtZcK`iVqV2R~{04^%R4w#ezCS-!f#Jr&Xo1vkj!@|H2!oGI7 z1k8pIDy*!0qYs=JggdRNuKqkY6$U%@@y2SO20d5xR}cuq|EApk&ie%is*#6>2N0v( zAoLpc4-a$iEI}f_4W#=umseZMhlVDtrZyHnIKKpF`L%8>9ubl2$zNH3WG$_!Cr?SR zG?I?V-=Qlcau~I|1gT0yL=*VjXqqAt(QI0g@}CbHzUv)q}Klmu`ZpKxzNX$jB=5Eu1v06M1Y#8^NOUsP>{KmhmvcpON5Qkub z(l-%nKvq~-cx(<*d3aocUQ9>(#yPmPY@ddeC3IeR+^HM za-6y_J|4V8-_zUsBDk%s4HdU&jV=i>u#N;)34~N7!{#6iOb;Y8fS=uigEU^dXZw0! z2Pe#Acnf4?px450C_VyQ0?|;(02s0Z#Q>$)83xCjH_E>ipvVe4U!C?DqZL4G20}Sy_Y(ii+2f zNSFWoTv0c+z# zHA9R5RH!H^qo{@JUAI-i?P+Mz`}^gM+3wZ510j#=9mb~1OcdnhacD&3Yev8{Ks|uW zOc^+`vWp#|1qm2VjGrBwou5$FahwV3=wW7p+**i^RB@O1yOB9%nsOz}<37 zS|`MUl8nrsK&3KdvNt=ctIj~X&DA*6R8;}-uGesJ>9P4zAz58jRaIMCHttVfpIZKS zTbo!g-hQ|r2x*{Kfg%QUPzS1wtE&iHZ@@_y7#OoxZA!u@z`^(T_u1LmKre#u!=({P z74Z`&(**K?H(-q%I4UG8EH^jTqJ{oo+3?6UF)b|$&)Uweld;Z8MMVYBB0_^mOy=FY zzz-VQ+E%8fM4&|)x3#ll4ho{291u^^l9C{6f{+-i1|nSUvQPpfCLA0bAXpGsoXE|l z9Z-Ix_il=6Qz14Rnt zr{sp4tE;P<8<SsAD+=bL<>?*EciV&XK=x>irO;R*=}2_qvTAn$;ifocor=1M{- zcKya-u*38HJDK)(B;O&pep}b|HKw>y^X&|m+fedFfvVfoQk_Ic=t4n5U0G(eaYv-H zlhX`X?z!F~vs%vWVZZ~(qJT6Qf5?!s>l2ZZ8o+()K+RWD!hSCqr%|X02(sv7mG$Ir zZ6eHdkRuV*jEbqAS0~wZbyvlD4WQDA@T2B;mXMO#=%xu!*V3w~si8Sqr>qA`8VHyk z({iv;;!j#@)D9F1DiCZSux@dpkEuhg$N&8KGd9Kz8VWi(K7g4kTbWo`1O)_=laoO} zbaiw*4+ce;$KLeH^77G`5@f_5Be%I3!pHYPNl8gf?G(sj0QO3ShwI%EfM#!R?~Z{3 zMi{b@(aKbbfzF2yd4+`zAo*^OW>>x)P*hX|xdY_%3iE+B3$*!#1#vO4-@ktYA^H}v z^3dAK$Il-Rg{CAYAIwyMdL}(DDKQb$7F>SUXKXsvwG9p6!5+dwpp1aUEEz>DhSHk+ zZrruBwAk2+faZdU_<6TXU4znL1JLCq5|}qF6H~3nA=tSOc}hw!MB?b;Vo>Wy%f_al zqjS^p38ne&A_eFkf5{M|>2KuRcw!$vegu>o^Z;#1=CQF18^ad^r8f%dW>`5GMO#~2 zT)f@ubVF87ZcY3Qq&x6H5l|+T9$mpe71&G_uaLVw2DP}$(XuEgC@3gW?rtt|JzWDO zUQhv=I$CK5Rt22U{tr$-D}n_GyKG=3$)^G75}11co?ePzZ$RPb#iDe+)qTbtyOaiL zYh}-?bq?b5!ddIhzAV=0l)kL2`i`rI*bDlr{tBqzvz>{;{@X($q!wjR6NAcxl!{70 zFyuimB03u4bHRH0V2g${d6G@Qi)0w54Y=@Pe{ORi<+2C`c^QHUu7XFD9>K>l*uWd4 l;m`l$^IHG?(_DcMD8e_)+|<7f9si9XE2$_^EN1xm{{S=17.0.0 <19.0.0" + +"@jupyter/web-components@^0.15.3": + version "0.15.3" + resolved "https://registry.npmjs.org/@jupyter/web-components/-/web-components-0.15.3.tgz#5491b5f6bc4e12f9b9c85bac63cd19514abedef1" + integrity sha512-ZrbyXr9mcYowXZ0Ic09ee6W2T3+t7+HP7yYwd2u7wHmQ0etWq5r8+DNdp8uFHiAeTV1kFm6aogNufR9fH34MCw== + dependencies: + "@microsoft/fast-colors" "^5.3.1" + "@microsoft/fast-element" "^1.12.0" + "@microsoft/fast-foundation" "^2.49.4" + "@microsoft/fast-web-utilities" "^5.4.1" + +"@jupyter/ydoc@^2.0.1": + version "2.1.1" + resolved "https://registry.npmjs.org/@jupyter/ydoc/-/ydoc-2.1.1.tgz#96487cab6d3eb1419a7891384e8fcbefc971d605" + integrity sha512-NeEwqXQ2j1OyLq4uezeQmsMiI+Qo5k7dYIMqNByOM7dJp6sHeP0jQ96w7BEc9E4SmrxwcOT4cLvcJWJE8Xun4g== + dependencies: + "@jupyterlab/nbformat" "^3.0.0 || ^4.0.0-alpha.21 || ^4.0.0" + "@lumino/coreutils" "^1.11.0 || ^2.0.0" + "@lumino/disposable" "^1.10.0 || ^2.0.0" + "@lumino/signaling" "^1.10.0 || ^2.0.0" + y-protocols "^1.0.5" + yjs "^13.5.40" + +"@jupyterlab/application@^4.2.5": + version "4.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/application/-/application-4.2.5.tgz#a24819d607b46c80c6a366811680cf93eb45ea28" + integrity sha512-einksHwj/aki/F3Z3kA9o/8ZfHsDrdvzN753jv8wxPpi8s0aDfMivHafbvV4O4iN02y0Iv2yxsHEpA65SP/Kmg== + dependencies: + "@fortawesome/fontawesome-free" "^5.12.0" + "@jupyterlab/apputils" "^4.3.5" + "@jupyterlab/coreutils" "^6.2.5" + "@jupyterlab/docregistry" "^4.2.5" + "@jupyterlab/rendermime" "^4.2.5" + "@jupyterlab/rendermime-interfaces" "^3.10.5" + "@jupyterlab/services" "^7.2.5" + "@jupyterlab/statedb" "^4.2.5" + "@jupyterlab/translation" "^4.2.5" + "@jupyterlab/ui-components" "^4.2.5" + "@lumino/algorithm" "^2.0.1" + "@lumino/application" "^2.3.1" + "@lumino/commands" "^2.3.0" + "@lumino/coreutils" "^2.1.2" + "@lumino/disposable" "^2.1.2" + "@lumino/messaging" "^2.0.1" + "@lumino/polling" "^2.1.2" + "@lumino/properties" "^2.0.1" + "@lumino/signaling" "^2.1.2" + "@lumino/widgets" "^2.3.2" + +"@jupyterlab/apputils@^4.3.5": + version "4.3.5" + resolved "https://registry.npmjs.org/@jupyterlab/apputils/-/apputils-4.3.5.tgz#ea335ed016969c5cc844e1922e0d456cafbcdd9b" + integrity sha512-O88fr29bne7X29r742pb4NIQXm6VYRnx9uDzOh5RuAf/oJlW+xeY95EsHWFfXJQEerueuBp5ivTQxhf1ZM1hew== + dependencies: + "@jupyterlab/coreutils" "^6.2.5" + "@jupyterlab/observables" "^5.2.5" + "@jupyterlab/rendermime-interfaces" "^3.10.5" + "@jupyterlab/services" "^7.2.5" + "@jupyterlab/settingregistry" "^4.2.5" + "@jupyterlab/statedb" "^4.2.5" + "@jupyterlab/statusbar" "^4.2.5" + "@jupyterlab/translation" "^4.2.5" + "@jupyterlab/ui-components" "^4.2.5" + "@lumino/algorithm" "^2.0.1" + "@lumino/commands" "^2.3.0" + "@lumino/coreutils" "^2.1.2" + "@lumino/disposable" "^2.1.2" + "@lumino/domutils" "^2.0.1" + "@lumino/messaging" "^2.0.1" + "@lumino/signaling" "^2.1.2" + "@lumino/virtualdom" "^2.0.1" + "@lumino/widgets" "^2.3.2" + "@types/react" "^18.0.26" + react "^18.2.0" + sanitize-html "~2.12.1" + +"@jupyterlab/attachments@^4.2.5": + version "4.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/attachments/-/attachments-4.2.5.tgz#55704047a015ce28d32d7696ee9b5f95310abd37" + integrity sha512-W1on7D8ZVfe//Ec/fJOD4OMpsUkRhelEQBEuhJEbgKZ+b5y0awXiCHGsdQ1rkiHt90tLReAPqBDj5+9KavlM7g== + dependencies: + "@jupyterlab/nbformat" "^4.2.5" + "@jupyterlab/observables" "^5.2.5" + "@jupyterlab/rendermime" "^4.2.5" + "@jupyterlab/rendermime-interfaces" "^3.10.5" + "@lumino/disposable" "^2.1.2" + "@lumino/signaling" "^2.1.2" + +"@jupyterlab/cells@^4.2.5": + version "4.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/cells/-/cells-4.2.5.tgz#205d020f6074d7e6b4c086d86d8a669973971fcf" + integrity sha512-4fSJnZbPT6YKTHu/DWgelFFRCo3+sU2di68e0k8b0oincOc4c0mg6hNa0dmxmrDah1IdOlhVOZX7RqKKYFGFEA== + dependencies: + "@codemirror/state" "^6.4.1" + "@codemirror/view" "^6.26.0" + "@jupyter/ydoc" "^2.0.1" + "@jupyterlab/apputils" "^4.3.5" + "@jupyterlab/attachments" "^4.2.5" + "@jupyterlab/codeeditor" "^4.2.5" + "@jupyterlab/codemirror" "^4.2.5" + "@jupyterlab/coreutils" "^6.2.5" + "@jupyterlab/documentsearch" "^4.2.5" + "@jupyterlab/filebrowser" "^4.2.5" + "@jupyterlab/nbformat" "^4.2.5" + "@jupyterlab/observables" "^5.2.5" + "@jupyterlab/outputarea" "^4.2.5" + "@jupyterlab/rendermime" "^4.2.5" + "@jupyterlab/services" "^7.2.5" + "@jupyterlab/toc" "^6.2.5" + "@jupyterlab/translation" "^4.2.5" + "@jupyterlab/ui-components" "^4.2.5" + "@lumino/algorithm" "^2.0.1" + "@lumino/coreutils" "^2.1.2" + "@lumino/domutils" "^2.0.1" + "@lumino/dragdrop" "^2.1.4" + "@lumino/messaging" "^2.0.1" + "@lumino/polling" "^2.1.2" + "@lumino/signaling" "^2.1.2" + "@lumino/virtualdom" "^2.0.1" + "@lumino/widgets" "^2.3.2" + react "^18.2.0" + +"@jupyterlab/codeeditor@^4.2.5": + version "4.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/codeeditor/-/codeeditor-4.2.5.tgz#16eb06f25e43be37eb4bd6bbb48cecccd18a46a1" + integrity sha512-Y145T9JSD89mBTiD67JxWG738Adv87Z+RWp87W9CJE4ZLsJo1FzyXBvIAAwXN737zoSvnx3lMMY3mmWqUdicZg== + dependencies: + "@codemirror/state" "^6.4.1" + "@jupyter/ydoc" "^2.0.1" + "@jupyterlab/apputils" "^4.3.5" + "@jupyterlab/coreutils" "^6.2.5" + "@jupyterlab/nbformat" "^4.2.5" + "@jupyterlab/observables" "^5.2.5" + "@jupyterlab/statusbar" "^4.2.5" + "@jupyterlab/translation" "^4.2.5" + "@jupyterlab/ui-components" "^4.2.5" + "@lumino/coreutils" "^2.1.2" + "@lumino/disposable" "^2.1.2" + "@lumino/dragdrop" "^2.1.4" + "@lumino/messaging" "^2.0.1" + "@lumino/signaling" "^2.1.2" + "@lumino/widgets" "^2.3.2" + react "^18.2.0" + +"@jupyterlab/codemirror@^4.2.5": + version "4.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/codemirror/-/codemirror-4.2.5.tgz#b4bd9c46c8629920d4843d7f052ac5aaa5ac3a3d" + integrity sha512-oz0RApEC8Te4ojee4Mmm1nsEjYCBfXiP4qfoWfJJ5kHS52vwaylH8lWm6Dy/7RNiLdfQA29zR9X+ImD69exVKw== + dependencies: + "@codemirror/autocomplete" "^6.15.0" + "@codemirror/commands" "^6.3.3" + "@codemirror/lang-cpp" "^6.0.2" + "@codemirror/lang-css" "^6.2.1" + "@codemirror/lang-html" "^6.4.8" + "@codemirror/lang-java" "^6.0.1" + "@codemirror/lang-javascript" "^6.2.2" + "@codemirror/lang-json" "^6.0.1" + "@codemirror/lang-markdown" "^6.2.4" + "@codemirror/lang-php" "^6.0.1" + "@codemirror/lang-python" "^6.1.4" + "@codemirror/lang-rust" "^6.0.1" + "@codemirror/lang-sql" "^6.6.1" + "@codemirror/lang-wast" "^6.0.2" + "@codemirror/lang-xml" "^6.1.0" + "@codemirror/language" "^6.10.1" + "@codemirror/legacy-modes" "^6.3.3" + "@codemirror/search" "^6.5.6" + "@codemirror/state" "^6.4.1" + "@codemirror/view" "^6.26.0" + "@jupyter/ydoc" "^2.0.1" + "@jupyterlab/codeeditor" "^4.2.5" + "@jupyterlab/coreutils" "^6.2.5" + "@jupyterlab/documentsearch" "^4.2.5" + "@jupyterlab/nbformat" "^4.2.5" + "@jupyterlab/translation" "^4.2.5" + "@lezer/common" "^1.2.1" + "@lezer/generator" "^1.7.0" + "@lezer/highlight" "^1.2.0" + "@lezer/markdown" "^1.2.0" + "@lumino/coreutils" "^2.1.2" + "@lumino/disposable" "^2.1.2" + "@lumino/signaling" "^2.1.2" + yjs "^13.5.40" + +"@jupyterlab/console@^4.2.5": + version "4.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/console/-/console-4.2.5.tgz#fced414f069d1937471752e76cfb276e88967b6a" + integrity sha512-qOPi2w+TG53ADXvud93aMsCsQUDJxhA8c2anWQ961g0ctbzqwOtGgb8MiUVGLEkFf0tUY8Fov39NdOJr75aCmQ== + dependencies: + "@jupyter/ydoc" "^2.0.1" + "@jupyterlab/apputils" "^4.3.5" + "@jupyterlab/cells" "^4.2.5" + "@jupyterlab/codeeditor" "^4.2.5" + "@jupyterlab/coreutils" "^6.2.5" + "@jupyterlab/nbformat" "^4.2.5" + "@jupyterlab/observables" "^5.2.5" + "@jupyterlab/rendermime" "^4.2.5" + "@jupyterlab/services" "^7.2.5" + "@jupyterlab/translation" "^4.2.5" + "@jupyterlab/ui-components" "^4.2.5" + "@lumino/coreutils" "^2.1.2" + "@lumino/disposable" "^2.1.2" + "@lumino/dragdrop" "^2.1.4" + "@lumino/messaging" "^2.0.1" + "@lumino/signaling" "^2.1.2" + "@lumino/widgets" "^2.3.2" + +"@jupyterlab/coreutils@^6.2.5": + version "6.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/coreutils/-/coreutils-6.2.5.tgz#906dc8b65c841e8bf1c7589983bab90a12f8d24e" + integrity sha512-P3HniEv3bZ3EvV3zUwCmruR713fclGvSTfsuwFPBgI8M3rNIZYqGQ13xkTun7Zl6DUr2E8mrC/cq9jNwxW33yw== + dependencies: + "@lumino/coreutils" "^2.1.2" + "@lumino/disposable" "^2.1.2" + "@lumino/signaling" "^2.1.2" + minimist "~1.2.0" + path-browserify "^1.0.0" + url-parse "~1.5.4" + +"@jupyterlab/debugger@^4.2.5": + version "4.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/debugger/-/debugger-4.2.5.tgz#af2dfc32a33a44cb2a10d20ad9dcf6f376b4f494" + integrity sha512-au/0RMAMdau3adrJ1QLnW3m7eAtzDJfTYLFeTgrcMAyQIyrmWLLX6cNLrRlJuYfnBzuJJV5N/9HGClEQEdxeLw== + dependencies: + "@codemirror/state" "^6.4.1" + "@codemirror/view" "^6.26.0" + "@jupyter/react-components" "^0.15.3" + "@jupyter/ydoc" "^2.0.1" + "@jupyterlab/application" "^4.2.5" + "@jupyterlab/apputils" "^4.3.5" + "@jupyterlab/cells" "^4.2.5" + "@jupyterlab/codeeditor" "^4.2.5" + "@jupyterlab/codemirror" "^4.2.5" + "@jupyterlab/console" "^4.2.5" + "@jupyterlab/coreutils" "^6.2.5" + "@jupyterlab/docregistry" "^4.2.5" + "@jupyterlab/fileeditor" "^4.2.5" + "@jupyterlab/notebook" "^4.2.5" + "@jupyterlab/observables" "^5.2.5" + "@jupyterlab/rendermime" "^4.2.5" + "@jupyterlab/services" "^7.2.5" + "@jupyterlab/translation" "^4.2.5" + "@jupyterlab/ui-components" "^4.2.5" + "@lumino/algorithm" "^2.0.1" + "@lumino/commands" "^2.3.0" + "@lumino/coreutils" "^2.1.2" + "@lumino/datagrid" "^2.3.1" + "@lumino/disposable" "^2.1.2" + "@lumino/messaging" "^2.0.1" + "@lumino/polling" "^2.1.2" + "@lumino/signaling" "^2.1.2" + "@lumino/widgets" "^2.3.2" + "@vscode/debugprotocol" "^1.51.0" + react "^18.2.0" + +"@jupyterlab/docmanager@^4.2.5": + version "4.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/docmanager/-/docmanager-4.2.5.tgz#b33805c103f07ab0414e1a3bab978c99c7dd9e28" + integrity sha512-IjWdEMl9MI6kkioViQza67+uwQiqI5DEGBaLyFFZG3R2NFmo2zA1zNbmVMUomMil/lcnVZN60ZnifFhiRvh1rQ== + dependencies: + "@jupyterlab/apputils" "^4.3.5" + "@jupyterlab/coreutils" "^6.2.5" + "@jupyterlab/docregistry" "^4.2.5" + "@jupyterlab/services" "^7.2.5" + "@jupyterlab/statedb" "^4.2.5" + "@jupyterlab/statusbar" "^4.2.5" + "@jupyterlab/translation" "^4.2.5" + "@jupyterlab/ui-components" "^4.2.5" + "@lumino/algorithm" "^2.0.1" + "@lumino/coreutils" "^2.1.2" + "@lumino/disposable" "^2.1.2" + "@lumino/messaging" "^2.0.1" + "@lumino/polling" "^2.1.2" + "@lumino/properties" "^2.0.1" + "@lumino/signaling" "^2.1.2" + "@lumino/widgets" "^2.3.2" + react "^18.2.0" + +"@jupyterlab/docregistry@^4.2.5": + version "4.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/docregistry/-/docregistry-4.2.5.tgz#3027ee9217d295413af3cbb0da111cd1d1a486b6" + integrity sha512-9lKh3PrbhPbm3ui2TRTFgdu7Nxqtdf1kf12HHk+v9rE3+Y8c8k3SrbpNTeBMn03mRZKYOsEe6nYPZkT1R3+ARg== + dependencies: + "@jupyter/ydoc" "^2.0.1" + "@jupyterlab/apputils" "^4.3.5" + "@jupyterlab/codeeditor" "^4.2.5" + "@jupyterlab/coreutils" "^6.2.5" + "@jupyterlab/observables" "^5.2.5" + "@jupyterlab/rendermime" "^4.2.5" + "@jupyterlab/rendermime-interfaces" "^3.10.5" + "@jupyterlab/services" "^7.2.5" + "@jupyterlab/translation" "^4.2.5" + "@jupyterlab/ui-components" "^4.2.5" + "@lumino/algorithm" "^2.0.1" + "@lumino/coreutils" "^2.1.2" + "@lumino/disposable" "^2.1.2" + "@lumino/messaging" "^2.0.1" + "@lumino/properties" "^2.0.1" + "@lumino/signaling" "^2.1.2" + "@lumino/widgets" "^2.3.2" + react "^18.2.0" + +"@jupyterlab/documentsearch@^4.2.5": + version "4.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/documentsearch/-/documentsearch-4.2.5.tgz#5e239efddd6ce6c66d39f3d1046fda876e340034" + integrity sha512-hjBprI8HMgr8dc4MMe5SCi24pPXeNYAq3PaqoDeYkVQwmK+aJexRba6k8V2GMXcifZCHbWiDysOTGsXyGTmizQ== + dependencies: + "@jupyterlab/apputils" "^4.3.5" + "@jupyterlab/translation" "^4.2.5" + "@jupyterlab/ui-components" "^4.2.5" + "@lumino/commands" "^2.3.0" + "@lumino/coreutils" "^2.1.2" + "@lumino/disposable" "^2.1.2" + "@lumino/messaging" "^2.0.1" + "@lumino/polling" "^2.1.2" + "@lumino/signaling" "^2.1.2" + "@lumino/widgets" "^2.3.2" + react "^18.2.0" + +"@jupyterlab/filebrowser@^4.2.5": + version "4.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/filebrowser/-/filebrowser-4.2.5.tgz#2019ebb328d64cc3f7b051f67f297597eda14620" + integrity sha512-nI/Rkcye7/2j4GM0BfMXOrAHNQ7aHBQAMZqiqBte3mPUV/AoGc+E4E4Iv4b4bTs2/nKwgc/8B9ZSgN4LeVEAuQ== + dependencies: + "@jupyterlab/apputils" "^4.3.5" + "@jupyterlab/coreutils" "^6.2.5" + "@jupyterlab/docmanager" "^4.2.5" + "@jupyterlab/docregistry" "^4.2.5" + "@jupyterlab/services" "^7.2.5" + "@jupyterlab/statedb" "^4.2.5" + "@jupyterlab/statusbar" "^4.2.5" + "@jupyterlab/translation" "^4.2.5" + "@jupyterlab/ui-components" "^4.2.5" + "@lumino/algorithm" "^2.0.1" + "@lumino/coreutils" "^2.1.2" + "@lumino/disposable" "^2.1.2" + "@lumino/domutils" "^2.0.1" + "@lumino/dragdrop" "^2.1.4" + "@lumino/messaging" "^2.0.1" + "@lumino/polling" "^2.1.2" + "@lumino/signaling" "^2.1.2" + "@lumino/virtualdom" "^2.0.1" + "@lumino/widgets" "^2.3.2" + react "^18.2.0" + +"@jupyterlab/fileeditor@^4.2.5": + version "4.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/fileeditor/-/fileeditor-4.2.5.tgz#a69faad6fb0dbc1af4c0936719121ac11c8a6352" + integrity sha512-Daj38DN7jRLMBt+0Zwa7mW0XUWQTk/QOoS7ZInLEIXRcuYz5TcYBdFWFkJdUMr5mvq+RzasSXeaR6ZU5vXZSow== + dependencies: + "@jupyter/ydoc" "^2.0.1" + "@jupyterlab/apputils" "^4.3.5" + "@jupyterlab/codeeditor" "^4.2.5" + "@jupyterlab/codemirror" "^4.2.5" + "@jupyterlab/coreutils" "^6.2.5" + "@jupyterlab/docregistry" "^4.2.5" + "@jupyterlab/documentsearch" "^4.2.5" + "@jupyterlab/lsp" "^4.2.5" + "@jupyterlab/statusbar" "^4.2.5" + "@jupyterlab/toc" "^6.2.5" + "@jupyterlab/translation" "^4.2.5" + "@jupyterlab/ui-components" "^4.2.5" + "@lumino/commands" "^2.3.0" + "@lumino/coreutils" "^2.1.2" + "@lumino/messaging" "^2.0.1" + "@lumino/widgets" "^2.3.2" + react "^18.2.0" + regexp-match-indices "^1.0.2" + +"@jupyterlab/galata@^5.0.1": + version "5.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/galata/-/galata-5.2.5.tgz#73b05b354e43ab859ab8e94eee52da4fef7eb1bf" + integrity sha512-H/Igp4HxULaLPYyFWsTqQzvcEXWtNh+jgrVfN0Gr2P3DrpA6Hacb4UQVn6kYdEJdQHcwr/F3iAVieG20/Z1+kw== + dependencies: + "@jupyterlab/application" "^4.2.5" + "@jupyterlab/apputils" "^4.3.5" + "@jupyterlab/coreutils" "^6.2.5" + "@jupyterlab/debugger" "^4.2.5" + "@jupyterlab/docmanager" "^4.2.5" + "@jupyterlab/nbformat" "^4.2.5" + "@jupyterlab/notebook" "^4.2.5" + "@jupyterlab/services" "^7.2.5" + "@jupyterlab/settingregistry" "^4.2.5" + "@lumino/coreutils" "^2.1.2" + "@playwright/test" "^1.43.1" + "@stdlib/stats" "~0.0.13" + fs-extra "^10.1.0" + json5 "^2.2.3" + path "~0.12.7" + systeminformation "^5.8.6" + vega "^5.20.0" + vega-lite "^5.6.1" + vega-statistics "^1.7.9" + +"@jupyterlab/lsp@^4.2.5": + version "4.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/lsp/-/lsp-4.2.5.tgz#23502ebe48c93178e978b3f5bc1a3b74c105befd" + integrity sha512-pnIxXDHjJrPJ9+jZo4YqpF06HeIARtICIPpDPnpF/OW9Ukm3/7Votfh4cf30z/eaLMbCGi9PT4k/j8TH6uO2Yw== + dependencies: + "@jupyterlab/apputils" "^4.3.5" + "@jupyterlab/codeeditor" "^4.2.5" + "@jupyterlab/codemirror" "^4.2.5" + "@jupyterlab/coreutils" "^6.2.5" + "@jupyterlab/docregistry" "^4.2.5" + "@jupyterlab/services" "^7.2.5" + "@jupyterlab/translation" "^4.2.5" + "@lumino/coreutils" "^2.1.2" + "@lumino/disposable" "^2.1.2" + "@lumino/signaling" "^2.1.2" + "@lumino/widgets" "^2.3.2" + lodash.mergewith "^4.6.1" + vscode-jsonrpc "^6.0.0" + vscode-languageserver-protocol "^3.17.0" + vscode-ws-jsonrpc "~1.0.2" + +"@jupyterlab/nbformat@^3.0.0 || ^4.0.0-alpha.21 || ^4.0.0", "@jupyterlab/nbformat@^4.2.5": + version "4.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/nbformat/-/nbformat-4.2.5.tgz#ab9f99b3b33c566b4fa50cbe79a6a4ae1af324b0" + integrity sha512-DF8bdlsEziUR5oKUr3Mm0wUx7kHZjlAtEjD6oJ8cOogQqTrMyBnUAgVjPr9QQob5J7qiyzz9aW2DYtaX+jFhng== + dependencies: + "@lumino/coreutils" "^2.1.2" + +"@jupyterlab/notebook@^4.2.5": + version "4.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/notebook/-/notebook-4.2.5.tgz#9515ea4e99a25af863290f5281b9347b0061c56f" + integrity sha512-GcnAV9u1t5SwwCSbvKDpUA5V0Ytxwc6VtvCzuM9FylvLuAH7PE4LgZlzz29fsPneV4ptoMmA5ExSLojHEhQVeQ== + dependencies: + "@jupyter/ydoc" "^2.0.1" + "@jupyterlab/apputils" "^4.3.5" + "@jupyterlab/cells" "^4.2.5" + "@jupyterlab/codeeditor" "^4.2.5" + "@jupyterlab/codemirror" "^4.2.5" + "@jupyterlab/coreutils" "^6.2.5" + "@jupyterlab/docregistry" "^4.2.5" + "@jupyterlab/documentsearch" "^4.2.5" + "@jupyterlab/lsp" "^4.2.5" + "@jupyterlab/nbformat" "^4.2.5" + "@jupyterlab/observables" "^5.2.5" + "@jupyterlab/rendermime" "^4.2.5" + "@jupyterlab/services" "^7.2.5" + "@jupyterlab/settingregistry" "^4.2.5" + "@jupyterlab/statusbar" "^4.2.5" + "@jupyterlab/toc" "^6.2.5" + "@jupyterlab/translation" "^4.2.5" + "@jupyterlab/ui-components" "^4.2.5" + "@lumino/algorithm" "^2.0.1" + "@lumino/coreutils" "^2.1.2" + "@lumino/disposable" "^2.1.2" + "@lumino/domutils" "^2.0.1" + "@lumino/dragdrop" "^2.1.4" + "@lumino/messaging" "^2.0.1" + "@lumino/polling" "^2.1.2" + "@lumino/properties" "^2.0.1" + "@lumino/signaling" "^2.1.2" + "@lumino/virtualdom" "^2.0.1" + "@lumino/widgets" "^2.3.2" + react "^18.2.0" + +"@jupyterlab/observables@^5.2.5": + version "5.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/observables/-/observables-5.2.5.tgz#3e54e2b33c2bad32488362306fb4da6d7ebea569" + integrity sha512-EvrvweCpLle3uZciUdhbEUU++o0S7yHBKWtqrKFgb9qxM1qYFMlSMXlJLjE/cEXNiq/aZhsruyXiVKZ1UpVsVQ== + dependencies: + "@lumino/algorithm" "^2.0.1" + "@lumino/coreutils" "^2.1.2" + "@lumino/disposable" "^2.1.2" + "@lumino/messaging" "^2.0.1" + "@lumino/signaling" "^2.1.2" + +"@jupyterlab/outputarea@^4.2.5": + version "4.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/outputarea/-/outputarea-4.2.5.tgz#1fb6e651050ee15b6d395b28ed4584180a5a19a0" + integrity sha512-Oj/GVkEX5zHl/oOXl4Qmw05mdAB/+zEIuY9xsz5agBdeAD3c603PAR7Ep5UqSE4fltdnl+ILjSWyQPJgG4UEGg== + dependencies: + "@jupyterlab/apputils" "^4.3.5" + "@jupyterlab/nbformat" "^4.2.5" + "@jupyterlab/observables" "^5.2.5" + "@jupyterlab/rendermime" "^4.2.5" + "@jupyterlab/rendermime-interfaces" "^3.10.5" + "@jupyterlab/services" "^7.2.5" + "@jupyterlab/translation" "^4.2.5" + "@lumino/algorithm" "^2.0.1" + "@lumino/coreutils" "^2.1.2" + "@lumino/disposable" "^2.1.2" + "@lumino/messaging" "^2.0.1" + "@lumino/properties" "^2.0.1" + "@lumino/signaling" "^2.1.2" + "@lumino/widgets" "^2.3.2" + +"@jupyterlab/rendermime-interfaces@^3.10.5": + version "3.10.5" + resolved "https://registry.npmjs.org/@jupyterlab/rendermime-interfaces/-/rendermime-interfaces-3.10.5.tgz#ca63afab7e4070b24562d20575983979785aefef" + integrity sha512-wZq4QhmckMqEjImMxTLFVtj+E0GI82/i0nCuaEI6tReXOqVFDg5wG/ChD/B/g0Xz+uPqt92R5rzZTpzodkDiKA== + dependencies: + "@lumino/coreutils" "^1.11.0 || ^2.1.2" + "@lumino/widgets" "^1.37.2 || ^2.3.2" + +"@jupyterlab/rendermime@^4.2.5": + version "4.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/rendermime/-/rendermime-4.2.5.tgz#b7833d23c938b9e0d94d0dc763e1c20af000a9eb" + integrity sha512-2ZfKM4xNUKbuR1foA7B6nvXg8zRSai1GTEROcF0ievNYn3qYEmdYYW9XlP11+8SoFcM0a4WiTgocRCgVV5Fbiw== + dependencies: + "@jupyterlab/apputils" "^4.3.5" + "@jupyterlab/coreutils" "^6.2.5" + "@jupyterlab/nbformat" "^4.2.5" + "@jupyterlab/observables" "^5.2.5" + "@jupyterlab/rendermime-interfaces" "^3.10.5" + "@jupyterlab/services" "^7.2.5" + "@jupyterlab/translation" "^4.2.5" + "@lumino/coreutils" "^2.1.2" + "@lumino/messaging" "^2.0.1" + "@lumino/signaling" "^2.1.2" + "@lumino/widgets" "^2.3.2" + lodash.escape "^4.0.1" + +"@jupyterlab/services@^7.2.5": + version "7.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/services/-/services-7.2.5.tgz#d98bf6999d53005fe978f6151d1060c1f6be5a95" + integrity sha512-Ya/jA8p8WOfiPPERinZasigsfSth54nNNWBQUrT2MEitdka3jVsjC3fR9R5XBpYQ59Qkczz782jMfXvaWNfCHQ== + dependencies: + "@jupyter/ydoc" "^2.0.1" + "@jupyterlab/coreutils" "^6.2.5" + "@jupyterlab/nbformat" "^4.2.5" + "@jupyterlab/settingregistry" "^4.2.5" + "@jupyterlab/statedb" "^4.2.5" + "@lumino/coreutils" "^2.1.2" + "@lumino/disposable" "^2.1.2" + "@lumino/polling" "^2.1.2" + "@lumino/properties" "^2.0.1" + "@lumino/signaling" "^2.1.2" + ws "^8.11.0" + +"@jupyterlab/settingregistry@^4.2.5": + version "4.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/settingregistry/-/settingregistry-4.2.5.tgz#2ce28049c47ced6703c837766e6c2ff0e879b98b" + integrity sha512-RTHwFoldrP8h4hMxZrKafrOt3mLYKAcmUsnExkzKCqHuc3CIOh9hj+eN3gCh1mxjabbP9QIK0/08e89Rp/EG5w== + dependencies: + "@jupyterlab/nbformat" "^4.2.5" + "@jupyterlab/statedb" "^4.2.5" + "@lumino/commands" "^2.3.0" + "@lumino/coreutils" "^2.1.2" + "@lumino/disposable" "^2.1.2" + "@lumino/signaling" "^2.1.2" + "@rjsf/utils" "^5.13.4" + ajv "^8.12.0" + json5 "^2.2.3" + +"@jupyterlab/statedb@^4.2.5": + version "4.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/statedb/-/statedb-4.2.5.tgz#9007e9b51eb79daae17ceb12abaa4ed3b707170b" + integrity sha512-GGP4NSkVzcn/zYZyjKId8OvDxq+JQTHEmiE2ayzUvvP4BwpGJ2GafY1V+QT5Tl+4SB0AzowpNud6XHUJ28M/tA== + dependencies: + "@lumino/commands" "^2.3.0" + "@lumino/coreutils" "^2.1.2" + "@lumino/disposable" "^2.1.2" + "@lumino/properties" "^2.0.1" + "@lumino/signaling" "^2.1.2" + +"@jupyterlab/statusbar@^4.2.5": + version "4.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/statusbar/-/statusbar-4.2.5.tgz#eb5c2553f834b31fe86f421f5a3b4b2ddfbf2fbc" + integrity sha512-YjZf9gClZc6HWF2BN0lSFYuHQ/Im8AM1wF+BHKyGh2qgnN4Z34/d87fCGjvX4yY5tLynw0fFiydNZMwQnkWqLA== + dependencies: + "@jupyterlab/ui-components" "^4.2.5" + "@lumino/algorithm" "^2.0.1" + "@lumino/coreutils" "^2.1.2" + "@lumino/disposable" "^2.1.2" + "@lumino/messaging" "^2.0.1" + "@lumino/signaling" "^2.1.2" + "@lumino/widgets" "^2.3.2" + react "^18.2.0" + +"@jupyterlab/toc@^6.2.5": + version "6.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/toc/-/toc-6.2.5.tgz#f878d9fac248ecafd398684704924c522c402174" + integrity sha512-W2xQkVA7v87MaBcUB7OjzkOUY51bZzLL8S6esSARrNYdh5AY4rFKfnw0sKHAkSmrqB2DQLEZl/1z1GTNmyuYhQ== + dependencies: + "@jupyterlab/apputils" "^4.3.5" + "@jupyterlab/coreutils" "^6.2.5" + "@jupyterlab/docregistry" "^4.2.5" + "@jupyterlab/observables" "^5.2.5" + "@jupyterlab/rendermime" "^4.2.5" + "@jupyterlab/rendermime-interfaces" "^3.10.5" + "@jupyterlab/translation" "^4.2.5" + "@jupyterlab/ui-components" "^4.2.5" + "@lumino/coreutils" "^2.1.2" + "@lumino/disposable" "^2.1.2" + "@lumino/messaging" "^2.0.1" + "@lumino/signaling" "^2.1.2" + "@lumino/widgets" "^2.3.2" + react "^18.2.0" + +"@jupyterlab/translation@^4.2.5": + version "4.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/translation/-/translation-4.2.5.tgz#32f01cbc556dc0083fbff695f779c017e2cc2ed1" + integrity sha512-r2QABaUl2cm0d+8klviLc+l4+jssRrNUDnZ3hBVluCN0QowmwsovWc7V+R0VWIULzB5Jzf79RfJumXfVOpgUfg== + dependencies: + "@jupyterlab/coreutils" "^6.2.5" + "@jupyterlab/rendermime-interfaces" "^3.10.5" + "@jupyterlab/services" "^7.2.5" + "@jupyterlab/statedb" "^4.2.5" + "@lumino/coreutils" "^2.1.2" + +"@jupyterlab/ui-components@^4.2.5": + version "4.2.5" + resolved "https://registry.npmjs.org/@jupyterlab/ui-components/-/ui-components-4.2.5.tgz#be78e9aaa95164ede17ea7aa5d401ad0f70aca15" + integrity sha512-IZ6M4U48IJbthe4nQrOo8+JQeIrUxGQCK/ReKECx9xg9XCsvQ37iA1ZwOYXv5MhlF2bmIF7dXcwI7mpJgUAgOg== + dependencies: + "@jupyter/react-components" "^0.15.3" + "@jupyter/web-components" "^0.15.3" + "@jupyterlab/coreutils" "^6.2.5" + "@jupyterlab/observables" "^5.2.5" + "@jupyterlab/rendermime-interfaces" "^3.10.5" + "@jupyterlab/translation" "^4.2.5" + "@lumino/algorithm" "^2.0.1" + "@lumino/commands" "^2.3.0" + "@lumino/coreutils" "^2.1.2" + "@lumino/disposable" "^2.1.2" + "@lumino/messaging" "^2.0.1" + "@lumino/polling" "^2.1.2" + "@lumino/properties" "^2.0.1" + "@lumino/signaling" "^2.1.2" + "@lumino/virtualdom" "^2.0.1" + "@lumino/widgets" "^2.3.2" + "@rjsf/core" "^5.13.4" + "@rjsf/utils" "^5.13.4" + react "^18.2.0" + react-dom "^18.2.0" + typestyle "^2.0.4" + +"@lezer/common@^1.0.0", "@lezer/common@^1.0.2", "@lezer/common@^1.1.0", "@lezer/common@^1.2.0", "@lezer/common@^1.2.1": + version "1.2.1" + resolved "https://registry.npmjs.org/@lezer/common/-/common-1.2.1.tgz#198b278b7869668e1bebbe687586e12a42731049" + integrity sha512-yemX0ZD2xS/73llMZIK6KplkjIjf2EvAHcinDi/TfJ9hS25G0388+ClHt6/3but0oOxinTcQHJLDXh6w1crzFQ== + +"@lezer/cpp@^1.0.0": + version "1.1.2" + resolved "https://registry.npmjs.org/@lezer/cpp/-/cpp-1.1.2.tgz#1db93b09e011e8a7a08c347c9d5b7749971253bf" + integrity sha512-macwKtyeUO0EW86r3xWQCzOV9/CF8imJLpJlPv3sDY57cPGeUZ8gXWOWNlJr52TVByMV3PayFQCA5SHEERDmVQ== + dependencies: + "@lezer/common" "^1.2.0" + "@lezer/highlight" "^1.0.0" + "@lezer/lr" "^1.0.0" + +"@lezer/css@^1.1.0", "@lezer/css@^1.1.7": + version "1.1.9" + resolved "https://registry.npmjs.org/@lezer/css/-/css-1.1.9.tgz#404563d361422c5a1fe917295f1527ee94845ed1" + integrity sha512-TYwgljcDv+YrV0MZFFvYFQHCfGgbPMR6nuqLabBdmZoFH3EP1gvw8t0vae326Ne3PszQkbXfVBjCnf3ZVCr0bA== + dependencies: + "@lezer/common" "^1.2.0" + "@lezer/highlight" "^1.0.0" + "@lezer/lr" "^1.0.0" + +"@lezer/generator@^1.7.0": + version "1.7.1" + resolved "https://registry.npmjs.org/@lezer/generator/-/generator-1.7.1.tgz#90c1a9de2fb4d5a714216fa659058c7859accaab" + integrity sha512-MgPJN9Si+ccxzXl3OAmCeZuUKw4XiPl4y664FX/hnnyG9CTqUPq65N3/VGPA2jD23D7QgMTtNqflta+cPN+5mQ== + dependencies: + "@lezer/common" "^1.1.0" + "@lezer/lr" "^1.3.0" + +"@lezer/highlight@^1.0.0", "@lezer/highlight@^1.1.3", "@lezer/highlight@^1.2.0": + version "1.2.1" + resolved "https://registry.npmjs.org/@lezer/highlight/-/highlight-1.2.1.tgz#596fa8f9aeb58a608be0a563e960c373cbf23f8b" + integrity sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA== + dependencies: + "@lezer/common" "^1.0.0" + +"@lezer/html@^1.3.0": + version "1.3.10" + resolved "https://registry.npmjs.org/@lezer/html/-/html-1.3.10.tgz#1be9a029a6fe835c823b20a98a449a630416b2af" + integrity sha512-dqpT8nISx/p9Do3AchvYGV3qYc4/rKr3IBZxlHmpIKam56P47RSHkSF5f13Vu9hebS1jM0HmtJIwLbWz1VIY6w== + dependencies: + "@lezer/common" "^1.2.0" + "@lezer/highlight" "^1.0.0" + "@lezer/lr" "^1.0.0" + +"@lezer/java@^1.0.0": + version "1.1.2" + resolved "https://registry.npmjs.org/@lezer/java/-/java-1.1.2.tgz#01a6ffefa9a692ac6cd492f8b924009edcb903d7" + integrity sha512-3j8X70JvYf0BZt8iSRLXLkt0Ry1hVUgH6wT32yBxH/Xi55nW2VMhc1Az4SKwu4YGSmxCm1fsqDDcHTuFjC8pmg== + dependencies: + "@lezer/common" "^1.2.0" + "@lezer/highlight" "^1.0.0" + "@lezer/lr" "^1.0.0" + +"@lezer/javascript@^1.0.0": + version "1.4.17" + resolved "https://registry.npmjs.org/@lezer/javascript/-/javascript-1.4.17.tgz#8456e369f960c328b9e823342d0c72d704238c31" + integrity sha512-bYW4ctpyGK+JMumDApeUzuIezX01H76R1foD6LcRX224FWfyYit/HYxiPGDjXXe/wQWASjCvVGoukTH68+0HIA== + dependencies: + "@lezer/common" "^1.2.0" + "@lezer/highlight" "^1.1.3" + "@lezer/lr" "^1.3.0" + +"@lezer/json@^1.0.0": + version "1.0.2" + resolved "https://registry.npmjs.org/@lezer/json/-/json-1.0.2.tgz#bdc849e174113e2d9a569a5e6fb1a27e2f703eaf" + integrity sha512-xHT2P4S5eeCYECyKNPhr4cbEL9tc8w83SPwRC373o9uEdrvGKTZoJVAGxpOsZckMlEh9W23Pc72ew918RWQOBQ== + dependencies: + "@lezer/common" "^1.2.0" + "@lezer/highlight" "^1.0.0" + "@lezer/lr" "^1.0.0" + +"@lezer/lr@^1.0.0", "@lezer/lr@^1.1.0", "@lezer/lr@^1.3.0": + version "1.4.2" + resolved "https://registry.npmjs.org/@lezer/lr/-/lr-1.4.2.tgz#931ea3dea8e9de84e90781001dae30dea9ff1727" + integrity sha512-pu0K1jCIdnQ12aWNaAVU5bzi7Bd1w54J3ECgANPmYLtQKP0HBj2cE/5coBD66MT10xbtIuUr7tg0Shbsvk0mDA== + dependencies: + "@lezer/common" "^1.0.0" + +"@lezer/markdown@^1.0.0", "@lezer/markdown@^1.2.0": + version "1.3.1" + resolved "https://registry.npmjs.org/@lezer/markdown/-/markdown-1.3.1.tgz#2193012296927f383102d7a2b035f323759315b3" + integrity sha512-DGlzU/i8DC8k0uz1F+jeePrkATl0jWakauTzftMQOcbaMkHbNSRki/4E2tOzJWsVpoKYhe7iTJ03aepdwVUXUA== + dependencies: + "@lezer/common" "^1.0.0" + "@lezer/highlight" "^1.0.0" + +"@lezer/php@^1.0.0": + version "1.0.2" + resolved "https://registry.npmjs.org/@lezer/php/-/php-1.0.2.tgz#7c291631fc1e7f7efe99977522bc48bdc732658a" + integrity sha512-GN7BnqtGRpFyeoKSEqxvGvhJQiI4zkgmYnDk/JIyc7H7Ifc1tkPnUn/R2R8meH3h/aBf5rzjvU8ZQoyiNDtDrA== + dependencies: + "@lezer/common" "^1.2.0" + "@lezer/highlight" "^1.0.0" + "@lezer/lr" "^1.1.0" + +"@lezer/python@^1.1.4": + version "1.1.14" + resolved "https://registry.npmjs.org/@lezer/python/-/python-1.1.14.tgz#a0887086fb7645cd09ada38ed748ca1d968e6363" + integrity sha512-ykDOb2Ti24n76PJsSa4ZoDF0zH12BSw1LGfQXCYJhJyOGiFTfGaX0Du66Ze72R+u/P35U+O6I9m8TFXov1JzsA== + dependencies: + "@lezer/common" "^1.2.0" + "@lezer/highlight" "^1.0.0" + "@lezer/lr" "^1.0.0" + +"@lezer/rust@^1.0.0": + version "1.0.2" + resolved "https://registry.npmjs.org/@lezer/rust/-/rust-1.0.2.tgz#cc9a75605d67182a0e799ac40b1965a61dcc6ef0" + integrity sha512-Lz5sIPBdF2FUXcWeCu1//ojFAZqzTQNRga0aYv6dYXqJqPfMdCAI0NzajWUd4Xijj1IKJLtjoXRPMvTKWBcqKg== + dependencies: + "@lezer/common" "^1.2.0" + "@lezer/highlight" "^1.0.0" + "@lezer/lr" "^1.0.0" + +"@lezer/xml@^1.0.0": + version "1.0.5" + resolved "https://registry.npmjs.org/@lezer/xml/-/xml-1.0.5.tgz#4bb7fd3e527f41b78372477aa753f035b41c3846" + integrity sha512-VFouqOzmUWfIg+tfmpcdV33ewtK+NSwd4ngSe1aG7HFb4BN0ExyY1b8msp+ndFrnlG4V4iC8yXacjFtrwERnaw== + dependencies: + "@lezer/common" "^1.2.0" + "@lezer/highlight" "^1.0.0" + "@lezer/lr" "^1.0.0" + +"@lumino/algorithm@^2.0.1", "@lumino/algorithm@^2.0.2": + version "2.0.2" + resolved "https://registry.npmjs.org/@lumino/algorithm/-/algorithm-2.0.2.tgz#d211da98c92be0271afde96b949982e29178ae48" + integrity sha512-cI8yJ2+QK1yM5ZRU3Kuaw9fJ/64JEDZEwWWp7+U0cd/mvcZ44BGdJJ29w+tIet1QXxPAvnsUleWyQ5qm4qUouA== + +"@lumino/application@^2.3.1": + version "2.4.1" + resolved "https://registry.npmjs.org/@lumino/application/-/application-2.4.1.tgz#6d312a4f8f8e14e61d1b784339f552f4a065f1ee" + integrity sha512-XdCAlNajcsGdK6ep+s6QC70EY+uBnP3kDiWthFLl3EMkvkYwmjOPzIPGlwLEd9Hu0XCO+1Vd2PlpeTnxw5D3/g== + dependencies: + "@lumino/commands" "^2.3.1" + "@lumino/coreutils" "^2.2.0" + "@lumino/widgets" "^2.5.0" + +"@lumino/collections@^2.0.2": + version "2.0.2" + resolved "https://registry.npmjs.org/@lumino/collections/-/collections-2.0.2.tgz#c790d8d4555d5dd349ecc817c8bd9e65b7f21c64" + integrity sha512-o0QmfV1D3WhAeA8GI1/YmEPaK89JtHVa764rQ5T0LdbDEwUtUDbjavHs1E/+y66tNTXz9RUJ4D2rcSb9tysYsg== + dependencies: + "@lumino/algorithm" "^2.0.2" + +"@lumino/commands@^2.3.0", "@lumino/commands@^2.3.1": + version "2.3.1" + resolved "https://registry.npmjs.org/@lumino/commands/-/commands-2.3.1.tgz#4ab5ec6521fefd3a9ff7ae0983c645483b9ecd07" + integrity sha512-DpX1kkE4PhILpvK1T4ZnaFb6UP4+YTkdZifvN3nbiomD64O2CTd+wcWIBpZMgy6MMgbVgrE8dzHxHk1EsKxNxw== + dependencies: + "@lumino/algorithm" "^2.0.2" + "@lumino/coreutils" "^2.2.0" + "@lumino/disposable" "^2.1.3" + "@lumino/domutils" "^2.0.2" + "@lumino/keyboard" "^2.0.2" + "@lumino/signaling" "^2.1.3" + "@lumino/virtualdom" "^2.0.2" + +"@lumino/coreutils@^1.11.0 || ^2.0.0", "@lumino/coreutils@^1.11.0 || ^2.1.2", "@lumino/coreutils@^2.1.2", "@lumino/coreutils@^2.2.0": + version "2.2.0" + resolved "https://registry.npmjs.org/@lumino/coreutils/-/coreutils-2.2.0.tgz#3f9d5c36f2513f067b2563c7ad3b33f43905a4e2" + integrity sha512-x5wnQ/GjWBayJ6vXVaUi6+Q6ETDdcUiH9eSfpRZFbgMQyyM6pi6baKqJBK2CHkCc/YbAEl6ipApTgm3KOJ/I3g== + dependencies: + "@lumino/algorithm" "^2.0.2" + +"@lumino/datagrid@^2.3.1": + version "2.4.1" + resolved "https://registry.npmjs.org/@lumino/datagrid/-/datagrid-2.4.1.tgz#6624d170d2695a707fc92d7364a08514778f2a4b" + integrity sha512-9sJg8UU/hqcKDqO5Rd0Blm2JYKT9nyAK/kuPYeaQc4ZLvtgn4SoOenNaShLDr3Wp54quBM8npAlk1mWG+yYC2g== + dependencies: + "@lumino/algorithm" "^2.0.2" + "@lumino/coreutils" "^2.2.0" + "@lumino/disposable" "^2.1.3" + "@lumino/domutils" "^2.0.2" + "@lumino/dragdrop" "^2.1.5" + "@lumino/keyboard" "^2.0.2" + "@lumino/messaging" "^2.0.2" + "@lumino/signaling" "^2.1.3" + "@lumino/widgets" "^2.5.0" + +"@lumino/disposable@^1.10.0 || ^2.0.0", "@lumino/disposable@^2.1.2", "@lumino/disposable@^2.1.3": + version "2.1.3" + resolved "https://registry.npmjs.org/@lumino/disposable/-/disposable-2.1.3.tgz#cd2b11d82896eb654c2a528c9ff79a85ccf88d74" + integrity sha512-k5KXy/+T3UItiWHY4WwQawnsJnGo3aNtP5CTRKqo4+tbTNuhc3rTSvygJlNKIbEfIZXW2EWYnwfFDozkYx95eA== + dependencies: + "@lumino/signaling" "^2.1.3" + +"@lumino/domutils@^2.0.1", "@lumino/domutils@^2.0.2": + version "2.0.2" + resolved "https://registry.npmjs.org/@lumino/domutils/-/domutils-2.0.2.tgz#b05973a179db2da71239976f25194f65c0f8eb98" + integrity sha512-2Kp6YHaMNI1rKB0PrALvOsZBHPy2EvVVAvJLWjlCm8MpWOVETjFp0MA9QpMubT9I76aKbaI5s1o1NJyZ8Y99pQ== + +"@lumino/dragdrop@^2.1.4", "@lumino/dragdrop@^2.1.5": + version "2.1.5" + resolved "https://registry.npmjs.org/@lumino/dragdrop/-/dragdrop-2.1.5.tgz#2c178ac3e7520551f08ffb4f31521d87940dcce1" + integrity sha512-zqwR4GakrQBKZOW6S5pj2nfrQDurOErAoe9x3HS3BKLa1AzWA+t9PD5NESOKd81NqXFHjiMirSyFkTUs6pw+uA== + dependencies: + "@lumino/coreutils" "^2.2.0" + "@lumino/disposable" "^2.1.3" + +"@lumino/keyboard@^2.0.2": + version "2.0.2" + resolved "https://registry.npmjs.org/@lumino/keyboard/-/keyboard-2.0.2.tgz#8ff5e360b8960716f45b742845bea6f3c5f44760" + integrity sha512-icRUpvswDaFjqmAJNbQRb/aTu6Iugo6Y2oC08TiIwhQtLS9W+Ee9VofdqvbPSvCm6DkyP+DCWMuA3KXZ4V4g4g== + +"@lumino/messaging@^2.0.1", "@lumino/messaging@^2.0.2": + version "2.0.2" + resolved "https://registry.npmjs.org/@lumino/messaging/-/messaging-2.0.2.tgz#133b94d1fa1c67ad60e622a32acaf682faf05aaa" + integrity sha512-2sUF07cYA0f3mDil41Eh5sfBk0aGAH/mOh1I4+vyRUsKyBqp4WTUtpJFd8xVJGAntygxwnebIygkIaXXTIQvxA== + dependencies: + "@lumino/algorithm" "^2.0.2" + "@lumino/collections" "^2.0.2" + +"@lumino/polling@^2.1.2": + version "2.1.3" + resolved "https://registry.npmjs.org/@lumino/polling/-/polling-2.1.3.tgz#395fcfba3ada12439d5cc9592b68a89d781a54ed" + integrity sha512-WEZk96ddK6eHEhdDkFUAAA40EOLit86QVbqQqnbPmhdGwFogek26Kq9b1U273LJeirv95zXCATOJAkjRyb7D+w== + dependencies: + "@lumino/coreutils" "^2.2.0" + "@lumino/disposable" "^2.1.3" + "@lumino/signaling" "^2.1.3" + +"@lumino/properties@^2.0.1", "@lumino/properties@^2.0.2": + version "2.0.2" + resolved "https://registry.npmjs.org/@lumino/properties/-/properties-2.0.2.tgz#39213876b06a917eae72e8ea38b71daa1c699682" + integrity sha512-b312oA3Bh97WFK8efXejYmC3DVJmvzJk72LQB7H3fXhfqS5jUWvL7MSnNmgcQvGzl9fIhDWDWjhtSTi0KGYYBg== + +"@lumino/signaling@^1.10.0 || ^2.0.0", "@lumino/signaling@^2.1.2", "@lumino/signaling@^2.1.3": + version "2.1.3" + resolved "https://registry.npmjs.org/@lumino/signaling/-/signaling-2.1.3.tgz#612419e6948ce77c00328f9eff5ecd995c960a70" + integrity sha512-9Wd4iMk8F1i6pYjy65bqKuPlzQMicyL9xy1/ccS20kovPcfD074waneL/7BVe+3M8i+fGa3x2qjbWrBzOdTdNw== + dependencies: + "@lumino/algorithm" "^2.0.2" + "@lumino/coreutils" "^2.2.0" + +"@lumino/virtualdom@^2.0.1", "@lumino/virtualdom@^2.0.2": + version "2.0.2" + resolved "https://registry.npmjs.org/@lumino/virtualdom/-/virtualdom-2.0.2.tgz#927c9803ebd31cd97ca2c599ec9a5d509afb3b2d" + integrity sha512-HYZThOtZSoknjdXA102xpy5CiXtTFCVz45EXdWeYLx3NhuEwuAIX93QBBIhupalmtFlRg1yhdDNV40HxJ4kcXg== + dependencies: + "@lumino/algorithm" "^2.0.2" + +"@lumino/widgets@^1.37.2 || ^2.3.2", "@lumino/widgets@^2.3.2", "@lumino/widgets@^2.5.0": + version "2.5.0" + resolved "https://registry.npmjs.org/@lumino/widgets/-/widgets-2.5.0.tgz#7e37d86dbbc4eed1f85aa199b9fffa4919aa1e3e" + integrity sha512-RSRpc6aIEiuw79jqWUHYWXLJ2GBy7vhwuqgo94UVzg6oeh3XBECX0OvXGjK2k7N2BhmRrIs9bXky7Dm861S6mQ== + dependencies: + "@lumino/algorithm" "^2.0.2" + "@lumino/commands" "^2.3.1" + "@lumino/coreutils" "^2.2.0" + "@lumino/disposable" "^2.1.3" + "@lumino/domutils" "^2.0.2" + "@lumino/dragdrop" "^2.1.5" + "@lumino/keyboard" "^2.0.2" + "@lumino/messaging" "^2.0.2" + "@lumino/properties" "^2.0.2" + "@lumino/signaling" "^2.1.3" + "@lumino/virtualdom" "^2.0.2" + +"@microsoft/fast-colors@^5.3.1": + version "5.3.1" + resolved "https://registry.npmjs.org/@microsoft/fast-colors/-/fast-colors-5.3.1.tgz#defc59874176e42316be7e6d24c31885ead8ca56" + integrity sha512-72RZXVfCbwQzvo5sXXkuLXLT7rMeYaSf5r/6ewQiv/trBtqpWRm4DEH2EilHw/iWTBKOXs1qZNQndgUMa5n4LA== + +"@microsoft/fast-element@^1.12.0", "@microsoft/fast-element@^1.13.0": + version "1.13.0" + resolved "https://registry.npmjs.org/@microsoft/fast-element/-/fast-element-1.13.0.tgz#d390ff13697064a48dc6ad6bb332a5f5489f73f8" + integrity sha512-iFhzKbbD0cFRo9cEzLS3Tdo9BYuatdxmCEKCpZs1Cro/93zNMpZ/Y9/Z7SknmW6fhDZbpBvtO8lLh9TFEcNVAQ== + +"@microsoft/fast-foundation@^2.49.4", "@microsoft/fast-foundation@^2.49.6": + version "2.49.6" + resolved "https://registry.npmjs.org/@microsoft/fast-foundation/-/fast-foundation-2.49.6.tgz#0bdee7d28dcf93918075618359b083a676d2891c" + integrity sha512-DZVr+J/NIoskFC1Y6xnAowrMkdbf2d5o7UyWK6gW5AiQ6S386Ql8dw4KcC4kHaeE1yL2CKvweE79cj6ZhJhTvA== + dependencies: + "@microsoft/fast-element" "^1.13.0" + "@microsoft/fast-web-utilities" "^5.4.1" + tabbable "^5.2.0" + tslib "^1.13.0" + +"@microsoft/fast-react-wrapper@^0.3.22": + version "0.3.24" + resolved "https://registry.npmjs.org/@microsoft/fast-react-wrapper/-/fast-react-wrapper-0.3.24.tgz#caed362a95fac88f9715a4b82425c395fba550c2" + integrity sha512-sRnSBIKaO42p4mYoYR60spWVkg89wFxFAgQETIMazAm2TxtlsnsGszJnTwVhXq2Uz+XNiD8eKBkfzK5c/i6/Kw== + dependencies: + "@microsoft/fast-element" "^1.13.0" + "@microsoft/fast-foundation" "^2.49.6" + +"@microsoft/fast-web-utilities@^5.4.1": + version "5.4.1" + resolved "https://registry.npmjs.org/@microsoft/fast-web-utilities/-/fast-web-utilities-5.4.1.tgz#8e3082ee2ff2b5467f17e7cb1fb01b0e4906b71f" + integrity sha512-ReWYncndjV3c8D8iq9tp7NcFNc1vbVHvcBFPME2nNFKNbS1XCesYZGlIlf3ot5EmuOXPlrzUHOWzQ2vFpIkqDg== + dependencies: + exenv-es6 "^1.1.1" + +"@playwright/test@^1.32.0", "@playwright/test@^1.43.1": + version "1.47.0" + resolved "https://registry.npmjs.org/@playwright/test/-/test-1.47.0.tgz#69fc55b10754147cc20021afbfa05747d4961bf0" + integrity sha512-SgAdlSwYVpToI4e/IH19IHHWvoijAYH5hu2MWSXptRypLSnzj51PcGD+rsOXFayde4P9ZLi+loXVwArg6IUkCA== + dependencies: + playwright "1.47.0" + +"@rjsf/core@^5.13.4": + version "5.21.0" + resolved "https://registry.npmjs.org/@rjsf/core/-/core-5.21.0.tgz#35c3b2303345dc5e616d18061e5797e8b9ff1b31" + integrity sha512-G8eROGeHVerBRcXyHKSrNeY0C3YeeLINLwZOl4XeDPA0hHxrrM6/kRhZ2l07t96LX/vZmqrNqrMCbWokvxQ/uw== + dependencies: + lodash "^4.17.21" + lodash-es "^4.17.21" + markdown-to-jsx "^7.4.1" + nanoid "^3.3.7" + prop-types "^15.8.1" + +"@rjsf/utils@^5.13.4": + version "5.21.0" + resolved "https://registry.npmjs.org/@rjsf/utils/-/utils-5.21.0.tgz#5626f7e8ad4b8015eacfe1017183230c00a5fe9b" + integrity sha512-rpYylVRHv7m9HG49vBxo6pRP5vXIoiZrdCKISE5+0CMO9bI1R6wF+5m1SZH4AL4C4sbcwA5vApK12slpzNXwTA== + dependencies: + fast-equals "^5.0.1" + json-schema-merge-allof "^0.8.1" + jsonpointer "^5.0.1" + lodash "^4.17.21" + lodash-es "^4.17.21" + react-is "^18.2.0" + +"@stdlib/array@^0.0.x": + version "0.0.12" + resolved "https://registry.npmjs.org/@stdlib/array/-/array-0.0.12.tgz#12f40ab95bb36d424cdad991f29fc3cb491ee29e" + integrity sha512-nDksiuvRC1dSTHrf5yOGQmlRwAzSKV8MdFQwFSvLbZGGhi5Y4hExqea5HloLgNVouVs8lnAFi2oubSM4Mc7YAg== + dependencies: + "@stdlib/assert" "^0.0.x" + "@stdlib/blas" "^0.0.x" + "@stdlib/complex" "^0.0.x" + "@stdlib/constants" "^0.0.x" + "@stdlib/math" "^0.0.x" + "@stdlib/symbol" "^0.0.x" + "@stdlib/types" "^0.0.x" + "@stdlib/utils" "^0.0.x" + +"@stdlib/assert@^0.0.x": + version "0.0.12" + resolved "https://registry.npmjs.org/@stdlib/assert/-/assert-0.0.12.tgz#1648c9016e5041291f55a6464abcc4069c5103ce" + integrity sha512-38FxFf+ZoQZbdc+m09UsWtaCmzd/2e7im0JOaaFYE7icmRfm+4KiE9BRvBT4tIn7ioLB2f9PsBicKjIsf+tY1w== + dependencies: + "@stdlib/array" "^0.0.x" + "@stdlib/cli" "^0.0.x" + "@stdlib/complex" "^0.0.x" + "@stdlib/constants" "^0.0.x" + "@stdlib/fs" "^0.0.x" + "@stdlib/math" "^0.0.x" + "@stdlib/ndarray" "^0.0.x" + "@stdlib/number" "^0.0.x" + "@stdlib/os" "^0.0.x" + "@stdlib/process" "^0.0.x" + "@stdlib/regexp" "^0.0.x" + "@stdlib/streams" "^0.0.x" + "@stdlib/string" "^0.0.x" + "@stdlib/symbol" "^0.0.x" + "@stdlib/types" "^0.0.x" + "@stdlib/utils" "^0.0.x" + +"@stdlib/bigint@^0.0.x": + version "0.0.11" + resolved "https://registry.npmjs.org/@stdlib/bigint/-/bigint-0.0.11.tgz#c416a1d727001c55f4897e6424124199d638f2fd" + integrity sha512-uz0aYDLABAYyqxaCSHYbUt0yPkXYUCR7TrVvHN+UUD3i8FZ02ZKcLO+faKisDyxKEoSFTNtn3Ro8Ir5ebOlVXQ== + dependencies: + "@stdlib/utils" "^0.0.x" + +"@stdlib/blas@^0.0.x": + version "0.0.12" + resolved "https://registry.npmjs.org/@stdlib/blas/-/blas-0.0.12.tgz#7e93e42b4621fc6903bf63264f045047333536c2" + integrity sha512-nWY749bWceuoWQ7gz977blCwR7lyQ/rsIXVO4b600h+NFpeA2i/ea7MYC680utIbeu2cnDWHdglBPoK535VAzA== + dependencies: + "@stdlib/array" "^0.0.x" + "@stdlib/assert" "^0.0.x" + "@stdlib/math" "^0.0.x" + "@stdlib/number" "^0.0.x" + "@stdlib/types" "^0.0.x" + "@stdlib/utils" "^0.0.x" + +"@stdlib/buffer@^0.0.x": + version "0.0.11" + resolved "https://registry.npmjs.org/@stdlib/buffer/-/buffer-0.0.11.tgz#6137b00845e6c905181cc7ebfae9f7e47c01b0ce" + integrity sha512-Jeie5eDDa1tVuRcuU+cBXI/oOXSmMxUUccZpqXzgYe0IO8QSNtNxv9mUTzJk/m5wH+lmLoDvNxzPpOH9TODjJg== + dependencies: + "@stdlib/array" "^0.0.x" + "@stdlib/assert" "^0.0.x" + "@stdlib/process" "^0.0.x" + "@stdlib/types" "^0.0.x" + "@stdlib/utils" "^0.0.x" + +"@stdlib/cli@^0.0.x": + version "0.0.10" + resolved "https://registry.npmjs.org/@stdlib/cli/-/cli-0.0.10.tgz#28e2fbe6865d7f5cd15b7dc5846c99bd3b91674f" + integrity sha512-OITGaxG46kwK799+NuOd/+ccosJ9koVuQBC610DDJv0ZJf8mD7sbjGXrmue9C4EOh8MP7Vm/6HN14BojX8oTCg== + dependencies: + "@stdlib/utils" "^0.0.x" + minimist "^1.2.0" + +"@stdlib/complex@^0.0.x": + version "0.0.12" + resolved "https://registry.npmjs.org/@stdlib/complex/-/complex-0.0.12.tgz#3afbc190cd0a9b37fc7c6e508c3aa9fda9106944" + integrity sha512-UbZBdaUxT2G+lsTIrVlRZwx2IRY6GXnVILggeejsIVxHSuK+oTyapfetcAv0FJFLP+Rrr+ZzrN4b9G3hBw6NHA== + dependencies: + "@stdlib/array" "^0.0.x" + "@stdlib/assert" "^0.0.x" + "@stdlib/types" "^0.0.x" + "@stdlib/utils" "^0.0.x" + +"@stdlib/constants@^0.0.x": + version "0.0.11" + resolved "https://registry.npmjs.org/@stdlib/constants/-/constants-0.0.11.tgz#78cd56d6c2982b30264843c3d75bde7125e90cd2" + integrity sha512-cWKy0L9hXHUQTvFzdPkTvZnn/5Pjv7H4UwY0WC1rLt+A5CxFDJKjvnIi9ypSzJS3CAiGl1ZaHCdadoqXhNdkUg== + dependencies: + "@stdlib/array" "^0.0.x" + "@stdlib/assert" "^0.0.x" + "@stdlib/number" "^0.0.x" + "@stdlib/utils" "^0.0.x" + +"@stdlib/fs@^0.0.x": + version "0.0.12" + resolved "https://registry.npmjs.org/@stdlib/fs/-/fs-0.0.12.tgz#662365fd5846a51f075724b4f2888ae88441b70d" + integrity sha512-zcDLbt39EEM3M3wJW6luChS53B8T+TMJkjs2526UpKJ71O0/0adR57cI7PfCpkMd33d05uM7GM+leEj4eks4Cw== + dependencies: + "@stdlib/array" "^0.0.x" + "@stdlib/assert" "^0.0.x" + "@stdlib/cli" "^0.0.x" + "@stdlib/math" "^0.0.x" + "@stdlib/process" "^0.0.x" + "@stdlib/string" "^0.0.x" + "@stdlib/utils" "^0.0.x" + debug "^2.6.9" + +"@stdlib/math@^0.0.x": + version "0.0.11" + resolved "https://registry.npmjs.org/@stdlib/math/-/math-0.0.11.tgz#eb6638bc03a20fbd6727dd5b977ee0170bda4649" + integrity sha512-qI78sR1QqGjHj8k/aAqkZ51Su2fyBvaR/jMKQqcB/ML8bpYpf+QGlGvTty5Qdru/wpqds4kVFOVbWGcNFIV2+Q== + dependencies: + "@stdlib/assert" "^0.0.x" + "@stdlib/constants" "^0.0.x" + "@stdlib/ndarray" "^0.0.x" + "@stdlib/number" "^0.0.x" + "@stdlib/strided" "^0.0.x" + "@stdlib/symbol" "^0.0.x" + "@stdlib/types" "^0.0.x" + "@stdlib/utils" "^0.0.x" + debug "^2.6.9" + +"@stdlib/ndarray@^0.0.x": + version "0.0.13" + resolved "https://registry.npmjs.org/@stdlib/ndarray/-/ndarray-0.0.13.tgz#2e8fc645e10f56a645a0ab81598808c0e8f43b82" + integrity sha512-Z+U9KJP4U2HWrLtuAXSPvhNetAdqaNLMcliR6S/fz+VPlFDeymRK7omRFMgVQ+1zcAvIgKZGJxpLC3vjiPUYEw== + dependencies: + "@stdlib/array" "^0.0.x" + "@stdlib/assert" "^0.0.x" + "@stdlib/bigint" "^0.0.x" + "@stdlib/buffer" "^0.0.x" + "@stdlib/complex" "^0.0.x" + "@stdlib/constants" "^0.0.x" + "@stdlib/math" "^0.0.x" + "@stdlib/number" "^0.0.x" + "@stdlib/string" "^0.0.x" + "@stdlib/types" "^0.0.x" + "@stdlib/utils" "^0.0.x" + +"@stdlib/nlp@^0.0.x": + version "0.0.11" + resolved "https://registry.npmjs.org/@stdlib/nlp/-/nlp-0.0.11.tgz#532ec0f7267b8d639e4c20c6de864e8de8a09054" + integrity sha512-D9avYWANm0Db2W7RpzdSdi5GxRYALGAqUrNnRnnKIO6sMEfr/DvONoAbWruda4QyvSC+0MJNwcEn7+PHhRwYhw== + dependencies: + "@stdlib/array" "^0.0.x" + "@stdlib/assert" "^0.0.x" + "@stdlib/math" "^0.0.x" + "@stdlib/random" "^0.0.x" + "@stdlib/string" "^0.0.x" + "@stdlib/utils" "^0.0.x" + +"@stdlib/number@^0.0.x": + version "0.0.10" + resolved "https://registry.npmjs.org/@stdlib/number/-/number-0.0.10.tgz#4030ad8fc3fac19a9afb415c443cee6deea0e65c" + integrity sha512-RyfoP9MlnX4kccvg8qv7vYQPbLdzfS1Mnp/prGOoWhvMG3pyBwFAan34kwFb5IS/zHC3W5EmrgXCV2QWyLg/Kg== + dependencies: + "@stdlib/array" "^0.0.x" + "@stdlib/assert" "^0.0.x" + "@stdlib/constants" "^0.0.x" + "@stdlib/math" "^0.0.x" + "@stdlib/os" "^0.0.x" + "@stdlib/string" "^0.0.x" + "@stdlib/types" "^0.0.x" + "@stdlib/utils" "^0.0.x" + +"@stdlib/os@^0.0.x": + version "0.0.12" + resolved "https://registry.npmjs.org/@stdlib/os/-/os-0.0.12.tgz#08bbf013c62a7153099fa9cbac086ca1349a4677" + integrity sha512-O7lklZ/9XEzoCmYvzjPh7jrFWkbpOSHGI71ve3dkSvBy5tyiSL3TtivfKsIC+9ZxuEJZ3d3lIjc9e+yz4HVbqQ== + dependencies: + "@stdlib/assert" "^0.0.x" + "@stdlib/cli" "^0.0.x" + "@stdlib/fs" "^0.0.x" + "@stdlib/process" "^0.0.x" + "@stdlib/utils" "^0.0.x" + +"@stdlib/process@^0.0.x": + version "0.0.12" + resolved "https://registry.npmjs.org/@stdlib/process/-/process-0.0.12.tgz#123325079d89a32f4212f72fb694f8fe3614cf18" + integrity sha512-P0X0TMvkissBE1Wr877Avi2/AxmP7X5Toa6GatHbpJdDg6jQmN4SgPd+NZNp98YtZUyk478c8XSIzMr1krQ20g== + dependencies: + "@stdlib/assert" "^0.0.x" + "@stdlib/buffer" "^0.0.x" + "@stdlib/cli" "^0.0.x" + "@stdlib/fs" "^0.0.x" + "@stdlib/streams" "^0.0.x" + "@stdlib/string" "^0.0.x" + "@stdlib/utils" "^0.0.x" + +"@stdlib/random@^0.0.x": + version "0.0.12" + resolved "https://registry.npmjs.org/@stdlib/random/-/random-0.0.12.tgz#e819c3abd602ed5559ba800dba751e49c633ff85" + integrity sha512-c5yND4Ahnm9Jx0I+jsKhn4Yrz10D53ALSrIe3PG1qIz3kNFcIPnmvCuNGd+3V4ch4Mbrez55Y8z/ZC5RJh4vJQ== + dependencies: + "@stdlib/array" "^0.0.x" + "@stdlib/assert" "^0.0.x" + "@stdlib/blas" "^0.0.x" + "@stdlib/buffer" "^0.0.x" + "@stdlib/cli" "^0.0.x" + "@stdlib/constants" "^0.0.x" + "@stdlib/fs" "^0.0.x" + "@stdlib/math" "^0.0.x" + "@stdlib/process" "^0.0.x" + "@stdlib/stats" "^0.0.x" + "@stdlib/streams" "^0.0.x" + "@stdlib/symbol" "^0.0.x" + "@stdlib/types" "^0.0.x" + "@stdlib/utils" "^0.0.x" + debug "^2.6.9" + readable-stream "^2.1.4" + +"@stdlib/regexp@^0.0.x": + version "0.0.13" + resolved "https://registry.npmjs.org/@stdlib/regexp/-/regexp-0.0.13.tgz#80b98361dc7a441b47bc3fa964bb0c826759e971" + integrity sha512-3JT5ZIoq/1nXY+dY+QtkU8/m7oWDeekyItEEXMx9c/AOf0ph8fmvTUGMDNfUq0RetcznFe3b66kFz6Zt4XHviA== + dependencies: + "@stdlib/assert" "^0.0.x" + "@stdlib/utils" "^0.0.x" + +"@stdlib/stats@^0.0.x", "@stdlib/stats@~0.0.13": + version "0.0.13" + resolved "https://registry.npmjs.org/@stdlib/stats/-/stats-0.0.13.tgz#87c973f385379d794707c7b5196a173dba8b07e1" + integrity sha512-hm+t32dKbx/L7+7WlQ1o4NDEzV0J4QSnwFBCsIMIAO8+VPxTZ4FxyNERl4oKlS3hZZe4AVKjoOVhBDtgEWrS4g== + dependencies: + "@stdlib/array" "^0.0.x" + "@stdlib/assert" "^0.0.x" + "@stdlib/blas" "^0.0.x" + "@stdlib/constants" "^0.0.x" + "@stdlib/math" "^0.0.x" + "@stdlib/ndarray" "^0.0.x" + "@stdlib/random" "^0.0.x" + "@stdlib/string" "^0.0.x" + "@stdlib/symbol" "^0.0.x" + "@stdlib/types" "^0.0.x" + "@stdlib/utils" "^0.0.x" + +"@stdlib/streams@^0.0.x": + version "0.0.12" + resolved "https://registry.npmjs.org/@stdlib/streams/-/streams-0.0.12.tgz#07f5ceae5852590afad8e1cb7ce94174becc8739" + integrity sha512-YLUlXwjJNknHp92IkJUdvn5jEQjDckpawKhDLLCoxyh3h5V+w/8+61SH7TMTfKx5lBxKJ8vvtchZh90mIJOAjQ== + dependencies: + "@stdlib/assert" "^0.0.x" + "@stdlib/buffer" "^0.0.x" + "@stdlib/cli" "^0.0.x" + "@stdlib/fs" "^0.0.x" + "@stdlib/types" "^0.0.x" + "@stdlib/utils" "^0.0.x" + debug "^2.6.9" + readable-stream "^2.1.4" + +"@stdlib/strided@^0.0.x": + version "0.0.12" + resolved "https://registry.npmjs.org/@stdlib/strided/-/strided-0.0.12.tgz#86ac48e660cb7f64a45cf07e80cbbfe58be21ae1" + integrity sha512-1NINP+Y7IJht34iri/bYLY7TVxrip51f6Z3qWxGHUCH33kvk5H5QqV+RsmFEGbbyoGtdeHrT2O+xA+7R2e3SNg== + dependencies: + "@stdlib/assert" "^0.0.x" + "@stdlib/math" "^0.0.x" + "@stdlib/ndarray" "^0.0.x" + "@stdlib/types" "^0.0.x" + "@stdlib/utils" "^0.0.x" + +"@stdlib/string@^0.0.x": + version "0.0.14" + resolved "https://registry.npmjs.org/@stdlib/string/-/string-0.0.14.tgz#4feea4f9089ab72428eebb65fe7b93d90a7f34f4" + integrity sha512-1ClvUTPysens7GZz3WsrkFYIFs8qDmnXkyAd3zMvTXgRpy7hqrv6nNzLMQj8BHv5cBWaWPOXYd/cZ+JyMnZNQQ== + dependencies: + "@stdlib/assert" "^0.0.x" + "@stdlib/cli" "^0.0.x" + "@stdlib/constants" "^0.0.x" + "@stdlib/fs" "^0.0.x" + "@stdlib/math" "^0.0.x" + "@stdlib/nlp" "^0.0.x" + "@stdlib/process" "^0.0.x" + "@stdlib/regexp" "^0.0.x" + "@stdlib/streams" "^0.0.x" + "@stdlib/types" "^0.0.x" + "@stdlib/utils" "^0.0.x" + +"@stdlib/symbol@^0.0.x": + version "0.0.12" + resolved "https://registry.npmjs.org/@stdlib/symbol/-/symbol-0.0.12.tgz#b9f396b0bf269c2985bb7fe99810a8e26d7288c3" + integrity sha512-2IDhpzWVGeLHgsvIsX12RXvf78r7xBkc4QLoRUv3k7Cp61BisR1Ym1p0Tq9PbxT8fknlvLToh9n5RpmESi2d4w== + dependencies: + "@stdlib/assert" "^0.0.x" + "@stdlib/utils" "^0.0.x" + +"@stdlib/time@^0.0.x": + version "0.0.14" + resolved "https://registry.npmjs.org/@stdlib/time/-/time-0.0.14.tgz#ea6daa438b1d3b019b99f5091117ee4bcef55d60" + integrity sha512-1gMFCQTabMVIgww+k4g8HHHIhyy1tIlvwT8mC0BHW7Q7TzDAgobwL0bvor+lwvCb5LlDAvNQEpaRgVT99QWGeQ== + dependencies: + "@stdlib/assert" "^0.0.x" + "@stdlib/cli" "^0.0.x" + "@stdlib/constants" "^0.0.x" + "@stdlib/fs" "^0.0.x" + "@stdlib/math" "^0.0.x" + "@stdlib/string" "^0.0.x" + "@stdlib/utils" "^0.0.x" + +"@stdlib/types@^0.0.x": + version "0.0.14" + resolved "https://registry.npmjs.org/@stdlib/types/-/types-0.0.14.tgz#02d3aab7a9bfaeb86e34ab749772ea22f7b2f7e0" + integrity sha512-AP3EI9/il/xkwUazcoY+SbjtxHRrheXgSbWZdEGD+rWpEgj6n2i63hp6hTOpAB5NipE0tJwinQlDGOuQ1lCaCw== + +"@stdlib/utils@^0.0.x": + version "0.0.12" + resolved "https://registry.npmjs.org/@stdlib/utils/-/utils-0.0.12.tgz#670de5a7b253f04f11a4cba38f790e82393bcb46" + integrity sha512-+JhFpl6l7RSq/xGnbWRQ5dAL90h9ONj8MViqlb7teBZFtePZLMwoRA1wssypFcJ8SFMRWQn7lPmpYVUkGwRSOg== + dependencies: + "@stdlib/array" "^0.0.x" + "@stdlib/assert" "^0.0.x" + "@stdlib/blas" "^0.0.x" + "@stdlib/buffer" "^0.0.x" + "@stdlib/cli" "^0.0.x" + "@stdlib/constants" "^0.0.x" + "@stdlib/fs" "^0.0.x" + "@stdlib/math" "^0.0.x" + "@stdlib/os" "^0.0.x" + "@stdlib/process" "^0.0.x" + "@stdlib/random" "^0.0.x" + "@stdlib/regexp" "^0.0.x" + "@stdlib/streams" "^0.0.x" + "@stdlib/string" "^0.0.x" + "@stdlib/symbol" "^0.0.x" + "@stdlib/time" "^0.0.x" + "@stdlib/types" "^0.0.x" + debug "^2.6.9" + +"@types/estree@^1.0.0": + version "1.0.5" + resolved "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz#a6ce3e556e00fd9895dd872dd172ad0d4bd687f4" + integrity sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw== + +"@types/geojson@7946.0.4": + version "7946.0.4" + resolved "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.4.tgz#4e049756383c3f055dd8f3d24e63fb543e98eb07" + integrity sha512-MHmwBtCb7OCv1DSivz2UNJXPGU/1btAWRKlqJ2saEhVJkpkvqHMMaOpKg0v4sAbDWSQekHGvPVMM8nQ+Jen03Q== + +"@types/prop-types@*": + version "15.7.12" + resolved "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.12.tgz#12bb1e2be27293c1406acb6af1c3f3a1481d98c6" + integrity sha512-5zvhXYtRNRluoE/jAp4GVsSduVUzNWKkOZrCDBWYtE7biZywwdC2AcEzg+cSMLFRfVgeAFqpfNabiPjxFddV1Q== + +"@types/react@^18.0.26": + version "18.3.5" + resolved "https://registry.npmjs.org/@types/react/-/react-18.3.5.tgz#5f524c2ad2089c0ff372bbdabc77ca2c4dbadf8f" + integrity sha512-WeqMfGJLGuLCqHGYRGHxnKrXcTitc6L/nBUWfWPcTarG3t9PsquqUMuVeXZeca+mglY4Vo5GZjCi0A3Or2lnxA== + dependencies: + "@types/prop-types" "*" + csstype "^3.0.2" + +"@vscode/debugprotocol@^1.51.0": + version "1.67.0" + resolved "https://registry.npmjs.org/@vscode/debugprotocol/-/debugprotocol-1.67.0.tgz#cbeef6f9e8e4b5e9a30468faa6f42c96e4d42040" + integrity sha512-vTn5JwZ+LQy2QqT/wUD8Rlrb+7eLo5fsiKIxD5i0BZIuvdRbxTTfGU7+47PsorMrfBzozngIrocKCKS3OVnYyw== + +"@yarnpkg/lockfile@^1.1.0": + version "1.1.0" + resolved "https://registry.npmjs.org/@yarnpkg/lockfile/-/lockfile-1.1.0.tgz#e77a97fbd345b76d83245edcd17d393b1b41fb31" + integrity sha512-GpSwvyXOcOOlV70vbnzjj4fW5xW/FdUF6nQEt1ENy7m4ZCczi1+/buVUPAqmGfqznsORNFzUMjctTIp8a9tuCQ== + +ajv@^8.12.0: + version "8.17.1" + resolved "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz#37d9a5c776af6bc92d7f4f9510eba4c0a60d11a6" + integrity sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g== + dependencies: + fast-deep-equal "^3.1.3" + fast-uri "^3.0.1" + json-schema-traverse "^1.0.0" + require-from-string "^2.0.2" + +ansi-regex@^5.0.1: + version "5.0.1" + resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== + +ansi-styles@^4.0.0: + version "4.3.0" + resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +cliui@^8.0.1: + version "8.0.1" + resolved "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz#0c04b075db02cbfe60dc8e6cf2f5486b1a3608aa" + integrity sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.1" + wrap-ansi "^7.0.0" + +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +commander@2: + version "2.20.3" + resolved "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" + integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== + +commander@7: + version "7.2.0" + resolved "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz#a36cb57d0b501ce108e4d20559a150a391d97ab7" + integrity sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw== + +commander@^10.0.1: + version "10.0.1" + resolved "https://registry.npmjs.org/commander/-/commander-10.0.1.tgz#881ee46b4f77d1c1dccc5823433aa39b022cbe06" + integrity sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug== + +compute-gcd@^1.2.1: + version "1.2.1" + resolved "https://registry.npmjs.org/compute-gcd/-/compute-gcd-1.2.1.tgz#34d639f3825625e1357ce81f0e456a6249d8c77f" + integrity sha512-TwMbxBNz0l71+8Sc4czv13h4kEqnchV9igQZBi6QUaz09dnz13juGnnaWWJTRsP3brxOoxeB4SA2WELLw1hCtg== + dependencies: + validate.io-array "^1.0.3" + validate.io-function "^1.0.2" + validate.io-integer-array "^1.0.0" + +compute-lcm@^1.1.2: + version "1.1.2" + resolved "https://registry.npmjs.org/compute-lcm/-/compute-lcm-1.1.2.tgz#9107c66b9dca28cefb22b4ab4545caac4034af23" + integrity sha512-OFNPdQAXnQhDSKioX8/XYT6sdUlXwpeMjfd6ApxMJfyZ4GxmLR1xvMERctlYhlHwIiz6CSpBc2+qYKjHGZw4TQ== + dependencies: + compute-gcd "^1.2.1" + validate.io-array "^1.0.3" + validate.io-function "^1.0.2" + validate.io-integer-array "^1.0.0" + +core-util-is@~1.0.0: + version "1.0.3" + resolved "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" + integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== + +crelt@^1.0.5: + version "1.0.6" + resolved "https://registry.npmjs.org/crelt/-/crelt-1.0.6.tgz#7cc898ea74e190fb6ef9dae57f8f81cf7302df72" + integrity sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g== + +csstype@3.0.10: + version "3.0.10" + resolved "https://registry.npmjs.org/csstype/-/csstype-3.0.10.tgz#2ad3a7bed70f35b965707c092e5f30b327c290e5" + integrity sha512-2u44ZG2OcNUO9HDp/Jl8C07x6pU/eTR3ncV91SiK3dhG9TWvRVsCoJw14Ckx5DgWkzGA3waZWO3d7pgqpUI/XA== + +csstype@^3.0.2: + version "3.1.3" + resolved "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz#d80ff294d114fb0e6ac500fbf85b60137d7eff81" + integrity sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw== + +"d3-array@1 - 3", "d3-array@2 - 3", "d3-array@2.10.0 - 3", "d3-array@2.5.0 - 3", d3-array@3.2.4, d3-array@^3.2.2: + version "3.2.4" + resolved "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz#15fec33b237f97ac5d7c986dc77da273a8ed0bb5" + integrity sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg== + dependencies: + internmap "1 - 2" + +"d3-color@1 - 3", d3-color@^3.1.0: + version "3.1.0" + resolved "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz#395b2833dfac71507f12ac2f7af23bf819de24e2" + integrity sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA== + +d3-delaunay@^6.0.2: + version "6.0.4" + resolved "https://registry.npmjs.org/d3-delaunay/-/d3-delaunay-6.0.4.tgz#98169038733a0a5babbeda55054f795bb9e4a58b" + integrity sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A== + dependencies: + delaunator "5" + +"d3-dispatch@1 - 3": + version "3.0.1" + resolved "https://registry.npmjs.org/d3-dispatch/-/d3-dispatch-3.0.1.tgz#5fc75284e9c2375c36c839411a0cf550cbfc4d5e" + integrity sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg== + +d3-dsv@^3.0.1: + version "3.0.1" + resolved "https://registry.npmjs.org/d3-dsv/-/d3-dsv-3.0.1.tgz#c63af978f4d6a0d084a52a673922be2160789b73" + integrity sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q== + dependencies: + commander "7" + iconv-lite "0.6" + rw "1" + +d3-force@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/d3-force/-/d3-force-3.0.0.tgz#3e2ba1a61e70888fe3d9194e30d6d14eece155c4" + integrity sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg== + dependencies: + d3-dispatch "1 - 3" + d3-quadtree "1 - 3" + d3-timer "1 - 3" + +"d3-format@1 - 3", d3-format@^3.1.0: + version "3.1.0" + resolved "https://registry.npmjs.org/d3-format/-/d3-format-3.1.0.tgz#9260e23a28ea5cb109e93b21a06e24e2ebd55641" + integrity sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA== + +d3-geo-projection@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/d3-geo-projection/-/d3-geo-projection-4.0.0.tgz#dc229e5ead78d31869a4e87cf1f45bd2716c48ca" + integrity sha512-p0bK60CEzph1iqmnxut7d/1kyTmm3UWtPlwdkM31AU+LW+BXazd5zJdoCn7VFxNCHXRngPHRnsNn5uGjLRGndg== + dependencies: + commander "7" + d3-array "1 - 3" + d3-geo "1.12.0 - 3" + +"d3-geo@1.12.0 - 3", d3-geo@^3.1.0: + version "3.1.1" + resolved "https://registry.npmjs.org/d3-geo/-/d3-geo-3.1.1.tgz#6027cf51246f9b2ebd64f99e01dc7c3364033a4d" + integrity sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q== + dependencies: + d3-array "2.5.0 - 3" + +d3-hierarchy@^3.1.2: + version "3.1.2" + resolved "https://registry.npmjs.org/d3-hierarchy/-/d3-hierarchy-3.1.2.tgz#b01cd42c1eed3d46db77a5966cf726f8c09160c6" + integrity sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA== + +"d3-interpolate@1 - 3", "d3-interpolate@1.2.0 - 3", d3-interpolate@^3.0.1: + version "3.0.1" + resolved "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz#3c47aa5b32c5b3dfb56ef3fd4342078a632b400d" + integrity sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g== + dependencies: + d3-color "1 - 3" + +d3-path@^3.1.0: + version "3.1.0" + resolved "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz#22df939032fb5a71ae8b1800d61ddb7851c42526" + integrity sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ== + +"d3-quadtree@1 - 3": + version "3.0.1" + resolved "https://registry.npmjs.org/d3-quadtree/-/d3-quadtree-3.0.1.tgz#6dca3e8be2b393c9a9d514dabbd80a92deef1a4f" + integrity sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw== + +d3-scale-chromatic@^3.1.0: + version "3.1.0" + resolved "https://registry.npmjs.org/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz#34c39da298b23c20e02f1a4b239bd0f22e7f1314" + integrity sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ== + dependencies: + d3-color "1 - 3" + d3-interpolate "1 - 3" + +d3-scale@^4.0.2: + version "4.0.2" + resolved "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz#82b38e8e8ff7080764f8dcec77bd4be393689396" + integrity sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ== + dependencies: + d3-array "2.10.0 - 3" + d3-format "1 - 3" + d3-interpolate "1.2.0 - 3" + d3-time "2.1.1 - 3" + d3-time-format "2 - 4" + +d3-shape@^3.2.0: + version "3.2.0" + resolved "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz#a1a839cbd9ba45f28674c69d7f855bcf91dfc6a5" + integrity sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA== + dependencies: + d3-path "^3.1.0" + +"d3-time-format@2 - 4", d3-time-format@^4.1.0: + version "4.1.0" + resolved "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz#7ab5257a5041d11ecb4fe70a5c7d16a195bb408a" + integrity sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg== + dependencies: + d3-time "1 - 3" + +"d3-time@1 - 3", "d3-time@2.1.1 - 3", d3-time@^3.1.0: + version "3.1.0" + resolved "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz#9310db56e992e3c0175e1ef385e545e48a9bb5c7" + integrity sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q== + dependencies: + d3-array "2 - 3" + +"d3-timer@1 - 3", d3-timer@^3.0.1: + version "3.0.1" + resolved "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz#6284d2a2708285b1abb7e201eda4380af35e63b0" + integrity sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA== + +debug@^2.6.9: + version "2.6.9" + resolved "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +deepmerge@^4.2.2: + version "4.3.1" + resolved "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz#44b5f2147cd3b00d4b56137685966f26fd25dd4a" + integrity sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A== + +delaunator@5: + version "5.0.1" + resolved "https://registry.npmjs.org/delaunator/-/delaunator-5.0.1.tgz#39032b08053923e924d6094fe2cde1a99cc51278" + integrity sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw== + dependencies: + robust-predicates "^3.0.2" + +dom-serializer@^2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz#e41b802e1eedf9f6cae183ce5e622d789d7d8e53" + integrity sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg== + dependencies: + domelementtype "^2.3.0" + domhandler "^5.0.2" + entities "^4.2.0" + +domelementtype@^2.3.0: + version "2.3.0" + resolved "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz#5c45e8e869952626331d7aab326d01daf65d589d" + integrity sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw== + +domhandler@^5.0.2, domhandler@^5.0.3: + version "5.0.3" + resolved "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz#cc385f7f751f1d1fc650c21374804254538c7d31" + integrity sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w== + dependencies: + domelementtype "^2.3.0" + +domutils@^3.0.1: + version "3.1.0" + resolved "https://registry.npmjs.org/domutils/-/domutils-3.1.0.tgz#c47f551278d3dc4b0b1ab8cbb42d751a6f0d824e" + integrity sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA== + dependencies: + dom-serializer "^2.0.0" + domelementtype "^2.3.0" + domhandler "^5.0.3" + +emoji-regex@^8.0.0: + version "8.0.0" + resolved "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== + +entities@^4.2.0, entities@^4.4.0: + version "4.5.0" + resolved "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz#5d268ea5e7113ec74c4d033b79ea5a35a488fb48" + integrity sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw== + +escalade@^3.1.1: + version "3.2.0" + resolved "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz#011a3f69856ba189dffa7dc8fcce99d2a87903e5" + integrity sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA== + +escape-string-regexp@^4.0.0: + version "4.0.0" + resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" + integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== + +exenv-es6@^1.1.1: + version "1.1.1" + resolved "https://registry.npmjs.org/exenv-es6/-/exenv-es6-1.1.1.tgz#80b7a8c5af24d53331f755bac07e84abb1f6de67" + integrity sha512-vlVu3N8d6yEMpMsEm+7sUBAI81aqYYuEvfK0jNqmdb/OPXzzH7QWDDnVjMvDSY47JdHEqx/dfC/q8WkfoTmpGQ== + +fast-deep-equal@^3.1.3: + version "3.1.3" + resolved "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-equals@^5.0.1: + version "5.0.1" + resolved "https://registry.npmjs.org/fast-equals/-/fast-equals-5.0.1.tgz#a4eefe3c5d1c0d021aeed0bc10ba5e0c12ee405d" + integrity sha512-WF1Wi8PwwSY7/6Kx0vKXtw8RwuSGoM1bvDaJbu7MxDlR1vovZjIAKrnzyrThgAjm6JDTu0fVgWXDlMGspodfoQ== + +fast-uri@^3.0.1: + version "3.0.1" + resolved "https://registry.npmjs.org/fast-uri/-/fast-uri-3.0.1.tgz#cddd2eecfc83a71c1be2cc2ef2061331be8a7134" + integrity sha512-MWipKbbYiYI0UC7cl8m/i/IWTqfC8YXsqjzybjddLsFjStroQzsHXkc73JutMvBiXmOvapk+axIl79ig5t55Bw== + +free-style@3.1.0: + version "3.1.0" + resolved "https://registry.npmjs.org/free-style/-/free-style-3.1.0.tgz#4e2996029534e6b1731611d843437b9e2f473f08" + integrity sha512-vJujYSIyT30iDoaoeigNAxX4yB1RUrh+N2ZMhIElMr3BvCuGXOw7XNJMEEJkDUeamK2Rnb/IKFGKRKlTWIGRWA== + +fs-extra@^10.1.0: + version "10.1.0" + resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz#02873cfbc4084dde127eaa5f9905eef2325d1abf" + integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ== + dependencies: + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + +fsevents@2.3.2: + version "2.3.2" + resolved "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" + integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== + +get-caller-file@^2.0.5: + version "2.0.5" + resolved "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" + integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== + +graceful-fs@^4.1.6, graceful-fs@^4.2.0: + version "4.2.11" + resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" + integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== + +htmlparser2@^8.0.0: + version "8.0.2" + resolved "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.2.tgz#f002151705b383e62433b5cf466f5b716edaec21" + integrity sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA== + dependencies: + domelementtype "^2.3.0" + domhandler "^5.0.3" + domutils "^3.0.1" + entities "^4.4.0" + +iconv-lite@0.6: + version "0.6.3" + resolved "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz#a52f80bf38da1952eb5c681790719871a1a72501" + integrity sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw== + dependencies: + safer-buffer ">= 2.1.2 < 3.0.0" + +inherits@2.0.3: + version "2.0.3" + resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" + integrity sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw== + +inherits@~2.0.3: + version "2.0.4" + resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +"internmap@1 - 2": + version "2.0.3" + resolved "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz#6685f23755e43c524e251d29cbc97248e3061009" + integrity sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg== + +is-fullwidth-code-point@^3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== + +is-plain-object@^5.0.0: + version "5.0.0" + resolved "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz#4427f50ab3429e9025ea7d52e9043a9ef4159344" + integrity sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q== + +isarray@~1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ== + +isomorphic.js@^0.2.4: + version "0.2.5" + resolved "https://registry.npmjs.org/isomorphic.js/-/isomorphic.js-0.2.5.tgz#13eecf36f2dba53e85d355e11bf9d4208c6f7f88" + integrity sha512-PIeMbHqMt4DnUP3MA/Flc0HElYjMXArsw1qwJZcm9sqR8mq3l8NYizFMty0pWwE/tzIGH3EKK5+jes5mAr85yw== + +"js-tokens@^3.0.0 || ^4.0.0": + version "4.0.0" + resolved "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +json-schema-compare@^0.2.2: + version "0.2.2" + resolved "https://registry.npmjs.org/json-schema-compare/-/json-schema-compare-0.2.2.tgz#dd601508335a90c7f4cfadb6b2e397225c908e56" + integrity sha512-c4WYmDKyJXhs7WWvAWm3uIYnfyWFoIp+JEoX34rctVvEkMYCPGhXtvmFFXiffBbxfZsvQ0RNnV5H7GvDF5HCqQ== + dependencies: + lodash "^4.17.4" + +json-schema-merge-allof@^0.8.1: + version "0.8.1" + resolved "https://registry.npmjs.org/json-schema-merge-allof/-/json-schema-merge-allof-0.8.1.tgz#ed2828cdd958616ff74f932830a26291789eaaf2" + integrity sha512-CTUKmIlPJbsWfzRRnOXz+0MjIqvnleIXwFTzz+t9T86HnYX/Rozria6ZVGLktAU9e+NygNljveP+yxqtQp/Q4w== + dependencies: + compute-lcm "^1.1.2" + json-schema-compare "^0.2.2" + lodash "^4.17.20" + +json-schema-traverse@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" + integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== + +json-stringify-pretty-compact@~3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/json-stringify-pretty-compact/-/json-stringify-pretty-compact-3.0.0.tgz#f71ef9d82ef16483a407869556588e91b681d9ab" + integrity sha512-Rc2suX5meI0S3bfdZuA7JMFBGkJ875ApfVyq2WHELjBiiG22My/l7/8zPpH/CfFVQHuVLd8NLR0nv6vi0BYYKA== + +json5@^2.2.3: + version "2.2.3" + resolved "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" + integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== + +jsonfile@^6.0.1: + version "6.1.0" + resolved "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" + integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== + dependencies: + universalify "^2.0.0" + optionalDependencies: + graceful-fs "^4.1.6" + +jsonpointer@^5.0.1: + version "5.0.1" + resolved "https://registry.npmjs.org/jsonpointer/-/jsonpointer-5.0.1.tgz#2110e0af0900fd37467b5907ecd13a7884a1b559" + integrity sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ== + +lib0@^0.2.85, lib0@^0.2.86: + version "0.2.97" + resolved "https://registry.npmjs.org/lib0/-/lib0-0.2.97.tgz#a68d7c88577ac1910cdbe5204bac070f07c8e0b4" + integrity sha512-Q4d1ekgvufi9FiHkkL46AhecfNjznSL9MRNoJRQ76gBHS9OqU2ArfQK0FvBpuxgWeJeNI0LVgAYMIpsGeX4gYg== + dependencies: + isomorphic.js "^0.2.4" + +lodash-es@^4.17.21: + version "4.17.21" + resolved "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz#43e626c46e6591b7750beb2b50117390c609e3ee" + integrity sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw== + +lodash.escape@^4.0.1: + version "4.0.1" + resolved "https://registry.npmjs.org/lodash.escape/-/lodash.escape-4.0.1.tgz#c9044690c21e04294beaa517712fded1fa88de98" + integrity sha512-nXEOnb/jK9g0DYMr1/Xvq6l5xMD7GDG55+GSYIYmS0G4tBk/hURD4JR9WCavs04t33WmJx9kCyp9vJ+mr4BOUw== + +lodash.mergewith@^4.6.1: + version "4.6.2" + resolved "https://registry.npmjs.org/lodash.mergewith/-/lodash.mergewith-4.6.2.tgz#617121f89ac55f59047c7aec1ccd6654c6590f55" + integrity sha512-GK3g5RPZWTRSeLSpgP8Xhra+pnjBC56q9FZYe1d5RN3TJ35dbkGy3YqBSMbyCrlbi+CM9Z3Jk5yTL7RCsqboyQ== + +lodash@^4.17.20, lodash@^4.17.21, lodash@^4.17.4: + version "4.17.21" + resolved "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" + integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== + +loose-envify@^1.1.0, loose-envify@^1.4.0: + version "1.4.0" + resolved "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" + integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== + dependencies: + js-tokens "^3.0.0 || ^4.0.0" + +markdown-to-jsx@^7.4.1: + version "7.5.0" + resolved "https://registry.npmjs.org/markdown-to-jsx/-/markdown-to-jsx-7.5.0.tgz#42ece0c71e842560a7d8bd9f81e7a34515c72150" + integrity sha512-RrBNcMHiFPcz/iqIj0n3wclzHXjwS7mzjBNWecKKVhNTIxQepIix6Il/wZCn2Cg5Y1ow2Qi84+eJrryFRWBEWw== + +minimist@^1.2.0, minimist@~1.2.0: + version "1.2.8" + resolved "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" + integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== + +ms@2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== + +nanoid@^3.3.7: + version "3.3.7" + resolved "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz#d0c301a691bc8d54efa0a2226ccf3fe2fd656bd8" + integrity sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g== + +node-fetch@^2.6.7: + version "2.7.0" + resolved "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d" + integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A== + dependencies: + whatwg-url "^5.0.0" + +object-assign@^4.1.1: + version "4.1.1" + resolved "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== + +parse-srcset@^1.0.2: + version "1.0.2" + resolved "https://registry.npmjs.org/parse-srcset/-/parse-srcset-1.0.2.tgz#f2bd221f6cc970a938d88556abc589caaaa2bde1" + integrity sha512-/2qh0lav6CmI15FzA3i/2Bzk2zCgQhGMkvhOhKNcBVQ1ldgpbfiNTVslmooUmWJcADi1f1kIeynbDRVzNlfR6Q== + +path-browserify@^1.0.0: + version "1.0.1" + resolved "https://registry.npmjs.org/path-browserify/-/path-browserify-1.0.1.tgz#d98454a9c3753d5790860f16f68867b9e46be1fd" + integrity sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g== + +path@~0.12.7: + version "0.12.7" + resolved "https://registry.npmjs.org/path/-/path-0.12.7.tgz#d4dc2a506c4ce2197eb481ebfcd5b36c0140b10f" + integrity sha512-aXXC6s+1w7otVF9UletFkFcDsJeO7lSZBPUQhtb5O0xJe8LtYhj/GxldoL09bBj9+ZmE2hNoHqQSFMN5fikh4Q== + dependencies: + process "^0.11.1" + util "^0.10.3" + +picocolors@^1.0.1: + version "1.1.0" + resolved "https://registry.npmjs.org/picocolors/-/picocolors-1.1.0.tgz#5358b76a78cde483ba5cef6a9dc9671440b27d59" + integrity sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw== + +playwright-core@1.47.0: + version "1.47.0" + resolved "https://registry.npmjs.org/playwright-core/-/playwright-core-1.47.0.tgz#b54ec060fd83e5c2e46b63986b5ebb5e96ace427" + integrity sha512-1DyHT8OqkcfCkYUD9zzUTfg7EfTd+6a8MkD/NWOvjo0u/SCNd5YmY/lJwFvUZOxJbWNds+ei7ic2+R/cRz/PDg== + +playwright@1.47.0: + version "1.47.0" + resolved "https://registry.npmjs.org/playwright/-/playwright-1.47.0.tgz#fb9b028883fad11362f9ff63ce7ba44bda0bf626" + integrity sha512-jOWiRq2pdNAX/mwLiwFYnPHpEZ4rM+fRSQpRHwEwZlP2PUANvL3+aJOF/bvISMhFD30rqMxUB4RJx9aQbfh4Ww== + dependencies: + playwright-core "1.47.0" + optionalDependencies: + fsevents "2.3.2" + +postcss@^8.3.11: + version "8.4.45" + resolved "https://registry.npmjs.org/postcss/-/postcss-8.4.45.tgz#538d13d89a16ef71edbf75d895284ae06b79e603" + integrity sha512-7KTLTdzdZZYscUc65XmjFiB73vBhBfbPztCYdUNvlaso9PrzjzcmjqBPR0lNGkcVlcO4BjiO5rK/qNz+XAen1Q== + dependencies: + nanoid "^3.3.7" + picocolors "^1.0.1" + source-map-js "^1.2.0" + +process-nextick-args@~2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" + integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== + +process@^0.11.1: + version "0.11.10" + resolved "https://registry.npmjs.org/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" + integrity sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A== + +prop-types@^15.8.1: + version "15.8.1" + resolved "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5" + integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg== + dependencies: + loose-envify "^1.4.0" + object-assign "^4.1.1" + react-is "^16.13.1" + +querystringify@^2.1.1: + version "2.2.0" + resolved "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" + integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== + +react-dom@^18.2.0: + version "18.3.1" + resolved "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz#c2265d79511b57d479b3dd3fdfa51536494c5cb4" + integrity sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw== + dependencies: + loose-envify "^1.1.0" + scheduler "^0.23.2" + +react-is@^16.13.1: + version "16.13.1" + resolved "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" + integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== + +react-is@^18.2.0: + version "18.3.1" + resolved "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz#e83557dc12eae63a99e003a46388b1dcbb44db7e" + integrity sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg== + +"react@>=17.0.0 <19.0.0", react@^18.2.0: + version "18.3.1" + resolved "https://registry.npmjs.org/react/-/react-18.3.1.tgz#49ab892009c53933625bd16b2533fc754cab2891" + integrity sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ== + dependencies: + loose-envify "^1.1.0" + +readable-stream@^2.1.4: + version "2.3.8" + resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz#91125e8042bba1b9887f49345f6277027ce8be9b" + integrity sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~2.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.1.1" + util-deprecate "~1.0.1" + +regexp-match-indices@^1.0.2: + version "1.0.2" + resolved "https://registry.npmjs.org/regexp-match-indices/-/regexp-match-indices-1.0.2.tgz#cf20054a6f7d5b3e116a701a7b00f82889d10da6" + integrity sha512-DwZuAkt8NF5mKwGGER1EGh2PRqyvhRhhLviH+R8y8dIuaQROlUfXjt4s9ZTXstIsSkptf06BSvwcEmmfheJJWQ== + dependencies: + regexp-tree "^0.1.11" + +regexp-tree@^0.1.11: + version "0.1.27" + resolved "https://registry.npmjs.org/regexp-tree/-/regexp-tree-0.1.27.tgz#2198f0ef54518ffa743fe74d983b56ffd631b6cd" + integrity sha512-iETxpjK6YoRWJG5o6hXLwvjYAoW+FEZn9os0PD/b6AP6xQwsa/Y7lCVgIixBbUPMfhu+i2LtdeAqVTgGlQarfA== + +require-directory@^2.1.1: + version "2.1.1" + resolved "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== + +require-from-string@^2.0.2: + version "2.0.2" + resolved "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" + integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== + +requires-port@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" + integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ== + +robust-predicates@^3.0.2: + version "3.0.2" + resolved "https://registry.npmjs.org/robust-predicates/-/robust-predicates-3.0.2.tgz#d5b28528c4824d20fc48df1928d41d9efa1ad771" + integrity sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg== + +rw@1: + version "1.3.3" + resolved "https://registry.npmjs.org/rw/-/rw-1.3.3.tgz#3f862dfa91ab766b14885ef4d01124bfda074fb4" + integrity sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ== + +safe-buffer@~5.1.0, safe-buffer@~5.1.1: + version "5.1.2" + resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +"safer-buffer@>= 2.1.2 < 3.0.0": + version "2.1.2" + resolved "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + +sanitize-html@~2.12.1: + version "2.12.1" + resolved "https://registry.npmjs.org/sanitize-html/-/sanitize-html-2.12.1.tgz#280a0f5c37305222921f6f9d605be1f6558914c7" + integrity sha512-Plh+JAn0UVDpBRP/xEjsk+xDCoOvMBwQUf/K+/cBAVuTbtX8bj2VB7S1sL1dssVpykqp0/KPSesHrqXtokVBpA== + dependencies: + deepmerge "^4.2.2" + escape-string-regexp "^4.0.0" + htmlparser2 "^8.0.0" + is-plain-object "^5.0.0" + parse-srcset "^1.0.2" + postcss "^8.3.11" + +scheduler@^0.23.2: + version "0.23.2" + resolved "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz#414ba64a3b282892e944cf2108ecc078d115cdc3" + integrity sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ== + dependencies: + loose-envify "^1.1.0" + +semver@^7.5.0: + version "7.6.3" + resolved "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz#980f7b5550bc175fb4dc09403085627f9eb33143" + integrity sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A== + +source-map-js@^1.2.0: + version "1.2.1" + resolved "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz#1ce5650fddd87abc099eda37dcff024c2667ae46" + integrity sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA== + +string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: + version "4.2.3" + resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string_decoder@~1.1.1: + version "1.1.1" + resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== + dependencies: + safe-buffer "~5.1.0" + +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +style-mod@^4.0.0, style-mod@^4.1.0: + version "4.1.2" + resolved "https://registry.npmjs.org/style-mod/-/style-mod-4.1.2.tgz#ca238a1ad4786520f7515a8539d5a63691d7bf67" + integrity sha512-wnD1HyVqpJUI2+eKZ+eo1UwghftP6yuFheBqqe+bWCotBjC2K1YnteJILRMs3SM4V/0dLEW1SC27MWP5y+mwmw== + +systeminformation@^5.8.6: + version "5.23.5" + resolved "https://registry.npmjs.org/systeminformation/-/systeminformation-5.23.5.tgz#bf7544be42c42f7d14a81b02aa0365e9ca1f5266" + integrity sha512-PEpJwhRYxZgBCAlWZhWIgfMTjXLqfcaZ1pJsJn9snWNfBW/Z1YQg1mbIUSWrEV3ErAHF7l/OoVLQeaZDlPzkpA== + +tabbable@^5.2.0: + version "5.3.3" + resolved "https://registry.npmjs.org/tabbable/-/tabbable-5.3.3.tgz#aac0ff88c73b22d6c3c5a50b1586310006b47fbf" + integrity sha512-QD9qKY3StfbZqWOPLp0++pOrAVb/HbUi5xCc8cUo4XjP19808oaMiDzn0leBY5mCespIBM0CIZePzZjgzR83kA== + +topojson-client@^3.1.0: + version "3.1.0" + resolved "https://registry.npmjs.org/topojson-client/-/topojson-client-3.1.0.tgz#22e8b1ed08a2b922feeb4af6f53b6ef09a467b99" + integrity sha512-605uxS6bcYxGXw9qi62XyrV6Q3xwbndjachmNxu8HWTtVPxZfEJN9fd/SZS1Q54Sn2y0TMyMxFj/cJINqGHrKw== + dependencies: + commander "2" + +tr46@~0.0.3: + version "0.0.3" + resolved "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" + integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== + +tslib@^1.13.0: + version "1.14.1" + resolved "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" + integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== + +tslib@^2.5.0: + version "2.7.0" + resolved "https://registry.npmjs.org/tslib/-/tslib-2.7.0.tgz#d9b40c5c40ab59e8738f297df3087bf1a2690c01" + integrity sha512-gLXCKdN1/j47AiHiOkJN69hJmcbGTHI0ImLmbYLHykhgeN0jVGola9yVjFgzCUklsZQMW55o+dW7IXv3RCXDzA== + +tslib@~2.6.3: + version "2.6.3" + resolved "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz#0438f810ad7a9edcde7a241c3d80db693c8cbfe0" + integrity sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ== + +typestyle@^2.0.4: + version "2.4.0" + resolved "https://registry.npmjs.org/typestyle/-/typestyle-2.4.0.tgz#df5bae6ff15093f5ce51f0caac5ef79428f64e78" + integrity sha512-/d1BL6Qi+YlMLEydnUEB8KL/CAjAN8cyt3/UyGnOyBrWf7bLGcR/6yhmsaUstO2IcYwZfagjE7AIzuI2vUW9mg== + dependencies: + csstype "3.0.10" + free-style "3.1.0" + +universalify@^2.0.0: + version "2.0.1" + resolved "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz#168efc2180964e6386d061e094df61afe239b18d" + integrity sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw== + +url-parse@~1.5.4: + version "1.5.10" + resolved "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz#9d3c2f736c1d75dd3bd2be507dcc111f1e2ea9c1" + integrity sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ== + dependencies: + querystringify "^2.1.1" + requires-port "^1.0.0" + +util-deprecate@~1.0.1: + version "1.0.2" + resolved "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== + +util@^0.10.3: + version "0.10.4" + resolved "https://registry.npmjs.org/util/-/util-0.10.4.tgz#3aa0125bfe668a4672de58857d3ace27ecb76901" + integrity sha512-0Pm9hTQ3se5ll1XihRic3FDIku70C+iHUdT/W926rSgHV5QgXsYbKZN8MSC3tJtSkhuROzvsQjAaFENRXr+19A== + dependencies: + inherits "2.0.3" + +validate.io-array@^1.0.3: + version "1.0.6" + resolved "https://registry.npmjs.org/validate.io-array/-/validate.io-array-1.0.6.tgz#5b5a2cafd8f8b85abb2f886ba153f2d93a27774d" + integrity sha512-DeOy7CnPEziggrOO5CZhVKJw6S3Yi7e9e65R1Nl/RTN1vTQKnzjfvks0/8kQ40FP/dsjRAOd4hxmJ7uLa6vxkg== + +validate.io-function@^1.0.2: + version "1.0.2" + resolved "https://registry.npmjs.org/validate.io-function/-/validate.io-function-1.0.2.tgz#343a19802ed3b1968269c780e558e93411c0bad7" + integrity sha512-LlFybRJEriSuBnUhQyG5bwglhh50EpTL2ul23MPIuR1odjO7XaMLFV8vHGwp7AZciFxtYOeiSCT5st+XSPONiQ== + +validate.io-integer-array@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/validate.io-integer-array/-/validate.io-integer-array-1.0.0.tgz#2cabde033293a6bcbe063feafe91eaf46b13a089" + integrity sha512-mTrMk/1ytQHtCY0oNO3dztafHYyGU88KL+jRxWuzfOmQb+4qqnWmI+gykvGp8usKZOM0H7keJHEbRaFiYA0VrA== + dependencies: + validate.io-array "^1.0.3" + validate.io-integer "^1.0.4" + +validate.io-integer@^1.0.4: + version "1.0.5" + resolved "https://registry.npmjs.org/validate.io-integer/-/validate.io-integer-1.0.5.tgz#168496480b95be2247ec443f2233de4f89878068" + integrity sha512-22izsYSLojN/P6bppBqhgUDjCkr5RY2jd+N2a3DCAUey8ydvrZ/OkGvFPR7qfOpwR2LC5p4Ngzxz36g5Vgr/hQ== + dependencies: + validate.io-number "^1.0.3" + +validate.io-number@^1.0.3: + version "1.0.3" + resolved "https://registry.npmjs.org/validate.io-number/-/validate.io-number-1.0.3.tgz#f63ffeda248bf28a67a8d48e0e3b461a1665baf8" + integrity sha512-kRAyotcbNaSYoDnXvb4MHg/0a1egJdLwS6oJ38TJY7aw9n93Fl/3blIXdyYvPOp55CNxywooG/3BcrwNrBpcSg== + +vega-canvas@^1.2.7: + version "1.2.7" + resolved "https://registry.npmjs.org/vega-canvas/-/vega-canvas-1.2.7.tgz#cf62169518f5dcd91d24ad352998c2248f8974fb" + integrity sha512-OkJ9CACVcN9R5Pi9uF6MZBF06pO6qFpDYHWSKBJsdHP5o724KrsgR6UvbnXFH82FdsiTOff/HqjuaG8C7FL+9Q== + +vega-crossfilter@~4.1.2: + version "4.1.2" + resolved "https://registry.npmjs.org/vega-crossfilter/-/vega-crossfilter-4.1.2.tgz#810281c279b3592310f12814bc61206dd42ca61d" + integrity sha512-J7KVEXkpfRJBfRvwLxn5vNCzQCNkrnzmDvkvwhuiwT4gPm5sk7MK5TuUP8GCl/iKYw+kWeVXEtrVHwWtug+bcQ== + dependencies: + d3-array "^3.2.2" + vega-dataflow "^5.7.6" + vega-util "^1.17.2" + +vega-dataflow@^5.7.6, vega-dataflow@~5.7.6: + version "5.7.6" + resolved "https://registry.npmjs.org/vega-dataflow/-/vega-dataflow-5.7.6.tgz#21dfad9120cb18d9aeaed578658670839d1adc95" + integrity sha512-9Md8+5iUC1MVKPKDyZ7pCEHk6I9am+DgaMzZqo/27O/KI4f23/WQXPyuI8jbNmc/mkm340P0TKREmzL5M7+2Dg== + dependencies: + vega-format "^1.1.2" + vega-loader "^4.5.2" + vega-util "^1.17.2" + +vega-encode@~4.10.1: + version "4.10.1" + resolved "https://registry.npmjs.org/vega-encode/-/vega-encode-4.10.1.tgz#1656e20396db99c414f495704ef3d9cff99631df" + integrity sha512-d25nVKZDrg109rC65M8uxE+7iUrTxktaqgK4fU3XZBgpWlh1K4UbU5nDag7kiHVVN4tKqwgd+synEotra9TiVQ== + dependencies: + d3-array "^3.2.2" + d3-interpolate "^3.0.1" + vega-dataflow "^5.7.6" + vega-scale "^7.4.1" + vega-util "^1.17.2" + +vega-event-selector@^3.0.1, vega-event-selector@~3.0.1: + version "3.0.1" + resolved "https://registry.npmjs.org/vega-event-selector/-/vega-event-selector-3.0.1.tgz#b99e92147b338158f8079d81b28b2e7199c2e259" + integrity sha512-K5zd7s5tjr1LiOOkjGpcVls8GsH/f2CWCrWcpKy74gTCp+llCdwz0Enqo013ZlGaRNjfgD/o1caJRt3GSaec4A== + +vega-expression@^5.0.1, vega-expression@^5.1.1, vega-expression@~5.1.1: + version "5.1.1" + resolved "https://registry.npmjs.org/vega-expression/-/vega-expression-5.1.1.tgz#9b2d287a1f34d990577c9798ae68ec88453815ef" + integrity sha512-zv9L1Hm0KHE9M7mldHyz8sXbGu3KmC0Cdk7qfHkcTNS75Jpsem6jkbu6ZAwx5cNUeW91AxUQOu77r4mygq2wUQ== + dependencies: + "@types/estree" "^1.0.0" + vega-util "^1.17.2" + +vega-force@~4.2.1: + version "4.2.1" + resolved "https://registry.npmjs.org/vega-force/-/vega-force-4.2.1.tgz#bdce6ec8572867b4ff2fb7e09d2894798c5358ec" + integrity sha512-2BcuuqFr77vcCyKfcpedNFeYMxi+XEFCrlgLWNx7YV0PI8pdP5y/yPkzyuE9Tb894+KkRAvfQHZRAshcnFNcMw== + dependencies: + d3-force "^3.0.0" + vega-dataflow "^5.7.6" + vega-util "^1.17.2" + +vega-format@^1.1.2, vega-format@~1.1.2: + version "1.1.2" + resolved "https://registry.npmjs.org/vega-format/-/vega-format-1.1.2.tgz#d344ba8a2680144e92127459c149a4181e9e7f84" + integrity sha512-0kUfAj0dg0U6GcEY0Kp6LiSTCZ8l8jl1qVdQyToMyKmtZg/q56qsiJQZy3WWRr1MtWkTIZL71xSJXgjwjeUaAw== + dependencies: + d3-array "^3.2.2" + d3-format "^3.1.0" + d3-time-format "^4.1.0" + vega-time "^2.1.2" + vega-util "^1.17.2" + +vega-functions@^5.15.0, vega-functions@~5.15.0: + version "5.15.0" + resolved "https://registry.npmjs.org/vega-functions/-/vega-functions-5.15.0.tgz#a7905e1dd6457efe265dbf954cbc0a5721c484b0" + integrity sha512-pCqmm5efd+3M65jrJGxEy3UGuRksmK6DnWijoSNocnxdCBxez+yqUUVX9o2pN8VxMe3648vZnR9/Vk5CXqRvIQ== + dependencies: + d3-array "^3.2.2" + d3-color "^3.1.0" + d3-geo "^3.1.0" + vega-dataflow "^5.7.6" + vega-expression "^5.1.1" + vega-scale "^7.4.1" + vega-scenegraph "^4.13.0" + vega-selections "^5.4.2" + vega-statistics "^1.9.0" + vega-time "^2.1.2" + vega-util "^1.17.2" + +vega-geo@~4.4.2: + version "4.4.2" + resolved "https://registry.npmjs.org/vega-geo/-/vega-geo-4.4.2.tgz#da4a08ee39c9488bfc4fe6493779f584dd8bb412" + integrity sha512-unuV/UxUHf6UJu6GYxMZonC3SZlMfFXYLOkgEsRSvmsMPt3+CVv8FmG88dXNRUJUrdROrJepgecqx0jOwMSnGA== + dependencies: + d3-array "^3.2.2" + d3-color "^3.1.0" + d3-geo "^3.1.0" + vega-canvas "^1.2.7" + vega-dataflow "^5.7.6" + vega-projection "^1.6.1" + vega-statistics "^1.9.0" + vega-util "^1.17.2" + +vega-hierarchy@~4.1.2: + version "4.1.2" + resolved "https://registry.npmjs.org/vega-hierarchy/-/vega-hierarchy-4.1.2.tgz#e42938c42527b392b110b1e3bf89eaa456dba1b8" + integrity sha512-m+xDtT5092YPSnV0rdTLW+AWmoCb+A54JQ66MUJwiDBpKxvfKnTiQeuiWDU2YudjUoXZN9EBOcI6QHF8H2Lu2A== + dependencies: + d3-hierarchy "^3.1.2" + vega-dataflow "^5.7.6" + vega-util "^1.17.2" + +vega-label@~1.3.0: + version "1.3.0" + resolved "https://registry.npmjs.org/vega-label/-/vega-label-1.3.0.tgz#21b3e5ef40e63f51ac987a449d183068c4961503" + integrity sha512-EfSFSCWAwVPsklM5g0gUEuohALgryuGC/SKMmsOH7dYT/bywmLBZhLVbrE+IHJAUauoGrMhYw1mqnXL/0giJBg== + dependencies: + vega-canvas "^1.2.7" + vega-dataflow "^5.7.6" + vega-scenegraph "^4.13.0" + vega-util "^1.17.2" + +vega-lite@^5.6.1: + version "5.21.0" + resolved "https://registry.npmjs.org/vega-lite/-/vega-lite-5.21.0.tgz#21ce8b905a02ba364b7b1d7ef471497ba3e12e93" + integrity sha512-hNxM9nuMqpI1vkUOhEx6ewEf23WWLmJxSFJ4TA86AW43ixJyqcLV+iSCO0NipuVTE0rlDcc2e8joSewWyOlEwA== + dependencies: + json-stringify-pretty-compact "~3.0.0" + tslib "~2.6.3" + vega-event-selector "~3.0.1" + vega-expression "~5.1.1" + vega-util "~1.17.2" + yargs "~17.7.2" + +vega-loader@^4.5.2, vega-loader@~4.5.2: + version "4.5.2" + resolved "https://registry.npmjs.org/vega-loader/-/vega-loader-4.5.2.tgz#7212f093c397b153f69f7e6cfef47817c17c5c01" + integrity sha512-ktIdGz3DRIS3XfTP9lJ6oMT5cKwC86nQkjUbXZbOtwXQFVNE2xVWBuH13GP6FKUZxg5hJCMtb5v/e/fwTvhKsQ== + dependencies: + d3-dsv "^3.0.1" + node-fetch "^2.6.7" + topojson-client "^3.1.0" + vega-format "^1.1.2" + vega-util "^1.17.2" + +vega-parser@~6.4.0: + version "6.4.0" + resolved "https://registry.npmjs.org/vega-parser/-/vega-parser-6.4.0.tgz#6a12f07f0f9178492a17842efe7e1f51a2d36bed" + integrity sha512-/hFIJs0yITxfvLIfhhcpUrcbKvu4UZYoMGmly5PSsbgo60oAsVQW8ZbX2Ji3iNFqZJh1ifoX/P0j+9wep1OISw== + dependencies: + vega-dataflow "^5.7.6" + vega-event-selector "^3.0.1" + vega-functions "^5.15.0" + vega-scale "^7.4.1" + vega-util "^1.17.2" + +vega-projection@^1.6.1, vega-projection@~1.6.1: + version "1.6.1" + resolved "https://registry.npmjs.org/vega-projection/-/vega-projection-1.6.1.tgz#da687abc60f4a93bb888385beb23e0a1000f8b57" + integrity sha512-sqfnAAHumU7MWU1tQN3b6HNgKGF3legek0uLHhjLKcDJQxEc7kwcD18txFz2ffQks6d5j+AUhBiq4GARWf0DEQ== + dependencies: + d3-geo "^3.1.0" + d3-geo-projection "^4.0.0" + vega-scale "^7.4.1" + +vega-regression@~1.3.0: + version "1.3.0" + resolved "https://registry.npmjs.org/vega-regression/-/vega-regression-1.3.0.tgz#3e68e234fa9460041fac082c6a3469c896d436a8" + integrity sha512-gxOQfmV7Ft/MYKpXDEo09WZyBuKOBqxqDRWay9KtfGq/E0Y4vbTPsWLv2cB1ToPJdKE6XSN6Re9tCIw5M/yMUg== + dependencies: + d3-array "^3.2.2" + vega-dataflow "^5.7.6" + vega-statistics "^1.9.0" + vega-util "^1.17.2" + +vega-runtime@^6.2.0, vega-runtime@~6.2.0: + version "6.2.0" + resolved "https://registry.npmjs.org/vega-runtime/-/vega-runtime-6.2.0.tgz#10f435089fff11d8e1b49cb0cbab8041731e6f06" + integrity sha512-30UXbujWjKNd5aeP+oeHuwFmzuyVYlBj4aDy9+AjfWLECu8wJt4K01vwegcaGPdCWcPLVIv4Oa9Lob4mcXn5KQ== + dependencies: + vega-dataflow "^5.7.6" + vega-util "^1.17.2" + +vega-scale@^7.4.1, vega-scale@~7.4.1: + version "7.4.1" + resolved "https://registry.npmjs.org/vega-scale/-/vega-scale-7.4.1.tgz#2dcd3e39ebb00269b03a8be86e44c7b48c67442a" + integrity sha512-dArA28DbV/M92O2QvswnzCmQ4bq9WwLKUoyhqFYWCltmDwkmvX7yhqiFLFMWPItIm7mi4Qyoygby6r4DKd1X2A== + dependencies: + d3-array "^3.2.2" + d3-interpolate "^3.0.1" + d3-scale "^4.0.2" + d3-scale-chromatic "^3.1.0" + vega-time "^2.1.2" + vega-util "^1.17.2" + +vega-scenegraph@^4.13.0, vega-scenegraph@~4.13.0: + version "4.13.0" + resolved "https://registry.npmjs.org/vega-scenegraph/-/vega-scenegraph-4.13.0.tgz#c4fa5c82773f6244a9ca8b01a44e380adf03fabd" + integrity sha512-nfl45XtuqB5CxyIZJ+bbJ+dofzosPCRlmF+eUQo+0J23NkNXsTzur+1krJDSdhcw0SOYs4sbYRoMz1cpuOM4+Q== + dependencies: + d3-path "^3.1.0" + d3-shape "^3.2.0" + vega-canvas "^1.2.7" + vega-loader "^4.5.2" + vega-scale "^7.4.1" + vega-util "^1.17.2" + +vega-selections@^5.4.2: + version "5.4.2" + resolved "https://registry.npmjs.org/vega-selections/-/vega-selections-5.4.2.tgz#cb4f41f5d4c0ee924ebf131b8dbd43e7885bcad4" + integrity sha512-99FUhYmg0jOJr2/K4TcEURmJRkuibrCDc8KBUX7qcQEITzrZ5R6a4QE+sarCvbb3hi8aA9GV2oyST6MQeA9mgQ== + dependencies: + d3-array "3.2.4" + vega-expression "^5.0.1" + vega-util "^1.17.1" + +vega-statistics@^1.7.9, vega-statistics@^1.9.0, vega-statistics@~1.9.0: + version "1.9.0" + resolved "https://registry.npmjs.org/vega-statistics/-/vega-statistics-1.9.0.tgz#7d6139cea496b22d60decfa6abd73346f70206f9" + integrity sha512-GAqS7mkatpXcMCQKWtFu1eMUKLUymjInU0O8kXshWaQrVWjPIO2lllZ1VNhdgE0qGj4oOIRRS11kzuijLshGXQ== + dependencies: + d3-array "^3.2.2" + +vega-time@^2.1.2, vega-time@~2.1.2: + version "2.1.2" + resolved "https://registry.npmjs.org/vega-time/-/vega-time-2.1.2.tgz#0c414e74780613d6d3234fb97f19b50c0ebd9f49" + integrity sha512-6rXc6JdDt8MnCRy6UzUCsa6EeFycPDmvioMddLfKw38OYCV8pRQC5nw44gyddOwXgUTJLiCtn/sp53P0iA542A== + dependencies: + d3-array "^3.2.2" + d3-time "^3.1.0" + vega-util "^1.17.2" + +vega-transforms@~4.12.0: + version "4.12.0" + resolved "https://registry.npmjs.org/vega-transforms/-/vega-transforms-4.12.0.tgz#6a69e0b67934b0c0a40a6f607fdb543bf749955e" + integrity sha512-bh/2Qbj85O70mjfLRgPKAsABArgSUP0k+GjmaY54zukIRxoGxKju+85nigeX/aR/INpEqNWif+5lL+NvmyWA5w== + dependencies: + d3-array "^3.2.2" + vega-dataflow "^5.7.6" + vega-statistics "^1.9.0" + vega-time "^2.1.2" + vega-util "^1.17.2" + +vega-typings@~1.3.1: + version "1.3.1" + resolved "https://registry.npmjs.org/vega-typings/-/vega-typings-1.3.1.tgz#025a6031505794b44d9b6e2c49d4551b8918d4ae" + integrity sha512-j9Sdgmvowz09jkMgTFGVfiv7ycuRP/TQkdHRPXIYwt3RDgPQn7inyFcJ8C8ABFt4MiMWdjOwbneF6KWW8TRXIw== + dependencies: + "@types/geojson" "7946.0.4" + vega-event-selector "^3.0.1" + vega-expression "^5.1.1" + vega-util "^1.17.2" + +vega-util@^1.17.1, vega-util@^1.17.2, vega-util@~1.17.2: + version "1.17.2" + resolved "https://registry.npmjs.org/vega-util/-/vega-util-1.17.2.tgz#f69aa09fd5d6110c19c4a0f0af9e35945b99987d" + integrity sha512-omNmGiZBdjm/jnHjZlywyYqafscDdHaELHx1q96n5UOz/FlO9JO99P4B3jZg391EFG8dqhWjQilSf2JH6F1mIw== + +vega-view-transforms@~4.6.0: + version "4.6.0" + resolved "https://registry.npmjs.org/vega-view-transforms/-/vega-view-transforms-4.6.0.tgz#829d56ca3c8116b0dded4ec0502f4ac70253de9a" + integrity sha512-z3z66aJTA3ZRo4oBY4iBXnn+A4KqBGZT/UrlKDbm+7Ec+Ip+hK2tF8Kmhp/WNcMsDZoUWFqLJgR2VgOgvJk9RA== + dependencies: + vega-dataflow "^5.7.6" + vega-scenegraph "^4.13.0" + vega-util "^1.17.2" + +vega-view@~5.13.0: + version "5.13.0" + resolved "https://registry.npmjs.org/vega-view/-/vega-view-5.13.0.tgz#8ea96da9fcdf42fe7c0e95fe6258933477524745" + integrity sha512-ZPAAQ3iYz6YrQjJoDT+0bcxJkXt9PKF5v4OO7Omw8PFhkIv++jFXeKlQTW1bBtyQ92dkdGGHv5lYY67Djqjf3A== + dependencies: + d3-array "^3.2.2" + d3-timer "^3.0.1" + vega-dataflow "^5.7.6" + vega-format "^1.1.2" + vega-functions "^5.15.0" + vega-runtime "^6.2.0" + vega-scenegraph "^4.13.0" + vega-util "^1.17.2" + +vega-voronoi@~4.2.3: + version "4.2.3" + resolved "https://registry.npmjs.org/vega-voronoi/-/vega-voronoi-4.2.3.tgz#54c4bb96b9b94c3fa0160bee24695dcb9d583fe1" + integrity sha512-aYYYM+3UGqwsOx+TkVtF1IZfguy0H7AN79dR8H0nONRIc+vhk/lbnlkgwY2nSzEu0EZ4b5wZxeGoDBEVmdDEcg== + dependencies: + d3-delaunay "^6.0.2" + vega-dataflow "^5.7.6" + vega-util "^1.17.2" + +vega-wordcloud@~4.1.5: + version "4.1.5" + resolved "https://registry.npmjs.org/vega-wordcloud/-/vega-wordcloud-4.1.5.tgz#789c9e67225c77f3f35a6fc052beec1c2bdc8b5e" + integrity sha512-p+qXU3cb9VeWzJ/HEdax0TX2mqDJcSbrCIfo2d/EalOXGkvfSLKobsmMQ8DxPbtVp0uhnpvfCGDyMJw+AzcI2A== + dependencies: + vega-canvas "^1.2.7" + vega-dataflow "^5.7.6" + vega-scale "^7.4.1" + vega-statistics "^1.9.0" + vega-util "^1.17.2" + +vega@^5.20.0: + version "5.30.0" + resolved "https://registry.npmjs.org/vega/-/vega-5.30.0.tgz#d12350c829878b481453ab28ce10855a954df06d" + integrity sha512-ZGoC8LdfEUV0LlXIuz7hup9jxuQYhSaWek2M7r9dEHAPbPrzSQvKXZ0BbsJbrarM100TGRpTVN/l1AFxCwDkWw== + dependencies: + vega-crossfilter "~4.1.2" + vega-dataflow "~5.7.6" + vega-encode "~4.10.1" + vega-event-selector "~3.0.1" + vega-expression "~5.1.1" + vega-force "~4.2.1" + vega-format "~1.1.2" + vega-functions "~5.15.0" + vega-geo "~4.4.2" + vega-hierarchy "~4.1.2" + vega-label "~1.3.0" + vega-loader "~4.5.2" + vega-parser "~6.4.0" + vega-projection "~1.6.1" + vega-regression "~1.3.0" + vega-runtime "~6.2.0" + vega-scale "~7.4.1" + vega-scenegraph "~4.13.0" + vega-statistics "~1.9.0" + vega-time "~2.1.2" + vega-transforms "~4.12.0" + vega-typings "~1.3.1" + vega-util "~1.17.2" + vega-view "~5.13.0" + vega-view-transforms "~4.6.0" + vega-voronoi "~4.2.3" + vega-wordcloud "~4.1.5" + +vscode-jsonrpc@8.2.0: + version "8.2.0" + resolved "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.2.0.tgz#f43dfa35fb51e763d17cd94dcca0c9458f35abf9" + integrity sha512-C+r0eKJUIfiDIfwJhria30+TYWPtuHJXHtI7J0YlOmKAo7ogxP20T0zxB7HZQIFhIyvoBPwWskjxrvAtfjyZfA== + +vscode-jsonrpc@^6.0.0: + version "6.0.0" + resolved "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-6.0.0.tgz#108bdb09b4400705176b957ceca9e0880e9b6d4e" + integrity sha512-wnJA4BnEjOSyFMvjZdpiOwhSq9uDoK8e/kpRJDTaMYzwlkrhG1fwDIZI94CLsLzlCK5cIbMMtFlJlfR57Lavmg== + +vscode-jsonrpc@^8.0.2: + version "8.2.1" + resolved "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.2.1.tgz#a322cc0f1d97f794ffd9c4cd2a898a0bde097f34" + integrity sha512-kdjOSJ2lLIn7r1rtrMbbNCHjyMPfRnowdKjBQ+mGq6NAW5QY2bEZC/khaC5OR8svbbjvLEaIXkOq45e2X9BIbQ== + +vscode-languageserver-protocol@^3.17.0: + version "3.17.5" + resolved "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.5.tgz#864a8b8f390835572f4e13bd9f8313d0e3ac4bea" + integrity sha512-mb1bvRJN8SVznADSGWM9u/b07H7Ecg0I3OgXDuLdn307rl/J3A9YD6/eYOssqhecL27hK1IPZAsaqh00i/Jljg== + dependencies: + vscode-jsonrpc "8.2.0" + vscode-languageserver-types "3.17.5" + +vscode-languageserver-types@3.17.5: + version "3.17.5" + resolved "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.5.tgz#3273676f0cf2eab40b3f44d085acbb7f08a39d8a" + integrity sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg== + +vscode-ws-jsonrpc@~1.0.2: + version "1.0.2" + resolved "https://registry.npmjs.org/vscode-ws-jsonrpc/-/vscode-ws-jsonrpc-1.0.2.tgz#ead2efd66293f331ccc220222ae1aeca4bb5b2c1" + integrity sha512-09OpRC0RcqZs4DleJRgs+R+7gQkwb4tgvsL43lzVZwW4N5NO3H/9sLNeKPBt83k7WyA8qBZjrzM6X7tKFpFrjQ== + dependencies: + vscode-jsonrpc "^8.0.2" + +w3c-keyname@^2.2.4: + version "2.2.8" + resolved "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz#7b17c8c6883d4e8b86ac8aba79d39e880f8869c5" + integrity sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ== + +webidl-conversions@^3.0.0: + version "3.0.1" + resolved "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" + integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== + +whatwg-url@^5.0.0: + version "5.0.0" + resolved "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" + integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== + dependencies: + tr46 "~0.0.3" + webidl-conversions "^3.0.0" + +wrap-ansi@^7.0.0: + version "7.0.0" + resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +ws@^8.11.0: + version "8.18.0" + resolved "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz#0d7505a6eafe2b0e712d232b42279f53bc289bbc" + integrity sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw== + +y-protocols@^1.0.5: + version "1.0.6" + resolved "https://registry.npmjs.org/y-protocols/-/y-protocols-1.0.6.tgz#66dad8a95752623443e8e28c0e923682d2c0d495" + integrity sha512-vHRF2L6iT3rwj1jub/K5tYcTT/mEYDUppgNPXwp8fmLpui9f7Yeq3OEtTLVF012j39QnV+KEQpNqoN7CWU7Y9Q== + dependencies: + lib0 "^0.2.85" + +y18n@^5.0.5: + version "5.0.8" + resolved "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" + integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== + +yargs-parser@^21.1.1: + version "21.1.1" + resolved "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" + integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw== + +yargs@~17.7.2: + version "17.7.2" + resolved "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz#991df39aca675a192b816e1e0363f9d75d2aa269" + integrity sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w== + dependencies: + cliui "^8.0.1" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.3" + y18n "^5.0.5" + yargs-parser "^21.1.1" + +yarn-deduplicate@^6.0.1: + version "6.0.2" + resolved "https://registry.npmjs.org/yarn-deduplicate/-/yarn-deduplicate-6.0.2.tgz#63498d2d4c3a8567e992a994ce0ab51aa5681f2e" + integrity sha512-Efx4XEj82BgbRJe5gvQbZmEO7pU5DgHgxohYZp98/+GwPqdU90RXtzvHirb7hGlde0sQqk5G3J3Woyjai8hVqA== + dependencies: + "@yarnpkg/lockfile" "^1.1.0" + commander "^10.0.1" + semver "^7.5.0" + tslib "^2.5.0" + +yjs@^13.5.40: + version "13.6.19" + resolved "https://registry.npmjs.org/yjs/-/yjs-13.6.19.tgz#66999f41254ab65be8c8e71bd767d124ad600909" + integrity sha512-GNKw4mEUn5yWU2QPHRx8jppxmCm9KzbBhB4qJLUJFiiYD0g/tDVgXQ7aPkyh01YO28kbs2J/BEbWBagjuWyejw== + dependencies: + lib0 "^0.2.86" From 2e5ee98a1a1697d49c1ece874cb018cb55eaeaf7 Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Fri, 20 Sep 2024 10:31:37 +0100 Subject: [PATCH 345/496] Test to cover the widgets functionality --- .../tests/widget_notebook_example.test.ts | 79 ++++++++++++++++++- 1 file changed, 76 insertions(+), 3 deletions(-) diff --git a/ui-tests/tests/widget_notebook_example.test.ts b/ui-tests/tests/widget_notebook_example.test.ts index 11eff319..798c2eb6 100644 --- a/ui-tests/tests/widget_notebook_example.test.ts +++ b/ui-tests/tests/widget_notebook_example.test.ts @@ -16,8 +16,6 @@ import { test } from "@jupyterlab/galata"; import { expect } from "@playwright/test"; import * as path from "path"; -test.setTimeout(460000); - test.describe("Visual Regression", () => { test.beforeEach(async ({ page, tmpPath }) => { await page.contents.uploadDirectory( @@ -27,7 +25,7 @@ test.describe("Visual Regression", () => { await page.filebrowser.openDirectory(tmpPath); }); - test("Run notebook and capture cell outputs", async ({ + test("Run notebook, capture cell outputs, and test widgets", async ({ page, tmpPath, }) => { @@ -60,5 +58,80 @@ test.describe("Visual Regression", () => { continue; } } + + const widgetCellIndex = 3; + + await waitForWidget(page, widgetCellIndex, 'input[type="checkbox"]'); + await waitForWidget(page, widgetCellIndex, 'button:has-text("Cluster Down")'); + await waitForWidget(page, widgetCellIndex, 'button:has-text("Cluster Up")'); + + await interactWithWidget(page, widgetCellIndex, 'input[type="checkbox"]', async (checkbox) => { + await checkbox.click(); + const isChecked = await checkbox.isChecked(); + expect(isChecked).toBe(true); + }); + + await interactWithWidget(page, widgetCellIndex, 'button:has-text("Cluster Down")', async (button) => { + await button.click(); + const clusterDownMessage = await page.waitForSelector('text=No instances found, nothing to be done.', { timeout: 5000 }); + expect(clusterDownMessage).not.toBeNull(); + }); + + await interactWithWidget(page, widgetCellIndex, 'button:has-text("Cluster Up")', async (button) => { + await button.click(); + + const successMessage = await page.waitForSelector('text=Ray Cluster: \'raytest\' has successfully been created', { timeout: 10000 }); + expect(successMessage).not.toBeNull(); + + const resourcesMessage = await page.waitForSelector('text=Waiting for requested resources to be set up...'); + expect(resourcesMessage).not.toBeNull(); + + const upAndRunningMessage = await page.waitForSelector('text=Requested cluster is up and running!'); + expect(upAndRunningMessage).not.toBeNull(); + + const dashboardReadyMessage = await page.waitForSelector('text=Dashboard is ready!'); + expect(dashboardReadyMessage).not.toBeNull(); + }); + + await runPreviousCell(page, cellCount, '(, True)'); + + await interactWithWidget(page, widgetCellIndex, 'button:has-text("Cluster Down")', async (button) => { + await button.click(); + const clusterDownMessage = await page.waitForSelector('text=Ray Cluster: \'raytest\' has successfully been deleted', { timeout: 5000 }); + expect(clusterDownMessage).not.toBeNull(); + }); + + await runPreviousCell(page, cellCount, '(, False)'); }); }); + +async function waitForWidget(page, cellIndex: number, widgetSelector: string, timeout = 5000) { + const widgetCell = await page.notebook.getCellOutput(cellIndex); + + if (widgetCell) { + await widgetCell.waitForSelector(widgetSelector, { timeout }); + } +} + +async function interactWithWidget(page, cellIndex: number, widgetSelector: string, action: (widget) => Promise) { + const widgetCell = await page.notebook.getCellOutput(cellIndex); + + if (widgetCell) { + const widget = await widgetCell.$(widgetSelector); + if (widget) { + await action(widget); + } + } +} + +async function runPreviousCell(page, cellCount, expectedMessage) { + const runSuccess = await page.notebook.runCell(cellCount - 1); expect(runSuccess).toBe(true); + const lastCellOutput = await page.notebook.getCellOutput(cellCount - 1); + const newOutput = await lastCellOutput.evaluate((output) => output.textContent); + + if (expectedMessage) { + expect(newOutput).toContain(expectedMessage); + } + + return lastCellOutput; +} From 80fabe349429aa90a09a805bde782c3b841bf1a7 Mon Sep 17 00:00:00 2001 From: Fiona Waters Date: Thu, 26 Sep 2024 17:08:57 +0100 Subject: [PATCH 346/496] Update Ray image to new static tag --- src/codeflare_sdk/templates/base-template.yaml | 4 ++-- tests/e2e/support.py | 2 +- tests/test-case-bad.yaml | 4 ++-- tests/test-case-no-kueue-no-aw.yaml | 4 ++-- tests/test-case-no-mcad.yamls | 4 ++-- tests/test-case.yaml | 4 ++-- tests/test-default-appwrapper.yaml | 4 ++-- tests/unit_test_support.py | 2 +- 8 files changed, 14 insertions(+), 14 deletions(-) diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/templates/base-template.yaml index 494177f9..19e5b191 100644 --- a/src/codeflare_sdk/templates/base-template.yaml +++ b/src/codeflare_sdk/templates/base-template.yaml @@ -69,7 +69,7 @@ spec: containers: # The Ray head pod - name: ray-head - image: quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe + image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 imagePullPolicy: Always ports: - containerPort: 6379 @@ -150,7 +150,7 @@ spec: spec: containers: - name: machine-learning # must consist of lower case alphanumeric characters or '-', and must start and end with an alphanumeric character (e.g. 'my-name', or '123-abc' - image: quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe + image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 # environment variables to set in the container.Optional. # Refer to https://kubernetes.io/docs/tasks/inject-data-application/define-environment-variable-container/ lifecycle: diff --git a/tests/e2e/support.py b/tests/e2e/support.py index 8290b8cd..38325057 100644 --- a/tests/e2e/support.py +++ b/tests/e2e/support.py @@ -8,7 +8,7 @@ def get_ray_image(): - default_ray_image = "quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe" + default_ray_image = "quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06" return os.getenv("RAY_IMAGE", default_ray_image) diff --git a/tests/test-case-bad.yaml b/tests/test-case-bad.yaml index dedd2145..a072e9e9 100644 --- a/tests/test-case-bad.yaml +++ b/tests/test-case-bad.yaml @@ -43,7 +43,7 @@ spec: valueFrom: fieldRef: fieldPath: status.podIP - image: quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe + image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 imagePullPolicy: Always lifecycle: preStop: @@ -90,7 +90,7 @@ spec: valueFrom: fieldRef: fieldPath: status.podIP - image: quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe + image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 lifecycle: preStop: exec: diff --git a/tests/test-case-no-kueue-no-aw.yaml b/tests/test-case-no-kueue-no-aw.yaml index 23d73606..ea90a275 100644 --- a/tests/test-case-no-kueue-no-aw.yaml +++ b/tests/test-case-no-kueue-no-aw.yaml @@ -31,7 +31,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe + - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 imagePullPolicy: Always lifecycle: preStop: @@ -103,7 +103,7 @@ spec: key: value spec: containers: - - image: quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe + - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 lifecycle: preStop: exec: diff --git a/tests/test-case-no-mcad.yamls b/tests/test-case-no-mcad.yamls index 12a66dc4..36ce8e26 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test-case-no-mcad.yamls @@ -34,7 +34,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe + - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 imagePullPolicy: Always lifecycle: preStop: @@ -106,7 +106,7 @@ spec: key: value spec: containers: - - image: quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe + - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 lifecycle: preStop: exec: diff --git a/tests/test-case.yaml b/tests/test-case.yaml index 9c9a9d5a..c03422cf 100644 --- a/tests/test-case.yaml +++ b/tests/test-case.yaml @@ -39,7 +39,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe + - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 imagePullPolicy: Always lifecycle: preStop: @@ -111,7 +111,7 @@ spec: key: value spec: containers: - - image: quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe + - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 lifecycle: preStop: exec: diff --git a/tests/test-default-appwrapper.yaml b/tests/test-default-appwrapper.yaml index 6bf3acf5..0780a46e 100644 --- a/tests/test-default-appwrapper.yaml +++ b/tests/test-default-appwrapper.yaml @@ -40,7 +40,7 @@ spec: spec: imagePullSecrets: [] containers: - - image: quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe + - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 imagePullPolicy: Always lifecycle: preStop: @@ -111,7 +111,7 @@ spec: spec: imagePullSecrets: [] containers: - - image: quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe + - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 lifecycle: preStop: exec: diff --git a/tests/unit_test_support.py b/tests/unit_test_support.py index 70ea8016..c23c47eb 100644 --- a/tests/unit_test_support.py +++ b/tests/unit_test_support.py @@ -45,7 +45,7 @@ def createClusterWrongType(): appwrapper=True, machine_types=[True, False], image_pull_secrets=["unit-test-pull-secret"], - image="quay.io/modh/ray@sha256:8b9e0efa3ae0e1862d66c02ffa82b80830334a5f8af12deb96d4f2f8babce5fe", + image="quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06", write_to_file=True, labels={1: 1}, ) From c2eaa150a80f5d1437c4ce98da99fcff62d6af9f Mon Sep 17 00:00:00 2001 From: Christian Zaccaria <73656840+ChristianZaccaria@users.noreply.github.com> Date: Fri, 27 Sep 2024 11:19:07 +0100 Subject: [PATCH 347/496] Add UI table with Cluster specs and action buttons (#681) * Add UI table with RayCluster specs and action buttons * Update cpu and mem names from UI table * Merge requests-limits into single column in UI table * Enhance notebook outputs/display on button clicks * Refactor and move UI table to widgets.py file * Add unit tests for UI table functions * Add timeout and interval parameters to _delete_cluster function * Pre-select cluster if exists, and suppress widgets and outputs on creation of Cluster Object, and bug fixes * Add UI table to regression and functionality tests * Update codeflare_sdk.egg-info * Fix to hide toolbar before capturing snapshots for UI notebook tests * Add head comments to functions and add num_workers to data frame * Reformat for pre-commit checks * Revert codeflare_sdk.egg-info name --- .github/workflows/ui_notebooks_test.yaml | 3 +- .../guided-demos/3_widget_example.ipynb | 20 +- src/codeflare_sdk.egg-info/SOURCES.txt | 2 + src/codeflare_sdk/__init__.py | 1 + src/codeflare_sdk/cluster/__init__.py | 4 + src/codeflare_sdk/cluster/cluster.py | 17 +- src/codeflare_sdk/cluster/model.py | 6 +- src/codeflare_sdk/cluster/widgets.py | 405 +++++++++++++++++- src/codeflare_sdk/utils/pretty_print.py | 4 +- tests/unit_test.py | 275 +++++++++++- .../tests/widget_notebook_example.test.ts | 62 ++- .../widgets-cell-4-linux.png | Bin 9895 -> 3461 bytes .../widgets-cell-5-linux.png | Bin 0 -> 9832 bytes 13 files changed, 756 insertions(+), 43 deletions(-) create mode 100644 ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-5-linux.png diff --git a/.github/workflows/ui_notebooks_test.yaml b/.github/workflows/ui_notebooks_test.yaml index 864330b9..5e8d506d 100644 --- a/.github/workflows/ui_notebooks_test.yaml +++ b/.github/workflows/ui_notebooks_test.yaml @@ -86,7 +86,8 @@ jobs: jq -r 'del(.cells[] | select(.source[] | contains("Create authentication object for user permissions")))' 3_widget_example.ipynb > 3_widget_example.ipynb.tmp && mv 3_widget_example.ipynb.tmp 3_widget_example.ipynb jq -r 'del(.cells[] | select(.source[] | contains("auth.logout()")))' 3_widget_example.ipynb > 3_widget_example.ipynb.tmp && mv 3_widget_example.ipynb.tmp 3_widget_example.ipynb # Set explicit namespace as SDK need it (currently) to resolve local queues - sed -i "s/head_memory_limits=2,/head_memory_limits=2, namespace='default',/" 3_widget_example.ipynb + sed -i "s|head_memory_limits=2,|head_memory_limits=2, namespace='default', image='quay.io/modh/ray:2.35.0-py39-cu121',|" 3_widget_example.ipynb + sed -i "s|view_clusters()|view_clusters('default')|" 3_widget_example.ipynb working-directory: demo-notebooks/guided-demos - name: Run UI notebook tests diff --git a/demo-notebooks/guided-demos/3_widget_example.ipynb b/demo-notebooks/guided-demos/3_widget_example.ipynb index 4d3d6ea7..11521ec7 100644 --- a/demo-notebooks/guided-demos/3_widget_example.ipynb +++ b/demo-notebooks/guided-demos/3_widget_example.ipynb @@ -19,7 +19,7 @@ "outputs": [], "source": [ "# Import pieces from codeflare-sdk\n", - "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication" + "from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication, view_clusters" ] }, { @@ -61,7 +61,7 @@ "# Create and configure our cluster object\n", "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(\n", - " name='raytest', \n", + " name='raytest',\n", " head_cpu_requests='500m',\n", " head_cpu_limits='500m',\n", " head_memory_requests=2,\n", @@ -73,12 +73,22 @@ " worker_cpu_limits=1,\n", " worker_memory_requests=2,\n", " worker_memory_limits=2,\n", - " # image=\"\", # Optional Field \n", - " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources \n", + " # image=\"\", # Optional Field\n", + " write_to_file=False, # When enabled Ray Cluster yaml files are written to /HOME/.codeflare/resources\n", " # local_queue=\"local-queue-name\" # Specify the local queue manually\n", "))" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "3de6403c", + "metadata": {}, + "outputs": [], + "source": [ + "view_clusters()" + ] + }, { "cell_type": "code", "execution_count": null, @@ -106,7 +116,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.19" + "version": "3.9.18" }, "vscode": { "interpreter": { diff --git a/src/codeflare_sdk.egg-info/SOURCES.txt b/src/codeflare_sdk.egg-info/SOURCES.txt index 42541f1d..63614a81 100644 --- a/src/codeflare_sdk.egg-info/SOURCES.txt +++ b/src/codeflare_sdk.egg-info/SOURCES.txt @@ -12,9 +12,11 @@ src/codeflare_sdk/cluster/awload.py src/codeflare_sdk/cluster/cluster.py src/codeflare_sdk/cluster/config.py src/codeflare_sdk/cluster/model.py +src/codeflare_sdk/cluster/widgets.py src/codeflare_sdk/job/__init__.py src/codeflare_sdk/job/ray_jobs.py src/codeflare_sdk/utils/__init__.py +src/codeflare_sdk/utils/demos.py src/codeflare_sdk/utils/generate_cert.py src/codeflare_sdk/utils/generate_yaml.py src/codeflare_sdk/utils/kube_api_helpers.py diff --git a/src/codeflare_sdk/__init__.py b/src/codeflare_sdk/__init__.py index 0390a3d2..29205a36 100644 --- a/src/codeflare_sdk/__init__.py +++ b/src/codeflare_sdk/__init__.py @@ -14,6 +14,7 @@ get_cluster, list_all_queued, list_all_clusters, + view_clusters, ) from .job import RayJobClient diff --git a/src/codeflare_sdk/cluster/__init__.py b/src/codeflare_sdk/cluster/__init__.py index 0b1849e5..6490a224 100644 --- a/src/codeflare_sdk/cluster/__init__.py +++ b/src/codeflare_sdk/cluster/__init__.py @@ -21,4 +21,8 @@ list_all_clusters, ) +from .widgets import ( + view_clusters, +) + from .awload import AWManager diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 7c652a18..a32d5a4b 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -19,6 +19,7 @@ """ import re +import subprocess from time import sleep from typing import List, Optional, Tuple, Dict @@ -862,16 +863,19 @@ def _map_to_ray_cluster(rc) -> Optional[RayCluster]: name=rc["metadata"]["name"], status=status, # for now we are not using autoscaling so same replicas is fine - workers=rc["spec"]["workerGroupSpecs"][0]["replicas"], + num_workers=rc["spec"]["workerGroupSpecs"][0]["replicas"], worker_mem_limits=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ "containers" ][0]["resources"]["limits"]["memory"], worker_mem_requests=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ "containers" ][0]["resources"]["requests"]["memory"], - worker_cpu=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][ - 0 - ]["resources"]["limits"]["cpu"], + worker_cpu_requests=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + "containers" + ][0]["resources"]["requests"]["cpu"], + worker_cpu_limits=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + "containers" + ][0]["resources"]["limits"]["cpu"], worker_extended_resources=worker_extended_resources, namespace=rc["metadata"]["namespace"], head_cpu_requests=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][ @@ -907,10 +911,11 @@ def _copy_to_ray(cluster: Cluster) -> RayCluster: ray = RayCluster( name=cluster.config.name, status=cluster.status(print_to_console=False)[0], - workers=cluster.config.num_workers, + num_workers=cluster.config.num_workers, worker_mem_requests=cluster.config.worker_memory_requests, worker_mem_limits=cluster.config.worker_memory_limits, - worker_cpu=cluster.config.worker_cpu_requests, + worker_cpu_requests=cluster.config.worker_cpu_requests, + worker_cpu_limits=cluster.config.worker_cpu_limits, worker_extended_resources=cluster.config.worker_extended_resource_requests, namespace=cluster.config.namespace, dashboard=cluster.cluster_dashboard_uri(), diff --git a/src/codeflare_sdk/cluster/model.py b/src/codeflare_sdk/cluster/model.py index ab7b30ed..44be5456 100644 --- a/src/codeflare_sdk/cluster/model.py +++ b/src/codeflare_sdk/cluster/model.py @@ -21,6 +21,7 @@ from dataclasses import dataclass, field from enum import Enum import typing +from typing import Union class RayClusterStatus(Enum): @@ -77,10 +78,11 @@ class RayCluster: head_cpu_limits: int head_mem_requests: str head_mem_limits: str - workers: int + num_workers: int worker_mem_requests: str worker_mem_limits: str - worker_cpu: int + worker_cpu_requests: Union[int, str] + worker_cpu_limits: Union[int, str] namespace: str dashboard: str worker_extended_resources: typing.Dict[str, int] = field(default_factory=dict) diff --git a/src/codeflare_sdk/cluster/widgets.py b/src/codeflare_sdk/cluster/widgets.py index 351640e0..53afa28b 100644 --- a/src/codeflare_sdk/cluster/widgets.py +++ b/src/codeflare_sdk/cluster/widgets.py @@ -15,10 +15,21 @@ """ The widgets sub-module contains the ui widgets created using the ipywidgets package. """ -import ipywidgets as widgets -from IPython.display import display +import contextlib +import io import os +import warnings +import time import codeflare_sdk +from kubernetes import client +from kubernetes.client.rest import ApiException +import ipywidgets as widgets +from IPython.display import display, HTML, Javascript +import pandas as pd +from .config import ClusterConfiguration +from .model import RayClusterStatus +from ..utils.kube_api_helpers import _kube_api_error_handling +from .auth import config_check, api_config_handler def cluster_up_down_buttons(cluster: "codeflare_sdk.cluster.Cluster") -> widgets.Button: @@ -89,3 +100,393 @@ def is_notebook() -> bool: return True else: return False + + +def view_clusters(namespace: str = None): + """ + view_clusters function will display existing clusters with their specs, and handle user interactions. + """ + if not is_notebook(): + warnings.warn( + "view_clusters can only be used in a Jupyter Notebook environment." + ) + return # Exit function if not in Jupyter Notebook + + from .cluster import get_current_namespace + + if not namespace: + namespace = get_current_namespace() + + user_output = widgets.Output() + raycluster_data_output = widgets.Output() + url_output = widgets.Output() + + ray_clusters_df = _fetch_cluster_data(namespace) + if ray_clusters_df.empty: + print(f"No clusters found in the {namespace} namespace.") + return + + classification_widget = widgets.ToggleButtons( + options=ray_clusters_df["Name"].tolist(), + value=ray_clusters_df["Name"].tolist()[0], + description="Select an existing cluster:", + ) + # Setting the initial value to trigger the event handler to display the cluster details. + initial_value = classification_widget.value + _on_cluster_click( + {"new": initial_value}, raycluster_data_output, namespace, classification_widget + ) + classification_widget.observe( + lambda selection_change: _on_cluster_click( + selection_change, raycluster_data_output, namespace, classification_widget + ), + names="value", + ) + + # UI table buttons + delete_button = widgets.Button( + description="Delete Cluster", + icon="trash", + tooltip="Delete the selected cluster", + ) + delete_button.on_click( + lambda b: _on_delete_button_click( + b, + classification_widget, + ray_clusters_df, + raycluster_data_output, + user_output, + delete_button, + list_jobs_button, + ray_dashboard_button, + ) + ) + + list_jobs_button = widgets.Button( + description="View Jobs", icon="suitcase", tooltip="Open the Ray Job Dashboard" + ) + list_jobs_button.on_click( + lambda b: _on_list_jobs_button_click( + b, classification_widget, ray_clusters_df, user_output, url_output + ) + ) + + ray_dashboard_button = widgets.Button( + description="Open Ray Dashboard", + icon="dashboard", + tooltip="Open the Ray Dashboard in a new tab", + layout=widgets.Layout(width="auto"), + ) + ray_dashboard_button.on_click( + lambda b: _on_ray_dashboard_button_click( + b, classification_widget, ray_clusters_df, user_output, url_output + ) + ) + + display(widgets.VBox([classification_widget, raycluster_data_output])) + display( + widgets.HBox([delete_button, list_jobs_button, ray_dashboard_button]), + url_output, + user_output, + ) + + +def _on_cluster_click( + selection_change, + raycluster_data_output: widgets.Output, + namespace: str, + classification_widget: widgets.ToggleButtons, +): + """ + _on_cluster_click handles the event when a cluster is selected from the toggle buttons, updating the output with cluster details. + """ + new_value = selection_change["new"] + raycluster_data_output.clear_output() + ray_clusters_df = _fetch_cluster_data(namespace) + classification_widget.options = ray_clusters_df["Name"].tolist() + with raycluster_data_output: + display( + HTML( + ray_clusters_df[ray_clusters_df["Name"] == new_value][ + [ + "Name", + "Namespace", + "Num Workers", + "Head GPUs", + "Head CPU Req~Lim", + "Head Memory Req~Lim", + "Worker GPUs", + "Worker CPU Req~Lim", + "Worker Memory Req~Lim", + "status", + ] + ].to_html(escape=False, index=False, border=2) + ) + ) + + +def _on_delete_button_click( + b, + classification_widget: widgets.ToggleButtons, + ray_clusters_df: pd.DataFrame, + raycluster_data_output: widgets.Output, + user_output: widgets.Output, + delete_button: widgets.Button, + list_jobs_button: widgets.Button, + ray_dashboard_button: widgets.Button, +): + """ + _on_delete_button_click handles the event when the Delete Button is clicked, deleting the selected cluster. + """ + cluster_name = classification_widget.value + namespace = ray_clusters_df[ray_clusters_df["Name"] == classification_widget.value][ + "Namespace" + ].values[0] + + _delete_cluster(cluster_name, namespace) + + with user_output: + user_output.clear_output() + print( + f"Cluster {cluster_name} in the {namespace} namespace was deleted successfully." + ) + + # Refresh the dataframe + new_df = _fetch_cluster_data(namespace) + if new_df.empty: + classification_widget.close() + delete_button.close() + list_jobs_button.close() + ray_dashboard_button.close() + with raycluster_data_output: + raycluster_data_output.clear_output() + print(f"No clusters found in the {namespace} namespace.") + else: + classification_widget.options = new_df["Name"].tolist() + + +def _on_ray_dashboard_button_click( + b, + classification_widget: widgets.ToggleButtons, + ray_clusters_df: pd.DataFrame, + user_output: widgets.Output, + url_output: widgets.Output, +): + """ + _on_ray_dashboard_button_click handles the event when the Open Ray Dashboard button is clicked, opening the Ray Dashboard in a new tab + """ + from codeflare_sdk.cluster import Cluster + + cluster_name = classification_widget.value + namespace = ray_clusters_df[ray_clusters_df["Name"] == classification_widget.value][ + "Namespace" + ].values[0] + + # Suppress from Cluster Object initialisation widgets and outputs + with widgets.Output(), contextlib.redirect_stdout( + io.StringIO() + ), contextlib.redirect_stderr(io.StringIO()): + cluster = Cluster(ClusterConfiguration(cluster_name, namespace)) + dashboard_url = cluster.cluster_dashboard_uri() + + with user_output: + user_output.clear_output() + print(f"Opening Ray Dashboard for {cluster_name} cluster:\n{dashboard_url}") + with url_output: + display(Javascript(f'window.open("{dashboard_url}", "_blank");')) + + +def _on_list_jobs_button_click( + b, + classification_widget: widgets.ToggleButtons, + ray_clusters_df: pd.DataFrame, + user_output: widgets.Output, + url_output: widgets.Output, +): + """ + _on_list_jobs_button_click handles the event when the View Jobs button is clicked, opening the Ray Jobs Dashboard in a new tab + """ + from codeflare_sdk.cluster import Cluster + + cluster_name = classification_widget.value + namespace = ray_clusters_df[ray_clusters_df["Name"] == classification_widget.value][ + "Namespace" + ].values[0] + + # Suppress from Cluster Object initialisation widgets and outputs + with widgets.Output(), contextlib.redirect_stdout( + io.StringIO() + ), contextlib.redirect_stderr(io.StringIO()): + cluster = Cluster(ClusterConfiguration(cluster_name, namespace)) + dashboard_url = cluster.cluster_dashboard_uri() + + with user_output: + user_output.clear_output() + print( + f"Opening Ray Jobs Dashboard for {cluster_name} cluster:\n{dashboard_url}/#/jobs" + ) + with url_output: + display(Javascript(f'window.open("{dashboard_url}/#/jobs", "_blank");')) + + +def _delete_cluster( + cluster_name: str, + namespace: str, + timeout: int = 5, + interval: int = 1, +): + """ + _delete_cluster function deletes the cluster with the given name and namespace. + It optionally waits for the cluster to be deleted. + """ + from .cluster import _check_aw_exists + + try: + config_check() + api_instance = client.CustomObjectsApi(api_config_handler()) + + if _check_aw_exists(cluster_name, namespace): + api_instance.delete_namespaced_custom_object( + group="workload.codeflare.dev", + version="v1beta2", + namespace=namespace, + plural="appwrappers", + name=cluster_name, + ) + group = "workload.codeflare.dev" + version = "v1beta2" + plural = "appwrappers" + else: + api_instance.delete_namespaced_custom_object( + group="ray.io", + version="v1", + namespace=namespace, + plural="rayclusters", + name=cluster_name, + ) + group = "ray.io" + version = "v1" + plural = "rayclusters" + + # Wait for the resource to be deleted + while timeout > 0: + try: + api_instance.get_namespaced_custom_object( + group=group, + version=version, + namespace=namespace, + plural=plural, + name=cluster_name, + ) + # Retry if resource still exists + time.sleep(interval) + timeout -= interval + if timeout <= 0: + raise TimeoutError( + f"Timeout waiting for {cluster_name} to be deleted." + ) + except ApiException as e: + # Resource is deleted + if e.status == 404: + break + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + + +def _fetch_cluster_data(namespace): + """ + _fetch_cluster_data function fetches all clusters and their spec in a given namespace and returns a DataFrame. + """ + from .cluster import list_all_clusters + + rayclusters = list_all_clusters(namespace, False) + if not rayclusters: + return pd.DataFrame() + names = [item.name for item in rayclusters] + namespaces = [item.namespace for item in rayclusters] + num_workers = [item.num_workers for item in rayclusters] + head_extended_resources = [ + f"{list(item.head_extended_resources.keys())[0]}: {list(item.head_extended_resources.values())[0]}" + if item.head_extended_resources + else "0" + for item in rayclusters + ] + worker_extended_resources = [ + f"{list(item.worker_extended_resources.keys())[0]}: {list(item.worker_extended_resources.values())[0]}" + if item.worker_extended_resources + else "0" + for item in rayclusters + ] + head_cpu_requests = [ + item.head_cpu_requests if item.head_cpu_requests else 0 for item in rayclusters + ] + head_cpu_limits = [ + item.head_cpu_limits if item.head_cpu_limits else 0 for item in rayclusters + ] + head_cpu_rl = [ + f"{requests}~{limits}" + for requests, limits in zip(head_cpu_requests, head_cpu_limits) + ] + head_mem_requests = [ + item.head_mem_requests if item.head_mem_requests else 0 for item in rayclusters + ] + head_mem_limits = [ + item.head_mem_limits if item.head_mem_limits else 0 for item in rayclusters + ] + head_mem_rl = [ + f"{requests}~{limits}" + for requests, limits in zip(head_mem_requests, head_mem_limits) + ] + worker_cpu_requests = [ + item.worker_cpu_requests if item.worker_cpu_requests else 0 + for item in rayclusters + ] + worker_cpu_limits = [ + item.worker_cpu_limits if item.worker_cpu_limits else 0 for item in rayclusters + ] + worker_cpu_rl = [ + f"{requests}~{limits}" + for requests, limits in zip(worker_cpu_requests, worker_cpu_limits) + ] + worker_mem_requests = [ + item.worker_mem_requests if item.worker_mem_requests else 0 + for item in rayclusters + ] + worker_mem_limits = [ + item.worker_mem_limits if item.worker_mem_limits else 0 for item in rayclusters + ] + worker_mem_rl = [ + f"{requests}~{limits}" + for requests, limits in zip(worker_mem_requests, worker_mem_limits) + ] + status = [item.status.name for item in rayclusters] + + status = [_format_status(item.status) for item in rayclusters] + + data = { + "Name": names, + "Namespace": namespaces, + "Num Workers": num_workers, + "Head GPUs": head_extended_resources, + "Worker GPUs": worker_extended_resources, + "Head CPU Req~Lim": head_cpu_rl, + "Head Memory Req~Lim": head_mem_rl, + "Worker CPU Req~Lim": worker_cpu_rl, + "Worker Memory Req~Lim": worker_mem_rl, + "status": status, + } + return pd.DataFrame(data) + + +def _format_status(status): + """ + _format_status function formats the status enum. + """ + status_map = { + RayClusterStatus.READY: 'Ready ✓', + RayClusterStatus.SUSPENDED: 'Suspended ❄️', + RayClusterStatus.FAILED: 'Failed ✗', + RayClusterStatus.UNHEALTHY: 'Unhealthy', + RayClusterStatus.UNKNOWN: 'Unknown', + } + return status_map.get(status, status) diff --git a/src/codeflare_sdk/utils/pretty_print.py b/src/codeflare_sdk/utils/pretty_print.py index 4842c9cd..30331319 100644 --- a/src/codeflare_sdk/utils/pretty_print.py +++ b/src/codeflare_sdk/utils/pretty_print.py @@ -135,9 +135,9 @@ def print_clusters(clusters: List[RayCluster]): ) name = cluster.name dashboard = cluster.dashboard - workers = str(cluster.workers) + workers = str(cluster.num_workers) memory = f"{cluster.worker_mem_requests}~{cluster.worker_mem_limits}" - cpu = str(cluster.worker_cpu) + cpu = f"{cluster.worker_cpu_requests}~{cluster.worker_cpu_limits}" gpu = str(cluster.worker_extended_resources.get("nvidia.com/gpu", 0)) #'table0' to display the cluster name, status, url, and dashboard link diff --git a/tests/unit_test.py b/tests/unit_test.py index 388723c5..ae2af659 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -76,7 +76,9 @@ gen_names, is_openshift_cluster, ) -from codeflare_sdk.cluster.widgets import cluster_up_down_buttons + +import codeflare_sdk.cluster.widgets as cf_widgets +import pandas as pd import openshift from openshift.selector import Selector @@ -88,9 +90,6 @@ from ray.job_submission import JobSubmissionClient from codeflare_sdk.job.ray_jobs import RayJobClient -import ipywidgets as widgets -from IPython.display import display - # For mocking openshift client results fake_res = openshift.Result("fake") @@ -941,10 +940,11 @@ def test_ray_details(mocker, capsys): ray1 = RayCluster( name="raytest1", status=RayClusterStatus.READY, - workers=1, + num_workers=1, worker_mem_requests="2G", worker_mem_limits="2G", - worker_cpu=1, + worker_cpu_requests=1, + worker_cpu_limits=1, namespace="ns", dashboard="fake-uri", head_cpu_requests=2, @@ -979,10 +979,11 @@ def test_ray_details(mocker, capsys): assert details == ray2 assert ray2.name == "raytest2" assert ray1.namespace == ray2.namespace - assert ray1.workers == ray2.workers + assert ray1.num_workers == ray2.num_workers assert ray1.worker_mem_requests == ray2.worker_mem_requests assert ray1.worker_mem_limits == ray2.worker_mem_limits - assert ray1.worker_cpu == ray2.worker_cpu + assert ray1.worker_cpu_requests == ray2.worker_cpu_requests + assert ray1.worker_cpu_limits == ray2.worker_cpu_limits assert ray1.worker_extended_resources == ray2.worker_extended_resources try: print_clusters([ray1, ray2]) @@ -1006,7 +1007,7 @@ def test_ray_details(mocker, capsys): " │ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │ \n" " │ │ # Workers │ │ Memory CPU GPU │ │ \n" " │ │ │ │ │ │ \n" - " │ │ 1 │ │ 2G~2G 1 0 │ │ \n" + " │ │ 1 │ │ 2G~2G 1~1 0 │ │ \n" " │ │ │ │ │ │ \n" " │ ╰─────────────╯ ╰──────────────────────────────────────╯ │ \n" " ╰───────────────────────────────────────────────────────────────╯ \n" @@ -1024,7 +1025,7 @@ def test_ray_details(mocker, capsys): " │ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │ \n" " │ │ # Workers │ │ Memory CPU GPU │ │ \n" " │ │ │ │ │ │ \n" - " │ │ 1 │ │ 2G~2G 1 0 │ │ \n" + " │ │ 1 │ │ 2G~2G 1~1 0 │ │ \n" " │ │ │ │ │ │ \n" " │ ╰─────────────╯ ╰──────────────────────────────────────╯ │ \n" " ╰───────────────────────────────────────────────────────────────╯ \n" @@ -1040,7 +1041,7 @@ def test_ray_details(mocker, capsys): "│ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │\n" "│ │ # Workers │ │ Memory CPU GPU │ │\n" "│ │ │ │ │ │\n" - "│ │ 1 │ │ 2G~2G 1 0 │ │\n" + "│ │ 1 │ │ 2G~2G 1~1 0 │ │\n" "│ │ │ │ │ │\n" "│ ╰─────────────╯ ╰──────────────────────────────────────╯ │\n" "╰───────────────────────────────────────────────────────────────╯\n" @@ -2245,7 +2246,7 @@ def test_list_clusters(mocker, capsys): " │ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │ \n" " │ │ # Workers │ │ Memory CPU GPU │ │ \n" " │ │ │ │ │ │ \n" - " │ │ 1 │ │ 2G~2G 1 0 │ │ \n" + " │ │ 1 │ │ 2G~2G 1~1 0 │ │ \n" " │ │ │ │ │ │ \n" " │ ╰─────────────╯ ╰──────────────────────────────────────╯ │ \n" " ╰───────────────────────────────────────────────────────────────╯ \n" @@ -2261,7 +2262,7 @@ def test_list_clusters(mocker, capsys): "│ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │\n" "│ │ # Workers │ │ Memory CPU GPU │ │\n" "│ │ │ │ │ │\n" - "│ │ 1 │ │ 2G~2G 1 0 │ │\n" + "│ │ 1 │ │ 2G~2G 1~1 0 │ │\n" "│ │ │ │ │ │\n" "│ ╰─────────────╯ ╰──────────────────────────────────────╯ │\n" "╰───────────────────────────────────────────────────────────────╯\n" @@ -2357,10 +2358,11 @@ def test_cluster_status(mocker): fake_ray = RayCluster( name="test", status=RayClusterStatus.UNKNOWN, - workers=1, + num_workers=1, worker_mem_requests=2, worker_mem_limits=2, - worker_cpu=1, + worker_cpu_requests=1, + worker_cpu_limits=1, namespace="ns", dashboard="fake-uri", head_cpu_requests=2, @@ -2922,7 +2924,7 @@ def test_cluster_up_down_buttons(mocker): MockButton.side_effect = [mock_up_button, mock_down_button] # Call the method under test - cluster_up_down_buttons(cluster) + cf_widgets.cluster_up_down_buttons(cluster) # Simulate checkbox being checked or unchecked mock_wait_ready_check_box.value = True # Simulate checkbox being checked @@ -2955,6 +2957,247 @@ def test_is_notebook_true(): assert is_notebook() is True +def test_view_clusters(mocker, capsys): + from kubernetes.client.rest import ApiException + + mocker.patch("codeflare_sdk.cluster.widgets.is_notebook", return_value=False) + with pytest.warns( + UserWarning, + match="view_clusters can only be used in a Jupyter Notebook environment.", + ): + result = cf_widgets.view_clusters(namespace="default") + # Assert the function returns None when not in a notebook environment + assert result is None + + mocker.patch("codeflare_sdk.cluster.widgets.is_notebook", return_value=True) + + # Mock Kubernetes API responses + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value={"items": []}, + ) + mocker.patch("codeflare_sdk.cluster.cluster._check_aw_exists", return_value=False) + + # Return empty dataframe when no clusters are found + mocker.patch("codeflare_sdk.cluster.cluster.list_all_clusters", return_value=[]) + mocker.patch( + "codeflare_sdk.cluster.cluster.get_current_namespace", + return_value="default", + ) + df = cf_widgets._fetch_cluster_data(namespace="default") + assert df.empty + + cf_widgets.view_clusters() + captured = capsys.readouterr() + assert f"No clusters found in the default namespace." in captured.out + + # Assert the function returns None + assert result is None + + test_df = pd.DataFrame( + { + "Name": ["test-cluster"], + "Namespace": ["default"], + "Num Workers": ["1"], + "Head GPUs": ["0"], + "Worker GPUs": ["0"], + "Head CPU Req~Lim": ["1~1"], + "Head Memory Req~Lim": ["1Gi~1Gi"], + "Worker CPU Req~Lim": ["1~1"], + "Worker Memory Req~Lim": ["1Gi~1Gi"], + "status": ['Ready ✓'], + } + ) + + # Mock the _fetch_cluster_data function to return a test DataFrame + mocker.patch( + "codeflare_sdk.cluster.widgets._fetch_cluster_data", return_value=test_df + ) + + # Mock the Cluster class and related methods + mocker.patch("codeflare_sdk.cluster.Cluster") + mocker.patch("codeflare_sdk.cluster.ClusterConfiguration") + + with patch("ipywidgets.ToggleButtons") as MockToggleButtons, patch( + "ipywidgets.Button" + ) as MockButton, patch("ipywidgets.Output") as MockOutput, patch( + "ipywidgets.HBox" + ), patch( + "ipywidgets.VBox" + ), patch( + "IPython.display.display" + ) as mock_display, patch( + "IPython.display.HTML" + ), patch( + "codeflare_sdk.cluster.widgets.Javascript" + ) as mock_javascript: + # Create mock widget instances + mock_toggle = MagicMock() + mock_delete_button = MagicMock() + mock_list_jobs_button = MagicMock() + mock_ray_dashboard_button = MagicMock() + mock_output = MagicMock() + + # Set the return values for the mocked widgets + MockToggleButtons.return_value = mock_toggle + MockButton.side_effect = [ + mock_delete_button, + mock_list_jobs_button, + mock_ray_dashboard_button, + ] + MockOutput.return_value = mock_output + + # Call the function under test + cf_widgets.view_clusters() + + # Simulate selecting a cluster + mock_toggle.value = "test-cluster" + selection_change = {"new": "test-cluster"} + cf_widgets._on_cluster_click( + selection_change, mock_output, "default", mock_toggle + ) + + # Assert that the toggle options are set correctly + mock_toggle.observe.assert_called() + + # Simulate clicking the list jobs button + cf_widgets._on_list_jobs_button_click( + None, mock_toggle, test_df, mock_output, mock_output + ) + mock_javascript.assert_called_once() + + # Simulate clicking the Ray dashboard button + cf_widgets._on_ray_dashboard_button_click( + None, mock_toggle, test_df, mock_output, mock_output + ) + mock_javascript.call_count = 2 + + mocker.patch( + "kubernetes.client.CustomObjectsApi.delete_namespaced_custom_object", + ) + mock_response = mocker.MagicMock() + mock_response.status = 404 + mock_exception = ApiException(http_resp=mock_response) + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_namespaced_custom_object", + side_effect=mock_exception, + ) + + # Simulate clicking the delete button + cf_widgets._on_delete_button_click( + None, + mock_toggle, + test_df, + mock_output, + mock_output, + mock_delete_button, + mock_list_jobs_button, + mock_ray_dashboard_button, + ) + MockButton.call_count = 3 + + +def test_fetch_cluster_data(mocker): + # Return empty dataframe when no clusters are found + mocker.patch("codeflare_sdk.cluster.cluster.list_all_clusters", return_value=[]) + df = cf_widgets._fetch_cluster_data(namespace="default") + assert df.empty + + # Create mock RayCluster objects + mock_raycluster1 = MagicMock(spec=RayCluster) + mock_raycluster1.name = "test-cluster-1" + mock_raycluster1.namespace = "default" + mock_raycluster1.num_workers = 1 + mock_raycluster1.head_extended_resources = {"nvidia.com/gpu": "1"} + mock_raycluster1.worker_extended_resources = {"nvidia.com/gpu": "2"} + mock_raycluster1.head_cpu_requests = "500m" + mock_raycluster1.head_cpu_limits = "1000m" + mock_raycluster1.head_mem_requests = "1Gi" + mock_raycluster1.head_mem_limits = "2Gi" + mock_raycluster1.worker_cpu_requests = "1000m" + mock_raycluster1.worker_cpu_limits = "2000m" + mock_raycluster1.worker_mem_requests = "2Gi" + mock_raycluster1.worker_mem_limits = "4Gi" + mock_raycluster1.status = MagicMock() + mock_raycluster1.status.name = "READY" + mock_raycluster1.status = RayClusterStatus.READY + + mock_raycluster2 = MagicMock(spec=RayCluster) + mock_raycluster2.name = "test-cluster-2" + mock_raycluster2.namespace = "default" + mock_raycluster2.num_workers = 2 + mock_raycluster2.head_extended_resources = {} + mock_raycluster2.worker_extended_resources = {} + mock_raycluster2.head_cpu_requests = None + mock_raycluster2.head_cpu_limits = None + mock_raycluster2.head_mem_requests = None + mock_raycluster2.head_mem_limits = None + mock_raycluster2.worker_cpu_requests = None + mock_raycluster2.worker_cpu_limits = None + mock_raycluster2.worker_mem_requests = None + mock_raycluster2.worker_mem_limits = None + mock_raycluster2.status = MagicMock() + mock_raycluster2.status.name = "SUSPENDED" + mock_raycluster2.status = RayClusterStatus.SUSPENDED + + with patch( + "codeflare_sdk.cluster.cluster.list_all_clusters", + return_value=[mock_raycluster1, mock_raycluster2], + ): + # Call the function under test + df = cf_widgets._fetch_cluster_data(namespace="default") + + # Expected DataFrame + expected_data = { + "Name": ["test-cluster-1", "test-cluster-2"], + "Namespace": ["default", "default"], + "Num Workers": [1, 2], + "Head GPUs": ["nvidia.com/gpu: 1", "0"], + "Worker GPUs": ["nvidia.com/gpu: 2", "0"], + "Head CPU Req~Lim": ["500m~1000m", "0~0"], + "Head Memory Req~Lim": ["1Gi~2Gi", "0~0"], + "Worker CPU Req~Lim": ["1000m~2000m", "0~0"], + "Worker Memory Req~Lim": ["2Gi~4Gi", "0~0"], + "status": [ + 'Ready ✓', + 'Suspended ❄️', + ], + } + + expected_df = pd.DataFrame(expected_data) + + # Assert that the DataFrame matches expected + pd.testing.assert_frame_equal( + df.reset_index(drop=True), expected_df.reset_index(drop=True) + ) + + +def test_format_status(): + # Test each possible status + test_cases = [ + (RayClusterStatus.READY, 'Ready ✓'), + ( + RayClusterStatus.SUSPENDED, + 'Suspended ❄️', + ), + (RayClusterStatus.FAILED, 'Failed ✗'), + (RayClusterStatus.UNHEALTHY, 'Unhealthy'), + (RayClusterStatus.UNKNOWN, 'Unknown'), + ] + + for status, expected_output in test_cases: + assert ( + cf_widgets._format_status(status) == expected_output + ), f"Failed for status: {status}" + + # Test an unrecognized status + unrecognized_status = "NotAStatus" + assert ( + cf_widgets._format_status(unrecognized_status) == "NotAStatus" + ), "Failed for unrecognized status" + + # Make sure to always keep this function last def test_cleanup(): os.remove(f"{aw_dir}unit-test-no-kueue.yaml") diff --git a/ui-tests/tests/widget_notebook_example.test.ts b/ui-tests/tests/widget_notebook_example.test.ts index 798c2eb6..823a73f4 100644 --- a/ui-tests/tests/widget_notebook_example.test.ts +++ b/ui-tests/tests/widget_notebook_example.test.ts @@ -30,11 +30,16 @@ test.describe("Visual Regression", () => { tmpPath, }) => { const notebook = "3_widget_example.ipynb"; + const namespace = 'default'; await page.notebook.openByPath(`${tmpPath}/${notebook}`); await page.notebook.activate(notebook); + // Hide the cell toolbar before capturing the screenshots + await page.addStyleTag({ content: '.jp-cell-toolbar { display: none !important; }' }); + const captures: (Buffer | null)[] = []; // Array to store cell screenshots const cellCount = await page.notebook.getCellCount(); + console.log(`Cell count: ${cellCount}`); // Run all cells and capture their screenshots await page.notebook.runCellByCell({ @@ -43,7 +48,6 @@ test.describe("Visual Regression", () => { if (cell && (await cell.isVisible())) { captures[cellIndex] = await cell.screenshot(); // Save the screenshot by cell index } - await page.addStyleTag({ content: '.jp-cell-toolbar { display: none !important; }' }); }, }); @@ -59,25 +63,27 @@ test.describe("Visual Regression", () => { } } - const widgetCellIndex = 3; + // At this point, all cells have been ran, and their screenshots have been captured. + // We now interact with the widgets in the notebook. + const upDownWidgetCellIndex = 3; // 4 on OpenShift - await waitForWidget(page, widgetCellIndex, 'input[type="checkbox"]'); - await waitForWidget(page, widgetCellIndex, 'button:has-text("Cluster Down")'); - await waitForWidget(page, widgetCellIndex, 'button:has-text("Cluster Up")'); + await waitForWidget(page, upDownWidgetCellIndex, 'input[type="checkbox"]'); + await waitForWidget(page, upDownWidgetCellIndex, 'button:has-text("Cluster Down")'); + await waitForWidget(page, upDownWidgetCellIndex, 'button:has-text("Cluster Up")'); - await interactWithWidget(page, widgetCellIndex, 'input[type="checkbox"]', async (checkbox) => { + await interactWithWidget(page, upDownWidgetCellIndex, 'input[type="checkbox"]', async (checkbox) => { await checkbox.click(); const isChecked = await checkbox.isChecked(); expect(isChecked).toBe(true); }); - await interactWithWidget(page, widgetCellIndex, 'button:has-text("Cluster Down")', async (button) => { + await interactWithWidget(page, upDownWidgetCellIndex, 'button:has-text("Cluster Down")', async (button) => { await button.click(); const clusterDownMessage = await page.waitForSelector('text=No instances found, nothing to be done.', { timeout: 5000 }); expect(clusterDownMessage).not.toBeNull(); }); - await interactWithWidget(page, widgetCellIndex, 'button:has-text("Cluster Up")', async (button) => { + await interactWithWidget(page, upDownWidgetCellIndex, 'button:has-text("Cluster Up")', async (button) => { await button.click(); const successMessage = await page.waitForSelector('text=Ray Cluster: \'raytest\' has successfully been created', { timeout: 10000 }); @@ -95,13 +101,51 @@ test.describe("Visual Regression", () => { await runPreviousCell(page, cellCount, '(, True)'); - await interactWithWidget(page, widgetCellIndex, 'button:has-text("Cluster Down")', async (button) => { + await interactWithWidget(page, upDownWidgetCellIndex, 'button:has-text("Cluster Down")', async (button) => { await button.click(); const clusterDownMessage = await page.waitForSelector('text=Ray Cluster: \'raytest\' has successfully been deleted', { timeout: 5000 }); expect(clusterDownMessage).not.toBeNull(); }); await runPreviousCell(page, cellCount, '(, False)'); + + // view_clusters table with buttons + await interactWithWidget(page, upDownWidgetCellIndex, 'input[type="checkbox"]', async (checkbox) => { + await checkbox.click(); + const isChecked = await checkbox.isChecked(); + expect(isChecked).toBe(false); + }); + + await interactWithWidget(page, upDownWidgetCellIndex, 'button:has-text("Cluster Up")', async (button) => { + await button.click(); + const successMessage = await page.waitForSelector('text=Ray Cluster: \'raytest\' has successfully been created', { timeout: 10000 }); + expect(successMessage).not.toBeNull(); + }); + + const viewClustersCellIndex = 4; // 5 on OpenShift + await page.notebook.runCell(cellCount - 2, true); + await interactWithWidget(page, viewClustersCellIndex, 'button:has-text("Open Ray Dashboard")', async (button) => { + await button.click(); + const successMessage = await page.waitForSelector('text=Opening Ray Dashboard for raytest cluster', { timeout: 5000 }); + expect(successMessage).not.toBeNull(); + }); + + await interactWithWidget(page, viewClustersCellIndex, 'button:has-text("View Jobs")', async (button) => { + await button.click(); + const successMessage = await page.waitForSelector('text=Opening Ray Jobs Dashboard for raytest cluster', { timeout: 5000 }); + expect(successMessage).not.toBeNull(); + }); + + await interactWithWidget(page, viewClustersCellIndex, 'button:has-text("Delete Cluster")', async (button) => { + await button.click(); + + const noClustersMessage = await page.waitForSelector(`text=No clusters found in the ${namespace} namespace.`, { timeout: 5000 }); + expect(noClustersMessage).not.toBeNull(); + const successMessage = await page.waitForSelector(`text=Cluster raytest in the ${namespace} namespace was deleted successfully.`, { timeout: 5000 }); + expect(successMessage).not.toBeNull(); + }); + + await runPreviousCell(page, cellCount, '(, False)'); }); }); diff --git a/ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-4-linux.png b/ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-4-linux.png index 9d881da2855f221ac585377caeb0773a740a7df8..691e7124f118370dd40eb36b4c0e423830a99a33 100644 GIT binary patch literal 3461 zcmYjUc{tQv8@5bBVoYP~8eXJKLWqg#D6GcYhE zfH4m%6Zj2Q)YkzIMlWMsErzllAsU#ZuCJ|W;+wUc>5RFhE7*>PF`acRLSj(aT6Vhm zmPvZ7C&jfGoWx4S(vVD6pOOqHwBvjQ@jbU?tKhFK;F$CpOi3u_PU&PJ$LDTSSyJBl zDvQTV1mt7YS|R0M(LwWuQC$Sg9V`0EFXqbo7kUpX>s8QOBax47`Iy2qkWki{5W#hW z6QO^GkP78K+iQOY>p~2|Tk_m^?VOw(4iN={0G`S|!KWo9QQ)43icCM6}M zr|+0OaHBRIjJh`m%`p`i(;`ps<`7!&cs7;@g2_4FQlc?YOiXOpMJcn}>bmZ+W5=`z zeIp|(Dk^^F2BOdyf^9ixeN|OTi%piQ59gIv1qBWJEA_p7!GHh#x04g?qMSskL2PVn zE}q!jv$26QHa0djHTCkMhcO>NoSm2#OYXdl8lt`n&6kR3@m-(ktUR^(UP?yhl`)Y$ zGQYOgkH6dJ*Tmghw@yK!vhK7eCHx@C3ceEZb!cmAYe9MWLw|q&!pPqj?X*%FbE0OgD(&wAS1 z+iT9PO}FcaVV#|w{r&6o#9?^tTeoiIy>4m>ZjTpsQM!XdP@94T-{)p#)`KLH&!C@Z zi-|lto>vd9lQWpM%8`5)w8Rn!)<|_}eIN508-sKRW)@nJSC(B7DBT zcjR855)rY&U^Ma`+jM`4H92Rbug^%%S7)L>(Z6|ZE^7lc(!^-Fgfa^pPwIJlo1;3gTvYI0WKo= z`BfAZBlBNt5%>fIEO3FLJ8NqmSBgqYyIWcaH8rM113Yl}L**X`ho^{^Ete{rmUhu3vM}EE=HDaI#2cke-^Fny|2NL_`D(FD@;8f-GBY zu{=AQnv|4JajdJa*Ae4ha1>n`tDRn28vFkJvZ5l7px}+7fq;MjIgWjh_-^1KU~m2a z&C}EK_rU=c-gZY=E%3HX+T!Th^XFaN+=d=hgK9fGJj}?*_^_n=OeE~Gs?W-o-rjGQ zl2TF>%=*U^E1_T`?v0y6t*wwJ`L?#UHV_0XIVAobFf=`vYEanU+Nv!DhndO9$ZV~zC+Ug9+1tj)v)iW+R>@qx#MIQm!-M@W zBLGnXA3r~BVF8(_C&bx4HWtXw{?@vAWo~YcMssp-fb;VMiY_iL;xku$uc)f3LNq6Q zr{e%bqO9_=vUwOOJ3G6ladEYlWoc>O3kU%0yU%#ek5gyiB z$}&Oh0IUFMkZIkc#igaCz5RVQgm{##gZwVgNT4GZ8aH5RU2UqfYrZxfK$mK&s~_QT zP@MnTln!273=tdm^yyHEIrGhxw97`&m=^o0ex^h74hK&Y6O-UzwG}e>;&~M6waGc( zs+tLu#|wo9*8NZM@v97NhWI24!` zhjBwRU0hr=4gp?x<{C(eMK!xP$$@ddi}dj$lenRwVQ3=guYiA-sDYttsao-d{MEH3WH?$*{8KR-WfxPp=rpn!isfU1g0m>3MNu=ucNXvkD7 zv14}KPa@3hjasp3sR2me&~RyadA#yI8jIx~4!^)AEnWQS)AfW;vK-sg6G8cYj7Cw> z(P6!&L=y!8SE9l5<9O}8SVdP?S8Kmno`bw&azKvAWHPeUn7Far;UEuXxec$jXmqDG zJyY9TSbk5HOzu2`&T(;aVypy}!7E^EZ$ItlZq#%4;te}dIWNXWC`#|&Cg@dB5uHw- zo}SKp@q(m1*&Goa9Xw^bxN=o3dZIJGuz(5SVyQ3pLE6b#KFztf%r&tsU1^DT;y>j<;CpNnqp9RxLgn-z#eJ$&p(mWpuaSv;Q3N7va?G~i?MKkX|^K1GC@xq_^wH& z?8Cy!)z#ID_j^KRcUD&)Iys$P@tK!bR4k-80&mdaus%6CIaq9_sfO3KljWN%>L*8Y z-9I*H9dhs}Qj0)wyl0+SfA4v#mL0;5a12;Lx3Ld$ii$b}43HmCZL7IP+rOx9iA2yg z%+1X?IXO94BJu{ZRegZOj(zZ8*1@J4JUZCh)F~o1G+2tmxN|i%kStL@R9zsNTT3Gm z&Fg?XJ-O=EckeF${3*c8>$9Q8grr7MQ3<~Aq?!LXgK1JV6O-=O$Ha4ki*0Xl3 zv-2$IHx8d2DppojLPJA=?uKcEnw1q7gYrcoq?MGm$Xx1>_5j~xz(epkE-o&phWEpV z9iKm64%kAab#1P$cJ%h1ISd7i2-sfv=iP1K3wT%|nwpwjd=kfxM^{x5y9*YH<`nky_I|D>4xAyRJ`lL&f&%djLj*Us2SDi{fCJdeuCA_RgF+CM zQ!}EXqDo3imNMTuJM}~8SADusY)o?WioWoyjEv6SUSOq!IEzY3mNsYuzNggHn}LVp zjDCrclD~LSUmT`c1ds#GMpZSlYt}$K322i}k&}x{NoD1&qZ}28-RkdBk^qD$>UGLxT{bTOTotbmb+56ege)c{$LRnE78}k_^5)u-&tc-*z64ImZ;JpU=Bk<~E zC(aB`$S$hVVo1e~zj+x_~4?}u`Kn3$esdi6@6wy~y$w7pcxn(9$# zAgv)f*&}Km*xlXz&yZl`Wfe+CMMa#2OhZ(w@!Pey zg9Cd#y@kL)Bs;r(2dbxFO-D(QY5aS=y}kGM{(mOH1*z{5u|MqW*ad@j^QUzQ5jVlf37T3^kQNhlS39^k!;Ea4{|kbVu}X?>wR={)x3<|Fo|F4S^)efC+v%t!qL z#TW5#&*P#sV~x9}rb}M?@udn25AmS=_OK-Z$|JTPZ|#KwjpW#M5xRoD9yb^G-Vn?q z;qaMegZ9NVC9%)d!$&nsF^x_6JUj-K+hLFgU%Q?d8Nr^Tn`@<+_ zV98-~VyV-W$APlp;El^qb*)R~{4ZTyLMB~~ekJKeNWS{x$;o@A917Ci$m=FV#AZEz zzsW)2{xVk{hfxzF+J1*=W)cz_-q%5Pb8vsG<*(O=FKs(%{AF@IXugwE`dW|D@a{c`2V$b1pM>%BoEpP=2W zifAjv&|vAgP#0eHB3+1giZ|IC~>IPX&V{2qZ%U&H(gDw9qN-e zzBQ22L{BdyDN)R(J*unQU%TK#@ivrISEkX{`(Y6#QP9)hASIf~~*_;uxmt$`^04Nl?``ueFk+=S{0Y<+O|$m5w7 z&MgA;jK8sre#AsLr^ipe_jHz(c^Zw^yB*N0ekpMfb$#Rik^26Y8*W{klG3`^Wa8#Z z6o4?|BBUZHO?|L$n_MhsxVyc&U(}y%>(web_^nsiU^kbuXFH)$_@e0dv!|Gevs9tN z3l$bUSEmgW6i~!Xg^TlryF2hQNfL*Vxc7BIUS4lyWw)UFvYQLt3w@YygAjd3cAllx zJ0wO3M1OH?k-FM7zhgX~VyThy_*~)k&cmG12CXUzufYFKV0>KN|6m@QoP5&bz>uc- zR5K;kWntd8tKKce#YI68_66d}@m7e^;5;lcGO_$*BbBl{i+dKlm0m*w*D$d! z3>Q74=*bf_4%_|iq!7WVD7Nfuoa5taTpCTdcjwd$YFRwUtY8-qqBpR1Flp?A5d0%q>pF=nQwD?w-upM#Fu1~<&1Wgv9da(4TBC2HNa1@t9Jxt^@l25%g8r~(qdP71IGd=Ck%u<1l8-2B3Q{O1m zbq;X4e06$L-!LWQx04w6M&BU4R{mDqj+nS+@^{kq>Z+=G<@KWQh-~bUvzyKw3R(5Z zB%7lDJqnpSQadl)E2zYHILrwwb;8kcT3fARV}={qYWv5NP`uto?}>|*c3=x%V`GoW z;%q&QliRvIyyjRwP3hd%Z);$S(IVI_KjpHnZko?7^!GMv_!*^L_CKzE8Qxp%L?O%B zUHxTh{Utp?l7`(2qiSYm#>nUy4i24&;}NV)>Z`}$;y@y2P*O7BvU=L_u@NUt1-7?b z<=Macu`^Zi*qJ#crs={2gK+wEv?gP&nyhQ}m~s=f_= zp9W!k#--2JT;+7}zUFf)_y=0+CELqbwdtDc@7ZS>); z!NZGuIlDVqqRV6(oxNhMA>V$mx;6g@*b!z`+JN>RJsI6(E68 z<2&kT5W`=2_a&5?^@R%eXViBnH!*b%9Rh+dP{O?*wYWv}vuKgM{js{~Aus{{`0-=!hn6P=z z@k)GluF?<=HkI}+LW6ffZZG-w@A}3@tmJJZPBSxK(e9b6>Lxz`4`*%*#fnOzbgqz| zNi<8=S7PY0{*(duikeLu-5AC-t&1wVWGlxJ`M*bLvx>$Pa> z4@t@AQBW=U-$lP38Tdb(S*51v*eT+Ocn@71C%FDDE+t&ZOGVP=xVXSL zqEjjdrl5w$D37?fY-s`n!!G{fX7dW_B*?!+4ZFuqBIo{lb~ad0@aq03Xudzmx=;(( z^P3F^svp^cN&rN<%XZ9=NaHUYv}U z*06h@{9P#00f|;nC}5E94hy3lwd2+!?}3%lk(EAqH>EisA0}a(Nj^g5?&|Tu_D%Cn zXw9gm3ck5Huf-BvLq(Y(AW<}UJ?7f z4@-iZEA-#rRq^v{XJo|6%Oe;VA~fju^st8a39!8Jx6`!}0@81XEb=A+ex_d1$H6^S zC`qB-*RG?-Yapf6g#?ow+U1FSTO%Q%7F7>YddkT;GBw6+C@$i39F_C3{-%T70}7e=p?yeCFEO>g&euUsI9h_WOQAcRoZ3v!Gzs`5FyX`AZu(Y&m*Pc zWGV<%S1XiPdbkm-5LGBlOkqDc=V@@=9`d;?g^M(vceRNb$L{pY|8k?o|6z~$#K4}J z&5SEB2H|5#T{@zBO;tzr zf(Z5|!V{?|v&?BZmp-&A0gCG*{pZ7Z%3Jhytc!znVq!`3yR~OH8CEP>B;8#g4{DrW zpTA*YX;fBKD5}ZK8eMS%$uknW0pO7fd<*q&yMy3C!tCg0^=#f)>9uH4k^0V8b zO?-S&biW-PNF4szuMC-je2wf}pk`gCoH5YPaO#^)hXo>FexEZkRz9HsW9jtu6>@Qf zYf#S52j!lUUmBs#?(QUi{c2UgwZ<*q$PCg2I;~2w5{i|=K!*9Xjf-<_NN{+MXV;cH zh6o0RKQl9G_n+2*1<2O3M;6pXS+?d~@?(MPx4DG8_gmVncQ+k!Y>c_MWo68{iaaDl zK!1IJLf@J!)c@GmZXb+_jb#Zk%7XohyNcLU=!ns$kFb|d- zydTos-Irt0BPrhYbo?D*Z&uIFg)%aB@6F{}+eI#&{fX%TN}b{O7`Q0l9j3D`X=4S^Jl=~ty^^^;v2=~6t^)QLqXQ~)Pi0@TyTI?ZW_OMgQj^G){h86 z2SL7X6O$xKeE94{S}IA_R##y~b}Q{+3WrOiBA$3~UoS12-BLTN>A76Q)w!F8QqC21 z(PSug4;=nvWW>$O3sgfyF|jF?j9!=rM_e73mscWm z>&m&V!_Vtn@)+0tk=r6c5D(Tx9$Zvz6XU#r<(if zxQ=GQi;F#K94;$sF5NuVLd4Hkj24#aCp=aqW@{pj$p>pegfw{AR8*wz&GIrlX zqcbNZs^?^5!(WS{$9HtIkWs7!jQ9VXW59a2Y(|KO=^NCKtE;OgBqEMB)0ZG#Vm==W zxVUzfns)d1C>=#(x7Ib1zqgo<1y~$R%8GBk#cI?Gg1lAXE+s(j)*y&F~Vk<)8sw=FT;Ck_hee>_8f!n5{ zo_+Dd+4g0P{lpv`hDg{H!i^qGAYo_D4<}|B*DNxAiDadqkg~Of%gK44oJ{-jWfKhM zSr=D-y|cBoT3ZW-WI5d=751hV_7m`jisu)@jqLUd^ALu+nsgo>#4q8ZZ{p0LuQ7Uh zo{&G&zgs~A!x2Itjk@yMUTI;{!6BWpURU6b=87fdNlA+r`%v?t3pLf%riCk9Bu81^$miWsQ@Yy!=J0Tka;IZK5n-i6jIWR zGLx|6@vLlP`@x>>w*#Xwu#^<0@j19&``|%!)18;Xz{T~ohR4Bj1rj|kZ^c*AC0+O) z19Q^e8u!5H$r9xTy7!1O-__N=vGMfuOi6!wfQr`TT`XfIuF2O|%y-NCcVKZ2#J0NgM+<9@Z|M*fA_l@IZKB{p`rG*&N7qS zT=|H~a?mx1R#ARFSui)-I|TD_+UN&WYqXx8)um=y>b$CEm+x+1$ai>$>BQR~>FLeh z=R0R-XAV^8hytz>-MSj1puU^S%Jh>ZZfk`B41WHb+FGXyi-6q^NOVAnCWHlZ)Y=YU zV#uDK>Of<}bP~xaQ#TF|5ho0%*rHG*`D-PJE2`9|)#(pK*c2aL=v-c2R8LI^&($c& zYmpPyc!>=}TK)Tk;YT66)$_ zI@13dGrstlj7BKnO(q$=EjR@}dDn&t&L5Ee>*KeyU^XVE>5&mmYie&a@Qr0;T8Wzx zS+!y7=WIFX&h9Q!PBf#>U3fGymg!&}vHAFhIf488p-|F`-+Kp7inY z;l#Shn*6`bCPI!R`5YgjellSU3fedr>~6QDIUz32kgTh_drI*TTFyj-iHnPih6Z~e zqkI1RUB;yj5onBo_L!>lNY(W`+j&dyuZ zPq?Kt{<9dV!V&JUe=WIh#{bohLi+!^Q~ygo9%U1_`+XFsxmx}-)KguzPrRPgX7VoM`!2f-5_(>4&>zI=ry=o zSy?H-1SaO^g&>dz?ocB$v+Q>$N(nZTzhz})5WgZ$TZ7QRFV%A7MJKLKJdN&cX))~jMh=6)5N;WtKa1(>Q|nVdd-g?ZaA;_1auV*m{@%;0?(^ri z*(#gklf*0dw5@ zHH3zS`uX`?9xgpxHauDu>^PqIxJrPpR=&_nM=Ne@Qt0W-GG?>+TkS1=#F-?;{=oqb zm=qQkWwp73LrG9j(9fUCV4#l9&e+)4hK2@+14jz4-E6fT1zi5R4K2#&7I7R!B`72$ z^er-y;@L9-Lc(UT%=E1C^3k52o`HdZ@$t&4DxTM`i)(5wb7|6-lw1;Bz@bDn2&CJYrd3kqsc38BEZEbDoFJ!Y-7Zw&ktG%kK%42_4GDA6B z1AH-0)Xc2<^Vu`V+mT@aqL ziwidwSMT7Ui2rTvkAH`Xv^1I+6CGW9Z|{3?adByBi?y!EjyXy>g($< zpFRb11t_1dcau|5DFf!JsyYY5VPaw)E;beB<>3MQkdk(zeJ3O!2sg=H-`=(|2P*vU zr%#`JF7`ONxBwj0)YK>h-C4!{05HzYHDqP|ja7OqtgEXl5oTdxB91O8A;J2wQbteD zZ!ndgnwq*@pAh$)mw|!d4zU&uT=12I?aSAj?yQtgpG2jkr2PEuEchQI!C4G$GAcR- z`UogVHw>=zW?O>Q-g!=6FDRVvYh=^ezh-2VmX)o)t}O%~})73;~#%l};uRGBR~@ps$Dh*x3(;Q-NXHE zz_JhK>j+6nfqC4|wnp%S7skd+3=NOAMl#RNT)DZq(a^#!F5HdntgQ>ae^&;Wa&|uX zCHXbfI_`Zo7(i1=$>A~bFwk|=buPInDRU`r9S)Y7OR}>=piAKUo>)foj6$uFuCA_C zC{BCYr{Q5`2&6eUIJmTw1N4jY)pDz=s=PcsP0h^C&(81wzs?(*m`se1>+0#5{O9oU z@`|__$;!&w97;buIU#W9R&K@WTv*UkQevm0qvPas%DXW!x3mG(l=Djl25hE>(;*08 z13Oz=%L*Pb%k!NHuxn&vV?*~IxF7U4k22d!V!wR(l9iqPe6Q2izAlr(vjO^pgW+SfoAlaZIVU2FvHnI95BGpw(y z(CVL@oT%oj3%Nk!;{i|d^YLNHOnYXotgHkbD*O5RCMHF8m5>=58{027UM;r@I?o1$w!WM$@_J#KCx@6BC_2H*FzLk7I~gH09GpV8D&< z-&-HAcDBR)T$geHhXbx6BqRix0q8a=Dp;;k`lvkvP?V#iT;++#h%~<@ep^bA6QJSK z9mF;Uwz|JgZ#1YelGtZcK`iVqV2R~{04^%R4w#ezCS-!f#Jr&Xo1vkj!@|H2!oGI7 z1k8pIDy*!0qYs=JggdRNuKqkY6$U%@@y2SO20d5xR}cuq|EApk&ie%is*#6>2N0v( zAoLpc4-a$iEI}f_4W#=umseZMhlVDtrZyHnIKKpF`L%8>9ubl2$zNH3WG$_!Cr?SR zG?I?V-=Qlcau~I|1gT0yL=*VjXqqAt(QI0g@}CbHzUv)q}Klmu`ZpKxzNX$jB=5Eu1v06M1Y#8^NOUsP>{KmhmvcpON5Qkub z(l-%nKvq~-cx(<*d3aocUQ9>(#yPmPY@ddeC3IeR+^HM za-6y_J|4V8-_zUsBDk%s4HdU&jV=i>u#N;)34~N7!{#6iOb;Y8fS=uigEU^dXZw0! z2Pe#Acnf4?px450C_VyQ0?|;(02s0Z#Q>$)83xCjH_E>ipvVe4U!C?DqZL4G20}Sy_Y(ii+2f zNSFWoTv0c+z# zHA9R5RH!H^qo{@JUAI-i?P+Mz`}^gM+3wZ510j#=9mb~1OcdnhacD&3Yev8{Ks|uW zOc^+`vWp#|1qm2VjGrBwou5$FahwV3=wW7p+**i^RB@O1yOB9%nsOz}<37 zS|`MUl8nrsK&3KdvNt=ctIj~X&DA*6R8;}-uGesJ>9P4zAz58jRaIMCHttVfpIZKS zTbo!g-hQ|r2x*{Kfg%QUPzS1wtE&iHZ@@_y7#OoxZA!u@z`^(T_u1LmKre#u!=({P z74Z`&(**K?H(-q%I4UG8EH^jTqJ{oo+3?6UF)b|$&)Uweld;Z8MMVYBB0_^mOy=FY zzz-VQ+E%8fM4&|)x3#ll4ho{291u^^l9C{6f{+-i1|nSUvQPpfCLA0bAXpGsoXE|l z9Z-Ix_il=6Qz14Rnt zr{sp4tE;P<8<SsAD+=bL<>?*EciV&XK=x>irO;R*=}2_qvTAn$;ifocor=1M{- zcKya-u*38HJDK)(B;O&pep}b|HKw>y^X&|m+fedFfvVfoQk_Ic=t4n5U0G(eaYv-H zlhX`X?z!F~vs%vWVZZ~(qJT6Qf5?!s>l2ZZ8o+()K+RWD!hSCqr%|X02(sv7mG$Ir zZ6eHdkRuV*jEbqAS0~wZbyvlD4WQDA@T2B;mXMO#=%xu!*V3w~si8Sqr>qA`8VHyk z({iv;;!j#@)D9F1DiCZSux@dpkEuhg$N&8KGd9Kz8VWi(K7g4kTbWo`1O)_=laoO} zbaiw*4+ce;$KLeH^77G`5@f_5Be%I3!pHYPNl8gf?G(sj0QO3ShwI%EfM#!R?~Z{3 zMi{b@(aKbbfzF2yd4+`zAo*^OW>>x)P*hX|xdY_%3iE+B3$*!#1#vO4-@ktYA^H}v z^3dAK$Il-Rg{CAYAIwyMdL}(DDKQb$7F>SUXKXsvwG9p6!5+dwpp1aUEEz>DhSHk+ zZrruBwAk2+faZdU_<6TXU4znL1JLCq5|}qF6H~3nA=tSOc}hw!MB?b;Vo>Wy%f_al zqjS^p38ne&A_eFkf5{M|>2KuRcw!$vegu>o^Z;#1=CQF18^ad^r8f%dW>`5GMO#~2 zT)f@ubVF87ZcY3Qq&x6H5l|+T9$mpe71&G_uaLVw2DP}$(XuEgC@3gW?rtt|JzWDO zUQhv=I$CK5Rt22U{tr$-D}n_GyKG=3$)^G75}11co?ePzZ$RPb#iDe+)qTbtyOaiL zYh}-?bq?b5!ddIhzAV=0l)kL2`i`rI*bDlr{tBqzvz>{;{@X($q!wjR6NAcxl!{70 zFyuimB03u4bHRH0V2g${d6G@Qi)0w54Y=@Pe{ORi<+2C`c^QHUu7XFD9>K>l*uWd4 l;m`l$^IHG?(_DcMD8e_)+|<7f9si9XE2$_^EN1xm{{S-+E9f6O^^&dk|+zx#Qg_j%uaLX;Jyah^VZiiU=UBP%2M5e@CXXmGCn z+c^6Ef|UOeCB`p2(>~6UH1N3KBwPy zhU_Oo21VD?)-TieqAt^pMa`KA1C5@5e$gNTk^5BtPSC`02?fC6_v05v55ds}?f?1f z>sB{3sFsh9kE|@FM6g`m)2B~yQ9o*G;-jLX!osW%e(mkqQBzY-Ad-u%Y;2wp6I1D- zCMSc8rkHDRAH4gG&y9}#AS%h9_1V*o9K)_)xhVny0ui^pmF4AkPY#cd-JG4VQGs%K zM@L7Fj=ZlO-oAay7B5_%78H~Zho>5az1!s-(;c6mk1`_bnCR;2+TPx#HGD$$U!Xtj z0s*CiogF85>?6z08wWWhrKpw`Uk8Ws($c;2$VXsK7wzu{DP2uXSAYLTGqDC2=tuv^ zPR-5DVfV0uZ~g>^D-1T^#1Q=(3CZ=aLnw*@} zR9E-eX8{-d%RYg>%F4mZl04% z>y4&X{P)jYqobN1N2MhhJ{>NVQ9Zmky9>jl&~!LvB3^51MYnQsvG;<<6teDbjQoXc znX{uoHM|rQ6y57x{575IYCN;D} z@U335H8vTx`M!M%(ViA5Ep6_kp>Hdn(udZ*_-p?9v?UVv307vZaLwkf?x!l|>$6Ka z8A?hq;gQ+YcsE$Xxl(3E(yLAX=wWr;*_J`Yri1Cl4rYG-bF3#fnMqSP7(s*G?_v1u z-1j`jykEbC_z*QWD_L1AAewh~dA297CL6Jm7Vcr8ub~iSE-UXC=%b zYO(PFQ6EMBHl9Yu#gTIDHzf0I9~~XoZgd~)*EbqeRNrhoz#V4{+>*LTjVX64%PcM~ zgQ15pH&m1#gy!W96{uV+)+HqJ&1cBc4C3HH#6Q!ac0(dls7Tgi>7;Is_L8HxSjgbklZPQhUcr|w z0hca`sWDPZ>(56QvNMaG$Vd5#w)Qr9_|GKQmbSLH8ZE5Yj*v>$yu1lg(waZ>Aybu) z<^{NTk1j)q!aCajhK7+o#&E)Xvas4rMYV`UD)%9+{%lpg%HKct{u-jeY>iP9iT}&@ za4Dvtp}jxZnwQVk9@Q!CyWDwo+5``XlsW@2l#a>8hd3FN$KGV@o^Q+XV&La5rcMB#P9P zUoh3(vvi>E>a^`!V0(Mg={A8xuw;sbZ8{CH)K043=~ix@e28%|0-;MyR3*%zpsXYO zxBcrSInvSD`}%^1r!eH5V#vJ3$KLDua=Pl;%Tq(tb0sZJbYeKV{=S^OV)S=8U8E*0t#)Fp*!@OJS;y1; z3oWdY)1bQ5&EDFUqcv%2AfDY)GhW<-PNZ{QHV%m_LLjycPoy(9xHkzm-@YBXxmm+_ zTskx`ZqN`@Pgnig$qX-W6W&mml)9X6iVBi9kg!!$1XPv~i!8^Zy!lc7ed%<2QuXHt zBU8jz-8xbHt=%2btgP>(#N!_|*1jlPexeIB5=>7Y8cLC}@sQBJ{Q|$1W`Eh_HTf{B zE{b}D@dYCoJtQ71o*1J-gK!V-z@!`XV>gpjJQA#N%{qvcxyE`9Z z9ITSk(mn_cy*SFdJTBvaD$Y)TwN%0()QpXN=T%xdGlSA=a6B;cP*D-jkzPiuva)8F zIC|+*1sX+2Nl`x;Cw*o}rg48&gAp>OV-RUSF*#lnl?oSS4Ddh$>=%Pa)t(4C&7=|! z=fYrNW!#wy<5Ddu0z(q4vnP5`r>?$O^hv~-s8u6q|bKtce|ih zQBAEva=H?XfG;s#~=8%q>vykA;ZG-w0|DraO2)J zUDflPypT)3udOSfPFzPeBrKC=Ud!`nB~3mta8lSee&B!*&Ljfogg|1=gx`EnCx#9? zxil|sB)J@B-;5Q;7MFA}e=w|Y5sv8UdP3Wa_pWC&mf86Bc3>a1Scm%|{J97Tbb*BM z%1ilqR(CcXhqSb}w&dF{x-u5-Dg?pHt3*WBMFqbT=fypr?JugNriMkKoV@E9iRhk?BH^4zp6^c{zb_UpqLH(6tAD}O$+bS@09Ms+z_tE)jU^&Q34~Ic_W7nZvArI#w7t+^wpT$B^rO6INnrfA4VZ`l$t< z7RDVeNr#ieMwl#m(IM}F5{1LvFziKjWP3~M>n|2NV(RPR6oPBgQq!oW-X@nBN1I{1 zMjo5=5Fv4KWSu>Yp54}V(dvfQ&A}qD3<$O98KOoRzJV|!XP$eatyZkf$N)(vpW`8@ z74!Gk7Vn)Q!j0D|l&}28aqRO-vg!V|f9fvPM`Y(*N=iZJ>j^APrEK_myP3GytopXb zFI7~wXu3NHB+|US;WM)%`OZnb>>8bWDE-vbRG-b5%H(AKT!soOviva2K>e?>Ty(eB zeMgzG_p8|B!Wg8JeVv^^E+lmJZRTJ5;Z!sxZVT{bDupcMqqC!-vMsUvY6pHyFiSEXq8B_hZdyx)=L{M)a_Hbo6RU z%8bm{Z{vd5*e6ZQiMXsQtEAakampNJaU%M{!~LjVQH)im_nN_eSMt|Ez%<*PBLT2CUgw$MnojmenDF#$;_&q zm@^AlBsQRv4o?(x8mk#NOOGy(f01%AO;0TZ#5{lQlb#;GxtTc@9bH@;Q@Dv39tL_3 zOz1!YS5fhMrZ1hMrk4Uv&Ckbv+a6Mq_!5~{6L_X%%@!=+zv|@VKu2LuI$}LN!)a_a z85IusaMwFE+{va$ozaH8?7!wFqH=K=U}UV;PQD~(LdDIbQNp5G z2J*jD*Pbc!-PSZTh_<(Pnur46qPN0e{&CM@jaOC{$;q5?zEol3sMnMm=2`xBy+dMB z_?;9V3{oxSMTQ~Z_4Q*p(iB)&L}z;?h@s3%tMLP?2}g&8rS+)F+EN*LYZjJ2E$%#5 z@6@%|hgwD5N*4!GyR);K{>aGU;1DK<9XSR#wq4eY9&%n!d|MBQKHa6($IRuL=j zg8tEX(8ZqJj9Nn6n1+f95E{tYy6UQjnUe1PWTt#o*RA&k@!wT4VhG{ZiH#*3zDRoc ztZH?Q1SWlpo zO4^RRcjW1=H?Mbfl37(HyCdL>D-8KX?7ViA1}vpN1x9ABxV)~jz0OlmEgX$rf*>*3 zo0y2I&0F0_dQLVqkL1f(^QsoMp2$W2ClSY1*XIr38d?Sh9(#J;xjHh6xPB}3qKvEh zHaqK-YuuhA%GK#^zx&lT8Nsf{(#qk9DM^z9P z#!O6W5)xaVU@oq#66cf8lc;O#?eF>eEWfvxmiAM^GkC2=b5|fjA^13)Li$>O2hx#j zKGf3SQY65%xOUphHk+#kA^2B@3b`lDRL0c!xoi)GA z{`1-gt2e(7Wn&#V^>qlT2bY>g3<0~aQ;RJvECF5XDJRVg=0|t`84FVk;w!2p&735$ zX=hs#8QzI#Ku1rQYtl|kBz@&^lpaBWBVfD&rc*_deb3o2-C6_w0tTN}{9#TBHaQnIvm8=K^U zdk|$ldhyvoD^2M5=&uk|w)dIez3jw9rc+YKhSa-7LVSndh}9RdT>Tk7@1gg*9YNK2 z#$mq-Zg$q)+?L-%$diG@H9-m2MGwP!qK1Yts9yVe+evngZcjckHL)m4+1wkprbfgm)I9mE#&^jTyc*okCH>~>&6l!3SJGeDo z-1EC!exno$0U~y@?OPAihv3SJEDg;l4K;=dtC?B${xlDe>e`F9VC?Nx*Tm=NR>#Fx zpo9(SbIW+NPxYTYd-f9AB1%uc?1k}POJOk_%OQsUx7vd?SFp-bs8*8Ocz(z z$fO|DzdJhg>(=^Yfdk2W=8NqCfq{WfXxZY~SXf@WS4~W4h>MGJzV|yU1huB1wfqMV zok0_IV`KUb>51#BD{k!T+=>5FZ6e|hkT7z8Rt$-rkZ0^4RF>={2>qQ2(o8 z{VQcrw>?;2TjLddsu5oBKg&?zYV_u~|7J(l_Wxd+`~Ugwf;HpL)k!Cem}%&5@iw6M z#?PArVc>je*W7s`&hJoWW_C6rA_CFZ*Wx|^yS+Z&pQ6EUcFX#4X==G{Bv`4%52 zEoZl*qhok@ILouphK5T)>t|?8A3l8esgzk-P%t$!b0u`jz`*cQzwrdo_2_rhDm%*P zI9Q$>9aKgKa@os#9R*Ro?=PEWvRCq@TDNE#Z{pjZ;Nl9rdPT`+)-PRWV`Vi{YnvJw zIoKIO#KFOV_4H|rA(?=HK`*H_%YcXO1{d-?0v|Md0eut+)M5);|k z*h=*p2)UvnBF0BY+Y_P9o;B`=pZNLt>FIOWHA@#X9kEDW{)jL#G<>734lcWFPn3j) zhH^n$IuWECI?tXz7j9Y>cG-&i{{4GO$_yw3I5{~5X=1(u^~uotvxT;YOtU>*UHZ+Q zr^mDW0s?#c`>jsvgX@})e&YonQ`Ai&|0=t#KaENn6SOMLUmwkrr=_K(nX4=-8|v+q zm66%l8ZTN}T4H5o6%!MK3M7I4ZD*=6p-kUpptX*H)S~X?rU())E-qD7Byh0Y++6$N z4d%k)V&~7F4-XEw1qJPGZ6l(h_P4jMQEz>|0;^F7+JU9z)zej0KG-g6ZER?mo}Eoh zOr(c>R8O2?CpkxW&JcMS~qvbeCYu(~J6oW}RuTH`+qdIbTmc4cBW7@|3E@sUS3sI6*gSfdS~_NQ>E+fOjO1L7nf7uhwSX^^#58; zZEfw&j*YLcFSr7Tk(ISDPd*V~_yr7@7`P;GQ(P)xlTZKJ==5|5eqVoobw$O<=qTt~ z7w*b&=!#7o9ua}_xrzcGzufoM(_2VTaI&vYPFFWoBFko~{7bK;(O`S~(#_>Dw&vor zC-SGl+1Z)o;@3x58?{QZvV|ojE^cl#{8n+mgx|l9fHDkNVjbOotxO;Ov!LMO>U0}m zUnS>#NLbkN!#^P*A)OV-qr=0B@^VAQqv+^pDiPPDq@>`G5K9Y-KfS%Q{|0|^b0a4& z54n`&ZXXy>aCXPV#f2%Gn5;B;9KYh{H}6kcTwOgKv0lnT{ajgDf%;xb$;(@#nn2~| zKE;`jmlsU5xUB4+E(q;6-fAk-&P?3Y7Zf$757C6%rsc2w0#C>e3Gg{}HMI>pa~^DY zS=rBSZi25~dEZT{4Twa3xh{L0SR6zs+ z0pMXj*T~Pr6gcHOS*jlp5Flue3Xh7ix3(5>SoqD|zPG!}&&%8BcklcDeIU3U8>=cU zjZq=M!J(j~^_r8DlabLBjQbt`3<`C(HO~I<;Ws1nH$_!dPJlDOHVxp6Ca3kt$VglR z-{tOzAUqm67^guK87XNHRM`6j8(@`{o12@5heuQt?7^5Xb%xm5+JdMyJw4sr+}vpf zp7Htf=jYF#@9ph@6VSZ9y*qC$9378hnUyXsE`sZf3k(C7Ha9H|4WpBjljGuEjotFF zJG;0v)Yr$v#%}h_H+dM_+uNI)OQ)6)NFWlqT79lvb#*CJKB}oP(9?gTq4G^v)6;ti z$eM)($oY))bPi_b#Q1o11B1wU19})bIy$%+f<>SM$k2Yi<f2L~rUJ|1u}pZQ=K zpuz+W-IUZ+0to2tv%e{6@E# ze1p8Hx1UP$@}y*Bf@@bmCrnICEiElNIy%5OZEbC4W@b;aYHI}obG`zzg1}?k6S=y& z3dRb2*2>PVw5SNt->+9^MLkd~f)sO$RZQ9#!7ypM%sfj)Q2%z;Lf zl$2oW4A_&7j_%phfZp!@AvAH@sQGsQasakbR3ZYryp>s5ZPU}*nwpa!4FGyb_Et4t z5g2%NeI3kSACQ=$;@|-;V62>9|5|8JP!EW2?(qv6#af^D{t7j=mzQ&UcwA0RY0VVj z!=jUtri!(zva_>;W6sW80fv?sX=tEcULqL{fZr;ss?yWaHV6e!7Lt;k3nFTOYAGVA`n=#xj%o_nh#~HteB9JCcUILvvtN~XPf4aXkjO( zr-vwUw>`Zk6A+gH2m#LnS4)BT-AT`j2imJjO2R5CDw31QlRr#=Jv=1R>tusq*8q?w zut-N3xj)TxU1|qsBN>Z`HEmh$j-WLG?JKJ>mKiuXOj$}w3PAI&I3hUsi3;#8DbYXg zz-F9=+}1w1+sz%kww5H*JU$_z8)0fQ{g91b4>0=6I`Ir9N*SP60~Sr-PPO=~OU?xGQtkbw9Aea2n^?Q%GKh(Z~x zO{a%^!@zs|_%Ww;B|5E%xp_~nR3IMZ!o#N5SXfw4==}NqJwI->-CQGx=ExTs00cJYvnQ-_gUK&1r{0PEsVOd#SdAWnL^UnT$zQPp1@%Y$Scw{6s zYH?Zc1aK;d3r}c4Sf1Hub#Qb97*R}n&BMi2_TvXyNBN?dBLNjvL?+88CJ}!AQ=kO* z=UYL9{qeDoCOy#nLIAL5ZeAWidU;Nc@o27WwO36|4JH;A9=#!&ufsg2QQfn(2?jBD zQkTbm+Rt2zSq0y9+%9O^s_Evml7TsYABW35csagl;5W?ckW6lX8nC4T{;5^(K#PG9)Y|H&p+QnX z;`i&@9^}7{I$6La{r&y3v#CZo>gwtxB_+XlrXK_N5#Jo+-rigB6TGPH(HnX!& zH(neauKT^lfvY?XkYI}4e2_qT7Y>Ra) z$SL;9`a0M$YWBNt$oE_;ykMknu>rPWW7}F^p9ERg#o=AG&d%Q6#Ms!%!h(f`MZL>5$*zuyiV6=; z1)%#85?=4r;*t`8G~ioTS63k4T3TMVWDd^|_W^YR4q95CrWX$`zTa-NI!_?~PHF4% z?92hJzq`8|bj7(3!z8*pZ@p(^W{&MOAq;U|>n8{4NLLu?_U7gzbo2$Nq;{=MazH>= zr77a4LhAdxk9btVuzjzKgGJE0wvLY7Y^{Q(W@3TU{oO4uFK_G3Q5X>Wz>;%`PGpsp zl`#nkKb2WPB6y*{kiu^bG||hKF99wOL`2v4vBwL5G>wW<`d{y(#4yTRd{a?Z2Zad% zK|!GLL9!?H1V~u>>B_F&-c5kSyj#M--@ktY8{%MN15^RMOM;L8X7+@}?`Cm%*%)Mh ze|LOG$;tSw#sHT9kOJvCSq5!&Sd^GaH3OmX=hhPWbFu1SMn(nzD^MFn8YRzf z4(=BW_4M@m5Qu?+0k@qgBMS=|8z9Y0%*+Hlj#h9e1l&D6K~V(12oN0roh9>Iu=p(= z$7XJBZd6q10F2ZkuHk#uw;&SmnUCJ1(CiQi93X^(S9x+d(nQ=i4{aXc;SFsf^Fh`I zSmu0Z`glFlZ)8cp9|x`?T% zDJT_~52P@1a75sZTwh;fU|@i7IEo@C0wV&#HQapIh)m4$1ULt%paAT+?xPgWH_$>P z|9QsQnTDn&#Qk7j-XqXH_Sdhi5F+MC8i-<+^1sDB9|JESpdXsLp}i-&qV3_z<6&PN zFT73rPeM+9GFa7&d;a2f7om>H(`?HT=acWjNQV8~FA~TWC#Pdz?8>8ARy6-CN>O)i zkQf1Z?)vmTC?-1EVW|^y!p{Z+?d+%qygp?J)2??|TpLIQ(>QH@_+SC@^nbrp3!(adJ7mfV?zkL02j}g{I>_I@bUI literal 0 HcmV?d00001 From e9786a22c5cf695f10aba1a18a3ac4f15c85d4cb Mon Sep 17 00:00:00 2001 From: codeflare-machine-account <138894154+codeflare-machine-account@users.noreply.github.com> Date: Fri, 27 Sep 2024 11:46:55 +0100 Subject: [PATCH 348/496] Changes in docs for release: v0.21.0 (#687) Co-authored-by: codeflare-machine-account --- .../cluster/cluster.html | 221 +++-- .../cluster/config.html | 94 ++- .../detailed-documentation/cluster/index.html | 9 + .../detailed-documentation/cluster/model.html | 74 +- .../cluster/widgets.html | 758 ++++++++++++++++++ docs/detailed-documentation/index.html | 1 + docs/detailed-documentation/job/ray_jobs.html | 13 +- .../utils/generate_yaml.html | 50 +- .../utils/pretty_print.html | 12 +- 9 files changed, 1071 insertions(+), 161 deletions(-) create mode 100644 docs/detailed-documentation/cluster/widgets.html diff --git a/docs/detailed-documentation/cluster/cluster.html b/docs/detailed-documentation/cluster/cluster.html index 5c8688eb..5276ee94 100644 --- a/docs/detailed-documentation/cluster/cluster.html +++ b/docs/detailed-documentation/cluster/cluster.html @@ -51,6 +51,7 @@

    Module codeflare_sdk.cluster.cluster

    """ import re +import subprocess from time import sleep from typing import List, Optional, Tuple, Dict @@ -74,6 +75,10 @@

    Module codeflare_sdk.cluster.cluster

    RayCluster, RayClusterStatus, ) +from .widgets import ( + cluster_up_down_buttons, + is_notebook, +) from kubernetes import client, config from kubernetes.utils import parse_quantity import yaml @@ -103,6 +108,8 @@

    Module codeflare_sdk.cluster.cluster

    self.app_wrapper_yaml = self.create_app_wrapper() self._job_submission_client = None self.app_wrapper_name = self.config.name + if is_notebook(): + cluster_up_down_buttons(self) @property def _client_headers(self): @@ -188,8 +195,12 @@

    Module codeflare_sdk.cluster.cluster

    plural="appwrappers", body=aw, ) + print(f"AppWrapper: '{self.config.name}' has successfully been created") else: self._component_resources_up(namespace, api_instance) + print( + f"Ray Cluster: '{self.config.name}' has successfully been created" + ) except Exception as e: # pragma: no cover return _kube_api_error_handling(e) @@ -230,8 +241,12 @@

    Module codeflare_sdk.cluster.cluster

    plural="appwrappers", name=self.app_wrapper_name, ) + print(f"AppWrapper: '{self.config.name}' has successfully been deleted") else: self._component_resources_down(namespace, api_instance) + print( + f"Ray Cluster: '{self.config.name}' has successfully been deleted" + ) except Exception as e: # pragma: no cover return _kube_api_error_handling(e) @@ -480,6 +495,18 @@

    Module codeflare_sdk.cluster.cluster

    name=rc["metadata"]["name"], namespace=rc["metadata"]["namespace"], machine_types=machine_types, + head_cpu_requests=rc["spec"]["headGroupSpec"]["template"]["spec"][ + "containers" + ][0]["resources"]["requests"]["cpu"], + head_cpu_limits=rc["spec"]["headGroupSpec"]["template"]["spec"][ + "containers" + ][0]["resources"]["limits"]["cpu"], + head_memory_requests=rc["spec"]["headGroupSpec"]["template"]["spec"][ + "containers" + ][0]["resources"]["requests"]["memory"], + head_memory_limits=rc["spec"]["headGroupSpec"]["template"]["spec"][ + "containers" + ][0]["resources"]["limits"]["memory"], num_workers=rc["spec"]["workerGroupSpecs"][0]["minReplicas"], worker_cpu_requests=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ "containers" @@ -578,41 +605,25 @@

    Module codeflare_sdk.cluster.cluster

    def get_current_namespace(): # pragma: no cover - if api_config_handler() != None: - if os.path.isfile("/var/run/secrets/kubernetes.io/serviceaccount/namespace"): - try: - file = open( - "/var/run/secrets/kubernetes.io/serviceaccount/namespace", "r" - ) - active_context = file.readline().strip("\n") - return active_context - except Exception as e: - print("Unable to find current namespace") - return None - else: + if os.path.isfile("/var/run/secrets/kubernetes.io/serviceaccount/namespace"): + try: + file = open("/var/run/secrets/kubernetes.io/serviceaccount/namespace", "r") + active_context = file.readline().strip("\n") + return active_context + except Exception as e: print("Unable to find current namespace") - return None - else: - if os.path.isfile("/var/run/secrets/kubernetes.io/serviceaccount/namespace"): - try: - file = open( - "/var/run/secrets/kubernetes.io/serviceaccount/namespace", "r" - ) - active_context = file.readline().strip("\n") - return active_context - except Exception as e: - print( - "unable to gather namespace from /var/run/secrets/kubernetes.io/serviceaccount/namespace trying to gather from current context" - ) - else: - try: - _, active_context = config.list_kube_config_contexts(config_check()) - except Exception as e: - return _kube_api_error_handling(e) - try: - return active_context["context"]["namespace"] - except KeyError: - return None + + if api_config_handler() != None: + return None + print("trying to gather from current context") + try: + _, active_context = config.list_kube_config_contexts(config_check()) + except Exception as e: + return _kube_api_error_handling(e) + try: + return active_context["context"]["namespace"] + except KeyError: + return None def get_cluster( @@ -884,24 +895,33 @@

    Module codeflare_sdk.cluster.cluster

    name=rc["metadata"]["name"], status=status, # for now we are not using autoscaling so same replicas is fine - workers=rc["spec"]["workerGroupSpecs"][0]["replicas"], - worker_mem_max=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + num_workers=rc["spec"]["workerGroupSpecs"][0]["replicas"], + worker_mem_limits=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ "containers" ][0]["resources"]["limits"]["memory"], - worker_mem_min=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + worker_mem_requests=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ "containers" ][0]["resources"]["requests"]["memory"], - worker_cpu=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][ - 0 - ]["resources"]["limits"]["cpu"], + worker_cpu_requests=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + "containers" + ][0]["resources"]["requests"]["cpu"], + worker_cpu_limits=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ + "containers" + ][0]["resources"]["limits"]["cpu"], worker_extended_resources=worker_extended_resources, namespace=rc["metadata"]["namespace"], - head_cpus=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][0][ - "resources" - ]["limits"]["cpu"], - head_mem=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][0][ - "resources" - ]["limits"]["memory"], + head_cpu_requests=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][ + 0 + ]["resources"]["requests"]["cpu"], + head_cpu_limits=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][ + 0 + ]["resources"]["limits"]["cpu"], + head_mem_requests=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][ + 0 + ]["resources"]["requests"]["memory"], + head_mem_limits=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][ + 0 + ]["resources"]["limits"]["memory"], head_extended_resources=head_extended_resources, dashboard=dashboard_url, ) @@ -923,15 +943,18 @@

    Module codeflare_sdk.cluster.cluster

    ray = RayCluster( name=cluster.config.name, status=cluster.status(print_to_console=False)[0], - workers=cluster.config.num_workers, - worker_mem_min=cluster.config.worker_memory_requests, - worker_mem_max=cluster.config.worker_memory_limits, - worker_cpu=cluster.config.worker_cpu_requests, + num_workers=cluster.config.num_workers, + worker_mem_requests=cluster.config.worker_memory_requests, + worker_mem_limits=cluster.config.worker_memory_limits, + worker_cpu_requests=cluster.config.worker_cpu_requests, + worker_cpu_limits=cluster.config.worker_cpu_limits, worker_extended_resources=cluster.config.worker_extended_resource_requests, namespace=cluster.config.namespace, dashboard=cluster.cluster_dashboard_uri(), - head_cpus=cluster.config.head_cpus, - head_mem=cluster.config.head_memory, + head_mem_requests=cluster.config.head_memory_requests, + head_mem_limits=cluster.config.head_memory_limits, + head_cpu_requests=cluster.config.head_cpu_requests, + head_cpu_limits=cluster.config.head_cpu_limits, head_extended_resources=cluster.config.head_extended_resource_requests, ) if ray.status == CodeFlareClusterStatus.READY: @@ -997,41 +1020,25 @@

    Functions

    Expand source code
    def get_current_namespace():  # pragma: no cover
    -    if api_config_handler() != None:
    -        if os.path.isfile("/var/run/secrets/kubernetes.io/serviceaccount/namespace"):
    -            try:
    -                file = open(
    -                    "/var/run/secrets/kubernetes.io/serviceaccount/namespace", "r"
    -                )
    -                active_context = file.readline().strip("\n")
    -                return active_context
    -            except Exception as e:
    -                print("Unable to find current namespace")
    -                return None
    -        else:
    +    if os.path.isfile("/var/run/secrets/kubernetes.io/serviceaccount/namespace"):
    +        try:
    +            file = open("/var/run/secrets/kubernetes.io/serviceaccount/namespace", "r")
    +            active_context = file.readline().strip("\n")
    +            return active_context
    +        except Exception as e:
                 print("Unable to find current namespace")
    -            return None
    -    else:
    -        if os.path.isfile("/var/run/secrets/kubernetes.io/serviceaccount/namespace"):
    -            try:
    -                file = open(
    -                    "/var/run/secrets/kubernetes.io/serviceaccount/namespace", "r"
    -                )
    -                active_context = file.readline().strip("\n")
    -                return active_context
    -            except Exception as e:
    -                print(
    -                    "unable to gather namespace from /var/run/secrets/kubernetes.io/serviceaccount/namespace trying to gather from current context"
    -                )
    -        else:
    -            try:
    -                _, active_context = config.list_kube_config_contexts(config_check())
    -            except Exception as e:
    -                return _kube_api_error_handling(e)
    -            try:
    -                return active_context["context"]["namespace"]
    -            except KeyError:
    -                return None
    + + if api_config_handler() != None: + return None + print("trying to gather from current context") + try: + _, active_context = config.list_kube_config_contexts(config_check()) + except Exception as e: + return _kube_api_error_handling(e) + try: + return active_context["context"]["namespace"] + except KeyError: + return None
    @@ -1123,6 +1130,8 @@

    Classes

    self.app_wrapper_yaml = self.create_app_wrapper() self._job_submission_client = None self.app_wrapper_name = self.config.name + if is_notebook(): + cluster_up_down_buttons(self) @property def _client_headers(self): @@ -1208,8 +1217,12 @@

    Classes

    plural="appwrappers", body=aw, ) + print(f"AppWrapper: '{self.config.name}' has successfully been created") else: self._component_resources_up(namespace, api_instance) + print( + f"Ray Cluster: '{self.config.name}' has successfully been created" + ) except Exception as e: # pragma: no cover return _kube_api_error_handling(e) @@ -1250,8 +1263,12 @@

    Classes

    plural="appwrappers", name=self.app_wrapper_name, ) + print(f"AppWrapper: '{self.config.name}' has successfully been deleted") else: self._component_resources_down(namespace, api_instance) + print( + f"Ray Cluster: '{self.config.name}' has successfully been deleted" + ) except Exception as e: # pragma: no cover return _kube_api_error_handling(e) @@ -1500,6 +1517,18 @@

    Classes

    name=rc["metadata"]["name"], namespace=rc["metadata"]["namespace"], machine_types=machine_types, + head_cpu_requests=rc["spec"]["headGroupSpec"]["template"]["spec"][ + "containers" + ][0]["resources"]["requests"]["cpu"], + head_cpu_limits=rc["spec"]["headGroupSpec"]["template"]["spec"][ + "containers" + ][0]["resources"]["limits"]["cpu"], + head_memory_requests=rc["spec"]["headGroupSpec"]["template"]["spec"][ + "containers" + ][0]["resources"]["requests"]["memory"], + head_memory_limits=rc["spec"]["headGroupSpec"]["template"]["spec"][ + "containers" + ][0]["resources"]["limits"]["memory"], num_workers=rc["spec"]["workerGroupSpecs"][0]["minReplicas"], worker_cpu_requests=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ "containers" @@ -1742,8 +1771,12 @@

    Methods

    plural="appwrappers", name=self.app_wrapper_name, ) + print(f"AppWrapper: '{self.config.name}' has successfully been deleted") else: self._component_resources_down(namespace, api_instance) + print( + f"Ray Cluster: '{self.config.name}' has successfully been deleted" + ) except Exception as e: # pragma: no cover return _kube_api_error_handling(e)
    @@ -1779,6 +1812,18 @@

    Methods

    name=rc["metadata"]["name"], namespace=rc["metadata"]["namespace"], machine_types=machine_types, + head_cpu_requests=rc["spec"]["headGroupSpec"]["template"]["spec"][ + "containers" + ][0]["resources"]["requests"]["cpu"], + head_cpu_limits=rc["spec"]["headGroupSpec"]["template"]["spec"][ + "containers" + ][0]["resources"]["limits"]["cpu"], + head_memory_requests=rc["spec"]["headGroupSpec"]["template"]["spec"][ + "containers" + ][0]["resources"]["requests"]["memory"], + head_memory_limits=rc["spec"]["headGroupSpec"]["template"]["spec"][ + "containers" + ][0]["resources"]["limits"]["memory"], num_workers=rc["spec"]["workerGroupSpecs"][0]["minReplicas"], worker_cpu_requests=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ "containers" @@ -2022,8 +2067,12 @@

    Methods

    plural="appwrappers", body=aw, ) + print(f"AppWrapper: '{self.config.name}' has successfully been created") else: self._component_resources_up(namespace, api_instance) + print( + f"Ray Cluster: '{self.config.name}' has successfully been created" + ) except Exception as e: # pragma: no cover return _kube_api_error_handling(e) diff --git a/docs/detailed-documentation/cluster/config.html b/docs/detailed-documentation/cluster/config.html index 87ad3b77..b329fb03 100644 --- a/docs/detailed-documentation/cluster/config.html +++ b/docs/detailed-documentation/cluster/config.html @@ -107,10 +107,16 @@

    Module codeflare_sdk.cluster.config

    name: str namespace: Optional[str] = None head_info: List[str] = field(default_factory=list) - head_cpus: Union[int, str] = 2 - head_memory: Union[int, str] = 8 + head_cpu_requests: Union[int, str] = 2 + head_cpu_limits: Union[int, str] = 2 + head_cpus: Optional[Union[int, str]] = None # Deprecating + head_memory_requests: Union[int, str] = 8 + head_memory_limits: Union[int, str] = 8 + head_memory: Optional[Union[int, str]] = None # Deprecating head_gpus: Optional[int] = None # Deprecating - head_extended_resource_requests: Dict[str, int] = field(default_factory=dict) + head_extended_resource_requests: Dict[str, Union[str, int]] = field( + default_factory=dict + ) machine_types: List[str] = field( default_factory=list ) # ["m4.xlarge", "g4dn.xlarge"] @@ -132,7 +138,9 @@

    Module codeflare_sdk.cluster.config

    write_to_file: bool = False verify_tls: bool = True labels: Dict[str, str] = field(default_factory=dict) - worker_extended_resource_requests: Dict[str, int] = field(default_factory=dict) + worker_extended_resource_requests: Dict[str, Union[str, int]] = field( + default_factory=dict + ) extended_resource_mapping: Dict[str, str] = field(default_factory=dict) overwrite_default_resource_mapping: bool = False local_queue: Optional[str] = None @@ -215,14 +223,21 @@

    Module codeflare_sdk.cluster.config

    self.worker_memory_limits = f"{self.worker_memory_limits}G" def _memory_to_string(self): - if isinstance(self.head_memory, int): - self.head_memory = f"{self.head_memory}G" + if isinstance(self.head_memory_requests, int): + self.head_memory_requests = f"{self.head_memory_requests}G" + if isinstance(self.head_memory_limits, int): + self.head_memory_limits = f"{self.head_memory_limits}G" if isinstance(self.worker_memory_requests, int): self.worker_memory_requests = f"{self.worker_memory_requests}G" if isinstance(self.worker_memory_limits, int): self.worker_memory_limits = f"{self.worker_memory_limits}G" def _cpu_to_resource(self): + if self.head_cpus: + warnings.warn( + "head_cpus is being deprecated, use head_cpu_requests and head_cpu_limits" + ) + self.head_cpu_requests = self.head_cpu_limits = self.head_cpus if self.min_cpus: warnings.warn("min_cpus is being deprecated, use worker_cpu_requests") self.worker_cpu_requests = self.min_cpus @@ -231,6 +246,11 @@

    Module codeflare_sdk.cluster.config

    self.worker_cpu_limits = self.max_cpus def _memory_to_resource(self): + if self.head_memory: + warnings.warn( + "head_memory is being deprecated, use head_memory_requests and head_memory_limits" + ) + self.head_memory_requests = self.head_memory_limits = self.head_memory if self.min_memory: warnings.warn("min_memory is being deprecated, use worker_memory_requests") self.worker_memory_requests = f"{self.min_memory}G" @@ -282,7 +302,7 @@

    Classes

    class ClusterConfiguration -(name: str, namespace: Optional[str] = None, head_info: List[str] = <factory>, head_cpus: Union[int, str] = 2, head_memory: Union[int, str] = 8, head_gpus: Optional[int] = None, head_extended_resource_requests: Dict[str, int] = <factory>, machine_types: List[str] = <factory>, worker_cpu_requests: Union[int, str] = 1, worker_cpu_limits: Union[int, str] = 1, min_cpus: Union[int, str, ForwardRef(None)] = None, max_cpus: Union[int, str, ForwardRef(None)] = None, num_workers: int = 1, worker_memory_requests: Union[int, str] = 2, worker_memory_limits: Union[int, str] = 2, min_memory: Union[int, str, ForwardRef(None)] = None, max_memory: Union[int, str, ForwardRef(None)] = None, num_gpus: Optional[int] = None, template: str = '/home/runner/work/codeflare-sdk/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', appwrapper: bool = False, envs: Dict[str, str] = <factory>, image: str = '', image_pull_secrets: List[str] = <factory>, write_to_file: bool = False, verify_tls: bool = True, labels: Dict[str, str] = <factory>, worker_extended_resource_requests: Dict[str, int] = <factory>, extended_resource_mapping: Dict[str, str] = <factory>, overwrite_default_resource_mapping: bool = False, local_queue: Optional[str] = None) +(name: str, namespace: Optional[str] = None, head_info: List[str] = <factory>, head_cpu_requests: Union[int, str] = 2, head_cpu_limits: Union[int, str] = 2, head_cpus: Union[int, str, ForwardRef(None)] = None, head_memory_requests: Union[int, str] = 8, head_memory_limits: Union[int, str] = 8, head_memory: Union[int, str, ForwardRef(None)] = None, head_gpus: Optional[int] = None, head_extended_resource_requests: Dict[str, Union[str, int]] = <factory>, machine_types: List[str] = <factory>, worker_cpu_requests: Union[int, str] = 1, worker_cpu_limits: Union[int, str] = 1, min_cpus: Union[int, str, ForwardRef(None)] = None, max_cpus: Union[int, str, ForwardRef(None)] = None, num_workers: int = 1, worker_memory_requests: Union[int, str] = 2, worker_memory_limits: Union[int, str] = 2, min_memory: Union[int, str, ForwardRef(None)] = None, max_memory: Union[int, str, ForwardRef(None)] = None, num_gpus: Optional[int] = None, template: str = '/home/runner/work/codeflare-sdk/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', appwrapper: bool = False, envs: Dict[str, str] = <factory>, image: str = '', image_pull_secrets: List[str] = <factory>, write_to_file: bool = False, verify_tls: bool = True, labels: Dict[str, str] = <factory>, worker_extended_resource_requests: Dict[str, Union[str, int]] = <factory>, extended_resource_mapping: Dict[str, str] = <factory>, overwrite_default_resource_mapping: bool = False, local_queue: Optional[str] = None)

    This dataclass is used to specify resource requirements and other details, and @@ -354,10 +374,16 @@

    Classes

    name: str namespace: Optional[str] = None head_info: List[str] = field(default_factory=list) - head_cpus: Union[int, str] = 2 - head_memory: Union[int, str] = 8 + head_cpu_requests: Union[int, str] = 2 + head_cpu_limits: Union[int, str] = 2 + head_cpus: Optional[Union[int, str]] = None # Deprecating + head_memory_requests: Union[int, str] = 8 + head_memory_limits: Union[int, str] = 8 + head_memory: Optional[Union[int, str]] = None # Deprecating head_gpus: Optional[int] = None # Deprecating - head_extended_resource_requests: Dict[str, int] = field(default_factory=dict) + head_extended_resource_requests: Dict[str, Union[str, int]] = field( + default_factory=dict + ) machine_types: List[str] = field( default_factory=list ) # ["m4.xlarge", "g4dn.xlarge"] @@ -379,7 +405,9 @@

    Classes

    write_to_file: bool = False verify_tls: bool = True labels: Dict[str, str] = field(default_factory=dict) - worker_extended_resource_requests: Dict[str, int] = field(default_factory=dict) + worker_extended_resource_requests: Dict[str, Union[str, int]] = field( + default_factory=dict + ) extended_resource_mapping: Dict[str, str] = field(default_factory=dict) overwrite_default_resource_mapping: bool = False local_queue: Optional[str] = None @@ -462,14 +490,21 @@

    Classes

    self.worker_memory_limits = f"{self.worker_memory_limits}G" def _memory_to_string(self): - if isinstance(self.head_memory, int): - self.head_memory = f"{self.head_memory}G" + if isinstance(self.head_memory_requests, int): + self.head_memory_requests = f"{self.head_memory_requests}G" + if isinstance(self.head_memory_limits, int): + self.head_memory_limits = f"{self.head_memory_limits}G" if isinstance(self.worker_memory_requests, int): self.worker_memory_requests = f"{self.worker_memory_requests}G" if isinstance(self.worker_memory_limits, int): self.worker_memory_limits = f"{self.worker_memory_limits}G" def _cpu_to_resource(self): + if self.head_cpus: + warnings.warn( + "head_cpus is being deprecated, use head_cpu_requests and head_cpu_limits" + ) + self.head_cpu_requests = self.head_cpu_limits = self.head_cpus if self.min_cpus: warnings.warn("min_cpus is being deprecated, use worker_cpu_requests") self.worker_cpu_requests = self.min_cpus @@ -478,6 +513,11 @@

    Classes

    self.worker_cpu_limits = self.max_cpus def _memory_to_resource(self): + if self.head_memory: + warnings.warn( + "head_memory is being deprecated, use head_memory_requests and head_memory_limits" + ) + self.head_memory_requests = self.head_memory_limits = self.head_memory if self.min_memory: warnings.warn("min_memory is being deprecated, use worker_memory_requests") self.worker_memory_requests = f"{self.min_memory}G" @@ -531,11 +571,19 @@

    Class variables

    -
    var head_cpus : Union[int, str]
    +
    var head_cpu_limits : Union[int, str]
    +
    +
    +
    +
    var head_cpu_requests : Union[int, str]
    +
    +
    +
    +
    var head_cpus : Union[int, str, ForwardRef(None)]
    -
    var head_extended_resource_requests : Dict[str, int]
    +
    var head_extended_resource_requests : Dict[str, Union[str, int]]
    @@ -547,7 +595,15 @@

    Class variables

    -
    var head_memory : Union[int, str]
    +
    var head_memory : Union[int, str, ForwardRef(None)]
    +
    +
    +
    +
    var head_memory_limits : Union[int, str]
    +
    +
    +
    +
    var head_memory_requests : Union[int, str]
    @@ -623,7 +679,7 @@

    Class variables

    -
    var worker_extended_resource_requests : Dict[str, int]
    +
    var worker_extended_resource_requests : Dict[str, Union[str, int]]
    @@ -663,11 +719,15 @@

    appwrapper
  • envs
  • extended_resource_mapping
  • +
  • head_cpu_limits
  • +
  • head_cpu_requests
  • head_cpus
  • head_extended_resource_requests
  • head_gpus
  • head_info
  • head_memory
  • +
  • head_memory_limits
  • +
  • head_memory_requests
  • image
  • image_pull_secrets
  • labels
  • diff --git a/docs/detailed-documentation/cluster/index.html b/docs/detailed-documentation/cluster/index.html index a7967885..f8c04fa2 100644 --- a/docs/detailed-documentation/cluster/index.html +++ b/docs/detailed-documentation/cluster/index.html @@ -49,6 +49,10 @@

    Module codeflare_sdk.cluster

    list_all_clusters, ) +from .widgets import ( + view_clusters, +) + from .awload import AWManager
    @@ -81,6 +85,10 @@

    Sub-modules

    states and AppWrapper states, and CodeFlare cluster states, as well as …

    +
    codeflare_sdk.cluster.widgets
    +
    +

    The widgets sub-module contains the ui widgets created using the ipywidgets package.

    +
    @@ -108,6 +116,7 @@

    Index

  • codeflare_sdk.cluster.cluster
  • codeflare_sdk.cluster.config
  • codeflare_sdk.cluster.model
  • +
  • codeflare_sdk.cluster.widgets
  • diff --git a/docs/detailed-documentation/cluster/model.html b/docs/detailed-documentation/cluster/model.html index a07027da..7d87e34f 100644 --- a/docs/detailed-documentation/cluster/model.html +++ b/docs/detailed-documentation/cluster/model.html @@ -54,6 +54,7 @@

    Module codeflare_sdk.cluster.model

    from dataclasses import dataclass, field from enum import Enum import typing +from typing import Union class RayClusterStatus(Enum): @@ -106,12 +107,15 @@

    Module codeflare_sdk.cluster.model

    name: str status: RayClusterStatus - head_cpus: int - head_mem: str - workers: int - worker_mem_min: str - worker_mem_max: str - worker_cpu: int + head_cpu_requests: int + head_cpu_limits: int + head_mem_requests: str + head_mem_limits: str + num_workers: int + worker_mem_requests: str + worker_mem_limits: str + worker_cpu_requests: Union[int, str] + worker_cpu_limits: Union[int, str] namespace: str dashboard: str worker_extended_resources: typing.Dict[str, int] = field(default_factory=dict) @@ -293,7 +297,7 @@

    Class variables

    class RayCluster -(name: str, status: RayClusterStatus, head_cpus: int, head_mem: str, workers: int, worker_mem_min: str, worker_mem_max: str, worker_cpu: int, namespace: str, dashboard: str, worker_extended_resources: Dict[str, int] = <factory>, head_extended_resources: Dict[str, int] = <factory>) +(name: str, status: RayClusterStatus, head_cpu_requests: int, head_cpu_limits: int, head_mem_requests: str, head_mem_limits: str, num_workers: int, worker_mem_requests: str, worker_mem_limits: str, worker_cpu_requests: Union[int, str], worker_cpu_limits: Union[int, str], namespace: str, dashboard: str, worker_extended_resources: Dict[str, int] = <factory>, head_extended_resources: Dict[str, int] = <factory>)

    For storing information about a Ray cluster.

    @@ -309,12 +313,15 @@

    Class variables

    name: str status: RayClusterStatus - head_cpus: int - head_mem: str - workers: int - worker_mem_min: str - worker_mem_max: str - worker_cpu: int + head_cpu_requests: int + head_cpu_limits: int + head_mem_requests: str + head_mem_limits: str + num_workers: int + worker_mem_requests: str + worker_mem_limits: str + worker_cpu_requests: Union[int, str] + worker_cpu_limits: Union[int, str] namespace: str dashboard: str worker_extended_resources: typing.Dict[str, int] = field(default_factory=dict) @@ -326,7 +333,11 @@

    Class variables

    -
    var head_cpus : int
    +
    var head_cpu_limits : int
    +
    +
    +
    +
    var head_cpu_requests : int
    @@ -334,7 +345,11 @@

    Class variables

    -
    var head_mem : str
    +
    var head_mem_limits : str
    +
    +
    +
    +
    var head_mem_requests : str
    @@ -346,27 +361,31 @@

    Class variables

    +
    var num_workers : int
    +
    +
    +
    var statusRayClusterStatus
    -
    var worker_cpu : int
    +
    var worker_cpu_limits : Union[int, str]
    -
    var worker_extended_resources : Dict[str, int]
    +
    var worker_cpu_requests : Union[int, str]
    -
    var worker_mem_max : str
    +
    var worker_extended_resources : Dict[str, int]
    -
    var worker_mem_min : str
    +
    var worker_mem_limits : str
    -
    var workers : int
    +
    var worker_mem_requests : str
    @@ -474,17 +493,20 @@

    RayCluster

  • diff --git a/docs/detailed-documentation/cluster/widgets.html b/docs/detailed-documentation/cluster/widgets.html new file mode 100644 index 00000000..e07fa2ea --- /dev/null +++ b/docs/detailed-documentation/cluster/widgets.html @@ -0,0 +1,758 @@ + + + + + + +codeflare_sdk.cluster.widgets API documentation + + + + + + + + + + + +
    +
    +
    +

    Module codeflare_sdk.cluster.widgets

    +
    +
    +

    The widgets sub-module contains the ui widgets created using the ipywidgets package.

    +
    + +Expand source code + +
    # Copyright 2024 IBM, Red Hat
    +#
    +# Licensed under the Apache License, Version 2.0 (the "License");
    +# you may not use this file except in compliance with the License.
    +# You may obtain a copy of the License at
    +#
    +#      http://www.apache.org/licenses/LICENSE-2.0
    +#
    +# Unless required by applicable law or agreed to in writing, software
    +# distributed under the License is distributed on an "AS IS" BASIS,
    +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    +# See the License for the specific language governing permissions and
    +# limitations under the License.
    +
    +"""
    +The widgets sub-module contains the ui widgets created using the ipywidgets package.
    +"""
    +import contextlib
    +import io
    +import os
    +import warnings
    +import time
    +import codeflare_sdk
    +from kubernetes import client
    +from kubernetes.client.rest import ApiException
    +import ipywidgets as widgets
    +from IPython.display import display, HTML, Javascript
    +import pandas as pd
    +from .config import ClusterConfiguration
    +from .model import RayClusterStatus
    +from ..utils.kube_api_helpers import _kube_api_error_handling
    +from .auth import config_check, api_config_handler
    +
    +
    +def cluster_up_down_buttons(cluster: "codeflare_sdk.cluster.Cluster") -> widgets.Button:
    +    """
    +    The cluster_up_down_buttons function returns two button widgets for a create and delete button.
    +    The function uses the appwrapper bool to distinguish between resource type for the tool tip.
    +    """
    +    resource = "Ray Cluster"
    +    if cluster.config.appwrapper:
    +        resource = "AppWrapper"
    +
    +    up_button = widgets.Button(
    +        description="Cluster Up",
    +        tooltip=f"Create the {resource}",
    +        icon="play",
    +    )
    +
    +    delete_button = widgets.Button(
    +        description="Cluster Down",
    +        tooltip=f"Delete the {resource}",
    +        icon="trash",
    +    )
    +
    +    wait_ready_check = wait_ready_check_box()
    +    output = widgets.Output()
    +
    +    # Display the buttons in an HBox wrapped in a VBox which includes the wait_ready Checkbox
    +    button_display = widgets.HBox([up_button, delete_button])
    +    display(widgets.VBox([button_display, wait_ready_check]), output)
    +
    +    def on_up_button_clicked(b):  # Handle the up button click event
    +        with output:
    +            output.clear_output()
    +            cluster.up()
    +
    +            # If the wait_ready Checkbox is clicked(value == True) trigger the wait_ready function
    +            if wait_ready_check.value:
    +                cluster.wait_ready()
    +
    +    def on_down_button_clicked(b):  # Handle the down button click event
    +        with output:
    +            output.clear_output()
    +            cluster.down()
    +
    +    up_button.on_click(on_up_button_clicked)
    +    delete_button.on_click(on_down_button_clicked)
    +
    +
    +def wait_ready_check_box():
    +    """
    +    The wait_ready_check_box function will return a checkbox widget used for waiting for the resource to be in the state READY.
    +    """
    +    wait_ready_check_box = widgets.Checkbox(
    +        False,
    +        description="Wait for Cluster?",
    +    )
    +    return wait_ready_check_box
    +
    +
    +def is_notebook() -> bool:
    +    """
    +    The is_notebook function checks if Jupyter Notebook environment variables exist in the given environment and return True/False based on that.
    +    """
    +    if (
    +        "PYDEVD_IPYTHON_COMPATIBLE_DEBUGGING" in os.environ
    +        or "JPY_SESSION_NAME" in os.environ
    +    ):  # If running Jupyter NBs in VsCode or RHOAI/ODH display UI buttons
    +        return True
    +    else:
    +        return False
    +
    +
    +def view_clusters(namespace: str = None):
    +    """
    +    view_clusters function will display existing clusters with their specs, and handle user interactions.
    +    """
    +    if not is_notebook():
    +        warnings.warn(
    +            "view_clusters can only be used in a Jupyter Notebook environment."
    +        )
    +        return  # Exit function if not in Jupyter Notebook
    +
    +    from .cluster import get_current_namespace
    +
    +    if not namespace:
    +        namespace = get_current_namespace()
    +
    +    user_output = widgets.Output()
    +    raycluster_data_output = widgets.Output()
    +    url_output = widgets.Output()
    +
    +    ray_clusters_df = _fetch_cluster_data(namespace)
    +    if ray_clusters_df.empty:
    +        print(f"No clusters found in the {namespace} namespace.")
    +        return
    +
    +    classification_widget = widgets.ToggleButtons(
    +        options=ray_clusters_df["Name"].tolist(),
    +        value=ray_clusters_df["Name"].tolist()[0],
    +        description="Select an existing cluster:",
    +    )
    +    # Setting the initial value to trigger the event handler to display the cluster details.
    +    initial_value = classification_widget.value
    +    _on_cluster_click(
    +        {"new": initial_value}, raycluster_data_output, namespace, classification_widget
    +    )
    +    classification_widget.observe(
    +        lambda selection_change: _on_cluster_click(
    +            selection_change, raycluster_data_output, namespace, classification_widget
    +        ),
    +        names="value",
    +    )
    +
    +    # UI table buttons
    +    delete_button = widgets.Button(
    +        description="Delete Cluster",
    +        icon="trash",
    +        tooltip="Delete the selected cluster",
    +    )
    +    delete_button.on_click(
    +        lambda b: _on_delete_button_click(
    +            b,
    +            classification_widget,
    +            ray_clusters_df,
    +            raycluster_data_output,
    +            user_output,
    +            delete_button,
    +            list_jobs_button,
    +            ray_dashboard_button,
    +        )
    +    )
    +
    +    list_jobs_button = widgets.Button(
    +        description="View Jobs", icon="suitcase", tooltip="Open the Ray Job Dashboard"
    +    )
    +    list_jobs_button.on_click(
    +        lambda b: _on_list_jobs_button_click(
    +            b, classification_widget, ray_clusters_df, user_output, url_output
    +        )
    +    )
    +
    +    ray_dashboard_button = widgets.Button(
    +        description="Open Ray Dashboard",
    +        icon="dashboard",
    +        tooltip="Open the Ray Dashboard in a new tab",
    +        layout=widgets.Layout(width="auto"),
    +    )
    +    ray_dashboard_button.on_click(
    +        lambda b: _on_ray_dashboard_button_click(
    +            b, classification_widget, ray_clusters_df, user_output, url_output
    +        )
    +    )
    +
    +    display(widgets.VBox([classification_widget, raycluster_data_output]))
    +    display(
    +        widgets.HBox([delete_button, list_jobs_button, ray_dashboard_button]),
    +        url_output,
    +        user_output,
    +    )
    +
    +
    +def _on_cluster_click(
    +    selection_change,
    +    raycluster_data_output: widgets.Output,
    +    namespace: str,
    +    classification_widget: widgets.ToggleButtons,
    +):
    +    """
    +    _on_cluster_click handles the event when a cluster is selected from the toggle buttons, updating the output with cluster details.
    +    """
    +    new_value = selection_change["new"]
    +    raycluster_data_output.clear_output()
    +    ray_clusters_df = _fetch_cluster_data(namespace)
    +    classification_widget.options = ray_clusters_df["Name"].tolist()
    +    with raycluster_data_output:
    +        display(
    +            HTML(
    +                ray_clusters_df[ray_clusters_df["Name"] == new_value][
    +                    [
    +                        "Name",
    +                        "Namespace",
    +                        "Num Workers",
    +                        "Head GPUs",
    +                        "Head CPU Req~Lim",
    +                        "Head Memory Req~Lim",
    +                        "Worker GPUs",
    +                        "Worker CPU Req~Lim",
    +                        "Worker Memory Req~Lim",
    +                        "status",
    +                    ]
    +                ].to_html(escape=False, index=False, border=2)
    +            )
    +        )
    +
    +
    +def _on_delete_button_click(
    +    b,
    +    classification_widget: widgets.ToggleButtons,
    +    ray_clusters_df: pd.DataFrame,
    +    raycluster_data_output: widgets.Output,
    +    user_output: widgets.Output,
    +    delete_button: widgets.Button,
    +    list_jobs_button: widgets.Button,
    +    ray_dashboard_button: widgets.Button,
    +):
    +    """
    +    _on_delete_button_click handles the event when the Delete Button is clicked, deleting the selected cluster.
    +    """
    +    cluster_name = classification_widget.value
    +    namespace = ray_clusters_df[ray_clusters_df["Name"] == classification_widget.value][
    +        "Namespace"
    +    ].values[0]
    +
    +    _delete_cluster(cluster_name, namespace)
    +
    +    with user_output:
    +        user_output.clear_output()
    +        print(
    +            f"Cluster {cluster_name} in the {namespace} namespace was deleted successfully."
    +        )
    +
    +    # Refresh the dataframe
    +    new_df = _fetch_cluster_data(namespace)
    +    if new_df.empty:
    +        classification_widget.close()
    +        delete_button.close()
    +        list_jobs_button.close()
    +        ray_dashboard_button.close()
    +        with raycluster_data_output:
    +            raycluster_data_output.clear_output()
    +            print(f"No clusters found in the {namespace} namespace.")
    +    else:
    +        classification_widget.options = new_df["Name"].tolist()
    +
    +
    +def _on_ray_dashboard_button_click(
    +    b,
    +    classification_widget: widgets.ToggleButtons,
    +    ray_clusters_df: pd.DataFrame,
    +    user_output: widgets.Output,
    +    url_output: widgets.Output,
    +):
    +    """
    +    _on_ray_dashboard_button_click handles the event when the Open Ray Dashboard button is clicked, opening the Ray Dashboard in a new tab
    +    """
    +    from codeflare_sdk.cluster import Cluster
    +
    +    cluster_name = classification_widget.value
    +    namespace = ray_clusters_df[ray_clusters_df["Name"] == classification_widget.value][
    +        "Namespace"
    +    ].values[0]
    +
    +    # Suppress from Cluster Object initialisation widgets and outputs
    +    with widgets.Output(), contextlib.redirect_stdout(
    +        io.StringIO()
    +    ), contextlib.redirect_stderr(io.StringIO()):
    +        cluster = Cluster(ClusterConfiguration(cluster_name, namespace))
    +    dashboard_url = cluster.cluster_dashboard_uri()
    +
    +    with user_output:
    +        user_output.clear_output()
    +        print(f"Opening Ray Dashboard for {cluster_name} cluster:\n{dashboard_url}")
    +    with url_output:
    +        display(Javascript(f'window.open("{dashboard_url}", "_blank");'))
    +
    +
    +def _on_list_jobs_button_click(
    +    b,
    +    classification_widget: widgets.ToggleButtons,
    +    ray_clusters_df: pd.DataFrame,
    +    user_output: widgets.Output,
    +    url_output: widgets.Output,
    +):
    +    """
    +    _on_list_jobs_button_click handles the event when the View Jobs button is clicked, opening the Ray Jobs Dashboard in a new tab
    +    """
    +    from codeflare_sdk.cluster import Cluster
    +
    +    cluster_name = classification_widget.value
    +    namespace = ray_clusters_df[ray_clusters_df["Name"] == classification_widget.value][
    +        "Namespace"
    +    ].values[0]
    +
    +    # Suppress from Cluster Object initialisation widgets and outputs
    +    with widgets.Output(), contextlib.redirect_stdout(
    +        io.StringIO()
    +    ), contextlib.redirect_stderr(io.StringIO()):
    +        cluster = Cluster(ClusterConfiguration(cluster_name, namespace))
    +    dashboard_url = cluster.cluster_dashboard_uri()
    +
    +    with user_output:
    +        user_output.clear_output()
    +        print(
    +            f"Opening Ray Jobs Dashboard for {cluster_name} cluster:\n{dashboard_url}/#/jobs"
    +        )
    +    with url_output:
    +        display(Javascript(f'window.open("{dashboard_url}/#/jobs", "_blank");'))
    +
    +
    +def _delete_cluster(
    +    cluster_name: str,
    +    namespace: str,
    +    timeout: int = 5,
    +    interval: int = 1,
    +):
    +    """
    +    _delete_cluster function deletes the cluster with the given name and namespace.
    +    It optionally waits for the cluster to be deleted.
    +    """
    +    from .cluster import _check_aw_exists
    +
    +    try:
    +        config_check()
    +        api_instance = client.CustomObjectsApi(api_config_handler())
    +
    +        if _check_aw_exists(cluster_name, namespace):
    +            api_instance.delete_namespaced_custom_object(
    +                group="workload.codeflare.dev",
    +                version="v1beta2",
    +                namespace=namespace,
    +                plural="appwrappers",
    +                name=cluster_name,
    +            )
    +            group = "workload.codeflare.dev"
    +            version = "v1beta2"
    +            plural = "appwrappers"
    +        else:
    +            api_instance.delete_namespaced_custom_object(
    +                group="ray.io",
    +                version="v1",
    +                namespace=namespace,
    +                plural="rayclusters",
    +                name=cluster_name,
    +            )
    +            group = "ray.io"
    +            version = "v1"
    +            plural = "rayclusters"
    +
    +        # Wait for the resource to be deleted
    +        while timeout > 0:
    +            try:
    +                api_instance.get_namespaced_custom_object(
    +                    group=group,
    +                    version=version,
    +                    namespace=namespace,
    +                    plural=plural,
    +                    name=cluster_name,
    +                )
    +                # Retry if resource still exists
    +                time.sleep(interval)
    +                timeout -= interval
    +                if timeout <= 0:
    +                    raise TimeoutError(
    +                        f"Timeout waiting for {cluster_name} to be deleted."
    +                    )
    +            except ApiException as e:
    +                # Resource is deleted
    +                if e.status == 404:
    +                    break
    +    except Exception as e:  # pragma: no cover
    +        return _kube_api_error_handling(e)
    +
    +
    +def _fetch_cluster_data(namespace):
    +    """
    +    _fetch_cluster_data function fetches all clusters and their spec in a given namespace and returns a DataFrame.
    +    """
    +    from .cluster import list_all_clusters
    +
    +    rayclusters = list_all_clusters(namespace, False)
    +    if not rayclusters:
    +        return pd.DataFrame()
    +    names = [item.name for item in rayclusters]
    +    namespaces = [item.namespace for item in rayclusters]
    +    num_workers = [item.num_workers for item in rayclusters]
    +    head_extended_resources = [
    +        f"{list(item.head_extended_resources.keys())[0]}: {list(item.head_extended_resources.values())[0]}"
    +        if item.head_extended_resources
    +        else "0"
    +        for item in rayclusters
    +    ]
    +    worker_extended_resources = [
    +        f"{list(item.worker_extended_resources.keys())[0]}: {list(item.worker_extended_resources.values())[0]}"
    +        if item.worker_extended_resources
    +        else "0"
    +        for item in rayclusters
    +    ]
    +    head_cpu_requests = [
    +        item.head_cpu_requests if item.head_cpu_requests else 0 for item in rayclusters
    +    ]
    +    head_cpu_limits = [
    +        item.head_cpu_limits if item.head_cpu_limits else 0 for item in rayclusters
    +    ]
    +    head_cpu_rl = [
    +        f"{requests}~{limits}"
    +        for requests, limits in zip(head_cpu_requests, head_cpu_limits)
    +    ]
    +    head_mem_requests = [
    +        item.head_mem_requests if item.head_mem_requests else 0 for item in rayclusters
    +    ]
    +    head_mem_limits = [
    +        item.head_mem_limits if item.head_mem_limits else 0 for item in rayclusters
    +    ]
    +    head_mem_rl = [
    +        f"{requests}~{limits}"
    +        for requests, limits in zip(head_mem_requests, head_mem_limits)
    +    ]
    +    worker_cpu_requests = [
    +        item.worker_cpu_requests if item.worker_cpu_requests else 0
    +        for item in rayclusters
    +    ]
    +    worker_cpu_limits = [
    +        item.worker_cpu_limits if item.worker_cpu_limits else 0 for item in rayclusters
    +    ]
    +    worker_cpu_rl = [
    +        f"{requests}~{limits}"
    +        for requests, limits in zip(worker_cpu_requests, worker_cpu_limits)
    +    ]
    +    worker_mem_requests = [
    +        item.worker_mem_requests if item.worker_mem_requests else 0
    +        for item in rayclusters
    +    ]
    +    worker_mem_limits = [
    +        item.worker_mem_limits if item.worker_mem_limits else 0 for item in rayclusters
    +    ]
    +    worker_mem_rl = [
    +        f"{requests}~{limits}"
    +        for requests, limits in zip(worker_mem_requests, worker_mem_limits)
    +    ]
    +    status = [item.status.name for item in rayclusters]
    +
    +    status = [_format_status(item.status) for item in rayclusters]
    +
    +    data = {
    +        "Name": names,
    +        "Namespace": namespaces,
    +        "Num Workers": num_workers,
    +        "Head GPUs": head_extended_resources,
    +        "Worker GPUs": worker_extended_resources,
    +        "Head CPU Req~Lim": head_cpu_rl,
    +        "Head Memory Req~Lim": head_mem_rl,
    +        "Worker CPU Req~Lim": worker_cpu_rl,
    +        "Worker Memory Req~Lim": worker_mem_rl,
    +        "status": status,
    +    }
    +    return pd.DataFrame(data)
    +
    +
    +def _format_status(status):
    +    """
    +    _format_status function formats the status enum.
    +    """
    +    status_map = {
    +        RayClusterStatus.READY: '<span style="color: green;">Ready ✓</span>',
    +        RayClusterStatus.SUSPENDED: '<span style="color: #007BFF;">Suspended ❄️</span>',
    +        RayClusterStatus.FAILED: '<span style="color: red;">Failed ✗</span>',
    +        RayClusterStatus.UNHEALTHY: '<span style="color: purple;">Unhealthy</span>',
    +        RayClusterStatus.UNKNOWN: '<span style="color: purple;">Unknown</span>',
    +    }
    +    return status_map.get(status, status)
    +
    +
    +
    +
    +
    +
    +
    +

    Functions

    +
    +
    +def cluster_up_down_buttons(cluster: codeflare_sdk.cluster.Cluster) ‑> ipywidgets.widgets.widget_button.Button +
    +
    +

    The cluster_up_down_buttons function returns two button widgets for a create and delete button. +The function uses the appwrapper bool to distinguish between resource type for the tool tip.

    +
    + +Expand source code + +
    def cluster_up_down_buttons(cluster: "codeflare_sdk.cluster.Cluster") -> widgets.Button:
    +    """
    +    The cluster_up_down_buttons function returns two button widgets for a create and delete button.
    +    The function uses the appwrapper bool to distinguish between resource type for the tool tip.
    +    """
    +    resource = "Ray Cluster"
    +    if cluster.config.appwrapper:
    +        resource = "AppWrapper"
    +
    +    up_button = widgets.Button(
    +        description="Cluster Up",
    +        tooltip=f"Create the {resource}",
    +        icon="play",
    +    )
    +
    +    delete_button = widgets.Button(
    +        description="Cluster Down",
    +        tooltip=f"Delete the {resource}",
    +        icon="trash",
    +    )
    +
    +    wait_ready_check = wait_ready_check_box()
    +    output = widgets.Output()
    +
    +    # Display the buttons in an HBox wrapped in a VBox which includes the wait_ready Checkbox
    +    button_display = widgets.HBox([up_button, delete_button])
    +    display(widgets.VBox([button_display, wait_ready_check]), output)
    +
    +    def on_up_button_clicked(b):  # Handle the up button click event
    +        with output:
    +            output.clear_output()
    +            cluster.up()
    +
    +            # If the wait_ready Checkbox is clicked(value == True) trigger the wait_ready function
    +            if wait_ready_check.value:
    +                cluster.wait_ready()
    +
    +    def on_down_button_clicked(b):  # Handle the down button click event
    +        with output:
    +            output.clear_output()
    +            cluster.down()
    +
    +    up_button.on_click(on_up_button_clicked)
    +    delete_button.on_click(on_down_button_clicked)
    +
    +
    +
    +def is_notebook() ‑> bool +
    +
    +

    The is_notebook function checks if Jupyter Notebook environment variables exist in the given environment and return True/False based on that.

    +
    + +Expand source code + +
    def is_notebook() -> bool:
    +    """
    +    The is_notebook function checks if Jupyter Notebook environment variables exist in the given environment and return True/False based on that.
    +    """
    +    if (
    +        "PYDEVD_IPYTHON_COMPATIBLE_DEBUGGING" in os.environ
    +        or "JPY_SESSION_NAME" in os.environ
    +    ):  # If running Jupyter NBs in VsCode or RHOAI/ODH display UI buttons
    +        return True
    +    else:
    +        return False
    +
    +
    +
    +def view_clusters(namespace: str = None) +
    +
    +

    view_clusters function will display existing clusters with their specs, and handle user interactions.

    +
    + +Expand source code + +
    def view_clusters(namespace: str = None):
    +    """
    +    view_clusters function will display existing clusters with their specs, and handle user interactions.
    +    """
    +    if not is_notebook():
    +        warnings.warn(
    +            "view_clusters can only be used in a Jupyter Notebook environment."
    +        )
    +        return  # Exit function if not in Jupyter Notebook
    +
    +    from .cluster import get_current_namespace
    +
    +    if not namespace:
    +        namespace = get_current_namespace()
    +
    +    user_output = widgets.Output()
    +    raycluster_data_output = widgets.Output()
    +    url_output = widgets.Output()
    +
    +    ray_clusters_df = _fetch_cluster_data(namespace)
    +    if ray_clusters_df.empty:
    +        print(f"No clusters found in the {namespace} namespace.")
    +        return
    +
    +    classification_widget = widgets.ToggleButtons(
    +        options=ray_clusters_df["Name"].tolist(),
    +        value=ray_clusters_df["Name"].tolist()[0],
    +        description="Select an existing cluster:",
    +    )
    +    # Setting the initial value to trigger the event handler to display the cluster details.
    +    initial_value = classification_widget.value
    +    _on_cluster_click(
    +        {"new": initial_value}, raycluster_data_output, namespace, classification_widget
    +    )
    +    classification_widget.observe(
    +        lambda selection_change: _on_cluster_click(
    +            selection_change, raycluster_data_output, namespace, classification_widget
    +        ),
    +        names="value",
    +    )
    +
    +    # UI table buttons
    +    delete_button = widgets.Button(
    +        description="Delete Cluster",
    +        icon="trash",
    +        tooltip="Delete the selected cluster",
    +    )
    +    delete_button.on_click(
    +        lambda b: _on_delete_button_click(
    +            b,
    +            classification_widget,
    +            ray_clusters_df,
    +            raycluster_data_output,
    +            user_output,
    +            delete_button,
    +            list_jobs_button,
    +            ray_dashboard_button,
    +        )
    +    )
    +
    +    list_jobs_button = widgets.Button(
    +        description="View Jobs", icon="suitcase", tooltip="Open the Ray Job Dashboard"
    +    )
    +    list_jobs_button.on_click(
    +        lambda b: _on_list_jobs_button_click(
    +            b, classification_widget, ray_clusters_df, user_output, url_output
    +        )
    +    )
    +
    +    ray_dashboard_button = widgets.Button(
    +        description="Open Ray Dashboard",
    +        icon="dashboard",
    +        tooltip="Open the Ray Dashboard in a new tab",
    +        layout=widgets.Layout(width="auto"),
    +    )
    +    ray_dashboard_button.on_click(
    +        lambda b: _on_ray_dashboard_button_click(
    +            b, classification_widget, ray_clusters_df, user_output, url_output
    +        )
    +    )
    +
    +    display(widgets.VBox([classification_widget, raycluster_data_output]))
    +    display(
    +        widgets.HBox([delete_button, list_jobs_button, ray_dashboard_button]),
    +        url_output,
    +        user_output,
    +    )
    +
    +
    +
    +def wait_ready_check_box() +
    +
    +

    The wait_ready_check_box function will return a checkbox widget used for waiting for the resource to be in the state READY.

    +
    + +Expand source code + +
    def wait_ready_check_box():
    +    """
    +    The wait_ready_check_box function will return a checkbox widget used for waiting for the resource to be in the state READY.
    +    """
    +    wait_ready_check_box = widgets.Checkbox(
    +        False,
    +        description="Wait for Cluster?",
    +    )
    +    return wait_ready_check_box
    +
    +
    +
    +
    +
    +
    +
    + +
    + + + diff --git a/docs/detailed-documentation/index.html b/docs/detailed-documentation/index.html index fd74344f..45000719 100644 --- a/docs/detailed-documentation/index.html +++ b/docs/detailed-documentation/index.html @@ -42,6 +42,7 @@

    Package codeflare_sdk

    get_cluster, list_all_queued, list_all_clusters, + view_clusters, ) from .job import RayJobClient diff --git a/docs/detailed-documentation/job/ray_jobs.html b/docs/detailed-documentation/job/ray_jobs.html index 01fa5fb5..20002e27 100644 --- a/docs/detailed-documentation/job/ray_jobs.html +++ b/docs/detailed-documentation/job/ray_jobs.html @@ -47,6 +47,7 @@

    Module codeflare_sdk.job.ray_jobs

    The ray_jobs sub-module contains methods needed to submit jobs and connect to Ray Clusters that were not created by CodeFlare. The SDK acts as a wrapper for the Ray Job Submission Client. """ + from ray.job_submission import JobSubmissionClient from ray.dashboard.modules.job.pydantic_models import JobDetails from typing import Iterator, Optional, Dict, Any, Union, List @@ -93,6 +94,7 @@

    Module codeflare_sdk.job.ray_jobs

    submission_id: Optional[str] = None, entrypoint_num_cpus: Optional[Union[int, float]] = None, entrypoint_num_gpus: Optional[Union[int, float]] = None, + entrypoint_memory: Optional[int] = None, entrypoint_resources: Optional[Dict[str, float]] = None, ) -> str: """ @@ -106,6 +108,7 @@

    Module codeflare_sdk.job.ray_jobs

    job_id -- DEPRECATED. This has been renamed to submission_id entrypoint_num_cpus -- The quantity of CPU cores to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0. entrypoint_num_gpus -- The quantity of GPUs to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0. + entrypoint_memory –- The quantity of memory to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0. entrypoint_resources -- The quantity of custom resources to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. """ return self.rayJobClient.submit_job( @@ -116,6 +119,7 @@

    Module codeflare_sdk.job.ray_jobs

    submission_id=submission_id, entrypoint_num_cpus=entrypoint_num_cpus, entrypoint_num_gpus=entrypoint_num_gpus, + entrypoint_memory=entrypoint_memory, entrypoint_resources=entrypoint_resources, ) @@ -248,6 +252,7 @@

    Classes

    submission_id: Optional[str] = None, entrypoint_num_cpus: Optional[Union[int, float]] = None, entrypoint_num_gpus: Optional[Union[int, float]] = None, + entrypoint_memory: Optional[int] = None, entrypoint_resources: Optional[Dict[str, float]] = None, ) -> str: """ @@ -261,6 +266,7 @@

    Classes

    job_id -- DEPRECATED. This has been renamed to submission_id entrypoint_num_cpus -- The quantity of CPU cores to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0. entrypoint_num_gpus -- The quantity of GPUs to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0. + entrypoint_memory –- The quantity of memory to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0. entrypoint_resources -- The quantity of custom resources to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. """ return self.rayJobClient.submit_job( @@ -271,6 +277,7 @@

    Classes

    submission_id=submission_id, entrypoint_num_cpus=entrypoint_num_cpus, entrypoint_num_gpus=entrypoint_num_gpus, + entrypoint_memory=entrypoint_memory, entrypoint_resources=entrypoint_resources, ) @@ -461,7 +468,7 @@

    Methods

    -def submit_job(self, entrypoint: str, job_id: Optional[str] = None, runtime_env: Optional[Dict[str, Any]] = None, metadata: Optional[Dict[str, str]] = None, submission_id: Optional[str] = None, entrypoint_num_cpus: Union[int, float, ForwardRef(None)] = None, entrypoint_num_gpus: Union[int, float, ForwardRef(None)] = None, entrypoint_resources: Optional[Dict[str, float]] = None) ‑> str +def submit_job(self, entrypoint: str, job_id: Optional[str] = None, runtime_env: Optional[Dict[str, Any]] = None, metadata: Optional[Dict[str, str]] = None, submission_id: Optional[str] = None, entrypoint_num_cpus: Union[int, float, ForwardRef(None)] = None, entrypoint_num_gpus: Union[int, float, ForwardRef(None)] = None, entrypoint_memory: Optional[int] = None, entrypoint_resources: Optional[Dict[str, float]] = None) ‑> str

    Method for submitting jobs to a Ray Cluster and returning the job id with entrypoint being a mandatory field.

    @@ -473,6 +480,7 @@

    Methods

    job_id – DEPRECATED. This has been renamed to submission_id entrypoint_num_cpus – The quantity of CPU cores to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0. entrypoint_num_gpus – The quantity of GPUs to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0. +entrypoint_memory –- The quantity of memory to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0. entrypoint_resources – The quantity of custom resources to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it.

    @@ -487,6 +495,7 @@

    Methods

    submission_id: Optional[str] = None, entrypoint_num_cpus: Optional[Union[int, float]] = None, entrypoint_num_gpus: Optional[Union[int, float]] = None, + entrypoint_memory: Optional[int] = None, entrypoint_resources: Optional[Dict[str, float]] = None, ) -> str: """ @@ -500,6 +509,7 @@

    Methods

    job_id -- DEPRECATED. This has been renamed to submission_id entrypoint_num_cpus -- The quantity of CPU cores to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0. entrypoint_num_gpus -- The quantity of GPUs to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0. + entrypoint_memory –- The quantity of memory to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0. entrypoint_resources -- The quantity of custom resources to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. """ return self.rayJobClient.submit_job( @@ -510,6 +520,7 @@

    Methods

    submission_id=submission_id, entrypoint_num_cpus=entrypoint_num_cpus, entrypoint_num_gpus=entrypoint_num_gpus, + entrypoint_memory=entrypoint_memory, entrypoint_resources=entrypoint_resources, )
    diff --git a/docs/detailed-documentation/utils/generate_yaml.html b/docs/detailed-documentation/utils/generate_yaml.html index 7b41e3c9..60ce89df 100644 --- a/docs/detailed-documentation/utils/generate_yaml.html +++ b/docs/detailed-documentation/utils/generate_yaml.html @@ -146,22 +146,22 @@

    Module codeflare_sdk.utils.generate_yaml

    def update_resources( spec, - worker_cpu_requests, - worker_cpu_limits, - worker_memory_requests, - worker_memory_limits, + cpu_requests, + cpu_limits, + memory_requests, + memory_limits, custom_resources, ): container = spec.get("containers") for resource in container: requests = resource.get("resources").get("requests") if requests is not None: - requests["cpu"] = worker_cpu_requests - requests["memory"] = worker_memory_requests + requests["cpu"] = cpu_requests + requests["memory"] = memory_requests limits = resource.get("resources").get("limits") if limits is not None: - limits["cpu"] = worker_cpu_limits - limits["memory"] = worker_memory_limits + limits["cpu"] = cpu_limits + limits["memory"] = memory_limits for k in custom_resources.keys(): limits[k] = custom_resources[k] requests[k] = custom_resources[k] @@ -241,10 +241,10 @@

    Module codeflare_sdk.utils.generate_yaml

    # TODO: Eventually add head node configuration outside of template update_resources( spec, - cluster.config.head_cpus, - cluster.config.head_cpus, - cluster.config.head_memory, - cluster.config.head_memory, + cluster.config.head_cpu_requests, + cluster.config.head_cpu_limits, + cluster.config.head_memory_requests, + cluster.config.head_memory_limits, cluster.config.head_extended_resource_requests, ) else: @@ -812,10 +812,10 @@

    Functions

    # TODO: Eventually add head node configuration outside of template update_resources( spec, - cluster.config.head_cpus, - cluster.config.head_cpus, - cluster.config.head_memory, - cluster.config.head_memory, + cluster.config.head_cpu_requests, + cluster.config.head_cpu_limits, + cluster.config.head_memory_requests, + cluster.config.head_memory_limits, cluster.config.head_extended_resource_requests, ) else: @@ -830,7 +830,7 @@

    Functions

    -def update_resources(spec, worker_cpu_requests, worker_cpu_limits, worker_memory_requests, worker_memory_limits, custom_resources) +def update_resources(spec, cpu_requests, cpu_limits, memory_requests, memory_limits, custom_resources)
    @@ -840,22 +840,22 @@

    Functions

    def update_resources(
         spec,
    -    worker_cpu_requests,
    -    worker_cpu_limits,
    -    worker_memory_requests,
    -    worker_memory_limits,
    +    cpu_requests,
    +    cpu_limits,
    +    memory_requests,
    +    memory_limits,
         custom_resources,
     ):
         container = spec.get("containers")
         for resource in container:
             requests = resource.get("resources").get("requests")
             if requests is not None:
    -            requests["cpu"] = worker_cpu_requests
    -            requests["memory"] = worker_memory_requests
    +            requests["cpu"] = cpu_requests
    +            requests["memory"] = memory_requests
             limits = resource.get("resources").get("limits")
             if limits is not None:
    -            limits["cpu"] = worker_cpu_limits
    -            limits["memory"] = worker_memory_limits
    +            limits["cpu"] = cpu_limits
    +            limits["memory"] = memory_limits
             for k in custom_resources.keys():
                 limits[k] = custom_resources[k]
                 requests[k] = custom_resources[k]
    diff --git a/docs/detailed-documentation/utils/pretty_print.html b/docs/detailed-documentation/utils/pretty_print.html index cbffd122..f2a8d7db 100644 --- a/docs/detailed-documentation/utils/pretty_print.html +++ b/docs/detailed-documentation/utils/pretty_print.html @@ -166,9 +166,9 @@

    Module codeflare_sdk.utils.pretty_print

    ) name = cluster.name dashboard = cluster.dashboard - workers = str(cluster.workers) - memory = f"{cluster.worker_mem_min}~{cluster.worker_mem_max}" - cpu = str(cluster.worker_cpu) + workers = str(cluster.num_workers) + memory = f"{cluster.worker_mem_requests}~{cluster.worker_mem_limits}" + cpu = f"{cluster.worker_cpu_requests}~{cluster.worker_cpu_limits}" gpu = str(cluster.worker_extended_resources.get("nvidia.com/gpu", 0)) #'table0' to display the cluster name, status, url, and dashboard link @@ -344,9 +344,9 @@

    Functions

    ) name = cluster.name dashboard = cluster.dashboard - workers = str(cluster.workers) - memory = f"{cluster.worker_mem_min}~{cluster.worker_mem_max}" - cpu = str(cluster.worker_cpu) + workers = str(cluster.num_workers) + memory = f"{cluster.worker_mem_requests}~{cluster.worker_mem_limits}" + cpu = f"{cluster.worker_cpu_requests}~{cluster.worker_cpu_limits}" gpu = str(cluster.worker_extended_resources.get("nvidia.com/gpu", 0)) #'table0' to display the cluster name, status, url, and dashboard link From d14902d2107485c34ce499d1982b4e856bb0dc8c Mon Sep 17 00:00:00 2001 From: Christian Zaccaria <73656840+ChristianZaccaria@users.noreply.github.com> Date: Fri, 27 Sep 2024 16:45:03 +0100 Subject: [PATCH 349/496] Upgrade poetry version to latest 1.8.3 (#688) * Upgrade poetry version to latest 1.8.3 * Update poetry.lock file --- .github/workflows/nightly-image-build.yaml | 2 +- .github/workflows/release.yaml | 2 +- poetry.lock | 1919 ++++++++++---------- pyproject.toml | 4 - 4 files changed, 1009 insertions(+), 918 deletions(-) diff --git a/.github/workflows/nightly-image-build.yaml b/.github/workflows/nightly-image-build.yaml index 4532c4a7..4b033c70 100644 --- a/.github/workflows/nightly-image-build.yaml +++ b/.github/workflows/nightly-image-build.yaml @@ -8,7 +8,7 @@ on: env: PYTHON_VERSION: 3.8 - POETRY_VERSION: 1.5.1 + POETRY_VERSION: 1.8.3 QUAY_ORGANIZATION: 'project-codeflare' SDK_FILE: 'codeflare_sdk-0.0.0.dev0.tar.gz' IMAGE_TAG: 'dev' diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index aae3e324..0b9d6bbc 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -21,7 +21,7 @@ on: required: true poetry_version: type: string - default: "1.5.1" + default: "1.8.3" required: true codeflare-repository-organization: type: string diff --git a/poetry.lock b/poetry.lock index a56bb3f0..3d68e4a8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,100 +1,127 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] -name = "aiohttp" -version = "3.9.5" -description = "Async http client/server framework (asyncio)" +name = "aiohappyeyeballs" +version = "2.4.2" +description = "Happy Eyeballs for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"}, - {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"}, - {file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"}, - {file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"}, - {file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"}, - {file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"}, - {file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"}, - {file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"}, - {file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"}, - {file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"}, - {file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"}, - {file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"}, - {file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"}, - {file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"}, + {file = "aiohappyeyeballs-2.4.2-py3-none-any.whl", hash = "sha256:8522691d9a154ba1145b157d6d5c15e5c692527ce6a53c5e5f9876977f6dab2f"}, + {file = "aiohappyeyeballs-2.4.2.tar.gz", hash = "sha256:4ca893e6c5c1f5bf3888b04cb5a3bee24995398efef6e0b9f747b5e89d84fd74"}, ] -[package.dependencies] +[[package]] +name = "aiohttp" +version = "3.10.6" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiohttp-3.10.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:682836fc672972cc3101cc9e30d49c5f7e8f1d010478d46119fe725a4545acfd"}, + {file = "aiohttp-3.10.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:289fa8a20018d0d5aa9e4b35d899bd51bcb80f0d5f365d9a23e30dac3b79159b"}, + {file = "aiohttp-3.10.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8617c96a20dd57e7e9d398ff9d04f3d11c4d28b1767273a5b1a018ada5a654d3"}, + {file = "aiohttp-3.10.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdbeff1b062751c2a2a55b171f7050fb7073633c699299d042e962aacdbe1a07"}, + {file = "aiohttp-3.10.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ea35d849cdd4a9268f910bff4497baebbc1aa3f2f625fd8ccd9ac99c860c621"}, + {file = "aiohttp-3.10.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:473961b3252f3b949bb84873d6e268fb6d8aa0ccc6eb7404fa58c76a326bb8e1"}, + {file = "aiohttp-3.10.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d2665c5df629eb2f981dab244c01bfa6cdc185f4ffa026639286c4d56fafb54"}, + {file = "aiohttp-3.10.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25d92f794f1332f656e3765841fc2b7ad5c26c3f3d01e8949eeb3495691cf9f4"}, + {file = "aiohttp-3.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9bd6b2033993d5ae80883bb29b83fb2b432270bbe067c2f53cc73bb57c46065f"}, + {file = "aiohttp-3.10.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d7f408c43f5e75ea1edc152fb375e8f46ef916f545fb66d4aebcbcfad05e2796"}, + {file = "aiohttp-3.10.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:cf8b8560aa965f87bf9c13bf9fed7025993a155ca0ce8422da74bf46d18c2f5f"}, + {file = "aiohttp-3.10.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14477c4e52e2f17437b99893fd220ffe7d7ee41df5ebf931a92b8ca82e6fd094"}, + {file = "aiohttp-3.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fb138fbf9f53928e779650f5ed26d0ea1ed8b2cab67f0ea5d63afa09fdc07593"}, + {file = "aiohttp-3.10.6-cp310-cp310-win32.whl", hash = "sha256:9843d683b8756971797be171ead21511d2215a2d6e3c899c6e3107fbbe826791"}, + {file = "aiohttp-3.10.6-cp310-cp310-win_amd64.whl", hash = "sha256:f8b8e49fe02f744d38352daca1dbef462c3874900bd8166516f6ea8e82b5aacf"}, + {file = "aiohttp-3.10.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f52e54fd776ad0da1006708762213b079b154644db54bcfc62f06eaa5b896402"}, + {file = "aiohttp-3.10.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:995ab1a238fd0d19dc65f2d222e5eb064e409665c6426a3e51d5101c1979ee84"}, + {file = "aiohttp-3.10.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0749c4d5a08a802dd66ecdf59b2df4d76b900004017468a7bb736c3b5a3dd902"}, + {file = "aiohttp-3.10.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e05b39158f2af0e2438cc2075cfc271f4ace0c3cc4a81ec95b27a0432e161951"}, + {file = "aiohttp-3.10.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a9f196c970db2dcde4f24317e06615363349dc357cf4d7a3b0716c20ac6d7bcd"}, + {file = "aiohttp-3.10.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:47647c8af04a70e07a2462931b0eba63146a13affa697afb4ecbab9d03a480ce"}, + {file = "aiohttp-3.10.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:669c0efe7e99f6d94d63274c06344bd0e9c8daf184ce5602a29bc39e00a18720"}, + {file = "aiohttp-3.10.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9721cdd83a994225352ca84cd537760d41a9da3c0eacb3ff534747ab8fba6d0"}, + {file = "aiohttp-3.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0b82c8ebed66ce182893e7c0b6b60ba2ace45b1df104feb52380edae266a4850"}, + {file = "aiohttp-3.10.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b169f8e755e541b72e714b89a831b315bbe70db44e33fead28516c9e13d5f931"}, + {file = "aiohttp-3.10.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0be3115753baf8b4153e64f9aa7bf6c0c64af57979aa900c31f496301b374570"}, + {file = "aiohttp-3.10.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e1f80cd17d81a404b6e70ef22bfe1870bafc511728397634ad5f5efc8698df56"}, + {file = "aiohttp-3.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6419728b08fb6380c66a470d2319cafcec554c81780e2114b7e150329b9a9a7f"}, + {file = "aiohttp-3.10.6-cp311-cp311-win32.whl", hash = "sha256:bd294dcdc1afdc510bb51d35444003f14e327572877d016d576ac3b9a5888a27"}, + {file = "aiohttp-3.10.6-cp311-cp311-win_amd64.whl", hash = "sha256:bf861da9a43d282d6dd9dcd64c23a0fccf2c5aa5cd7c32024513c8c79fb69de3"}, + {file = "aiohttp-3.10.6-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2708baccdc62f4b1251e59c2aac725936a900081f079b88843dabcab0feeeb27"}, + {file = "aiohttp-3.10.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7475da7a5e2ccf1a1c86c8fee241e277f4874c96564d06f726d8df8e77683ef7"}, + {file = "aiohttp-3.10.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:02108326574ff60267b7b35b17ac5c0bbd0008ccb942ce4c48b657bb90f0b8aa"}, + {file = "aiohttp-3.10.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:029a019627b37fa9eac5c75cc54a6bb722c4ebbf5a54d8c8c0fb4dd8facf2702"}, + {file = "aiohttp-3.10.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a637d387db6fdad95e293fab5433b775fd104ae6348d2388beaaa60d08b38c4"}, + {file = "aiohttp-3.10.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc1a16f3fc1944c61290d33c88dc3f09ba62d159b284c38c5331868425aca426"}, + {file = "aiohttp-3.10.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81b292f37969f9cc54f4643f0be7dacabf3612b3b4a65413661cf6c350226787"}, + {file = "aiohttp-3.10.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0754690a3a26e819173a34093798c155bafb21c3c640bff13be1afa1e9d421f9"}, + {file = "aiohttp-3.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:164ecd32e65467d86843dbb121a6666c3deb23b460e3f8aefdcaacae79eb718a"}, + {file = "aiohttp-3.10.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:438c5863feb761f7ca3270d48c292c334814459f61cc12bab5ba5b702d7c9e56"}, + {file = "aiohttp-3.10.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ba18573bb1de1063d222f41de64a0d3741223982dcea863b3f74646faf618ec7"}, + {file = "aiohttp-3.10.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:c82a94ddec996413a905f622f3da02c4359952aab8d817c01cf9915419525e95"}, + {file = "aiohttp-3.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:92351aa5363fc3c1f872ca763f86730ced32b01607f0c9662b1fa711087968d0"}, + {file = "aiohttp-3.10.6-cp312-cp312-win32.whl", hash = "sha256:3e15e33bfc73fa97c228f72e05e8795e163a693fd5323549f49367c76a6e5883"}, + {file = "aiohttp-3.10.6-cp312-cp312-win_amd64.whl", hash = "sha256:fe517113fe4d35d9072b826c3e147d63c5f808ca8167d450b4f96c520c8a1d8d"}, + {file = "aiohttp-3.10.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:482f74057ea13d387a7549d7a7ecb60e45146d15f3e58a2d93a0ad2d5a8457cd"}, + {file = "aiohttp-3.10.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:03fa40d1450ee5196e843315ddf74a51afc7e83d489dbfc380eecefea74158b1"}, + {file = "aiohttp-3.10.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1e52e59ed5f4cc3a3acfe2a610f8891f216f486de54d95d6600a2c9ba1581f4d"}, + {file = "aiohttp-3.10.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b3935a22c9e41a8000d90588bed96cf395ef572dbb409be44c6219c61d900d"}, + {file = "aiohttp-3.10.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4bef1480ee50f75abcfcb4b11c12de1005968ca9d0172aec4a5057ba9f2b644f"}, + {file = "aiohttp-3.10.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:671745ea7db19693ce867359d503772177f0b20fa8f6ee1e74e00449f4c4151d"}, + {file = "aiohttp-3.10.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b50b367308ca8c12e0b50cba5773bc9abe64c428d3fd2bbf5cd25aab37c77bf"}, + {file = "aiohttp-3.10.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a504d7cdb431a777d05a124fd0b21efb94498efa743103ea01b1e3136d2e4fb"}, + {file = "aiohttp-3.10.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:66bc81361131763660b969132a22edce2c4d184978ba39614e8f8f95db5c95f8"}, + {file = "aiohttp-3.10.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:27cf19a38506e2e9f12fc17e55f118f04897b0a78537055d93a9de4bf3022e3d"}, + {file = "aiohttp-3.10.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3468b39f977a11271517c6925b226720e148311039a380cc9117b1e2258a721f"}, + {file = "aiohttp-3.10.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:9d26da22a793dfd424be1050712a70c0afd96345245c29aced1e35dbace03413"}, + {file = "aiohttp-3.10.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:844d48ff9173d0b941abed8b2ea6a412f82b56d9ab1edb918c74000c15839362"}, + {file = "aiohttp-3.10.6-cp313-cp313-win32.whl", hash = "sha256:2dd56e3c43660ed3bea67fd4c5025f1ac1f9ecf6f0b991a6e5efe2e678c490c5"}, + {file = "aiohttp-3.10.6-cp313-cp313-win_amd64.whl", hash = "sha256:c91781d969fbced1993537f45efe1213bd6fccb4b37bfae2a026e20d6fbed206"}, + {file = "aiohttp-3.10.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4407a80bca3e694f2d2a523058e20e1f9f98a416619e04f6dc09dc910352ac8b"}, + {file = "aiohttp-3.10.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1cb045ec5961f51af3e2c08cd6fe523f07cc6e345033adee711c49b7b91bb954"}, + {file = "aiohttp-3.10.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4fabdcdc781a36b8fd7b2ca9dea8172f29a99e11d00ca0f83ffeb50958da84a1"}, + {file = "aiohttp-3.10.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a9f42efcc2681790595ab3d03c0e52d01edc23a0973ea09f0dc8d295e12b8e"}, + {file = "aiohttp-3.10.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cca776a440795db437d82c07455761c85bbcf3956221c3c23b8c93176c278ce7"}, + {file = "aiohttp-3.10.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5582de171f0898139cf51dd9fcdc79b848e28d9abd68e837f0803fc9f30807b1"}, + {file = "aiohttp-3.10.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:370e2d47575c53c817ee42a18acc34aad8da4dbdaac0a6c836d58878955f1477"}, + {file = "aiohttp-3.10.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:444d1704e2af6b30766debed9be8a795958029e552fe77551355badb1944012c"}, + {file = "aiohttp-3.10.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:40271a2a375812967401c9ca8077de9368e09a43a964f4dce0ff603301ec9358"}, + {file = "aiohttp-3.10.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f3af26f86863fad12e25395805bb0babbd49d512806af91ec9708a272b696248"}, + {file = "aiohttp-3.10.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4752df44df48fd42b80f51d6a97553b482cda1274d9dc5df214a3a1aa5d8f018"}, + {file = "aiohttp-3.10.6-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:2cd5290ab66cfca2f90045db2cc6434c1f4f9fbf97c9f1c316e785033782e7d2"}, + {file = "aiohttp-3.10.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3427031064b0d5c95647e6369c4aa3c556402f324a3e18107cb09517abe5f962"}, + {file = "aiohttp-3.10.6-cp38-cp38-win32.whl", hash = "sha256:614fc21e86adc28e4165a6391f851a6da6e9cbd7bb232d0df7718b453a89ee98"}, + {file = "aiohttp-3.10.6-cp38-cp38-win_amd64.whl", hash = "sha256:58c5d7318a136a3874c78717dd6de57519bc64f6363c5827c2b1cb775bea71dd"}, + {file = "aiohttp-3.10.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5db26bbca8e7968c4c977a0c640e0b9ce7224e1f4dcafa57870dc6ee28e27de6"}, + {file = "aiohttp-3.10.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3fb4216e3ec0dbc01db5ba802f02ed78ad8f07121be54eb9e918448cc3f61b7c"}, + {file = "aiohttp-3.10.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a976ef488f26e224079deb3d424f29144c6d5ba4ded313198169a8af8f47fb82"}, + {file = "aiohttp-3.10.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a86610174de8a85a920e956e2d4f9945e7da89f29a00e95ac62a4a414c4ef4e"}, + {file = "aiohttp-3.10.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:217791c6a399cc4f2e6577bb44344cba1f5714a2aebf6a0bea04cfa956658284"}, + {file = "aiohttp-3.10.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ba3662d41abe2eab0eeec7ee56f33ef4e0b34858f38abf24377687f9e1fb00a5"}, + {file = "aiohttp-3.10.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4dfa5ad4bce9ca30a76117fbaa1c1decf41ebb6c18a4e098df44298941566f9"}, + {file = "aiohttp-3.10.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0009258e97502936d3bd5bf2ced15769629097d0abb81e6495fba1047824fe0"}, + {file = "aiohttp-3.10.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0a75d5c9fb4f06c41d029ae70ad943c3a844c40c0a769d12be4b99b04f473d3d"}, + {file = "aiohttp-3.10.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8198b7c002aae2b40b2d16bfe724b9a90bcbc9b78b2566fc96131ef4e382574d"}, + {file = "aiohttp-3.10.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:4611db8c907f90fe86be112efdc2398cd7b4c8eeded5a4f0314b70fdea8feab0"}, + {file = "aiohttp-3.10.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ff99ae06eef85c7a565854826114ced72765832ee16c7e3e766c5e4c5b98d20e"}, + {file = "aiohttp-3.10.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7641920bdcc7cd2d3ddfb8bb9133a6c9536b09dbd49490b79e125180b2d25b93"}, + {file = "aiohttp-3.10.6-cp39-cp39-win32.whl", hash = "sha256:e2e7d5591ea868d5ec82b90bbeb366a198715672841d46281b623e23079593db"}, + {file = "aiohttp-3.10.6-cp39-cp39-win_amd64.whl", hash = "sha256:b504c08c45623bf5c7ca41be380156d925f00199b3970efd758aef4a77645feb"}, + {file = "aiohttp-3.10.6.tar.gz", hash = "sha256:d2578ef941be0c2ba58f6f421a703527d08427237ed45ecb091fed6f83305336"}, +] + +[package.dependencies] +aiohappyeyeballs = ">=2.3.0" aiosignal = ">=1.1.2" async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} attrs = ">=17.3.0" frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" -yarl = ">=1.0,<2.0" +yarl = ">=1.12.0,<2.0" [package.extras] -speedups = ["Brotli", "aiodns", "brotlicffi"] +speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] [[package]] name = "aiohttp-cors" @@ -126,13 +153,13 @@ frozenlist = ">=1.1.0" [[package]] name = "anyio" -version = "4.5.0" +version = "4.6.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "anyio-4.5.0-py3-none-any.whl", hash = "sha256:fdeb095b7cc5a5563175eedd926ec4ae55413bb4be5770c424af0ba46ccb4a78"}, - {file = "anyio-4.5.0.tar.gz", hash = "sha256:c5a275fe5ca0afd788001f58fca1e69e29ce706d746e317d660e21f70c530ef9"}, + {file = "anyio-4.6.0-py3-none-any.whl", hash = "sha256:c7d2e9d63e31599eeb636c8c5c03a7e108d73b345f064f1c19fdc87b79036a9a"}, + {file = "anyio-4.6.0.tar.gz", hash = "sha256:137b4559cbb034c477165047febb6ff83f390fc3b20bf181c1fc0a728cb8beeb"}, ] [package.dependencies] @@ -278,22 +305,22 @@ files = [ [[package]] name = "attrs" -version = "23.2.0" +version = "24.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, + {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "babel" @@ -311,38 +338,38 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "bcrypt" -version = "4.1.3" +version = "4.2.0" description = "Modern password hashing for your software and your servers" optional = false python-versions = ">=3.7" files = [ - {file = "bcrypt-4.1.3-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:48429c83292b57bf4af6ab75809f8f4daf52aa5d480632e53707805cc1ce9b74"}, - {file = "bcrypt-4.1.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a8bea4c152b91fd8319fef4c6a790da5c07840421c2b785084989bf8bbb7455"}, - {file = "bcrypt-4.1.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d3b317050a9a711a5c7214bf04e28333cf528e0ed0ec9a4e55ba628d0f07c1a"}, - {file = "bcrypt-4.1.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:094fd31e08c2b102a14880ee5b3d09913ecf334cd604af27e1013c76831f7b05"}, - {file = "bcrypt-4.1.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4fb253d65da30d9269e0a6f4b0de32bd657a0208a6f4e43d3e645774fb5457f3"}, - {file = "bcrypt-4.1.3-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:193bb49eeeb9c1e2db9ba65d09dc6384edd5608d9d672b4125e9320af9153a15"}, - {file = "bcrypt-4.1.3-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:8cbb119267068c2581ae38790e0d1fbae65d0725247a930fc9900c285d95725d"}, - {file = "bcrypt-4.1.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6cac78a8d42f9d120b3987f82252bdbeb7e6e900a5e1ba37f6be6fe4e3848286"}, - {file = "bcrypt-4.1.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:01746eb2c4299dd0ae1670234bf77704f581dd72cc180f444bfe74eb80495b64"}, - {file = "bcrypt-4.1.3-cp37-abi3-win32.whl", hash = "sha256:037c5bf7c196a63dcce75545c8874610c600809d5d82c305dd327cd4969995bf"}, - {file = "bcrypt-4.1.3-cp37-abi3-win_amd64.whl", hash = "sha256:8a893d192dfb7c8e883c4576813bf18bb9d59e2cfd88b68b725990f033f1b978"}, - {file = "bcrypt-4.1.3-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d4cf6ef1525f79255ef048b3489602868c47aea61f375377f0d00514fe4a78c"}, - {file = "bcrypt-4.1.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5698ce5292a4e4b9e5861f7e53b1d89242ad39d54c3da451a93cac17b61921a"}, - {file = "bcrypt-4.1.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec3c2e1ca3e5c4b9edb94290b356d082b721f3f50758bce7cce11d8a7c89ce84"}, - {file = "bcrypt-4.1.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3a5be252fef513363fe281bafc596c31b552cf81d04c5085bc5dac29670faa08"}, - {file = "bcrypt-4.1.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5f7cd3399fbc4ec290378b541b0cf3d4398e4737a65d0f938c7c0f9d5e686611"}, - {file = "bcrypt-4.1.3-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:c4c8d9b3e97209dd7111bf726e79f638ad9224b4691d1c7cfefa571a09b1b2d6"}, - {file = "bcrypt-4.1.3-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:31adb9cbb8737a581a843e13df22ffb7c84638342de3708a98d5c986770f2834"}, - {file = "bcrypt-4.1.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:551b320396e1d05e49cc18dd77d970accd52b322441628aca04801bbd1d52a73"}, - {file = "bcrypt-4.1.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6717543d2c110a155e6821ce5670c1f512f602eabb77dba95717ca76af79867d"}, - {file = "bcrypt-4.1.3-cp39-abi3-win32.whl", hash = "sha256:6004f5229b50f8493c49232b8e75726b568535fd300e5039e255d919fc3a07f2"}, - {file = "bcrypt-4.1.3-cp39-abi3-win_amd64.whl", hash = "sha256:2505b54afb074627111b5a8dc9b6ae69d0f01fea65c2fcaea403448c503d3991"}, - {file = "bcrypt-4.1.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:cb9c707c10bddaf9e5ba7cdb769f3e889e60b7d4fea22834b261f51ca2b89fed"}, - {file = "bcrypt-4.1.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9f8ea645eb94fb6e7bea0cf4ba121c07a3a182ac52876493870033141aa687bc"}, - {file = "bcrypt-4.1.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:f44a97780677e7ac0ca393bd7982b19dbbd8d7228c1afe10b128fd9550eef5f1"}, - {file = "bcrypt-4.1.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d84702adb8f2798d813b17d8187d27076cca3cd52fe3686bb07a9083930ce650"}, - {file = "bcrypt-4.1.3.tar.gz", hash = "sha256:2ee15dd749f5952fe3f0430d0ff6b74082e159c50332a1413d51b5689cf06623"}, + {file = "bcrypt-4.2.0-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:096a15d26ed6ce37a14c1ac1e48119660f21b24cba457f160a4b830f3fe6b5cb"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c02d944ca89d9b1922ceb8a46460dd17df1ba37ab66feac4870f6862a1533c00"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d84cf6d877918620b687b8fd1bf7781d11e8a0998f576c7aa939776b512b98d"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:1bb429fedbe0249465cdd85a58e8376f31bb315e484f16e68ca4c786dcc04291"}, + {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:655ea221910bcac76ea08aaa76df427ef8625f92e55a8ee44fbf7753dbabb328"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:1ee38e858bf5d0287c39b7a1fc59eec64bbf880c7d504d3a06a96c16e14058e7"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0da52759f7f30e83f1e30a888d9163a81353ef224d82dc58eb5bb52efcabc399"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3698393a1b1f1fd5714524193849d0c6d524d33523acca37cd28f02899285060"}, + {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:762a2c5fb35f89606a9fde5e51392dad0cd1ab7ae64149a8b935fe8d79dd5ed7"}, + {file = "bcrypt-4.2.0-cp37-abi3-win32.whl", hash = "sha256:5a1e8aa9b28ae28020a3ac4b053117fb51c57a010b9f969603ed885f23841458"}, + {file = "bcrypt-4.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:8f6ede91359e5df88d1f5c1ef47428a4420136f3ce97763e31b86dd8280fbdf5"}, + {file = "bcrypt-4.2.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:c52aac18ea1f4a4f65963ea4f9530c306b56ccd0c6f8c8da0c06976e34a6e841"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3bbbfb2734f0e4f37c5136130405332640a1e46e6b23e000eeff2ba8d005da68"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3413bd60460f76097ee2e0a493ccebe4a7601918219c02f503984f0a7ee0aebe"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8d7bb9c42801035e61c109c345a28ed7e84426ae4865511eb82e913df18f58c2"}, + {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3d3a6d28cb2305b43feac298774b997e372e56c7c7afd90a12b3dc49b189151c"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9c1c4ad86351339c5f320ca372dfba6cb6beb25e8efc659bedd918d921956bae"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:27fe0f57bb5573104b5a6de5e4153c60814c711b29364c10a75a54bb6d7ff48d"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8ac68872c82f1add6a20bd489870c71b00ebacd2e9134a8aa3f98a0052ab4b0e"}, + {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:cb2a8ec2bc07d3553ccebf0746bbf3d19426d1c6d1adbd4fa48925f66af7b9e8"}, + {file = "bcrypt-4.2.0-cp39-abi3-win32.whl", hash = "sha256:77800b7147c9dc905db1cba26abe31e504d8247ac73580b4aa179f98e6608f34"}, + {file = "bcrypt-4.2.0-cp39-abi3-win_amd64.whl", hash = "sha256:61ed14326ee023917ecd093ee6ef422a72f3aec6f07e21ea5f10622b735538a9"}, + {file = "bcrypt-4.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:39e1d30c7233cfc54f5c3f2c825156fe044efdd3e0b9d309512cc514a263ec2a"}, + {file = "bcrypt-4.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f4f4acf526fcd1c34e7ce851147deedd4e26e6402369304220250598b26448db"}, + {file = "bcrypt-4.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:1ff39b78a52cf03fdf902635e4c81e544714861ba3f0efc56558979dd4f09170"}, + {file = "bcrypt-4.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:373db9abe198e8e2c70d12b479464e0d5092cc122b20ec504097b5f2297ed184"}, + {file = "bcrypt-4.2.0.tar.gz", hash = "sha256:cf69eaf5185fd58f268f805b505ce31f9b9fc2d64b376642164e9244540c1221"}, ] [package.extras] @@ -390,85 +417,100 @@ css = ["tinycss2 (>=1.1.0,<1.3)"] [[package]] name = "cachetools" -version = "5.3.3" +version = "5.5.0" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, - {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, ] [[package]] name = "certifi" -version = "2024.6.2" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, - {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] name = "cffi" -version = "1.16.0" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] @@ -758,33 +800,33 @@ tox = ["tox"] [[package]] name = "debugpy" -version = "1.8.5" +version = "1.8.6" description = "An implementation of the Debug Adapter Protocol for Python" optional = false python-versions = ">=3.8" files = [ - {file = "debugpy-1.8.5-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:7e4d594367d6407a120b76bdaa03886e9eb652c05ba7f87e37418426ad2079f7"}, - {file = "debugpy-1.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4413b7a3ede757dc33a273a17d685ea2b0c09dbd312cc03f5534a0fd4d40750a"}, - {file = "debugpy-1.8.5-cp310-cp310-win32.whl", hash = "sha256:dd3811bd63632bb25eda6bd73bea8e0521794cda02be41fa3160eb26fc29e7ed"}, - {file = "debugpy-1.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:b78c1250441ce893cb5035dd6f5fc12db968cc07f91cc06996b2087f7cefdd8e"}, - {file = "debugpy-1.8.5-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:606bccba19f7188b6ea9579c8a4f5a5364ecd0bf5a0659c8a5d0e10dcee3032a"}, - {file = "debugpy-1.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db9fb642938a7a609a6c865c32ecd0d795d56c1aaa7a7a5722d77855d5e77f2b"}, - {file = "debugpy-1.8.5-cp311-cp311-win32.whl", hash = "sha256:4fbb3b39ae1aa3e5ad578f37a48a7a303dad9a3d018d369bc9ec629c1cfa7408"}, - {file = "debugpy-1.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:345d6a0206e81eb68b1493ce2fbffd57c3088e2ce4b46592077a943d2b968ca3"}, - {file = "debugpy-1.8.5-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:5b5c770977c8ec6c40c60d6f58cacc7f7fe5a45960363d6974ddb9b62dbee156"}, - {file = "debugpy-1.8.5-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a65b00b7cdd2ee0c2cf4c7335fef31e15f1b7056c7fdbce9e90193e1a8c8cb"}, - {file = "debugpy-1.8.5-cp312-cp312-win32.whl", hash = "sha256:c9f7c15ea1da18d2fcc2709e9f3d6de98b69a5b0fff1807fb80bc55f906691f7"}, - {file = "debugpy-1.8.5-cp312-cp312-win_amd64.whl", hash = "sha256:28ced650c974aaf179231668a293ecd5c63c0a671ae6d56b8795ecc5d2f48d3c"}, - {file = "debugpy-1.8.5-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:3df6692351172a42af7558daa5019651f898fc67450bf091335aa8a18fbf6f3a"}, - {file = "debugpy-1.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cd04a73eb2769eb0bfe43f5bfde1215c5923d6924b9b90f94d15f207a402226"}, - {file = "debugpy-1.8.5-cp38-cp38-win32.whl", hash = "sha256:8f913ee8e9fcf9d38a751f56e6de12a297ae7832749d35de26d960f14280750a"}, - {file = "debugpy-1.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:a697beca97dad3780b89a7fb525d5e79f33821a8bc0c06faf1f1289e549743cf"}, - {file = "debugpy-1.8.5-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:0a1029a2869d01cb777216af8c53cda0476875ef02a2b6ff8b2f2c9a4b04176c"}, - {file = "debugpy-1.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84c276489e141ed0b93b0af648eef891546143d6a48f610945416453a8ad406"}, - {file = "debugpy-1.8.5-cp39-cp39-win32.whl", hash = "sha256:ad84b7cde7fd96cf6eea34ff6c4a1b7887e0fe2ea46e099e53234856f9d99a34"}, - {file = "debugpy-1.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:7b0fe36ed9d26cb6836b0a51453653f8f2e347ba7348f2bbfe76bfeb670bfb1c"}, - {file = "debugpy-1.8.5-py2.py3-none-any.whl", hash = "sha256:55919dce65b471eff25901acf82d328bbd5b833526b6c1364bd5133754777a44"}, - {file = "debugpy-1.8.5.zip", hash = "sha256:b2112cfeb34b4507399d298fe7023a16656fc553ed5246536060ca7bd0e668d0"}, + {file = "debugpy-1.8.6-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:30f467c5345d9dfdcc0afdb10e018e47f092e383447500f125b4e013236bf14b"}, + {file = "debugpy-1.8.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d73d8c52614432f4215d0fe79a7e595d0dd162b5c15233762565be2f014803b"}, + {file = "debugpy-1.8.6-cp310-cp310-win32.whl", hash = "sha256:e3e182cd98eac20ee23a00653503315085b29ab44ed66269482349d307b08df9"}, + {file = "debugpy-1.8.6-cp310-cp310-win_amd64.whl", hash = "sha256:e3a82da039cfe717b6fb1886cbbe5c4a3f15d7df4765af857f4307585121c2dd"}, + {file = "debugpy-1.8.6-cp311-cp311-macosx_14_0_universal2.whl", hash = "sha256:67479a94cf5fd2c2d88f9615e087fcb4fec169ec780464a3f2ba4a9a2bb79955"}, + {file = "debugpy-1.8.6-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fb8653f6cbf1dd0a305ac1aa66ec246002145074ea57933978346ea5afdf70b"}, + {file = "debugpy-1.8.6-cp311-cp311-win32.whl", hash = "sha256:cdaf0b9691879da2d13fa39b61c01887c34558d1ff6e5c30e2eb698f5384cd43"}, + {file = "debugpy-1.8.6-cp311-cp311-win_amd64.whl", hash = "sha256:43996632bee7435583952155c06881074b9a742a86cee74e701d87ca532fe833"}, + {file = "debugpy-1.8.6-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:db891b141fc6ee4b5fc6d1cc8035ec329cabc64bdd2ae672b4550c87d4ecb128"}, + {file = "debugpy-1.8.6-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:567419081ff67da766c898ccf21e79f1adad0e321381b0dfc7a9c8f7a9347972"}, + {file = "debugpy-1.8.6-cp312-cp312-win32.whl", hash = "sha256:c9834dfd701a1f6bf0f7f0b8b1573970ae99ebbeee68314116e0ccc5c78eea3c"}, + {file = "debugpy-1.8.6-cp312-cp312-win_amd64.whl", hash = "sha256:e4ce0570aa4aca87137890d23b86faeadf184924ad892d20c54237bcaab75d8f"}, + {file = "debugpy-1.8.6-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:df5dc9eb4ca050273b8e374a4cd967c43be1327eeb42bfe2f58b3cdfe7c68dcb"}, + {file = "debugpy-1.8.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a85707c6a84b0c5b3db92a2df685b5230dd8fb8c108298ba4f11dba157a615a"}, + {file = "debugpy-1.8.6-cp38-cp38-win32.whl", hash = "sha256:538c6cdcdcdad310bbefd96d7850be1cd46e703079cc9e67d42a9ca776cdc8a8"}, + {file = "debugpy-1.8.6-cp38-cp38-win_amd64.whl", hash = "sha256:22140bc02c66cda6053b6eb56dfe01bbe22a4447846581ba1dd6df2c9f97982d"}, + {file = "debugpy-1.8.6-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:c1cef65cffbc96e7b392d9178dbfd524ab0750da6c0023c027ddcac968fd1caa"}, + {file = "debugpy-1.8.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1e60bd06bb3cc5c0e957df748d1fab501e01416c43a7bdc756d2a992ea1b881"}, + {file = "debugpy-1.8.6-cp39-cp39-win32.whl", hash = "sha256:f7158252803d0752ed5398d291dee4c553bb12d14547c0e1843ab74ee9c31123"}, + {file = "debugpy-1.8.6-cp39-cp39-win_amd64.whl", hash = "sha256:3358aa619a073b620cd0d51d8a6176590af24abcc3fe2e479929a154bf591b51"}, + {file = "debugpy-1.8.6-py2.py3-none-any.whl", hash = "sha256:b48892df4d810eff21d3ef37274f4c60d32cdcafc462ad5647239036b0f0649f"}, + {file = "debugpy-1.8.6.zip", hash = "sha256:c931a9371a86784cee25dec8d65bc2dc7a21f3f1552e3833d9ef8f919d22280a"}, ] [[package]] @@ -822,13 +864,13 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.2.1" +version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, - {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] @@ -864,19 +906,19 @@ devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benc [[package]] name = "filelock" -version = "3.15.4" +version = "3.16.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, - {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, + {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, + {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] -typing = ["typing-extensions (>=4.8)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] +typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "fqdn" @@ -977,13 +1019,13 @@ files = [ [[package]] name = "fsspec" -version = "2024.6.1" +version = "2024.9.0" description = "File-system specification" optional = false python-versions = ">=3.8" files = [ - {file = "fsspec-2024.6.1-py3-none-any.whl", hash = "sha256:3cb443f8bcd2efb31295a5b9fdb02aee81d8452c80d28f97a6d0959e6cee101e"}, - {file = "fsspec-2024.6.1.tar.gz", hash = "sha256:fad7d7e209dd4c1208e3bbfda706620e0da5142bebbd9c384afb95b07e798e49"}, + {file = "fsspec-2024.9.0-py3-none-any.whl", hash = "sha256:a0947d552d8a6efa72cc2c730b12c41d043509156966cca4fb157b0f2a0c574b"}, + {file = "fsspec-2024.9.0.tar.gz", hash = "sha256:4b0afb90c2f21832df142f292649035d80b421f60a9e1c027802e5a0da2b04e8"}, ] [package.extras] @@ -1016,13 +1058,13 @@ tqdm = ["tqdm"] [[package]] name = "google-api-core" -version = "2.19.1" +version = "2.20.0" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.19.1.tar.gz", hash = "sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd"}, - {file = "google_api_core-2.19.1-py3-none-any.whl", hash = "sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125"}, + {file = "google_api_core-2.20.0-py3-none-any.whl", hash = "sha256:ef0591ef03c30bb83f79b3d0575c3f31219001fc9c5cf37024d08310aeffed8a"}, + {file = "google_api_core-2.20.0.tar.gz", hash = "sha256:f74dff1889ba291a4b76c5079df0711810e2d9da81abfdc99957bc961c1eb28f"}, ] [package.dependencies] @@ -1039,13 +1081,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.30.0" +version = "2.35.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.30.0.tar.gz", hash = "sha256:ab630a1320f6720909ad76a7dbdb6841cdf5c66b328d690027e4867bdfb16688"}, - {file = "google_auth-2.30.0-py2.py3-none-any.whl", hash = "sha256:8df7da660f62757388b8a7f249df13549b3373f24388cb5d2f1dd91cc18180b5"}, + {file = "google_auth-2.35.0-py2.py3-none-any.whl", hash = "sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f"}, + {file = "google_auth-2.35.0.tar.gz", hash = "sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a"}, ] [package.dependencies] @@ -1055,20 +1097,20 @@ rsa = ">=3.1.4,<5" [package.extras] aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] -enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +enterprise-cert = ["cryptography", "pyopenssl"] pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] requests = ["requests (>=2.20.0,<3.0.0.dev0)"] [[package]] name = "googleapis-common-protos" -version = "1.63.2" +version = "1.65.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis-common-protos-1.63.2.tar.gz", hash = "sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87"}, - {file = "googleapis_common_protos-1.63.2-py2.py3-none-any.whl", hash = "sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945"}, + {file = "googleapis_common_protos-1.65.0-py2.py3-none-any.whl", hash = "sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63"}, + {file = "googleapis_common_protos-1.65.0.tar.gz", hash = "sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0"}, ] [package.dependencies] @@ -1079,61 +1121,61 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] [[package]] name = "grpcio" -version = "1.64.1" +version = "1.66.1" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.8" files = [ - {file = "grpcio-1.64.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:55697ecec192bc3f2f3cc13a295ab670f51de29884ca9ae6cd6247df55df2502"}, - {file = "grpcio-1.64.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:3b64ae304c175671efdaa7ec9ae2cc36996b681eb63ca39c464958396697daff"}, - {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:bac71b4b28bc9af61efcdc7630b166440bbfbaa80940c9a697271b5e1dabbc61"}, - {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c024ffc22d6dc59000faf8ad781696d81e8e38f4078cb0f2630b4a3cf231a90"}, - {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7cd5c1325f6808b8ae31657d281aadb2a51ac11ab081ae335f4f7fc44c1721d"}, - {file = "grpcio-1.64.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0a2813093ddb27418a4c99f9b1c223fab0b053157176a64cc9db0f4557b69bd9"}, - {file = "grpcio-1.64.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2981c7365a9353f9b5c864595c510c983251b1ab403e05b1ccc70a3d9541a73b"}, - {file = "grpcio-1.64.1-cp310-cp310-win32.whl", hash = "sha256:1262402af5a511c245c3ae918167eca57342c72320dffae5d9b51840c4b2f86d"}, - {file = "grpcio-1.64.1-cp310-cp310-win_amd64.whl", hash = "sha256:19264fc964576ddb065368cae953f8d0514ecc6cb3da8903766d9fb9d4554c33"}, - {file = "grpcio-1.64.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:58b1041e7c870bb30ee41d3090cbd6f0851f30ae4eb68228955d973d3efa2e61"}, - {file = "grpcio-1.64.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bbc5b1d78a7822b0a84c6f8917faa986c1a744e65d762ef6d8be9d75677af2ca"}, - {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:5841dd1f284bd1b3d8a6eca3a7f062b06f1eec09b184397e1d1d43447e89a7ae"}, - {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8caee47e970b92b3dd948371230fcceb80d3f2277b3bf7fbd7c0564e7d39068e"}, - {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73819689c169417a4f978e562d24f2def2be75739c4bed1992435d007819da1b"}, - {file = "grpcio-1.64.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6503b64c8b2dfad299749cad1b595c650c91e5b2c8a1b775380fcf8d2cbba1e9"}, - {file = "grpcio-1.64.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1de403fc1305fd96cfa75e83be3dee8538f2413a6b1685b8452301c7ba33c294"}, - {file = "grpcio-1.64.1-cp311-cp311-win32.whl", hash = "sha256:d4d29cc612e1332237877dfa7fe687157973aab1d63bd0f84cf06692f04c0367"}, - {file = "grpcio-1.64.1-cp311-cp311-win_amd64.whl", hash = "sha256:5e56462b05a6f860b72f0fa50dca06d5b26543a4e88d0396259a07dc30f4e5aa"}, - {file = "grpcio-1.64.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:4657d24c8063e6095f850b68f2d1ba3b39f2b287a38242dcabc166453e950c59"}, - {file = "grpcio-1.64.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:62b4e6eb7bf901719fce0ca83e3ed474ae5022bb3827b0a501e056458c51c0a1"}, - {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:ee73a2f5ca4ba44fa33b4d7d2c71e2c8a9e9f78d53f6507ad68e7d2ad5f64a22"}, - {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:198908f9b22e2672a998870355e226a725aeab327ac4e6ff3a1399792ece4762"}, - {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39b9d0acaa8d835a6566c640f48b50054f422d03e77e49716d4c4e8e279665a1"}, - {file = "grpcio-1.64.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5e42634a989c3aa6049f132266faf6b949ec2a6f7d302dbb5c15395b77d757eb"}, - {file = "grpcio-1.64.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b1a82e0b9b3022799c336e1fc0f6210adc019ae84efb7321d668129d28ee1efb"}, - {file = "grpcio-1.64.1-cp312-cp312-win32.whl", hash = "sha256:55260032b95c49bee69a423c2f5365baa9369d2f7d233e933564d8a47b893027"}, - {file = "grpcio-1.64.1-cp312-cp312-win_amd64.whl", hash = "sha256:c1a786ac592b47573a5bb7e35665c08064a5d77ab88a076eec11f8ae86b3e3f6"}, - {file = "grpcio-1.64.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:a011ac6c03cfe162ff2b727bcb530567826cec85eb8d4ad2bfb4bd023287a52d"}, - {file = "grpcio-1.64.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4d6dab6124225496010bd22690f2d9bd35c7cbb267b3f14e7a3eb05c911325d4"}, - {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:a5e771d0252e871ce194d0fdcafd13971f1aae0ddacc5f25615030d5df55c3a2"}, - {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c3c1b90ab93fed424e454e93c0ed0b9d552bdf1b0929712b094f5ecfe7a23ad"}, - {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20405cb8b13fd779135df23fabadc53b86522d0f1cba8cca0e87968587f50650"}, - {file = "grpcio-1.64.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0cc79c982ccb2feec8aad0e8fb0d168bcbca85bc77b080d0d3c5f2f15c24ea8f"}, - {file = "grpcio-1.64.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a3a035c37ce7565b8f4f35ff683a4db34d24e53dc487e47438e434eb3f701b2a"}, - {file = "grpcio-1.64.1-cp38-cp38-win32.whl", hash = "sha256:1257b76748612aca0f89beec7fa0615727fd6f2a1ad580a9638816a4b2eb18fd"}, - {file = "grpcio-1.64.1-cp38-cp38-win_amd64.whl", hash = "sha256:0a12ddb1678ebc6a84ec6b0487feac020ee2b1659cbe69b80f06dbffdb249122"}, - {file = "grpcio-1.64.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:75dbbf415026d2862192fe1b28d71f209e2fd87079d98470db90bebe57b33179"}, - {file = "grpcio-1.64.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e3d9f8d1221baa0ced7ec7322a981e28deb23749c76eeeb3d33e18b72935ab62"}, - {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:5f8b75f64d5d324c565b263c67dbe4f0af595635bbdd93bb1a88189fc62ed2e5"}, - {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c84ad903d0d94311a2b7eea608da163dace97c5fe9412ea311e72c3684925602"}, - {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:940e3ec884520155f68a3b712d045e077d61c520a195d1a5932c531f11883489"}, - {file = "grpcio-1.64.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f10193c69fc9d3d726e83bbf0f3d316f1847c3071c8c93d8090cf5f326b14309"}, - {file = "grpcio-1.64.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac15b6c2c80a4d1338b04d42a02d376a53395ddf0ec9ab157cbaf44191f3ffdd"}, - {file = "grpcio-1.64.1-cp39-cp39-win32.whl", hash = "sha256:03b43d0ccf99c557ec671c7dede64f023c7da9bb632ac65dbc57f166e4970040"}, - {file = "grpcio-1.64.1-cp39-cp39-win_amd64.whl", hash = "sha256:ed6091fa0adcc7e4ff944090cf203a52da35c37a130efa564ded02b7aff63bcd"}, - {file = "grpcio-1.64.1.tar.gz", hash = "sha256:8d51dd1c59d5fa0f34266b80a3805ec29a1f26425c2a54736133f6d87fc4968a"}, -] - -[package.extras] -protobuf = ["grpcio-tools (>=1.64.1)"] + {file = "grpcio-1.66.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:4877ba180591acdf127afe21ec1c7ff8a5ecf0fe2600f0d3c50e8c4a1cbc6492"}, + {file = "grpcio-1.66.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:3750c5a00bd644c75f4507f77a804d0189d97a107eb1481945a0cf3af3e7a5ac"}, + {file = "grpcio-1.66.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:a013c5fbb12bfb5f927444b477a26f1080755a931d5d362e6a9a720ca7dbae60"}, + {file = "grpcio-1.66.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1b24c23d51a1e8790b25514157d43f0a4dce1ac12b3f0b8e9f66a5e2c4c132f"}, + {file = "grpcio-1.66.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7ffb8ea674d68de4cac6f57d2498fef477cef582f1fa849e9f844863af50083"}, + {file = "grpcio-1.66.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:307b1d538140f19ccbd3aed7a93d8f71103c5d525f3c96f8616111614b14bf2a"}, + {file = "grpcio-1.66.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1c17ebcec157cfb8dd445890a03e20caf6209a5bd4ac5b040ae9dbc59eef091d"}, + {file = "grpcio-1.66.1-cp310-cp310-win32.whl", hash = "sha256:ef82d361ed5849d34cf09105d00b94b6728d289d6b9235513cb2fcc79f7c432c"}, + {file = "grpcio-1.66.1-cp310-cp310-win_amd64.whl", hash = "sha256:292a846b92cdcd40ecca46e694997dd6b9be6c4c01a94a0dfb3fcb75d20da858"}, + {file = "grpcio-1.66.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:c30aeceeaff11cd5ddbc348f37c58bcb96da8d5aa93fed78ab329de5f37a0d7a"}, + {file = "grpcio-1.66.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8a1e224ce6f740dbb6b24c58f885422deebd7eb724aff0671a847f8951857c26"}, + {file = "grpcio-1.66.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a66fe4dc35d2330c185cfbb42959f57ad36f257e0cc4557d11d9f0a3f14311df"}, + {file = "grpcio-1.66.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3ba04659e4fce609de2658fe4dbf7d6ed21987a94460f5f92df7579fd5d0e22"}, + {file = "grpcio-1.66.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4573608e23f7e091acfbe3e84ac2045680b69751d8d67685ffa193a4429fedb1"}, + {file = "grpcio-1.66.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7e06aa1f764ec8265b19d8f00140b8c4b6ca179a6dc67aa9413867c47e1fb04e"}, + {file = "grpcio-1.66.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3885f037eb11f1cacc41f207b705f38a44b69478086f40608959bf5ad85826dd"}, + {file = "grpcio-1.66.1-cp311-cp311-win32.whl", hash = "sha256:97ae7edd3f3f91480e48ede5d3e7d431ad6005bfdbd65c1b56913799ec79e791"}, + {file = "grpcio-1.66.1-cp311-cp311-win_amd64.whl", hash = "sha256:cfd349de4158d797db2bd82d2020554a121674e98fbe6b15328456b3bf2495bb"}, + {file = "grpcio-1.66.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:a92c4f58c01c77205df6ff999faa008540475c39b835277fb8883b11cada127a"}, + {file = "grpcio-1.66.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:fdb14bad0835914f325349ed34a51940bc2ad965142eb3090081593c6e347be9"}, + {file = "grpcio-1.66.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f03a5884c56256e08fd9e262e11b5cfacf1af96e2ce78dc095d2c41ccae2c80d"}, + {file = "grpcio-1.66.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ca2559692d8e7e245d456877a85ee41525f3ed425aa97eb7a70fc9a79df91a0"}, + {file = "grpcio-1.66.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84ca1be089fb4446490dd1135828bd42a7c7f8421e74fa581611f7afdf7ab761"}, + {file = "grpcio-1.66.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:d639c939ad7c440c7b2819a28d559179a4508783f7e5b991166f8d7a34b52815"}, + {file = "grpcio-1.66.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b9feb4e5ec8dc2d15709f4d5fc367794d69277f5d680baf1910fc9915c633524"}, + {file = "grpcio-1.66.1-cp312-cp312-win32.whl", hash = "sha256:7101db1bd4cd9b880294dec41a93fcdce465bdbb602cd8dc5bd2d6362b618759"}, + {file = "grpcio-1.66.1-cp312-cp312-win_amd64.whl", hash = "sha256:b0aa03d240b5539648d996cc60438f128c7f46050989e35b25f5c18286c86734"}, + {file = "grpcio-1.66.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:ecfe735e7a59e5a98208447293ff8580e9db1e890e232b8b292dc8bd15afc0d2"}, + {file = "grpcio-1.66.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4825a3aa5648010842e1c9d35a082187746aa0cdbf1b7a2a930595a94fb10fce"}, + {file = "grpcio-1.66.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:f517fd7259fe823ef3bd21e508b653d5492e706e9f0ef82c16ce3347a8a5620c"}, + {file = "grpcio-1.66.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1fe60d0772831d96d263b53d83fb9a3d050a94b0e94b6d004a5ad111faa5b5b"}, + {file = "grpcio-1.66.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31a049daa428f928f21090403e5d18ea02670e3d5d172581670be006100db9ef"}, + {file = "grpcio-1.66.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6f914386e52cbdeb5d2a7ce3bf1fdfacbe9d818dd81b6099a05b741aaf3848bb"}, + {file = "grpcio-1.66.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bff2096bdba686019fb32d2dde45b95981f0d1490e054400f70fc9a8af34b49d"}, + {file = "grpcio-1.66.1-cp38-cp38-win32.whl", hash = "sha256:aa8ba945c96e73de29d25331b26f3e416e0c0f621e984a3ebdb2d0d0b596a3b3"}, + {file = "grpcio-1.66.1-cp38-cp38-win_amd64.whl", hash = "sha256:161d5c535c2bdf61b95080e7f0f017a1dfcb812bf54093e71e5562b16225b4ce"}, + {file = "grpcio-1.66.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:d0cd7050397b3609ea51727b1811e663ffda8bda39c6a5bb69525ef12414b503"}, + {file = "grpcio-1.66.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0e6c9b42ded5d02b6b1fea3a25f036a2236eeb75d0579bfd43c0018c88bf0a3e"}, + {file = "grpcio-1.66.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:c9f80f9fad93a8cf71c7f161778ba47fd730d13a343a46258065c4deb4b550c0"}, + {file = "grpcio-1.66.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dd67ed9da78e5121efc5c510f0122a972216808d6de70953a740560c572eb44"}, + {file = "grpcio-1.66.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48b0d92d45ce3be2084b92fb5bae2f64c208fea8ceed7fccf6a7b524d3c4942e"}, + {file = "grpcio-1.66.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4d813316d1a752be6f5c4360c49f55b06d4fe212d7df03253dfdae90c8a402bb"}, + {file = "grpcio-1.66.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9c9bebc6627873ec27a70fc800f6083a13c70b23a5564788754b9ee52c5aef6c"}, + {file = "grpcio-1.66.1-cp39-cp39-win32.whl", hash = "sha256:30a1c2cf9390c894c90bbc70147f2372130ad189cffef161f0432d0157973f45"}, + {file = "grpcio-1.66.1-cp39-cp39-win_amd64.whl", hash = "sha256:17663598aadbedc3cacd7bbde432f541c8e07d2496564e22b214b22c7523dac8"}, + {file = "grpcio-1.66.1.tar.gz", hash = "sha256:35334f9c9745add3e357e3372756fd32d925bd52c41da97f4dfdafbde0bf0ee2"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.66.1)"] [[package]] name = "h11" @@ -1194,33 +1236,40 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "idna" -version = "3.7" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "importlib-metadata" -version = "8.0.0" +version = "8.5.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"}, - {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"}, + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, ] [package.dependencies] -zipp = ">=0.5" +zipp = ">=3.20" [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] [[package]] name = "iniconfig" @@ -1398,13 +1447,13 @@ files = [ [[package]] name = "jsonschema" -version = "4.22.0" +version = "4.23.0" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema-4.22.0-py3-none-any.whl", hash = "sha256:ff4cfd6b1367a40e7bc6411caec72effadd3db0bbe5017de188f2d6108335802"}, - {file = "jsonschema-4.22.0.tar.gz", hash = "sha256:5b22d434a45935119af990552c862e5d6d564e8f6601206b305a61fdf661a2b7"}, + {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, + {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, ] [package.dependencies] @@ -1419,11 +1468,11 @@ rfc3339-validator = {version = "*", optional = true, markers = "extra == \"forma rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} rpds-py = ">=0.7.1" uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} -webcolors = {version = ">=1.11", optional = true, markers = "extra == \"format-nongpl\""} +webcolors = {version = ">=24.6.0", optional = true, markers = "extra == \"format-nongpl\""} [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"] [[package]] name = "jsonschema-specifications" @@ -1651,13 +1700,13 @@ test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-v [[package]] name = "jupyterlab-widgets" -version = "3.0.11" +version = "3.0.13" description = "Jupyter interactive widgets for JupyterLab" optional = false python-versions = ">=3.7" files = [ - {file = "jupyterlab_widgets-3.0.11-py3-none-any.whl", hash = "sha256:78287fd86d20744ace330a61625024cf5521e1c012a352ddc0a3cdc2348becd0"}, - {file = "jupyterlab_widgets-3.0.11.tar.gz", hash = "sha256:dd5ac679593c969af29c9bed054c24f26842baa51352114736756bc035deee27"}, + {file = "jupyterlab_widgets-3.0.13-py3-none-any.whl", hash = "sha256:e3cda2c233ce144192f1e29914ad522b2f4c40e77214b0cc97377ca3d323db54"}, + {file = "jupyterlab_widgets-3.0.13.tar.gz", hash = "sha256:a2966d385328c1942b683a8cd96b89b8dd82c8b8f81dda902bb2bc06d46f5bed"}, ] [[package]] @@ -1707,13 +1756,13 @@ testing = ["pytest"] [[package]] name = "markdown" -version = "3.6" +version = "3.7" description = "Python implementation of John Gruber's Markdown." optional = false python-versions = ">=3.8" files = [ - {file = "Markdown-3.6-py3-none-any.whl", hash = "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f"}, - {file = "Markdown-3.6.tar.gz", hash = "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"}, + {file = "Markdown-3.7-py3-none-any.whl", hash = "sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803"}, + {file = "markdown-3.7.tar.gz", hash = "sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2"}, ] [package.dependencies] @@ -1874,167 +1923,180 @@ files = [ [[package]] name = "msgpack" -version = "1.0.8" +version = "1.1.0" description = "MessagePack serializer" optional = false python-versions = ">=3.8" files = [ - {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:505fe3d03856ac7d215dbe005414bc28505d26f0c128906037e66d98c4e95868"}, - {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b7842518a63a9f17107eb176320960ec095a8ee3b4420b5f688e24bf50c53c"}, - {file = "msgpack-1.0.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:376081f471a2ef24828b83a641a02c575d6103a3ad7fd7dade5486cad10ea659"}, - {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e390971d082dba073c05dbd56322427d3280b7cc8b53484c9377adfbae67dc2"}, - {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e073efcba9ea99db5acef3959efa45b52bc67b61b00823d2a1a6944bf45982"}, - {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82d92c773fbc6942a7a8b520d22c11cfc8fd83bba86116bfcf962c2f5c2ecdaa"}, - {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9ee32dcb8e531adae1f1ca568822e9b3a738369b3b686d1477cbc643c4a9c128"}, - {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e3aa7e51d738e0ec0afbed661261513b38b3014754c9459508399baf14ae0c9d"}, - {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69284049d07fce531c17404fcba2bb1df472bc2dcdac642ae71a2d079d950653"}, - {file = "msgpack-1.0.8-cp310-cp310-win32.whl", hash = "sha256:13577ec9e247f8741c84d06b9ece5f654920d8365a4b636ce0e44f15e07ec693"}, - {file = "msgpack-1.0.8-cp310-cp310-win_amd64.whl", hash = "sha256:e532dbd6ddfe13946de050d7474e3f5fb6ec774fbb1a188aaf469b08cf04189a"}, - {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9517004e21664f2b5a5fd6333b0731b9cf0817403a941b393d89a2f1dc2bd836"}, - {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d16a786905034e7e34098634b184a7d81f91d4c3d246edc6bd7aefb2fd8ea6ad"}, - {file = "msgpack-1.0.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2872993e209f7ed04d963e4b4fbae72d034844ec66bc4ca403329db2074377b"}, - {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c330eace3dd100bdb54b5653b966de7f51c26ec4a7d4e87132d9b4f738220ba"}, - {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83b5c044f3eff2a6534768ccfd50425939e7a8b5cf9a7261c385de1e20dcfc85"}, - {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1876b0b653a808fcd50123b953af170c535027bf1d053b59790eebb0aeb38950"}, - {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dfe1f0f0ed5785c187144c46a292b8c34c1295c01da12e10ccddfc16def4448a"}, - {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3528807cbbb7f315bb81959d5961855e7ba52aa60a3097151cb21956fbc7502b"}, - {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e2f879ab92ce502a1e65fce390eab619774dda6a6ff719718069ac94084098ce"}, - {file = "msgpack-1.0.8-cp311-cp311-win32.whl", hash = "sha256:26ee97a8261e6e35885c2ecd2fd4a6d38252246f94a2aec23665a4e66d066305"}, - {file = "msgpack-1.0.8-cp311-cp311-win_amd64.whl", hash = "sha256:eadb9f826c138e6cf3c49d6f8de88225a3c0ab181a9b4ba792e006e5292d150e"}, - {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:114be227f5213ef8b215c22dde19532f5da9652e56e8ce969bf0a26d7c419fee"}, - {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d661dc4785affa9d0edfdd1e59ec056a58b3dbb9f196fa43587f3ddac654ac7b"}, - {file = "msgpack-1.0.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d56fd9f1f1cdc8227d7b7918f55091349741904d9520c65f0139a9755952c9e8"}, - {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0726c282d188e204281ebd8de31724b7d749adebc086873a59efb8cf7ae27df3"}, - {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8db8e423192303ed77cff4dce3a4b88dbfaf43979d280181558af5e2c3c71afc"}, - {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99881222f4a8c2f641f25703963a5cefb076adffd959e0558dc9f803a52d6a58"}, - {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b5505774ea2a73a86ea176e8a9a4a7c8bf5d521050f0f6f8426afe798689243f"}, - {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ef254a06bcea461e65ff0373d8a0dd1ed3aa004af48839f002a0c994a6f72d04"}, - {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e1dd7839443592d00e96db831eddb4111a2a81a46b028f0facd60a09ebbdd543"}, - {file = "msgpack-1.0.8-cp312-cp312-win32.whl", hash = "sha256:64d0fcd436c5683fdd7c907eeae5e2cbb5eb872fafbc03a43609d7941840995c"}, - {file = "msgpack-1.0.8-cp312-cp312-win_amd64.whl", hash = "sha256:74398a4cf19de42e1498368c36eed45d9528f5fd0155241e82c4082b7e16cffd"}, - {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ceea77719d45c839fd73abcb190b8390412a890df2f83fb8cf49b2a4b5c2f40"}, - {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ab0bbcd4d1f7b6991ee7c753655b481c50084294218de69365f8f1970d4c151"}, - {file = "msgpack-1.0.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1cce488457370ffd1f953846f82323cb6b2ad2190987cd4d70b2713e17268d24"}, - {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3923a1778f7e5ef31865893fdca12a8d7dc03a44b33e2a5f3295416314c09f5d"}, - {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22e47578b30a3e199ab067a4d43d790249b3c0587d9a771921f86250c8435db"}, - {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd739c9251d01e0279ce729e37b39d49a08c0420d3fee7f2a4968c0576678f77"}, - {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d3420522057ebab1728b21ad473aa950026d07cb09da41103f8e597dfbfaeb13"}, - {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5845fdf5e5d5b78a49b826fcdc0eb2e2aa7191980e3d2cfd2a30303a74f212e2"}, - {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a0e76621f6e1f908ae52860bdcb58e1ca85231a9b0545e64509c931dd34275a"}, - {file = "msgpack-1.0.8-cp38-cp38-win32.whl", hash = "sha256:374a8e88ddab84b9ada695d255679fb99c53513c0a51778796fcf0944d6c789c"}, - {file = "msgpack-1.0.8-cp38-cp38-win_amd64.whl", hash = "sha256:f3709997b228685fe53e8c433e2df9f0cdb5f4542bd5114ed17ac3c0129b0480"}, - {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f51bab98d52739c50c56658cc303f190785f9a2cd97b823357e7aeae54c8f68a"}, - {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:73ee792784d48aa338bba28063e19a27e8d989344f34aad14ea6e1b9bd83f596"}, - {file = "msgpack-1.0.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f9904e24646570539a8950400602d66d2b2c492b9010ea7e965025cb71d0c86d"}, - {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e75753aeda0ddc4c28dce4c32ba2f6ec30b1b02f6c0b14e547841ba5b24f753f"}, - {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5dbf059fb4b7c240c873c1245ee112505be27497e90f7c6591261c7d3c3a8228"}, - {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4916727e31c28be8beaf11cf117d6f6f188dcc36daae4e851fee88646f5b6b18"}, - {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7938111ed1358f536daf311be244f34df7bf3cdedb3ed883787aca97778b28d8"}, - {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:493c5c5e44b06d6c9268ce21b302c9ca055c1fd3484c25ba41d34476c76ee746"}, - {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, - {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, - {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, - {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, + {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ad442d527a7e358a469faf43fda45aaf4ac3249c8310a82f0ccff9164e5dccd"}, + {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74bed8f63f8f14d75eec75cf3d04ad581da6b914001b474a5d3cd3372c8cc27d"}, + {file = "msgpack-1.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:914571a2a5b4e7606997e169f64ce53a8b1e06f2cf2c3a7273aa106236d43dd5"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c921af52214dcbb75e6bdf6a661b23c3e6417f00c603dd2070bccb5c3ef499f5"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8ce0b22b890be5d252de90d0e0d119f363012027cf256185fc3d474c44b1b9e"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73322a6cc57fcee3c0c57c4463d828e9428275fb85a27aa2aa1a92fdc42afd7b"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e1f3c3d21f7cf67bcf2da8e494d30a75e4cf60041d98b3f79875afb5b96f3a3f"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64fc9068d701233effd61b19efb1485587560b66fe57b3e50d29c5d78e7fef68"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:42f754515e0f683f9c79210a5d1cad631ec3d06cea5172214d2176a42e67e19b"}, + {file = "msgpack-1.1.0-cp310-cp310-win32.whl", hash = "sha256:3df7e6b05571b3814361e8464f9304c42d2196808e0119f55d0d3e62cd5ea044"}, + {file = "msgpack-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:685ec345eefc757a7c8af44a3032734a739f8c45d1b0ac45efc5d8977aa4720f"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d364a55082fb2a7416f6c63ae383fbd903adb5a6cf78c5b96cc6316dc1cedc7"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:79ec007767b9b56860e0372085f8504db5d06bd6a327a335449508bbee9648fa"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6ad622bf7756d5a497d5b6836e7fc3752e2dd6f4c648e24b1803f6048596f701"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e59bca908d9ca0de3dc8684f21ebf9a690fe47b6be93236eb40b99af28b6ea6"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1da8f11a3dd397f0a32c76165cf0c4eb95b31013a94f6ecc0b280c05c91b59"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452aff037287acb1d70a804ffd022b21fa2bb7c46bee884dbc864cc9024128a0"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8da4bf6d54ceed70e8861f833f83ce0814a2b72102e890cbdfe4b34764cdd66e"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:41c991beebf175faf352fb940bf2af9ad1fb77fd25f38d9142053914947cdbf6"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a52a1f3a5af7ba1c9ace055b659189f6c669cf3657095b50f9602af3a3ba0fe5"}, + {file = "msgpack-1.1.0-cp311-cp311-win32.whl", hash = "sha256:58638690ebd0a06427c5fe1a227bb6b8b9fdc2bd07701bec13c2335c82131a88"}, + {file = "msgpack-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd2906780f25c8ed5d7b323379f6138524ba793428db5d0e9d226d3fa6aa1788"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d46cf9e3705ea9485687aa4001a76e44748b609d260af21c4ceea7f2212a501d"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5dbad74103df937e1325cc4bfeaf57713be0b4f15e1c2da43ccdd836393e2ea2"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58dfc47f8b102da61e8949708b3eafc3504509a5728f8b4ddef84bd9e16ad420"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676e5be1b472909b2ee6356ff425ebedf5142427842aa06b4dfd5117d1ca8a2"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17fb65dd0bec285907f68b15734a993ad3fc94332b5bb21b0435846228de1f39"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a51abd48c6d8ac89e0cfd4fe177c61481aca2d5e7ba42044fd218cfd8ea9899f"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2137773500afa5494a61b1208619e3871f75f27b03bcfca7b3a7023284140247"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:398b713459fea610861c8a7b62a6fec1882759f308ae0795b5413ff6a160cf3c"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:06f5fd2f6bb2a7914922d935d3b8bb4a7fff3a9a91cfce6d06c13bc42bec975b"}, + {file = "msgpack-1.1.0-cp312-cp312-win32.whl", hash = "sha256:ad33e8400e4ec17ba782f7b9cf868977d867ed784a1f5f2ab46e7ba53b6e1e1b"}, + {file = "msgpack-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:115a7af8ee9e8cddc10f87636767857e7e3717b7a2e97379dc2054712693e90f"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:071603e2f0771c45ad9bc65719291c568d4edf120b44eb36324dcb02a13bfddf"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0f92a83b84e7c0749e3f12821949d79485971f087604178026085f60ce109330"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1964df7b81285d00a84da4e70cb1383f2e665e0f1f2a7027e683956d04b734"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59caf6a4ed0d164055ccff8fe31eddc0ebc07cf7326a2aaa0dbf7a4001cd823e"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0907e1a7119b337971a689153665764adc34e89175f9a34793307d9def08e6ca"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65553c9b6da8166e819a6aa90ad15288599b340f91d18f60b2061f402b9a4915"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7a946a8992941fea80ed4beae6bff74ffd7ee129a90b4dd5cf9c476a30e9708d"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4b51405e36e075193bc051315dbf29168d6141ae2500ba8cd80a522964e31434"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4c01941fd2ff87c2a934ee6055bda4ed353a7846b8d4f341c428109e9fcde8c"}, + {file = "msgpack-1.1.0-cp313-cp313-win32.whl", hash = "sha256:7c9a35ce2c2573bada929e0b7b3576de647b0defbd25f5139dcdaba0ae35a4cc"}, + {file = "msgpack-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:bce7d9e614a04d0883af0b3d4d501171fbfca038f12c77fa838d9f198147a23f"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c40ffa9a15d74e05ba1fe2681ea33b9caffd886675412612d93ab17b58ea2fec"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1ba6136e650898082d9d5a5217d5906d1e138024f836ff48691784bbe1adf96"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0856a2b7e8dcb874be44fea031d22e5b3a19121be92a1e098f46068a11b0870"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:471e27a5787a2e3f974ba023f9e265a8c7cfd373632247deb225617e3100a3c7"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:646afc8102935a388ffc3914b336d22d1c2d6209c773f3eb5dd4d6d3b6f8c1cb"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:13599f8829cfbe0158f6456374e9eea9f44eee08076291771d8ae93eda56607f"}, + {file = "msgpack-1.1.0-cp38-cp38-win32.whl", hash = "sha256:8a84efb768fb968381e525eeeb3d92857e4985aacc39f3c47ffd00eb4509315b"}, + {file = "msgpack-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:879a7b7b0ad82481c52d3c7eb99bf6f0645dbdec5134a4bddbd16f3506947feb"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:53258eeb7a80fc46f62fd59c876957a2d0e15e6449a9e71842b6d24419d88ca1"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e7b853bbc44fb03fbdba34feb4bd414322180135e2cb5164f20ce1c9795ee48"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3e9b4936df53b970513eac1758f3882c88658a220b58dcc1e39606dccaaf01c"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46c34e99110762a76e3911fc923222472c9d681f1094096ac4102c18319e6468"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a706d1e74dd3dea05cb54580d9bd8b2880e9264856ce5068027eed09680aa74"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:534480ee5690ab3cbed89d4c8971a5c631b69a8c0883ecfea96c19118510c846"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8cf9e8c3a2153934a23ac160cc4cba0ec035f6867c8013cc6077a79823370346"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3180065ec2abbe13a4ad37688b61b99d7f9e012a535b930e0e683ad6bc30155b"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c5a91481a3cc573ac8c0d9aace09345d989dc4a0202b7fcb312c88c26d4e71a8"}, + {file = "msgpack-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f80bc7d47f76089633763f952e67f8214cb7b3ee6bfa489b3cb6a84cfac114cd"}, + {file = "msgpack-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:4d1b7ff2d6146e16e8bd665ac726a89c74163ef8cd39fa8c1087d4e52d3a2325"}, + {file = "msgpack-1.1.0.tar.gz", hash = "sha256:dd432ccc2c72b914e4cb77afce64aab761c1137cc698be3984eee260bcb2896e"}, ] [[package]] name = "multidict" -version = "6.0.5" +version = "6.1.0" description = "multidict implementation" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, - {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, - {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, - {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, - {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, - {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, - {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, - {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, - {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, - {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, - {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, - {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, - {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, - {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, - {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, - {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, -] + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7"}, + {file = "multidict-6.1.0-cp310-cp310-win32.whl", hash = "sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0"}, + {file = "multidict-6.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753"}, + {file = "multidict-6.1.0-cp311-cp311-win32.whl", hash = "sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80"}, + {file = "multidict-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3"}, + {file = "multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133"}, + {file = "multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6"}, + {file = "multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81"}, + {file = "multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd"}, + {file = "multidict-6.1.0-cp38-cp38-win32.whl", hash = "sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167"}, + {file = "multidict-6.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43"}, + {file = "multidict-6.1.0-cp39-cp39-win32.whl", hash = "sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada"}, + {file = "multidict-6.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a"}, + {file = "multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506"}, + {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} [[package]] name = "nbclient" @@ -2147,56 +2209,56 @@ test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync" [[package]] name = "numpy" -version = "2.0.0" +version = "2.0.2" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" files = [ - {file = "numpy-2.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:04494f6ec467ccb5369d1808570ae55f6ed9b5809d7f035059000a37b8d7e86f"}, - {file = "numpy-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2635dbd200c2d6faf2ef9a0d04f0ecc6b13b3cad54f7c67c61155138835515d2"}, - {file = "numpy-2.0.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:0a43f0974d501842866cc83471bdb0116ba0dffdbaac33ec05e6afed5b615238"}, - {file = "numpy-2.0.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:8d83bb187fb647643bd56e1ae43f273c7f4dbcdf94550d7938cfc32566756514"}, - {file = "numpy-2.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79e843d186c8fb1b102bef3e2bc35ef81160ffef3194646a7fdd6a73c6b97196"}, - {file = "numpy-2.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d7696c615765091cc5093f76fd1fa069870304beaccfd58b5dcc69e55ef49c1"}, - {file = "numpy-2.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b4c76e3d4c56f145d41b7b6751255feefae92edbc9a61e1758a98204200f30fc"}, - {file = "numpy-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:acd3a644e4807e73b4e1867b769fbf1ce8c5d80e7caaef0d90dcdc640dfc9787"}, - {file = "numpy-2.0.0-cp310-cp310-win32.whl", hash = "sha256:cee6cc0584f71adefe2c908856ccc98702baf95ff80092e4ca46061538a2ba98"}, - {file = "numpy-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:ed08d2703b5972ec736451b818c2eb9da80d66c3e84aed1deeb0c345fefe461b"}, - {file = "numpy-2.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad0c86f3455fbd0de6c31a3056eb822fc939f81b1618f10ff3406971893b62a5"}, - {file = "numpy-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7f387600d424f91576af20518334df3d97bc76a300a755f9a8d6e4f5cadd289"}, - {file = "numpy-2.0.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:34f003cb88b1ba38cb9a9a4a3161c1604973d7f9d5552c38bc2f04f829536609"}, - {file = "numpy-2.0.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:b6f6a8f45d0313db07d6d1d37bd0b112f887e1369758a5419c0370ba915b3871"}, - {file = "numpy-2.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f64641b42b2429f56ee08b4f427a4d2daf916ec59686061de751a55aafa22e4"}, - {file = "numpy-2.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7039a136017eaa92c1848152827e1424701532ca8e8967fe480fe1569dae581"}, - {file = "numpy-2.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46e161722e0f619749d1cd892167039015b2c2817296104487cd03ed4a955995"}, - {file = "numpy-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0e50842b2295ba8414c8c1d9d957083d5dfe9e16828b37de883f51fc53c4016f"}, - {file = "numpy-2.0.0-cp311-cp311-win32.whl", hash = "sha256:2ce46fd0b8a0c947ae047d222f7136fc4d55538741373107574271bc00e20e8f"}, - {file = "numpy-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbd6acc766814ea6443628f4e6751d0da6593dae29c08c0b2606164db026970c"}, - {file = "numpy-2.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:354f373279768fa5a584bac997de6a6c9bc535c482592d7a813bb0c09be6c76f"}, - {file = "numpy-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4d2f62e55a4cd9c58c1d9a1c9edaedcd857a73cb6fda875bf79093f9d9086f85"}, - {file = "numpy-2.0.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:1e72728e7501a450288fc8e1f9ebc73d90cfd4671ebbd631f3e7857c39bd16f2"}, - {file = "numpy-2.0.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:84554fc53daa8f6abf8e8a66e076aff6ece62de68523d9f665f32d2fc50fd66e"}, - {file = "numpy-2.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c73aafd1afca80afecb22718f8700b40ac7cab927b8abab3c3e337d70e10e5a2"}, - {file = "numpy-2.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49d9f7d256fbc804391a7f72d4a617302b1afac1112fac19b6c6cec63fe7fe8a"}, - {file = "numpy-2.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0ec84b9ba0654f3b962802edc91424331f423dcf5d5f926676e0150789cb3d95"}, - {file = "numpy-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:feff59f27338135776f6d4e2ec7aeeac5d5f7a08a83e80869121ef8164b74af9"}, - {file = "numpy-2.0.0-cp312-cp312-win32.whl", hash = "sha256:c5a59996dc61835133b56a32ebe4ef3740ea5bc19b3983ac60cc32be5a665d54"}, - {file = "numpy-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:a356364941fb0593bb899a1076b92dfa2029f6f5b8ba88a14fd0984aaf76d0df"}, - {file = "numpy-2.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e61155fae27570692ad1d327e81c6cf27d535a5d7ef97648a17d922224b216de"}, - {file = "numpy-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4554eb96f0fd263041baf16cf0881b3f5dafae7a59b1049acb9540c4d57bc8cb"}, - {file = "numpy-2.0.0-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:903703372d46bce88b6920a0cd86c3ad82dae2dbef157b5fc01b70ea1cfc430f"}, - {file = "numpy-2.0.0-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:3e8e01233d57639b2e30966c63d36fcea099d17c53bf424d77f088b0f4babd86"}, - {file = "numpy-2.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cde1753efe513705a0c6d28f5884e22bdc30438bf0085c5c486cdaff40cd67a"}, - {file = "numpy-2.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:821eedb7165ead9eebdb569986968b541f9908979c2da8a4967ecac4439bae3d"}, - {file = "numpy-2.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a1712c015831da583b21c5bfe15e8684137097969c6d22e8316ba66b5baabe4"}, - {file = "numpy-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9c27f0946a3536403efb0e1c28def1ae6730a72cd0d5878db38824855e3afc44"}, - {file = "numpy-2.0.0-cp39-cp39-win32.whl", hash = "sha256:63b92c512d9dbcc37f9d81b123dec99fdb318ba38c8059afc78086fe73820275"}, - {file = "numpy-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:3f6bed7f840d44c08ebdb73b1825282b801799e325bcbdfa6bc5c370e5aecc65"}, - {file = "numpy-2.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9416a5c2e92ace094e9f0082c5fd473502c91651fb896bc17690d6fc475128d6"}, - {file = "numpy-2.0.0-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:17067d097ed036636fa79f6a869ac26df7db1ba22039d962422506640314933a"}, - {file = "numpy-2.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ecb5b0582cd125f67a629072fed6f83562d9dd04d7e03256c9829bdec027ad"}, - {file = "numpy-2.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cef04d068f5fb0518a77857953193b6bb94809a806bd0a14983a8f12ada060c9"}, - {file = "numpy-2.0.0.tar.gz", hash = "sha256:cf5d1c9e6837f8af9f92b6bd3e86d513cdc11f60fd62185cc49ec7d1aba34864"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8c5713284ce4e282544c68d1c3b2c7161d38c256d2eefc93c1d683cf47683e66"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:becfae3ddd30736fe1889a37f1f580e245ba79a5855bff5f2a29cb3ccc22dd7b"}, + {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2da5960c3cf0df7eafefd806d4e612c5e19358de82cb3c343631188991566ccd"}, + {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:496f71341824ed9f3d2fd36cf3ac57ae2e0165c143b55c3a035ee219413f3318"}, + {file = "numpy-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a61ec659f68ae254e4d237816e33171497e978140353c0c2038d46e63282d0c8"}, + {file = "numpy-2.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d731a1c6116ba289c1e9ee714b08a8ff882944d4ad631fd411106a30f083c326"}, + {file = "numpy-2.0.2-cp310-cp310-win32.whl", hash = "sha256:984d96121c9f9616cd33fbd0618b7f08e0cfc9600a7ee1d6fd9b239186d19d97"}, + {file = "numpy-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:c7b0be4ef08607dd04da4092faee0b86607f111d5ae68036f16cc787e250a131"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:49ca4decb342d66018b01932139c0961a8f9ddc7589611158cb3c27cbcf76448"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:11a76c372d1d37437857280aa142086476136a8c0f373b2e648ab2c8f18fb195"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:807ec44583fd708a21d4a11d94aedf2f4f3c3719035c76a2bbe1fe8e217bdc57"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8cafab480740e22f8d833acefed5cc87ce276f4ece12fdaa2e8903db2f82897a"}, + {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a15f476a45e6e5a3a79d8a14e62161d27ad897381fecfa4a09ed5322f2085669"}, + {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13e689d772146140a252c3a28501da66dfecd77490b498b168b501835041f951"}, + {file = "numpy-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9ea91dfb7c3d1c56a0e55657c0afb38cf1eeae4544c208dc465c3c9f3a7c09f9"}, + {file = "numpy-2.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c1c9307701fec8f3f7a1e6711f9089c06e6284b3afbbcd259f7791282d660a15"}, + {file = "numpy-2.0.2-cp311-cp311-win32.whl", hash = "sha256:a392a68bd329eafac5817e5aefeb39038c48b671afd242710b451e76090e81f4"}, + {file = "numpy-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:286cd40ce2b7d652a6f22efdfc6d1edf879440e53e76a75955bc0c826c7e64dc"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:df55d490dea7934f330006d0f81e8551ba6010a5bf035a249ef61a94f21c500b"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8df823f570d9adf0978347d1f926b2a867d5608f434a7cff7f7908c6570dcf5e"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9a92ae5c14811e390f3767053ff54eaee3bf84576d99a2456391401323f4ec2c"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:a842d573724391493a97a62ebbb8e731f8a5dcc5d285dfc99141ca15a3302d0c"}, + {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05e238064fc0610c840d1cf6a13bf63d7e391717d247f1bf0318172e759e692"}, + {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0123ffdaa88fa4ab64835dcbde75dcdf89c453c922f18dced6e27c90d1d0ec5a"}, + {file = "numpy-2.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:96a55f64139912d61de9137f11bf39a55ec8faec288c75a54f93dfd39f7eb40c"}, + {file = "numpy-2.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec9852fb39354b5a45a80bdab5ac02dd02b15f44b3804e9f00c556bf24b4bded"}, + {file = "numpy-2.0.2-cp312-cp312-win32.whl", hash = "sha256:671bec6496f83202ed2d3c8fdc486a8fc86942f2e69ff0e986140339a63bcbe5"}, + {file = "numpy-2.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:cfd41e13fdc257aa5778496b8caa5e856dc4896d4ccf01841daee1d96465467a"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9059e10581ce4093f735ed23f3b9d283b9d517ff46009ddd485f1747eb22653c"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:423e89b23490805d2a5a96fe40ec507407b8ee786d66f7328be214f9679df6dd"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:2b2955fa6f11907cf7a70dab0d0755159bca87755e831e47932367fc8f2f2d0b"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:97032a27bd9d8988b9a97a8c4d2c9f2c15a81f61e2f21404d7e8ef00cb5be729"}, + {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e795a8be3ddbac43274f18588329c72939870a16cae810c2b73461c40718ab1"}, + {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26b258c385842546006213344c50655ff1555a9338e2e5e02a0756dc3e803dd"}, + {file = "numpy-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fec9451a7789926bcf7c2b8d187292c9f93ea30284802a0ab3f5be8ab36865d"}, + {file = "numpy-2.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9189427407d88ff25ecf8f12469d4d39d35bee1db5d39fc5c168c6f088a6956d"}, + {file = "numpy-2.0.2-cp39-cp39-win32.whl", hash = "sha256:905d16e0c60200656500c95b6b8dca5d109e23cb24abc701d41c02d74c6b3afa"}, + {file = "numpy-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:a3f4ab0caa7f053f6797fcd4e1e25caee367db3112ef2b6ef82d749530768c73"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7f0a0c6f12e07fa94133c8a67404322845220c06a9e80e85999afe727f7438b8"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:312950fdd060354350ed123c0e25a71327d3711584beaef30cdaa93320c392d4"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26df23238872200f63518dd2aa984cfca675d82469535dc7162dc2ee52d9dd5c"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a46288ec55ebbd58947d31d72be2c63cbf839f0a63b49cb755022310792a3385"}, + {file = "numpy-2.0.2.tar.gz", hash = "sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78"}, ] [[package]] @@ -2282,40 +2344,53 @@ files = [ [[package]] name = "pandas" -version = "2.2.2" +version = "2.2.3" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" files = [ - {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, - {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, - {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, - {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, - {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, - {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, - {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, - {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, - {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, - {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, - {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, - {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, - {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, - {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, - {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, - {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, - {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, - {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, + {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, + {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, + {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, + {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, + {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, + {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, ] [package.dependencies] @@ -2366,13 +2441,13 @@ files = [ [[package]] name = "paramiko" -version = "3.4.0" +version = "3.5.0" description = "SSH2 protocol library" optional = false python-versions = ">=3.6" files = [ - {file = "paramiko-3.4.0-py3-none-any.whl", hash = "sha256:43f0b51115a896f9c00f59618023484cb3a14b98bbceab43394a39c6739b7ee7"}, - {file = "paramiko-3.4.0.tar.gz", hash = "sha256:aac08f26a31dc4dffd92821527d1682d99d52f9ef6851968114a8728f3c274d3"}, + {file = "paramiko-3.5.0-py3-none-any.whl", hash = "sha256:1fedf06b085359051cd7d0d270cebe19e755a8a921cc2ddbfa647fb0cd7d68f9"}, + {file = "paramiko-3.5.0.tar.gz", hash = "sha256:ad11e540da4f55cedda52931f1a3f812a8238a7af7f62a60de538cd80bb28124"}, ] [package.dependencies] @@ -2431,19 +2506,19 @@ ptyprocess = ">=0.5" [[package]] name = "platformdirs" -version = "4.2.2" +version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" @@ -2462,13 +2537,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "prometheus-client" -version = "0.20.0" +version = "0.21.0" description = "Python client for the Prometheus monitoring system." optional = false python-versions = ">=3.8" files = [ - {file = "prometheus_client-0.20.0-py3-none-any.whl", hash = "sha256:cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7"}, - {file = "prometheus_client-0.20.0.tar.gz", hash = "sha256:287629d00b147a32dcb2be0b9df905da599b2d82f80377083ec8463309a4bb89"}, + {file = "prometheus_client-0.21.0-py3-none-any.whl", hash = "sha256:4fa6b4dd0ac16d58bb587c04b1caae65b8c5043e85f778f42f5f632f6af2e166"}, + {file = "prometheus_client-0.21.0.tar.gz", hash = "sha256:96c83c606b71ff2b0a433c98889d275f51ffec6c5e267de37c7a2b5c9aa9233e"}, ] [package.extras] @@ -2476,13 +2551,13 @@ twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.47" +version = "3.0.48" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, - {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, + {file = "prompt_toolkit-3.0.48-py3-none-any.whl", hash = "sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e"}, + {file = "prompt_toolkit-3.0.48.tar.gz", hash = "sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90"}, ] [package.dependencies] @@ -2507,22 +2582,22 @@ testing = ["google-api-core (>=1.31.5)"] [[package]] name = "protobuf" -version = "5.27.2" +version = "5.28.2" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-5.27.2-cp310-abi3-win32.whl", hash = "sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38"}, - {file = "protobuf-5.27.2-cp310-abi3-win_amd64.whl", hash = "sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505"}, - {file = "protobuf-5.27.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5"}, - {file = "protobuf-5.27.2-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b"}, - {file = "protobuf-5.27.2-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e"}, - {file = "protobuf-5.27.2-cp38-cp38-win32.whl", hash = "sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863"}, - {file = "protobuf-5.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6"}, - {file = "protobuf-5.27.2-cp39-cp39-win32.whl", hash = "sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca"}, - {file = "protobuf-5.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce"}, - {file = "protobuf-5.27.2-py3-none-any.whl", hash = "sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470"}, - {file = "protobuf-5.27.2.tar.gz", hash = "sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714"}, + {file = "protobuf-5.28.2-cp310-abi3-win32.whl", hash = "sha256:eeea10f3dc0ac7e6b4933d32db20662902b4ab81bf28df12218aa389e9c2102d"}, + {file = "protobuf-5.28.2-cp310-abi3-win_amd64.whl", hash = "sha256:2c69461a7fcc8e24be697624c09a839976d82ae75062b11a0972e41fd2cd9132"}, + {file = "protobuf-5.28.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a8b9403fc70764b08d2f593ce44f1d2920c5077bf7d311fefec999f8c40f78b7"}, + {file = "protobuf-5.28.2-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:35cfcb15f213449af7ff6198d6eb5f739c37d7e4f1c09b5d0641babf2cc0c68f"}, + {file = "protobuf-5.28.2-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:5e8a95246d581eef20471b5d5ba010d55f66740942b95ba9b872d918c459452f"}, + {file = "protobuf-5.28.2-cp38-cp38-win32.whl", hash = "sha256:87317e9bcda04a32f2ee82089a204d3a2f0d3c8aeed16568c7daf4756e4f1fe0"}, + {file = "protobuf-5.28.2-cp38-cp38-win_amd64.whl", hash = "sha256:c0ea0123dac3399a2eeb1a1443d82b7afc9ff40241433296769f7da42d142ec3"}, + {file = "protobuf-5.28.2-cp39-cp39-win32.whl", hash = "sha256:ca53faf29896c526863366a52a8f4d88e69cd04ec9571ed6082fa117fac3ab36"}, + {file = "protobuf-5.28.2-cp39-cp39-win_amd64.whl", hash = "sha256:8ddc60bf374785fb7cb12510b267f59067fa10087325b8e1855b898a0d81d276"}, + {file = "protobuf-5.28.2-py3-none-any.whl", hash = "sha256:52235802093bd8a2811abbe8bf0ab9c5f54cca0a751fdd3f6ac2a21438bffece"}, + {file = "protobuf-5.28.2.tar.gz", hash = "sha256:59379674ff119717404f7454647913787034f03fe7049cbef1d74a97bb4593f0"}, ] [[package]] @@ -2567,13 +2642,13 @@ files = [ [[package]] name = "pure-eval" -version = "0.2.2" +version = "0.2.3" description = "Safely evaluate AST nodes without side effects" optional = false python-versions = "*" files = [ - {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, - {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, + {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, + {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, ] [package.extras] @@ -2597,72 +2672,75 @@ files = [ [[package]] name = "pyarrow" -version = "16.1.0" +version = "17.0.0" description = "Python library for Apache Arrow" optional = false python-versions = ">=3.8" files = [ - {file = "pyarrow-16.1.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:17e23b9a65a70cc733d8b738baa6ad3722298fa0c81d88f63ff94bf25eaa77b9"}, - {file = "pyarrow-16.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4740cc41e2ba5d641071d0ab5e9ef9b5e6e8c7611351a5cb7c1d175eaf43674a"}, - {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98100e0268d04e0eec47b73f20b39c45b4006f3c4233719c3848aa27a03c1aef"}, - {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f68f409e7b283c085f2da014f9ef81e885d90dcd733bd648cfba3ef265961848"}, - {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:a8914cd176f448e09746037b0c6b3a9d7688cef451ec5735094055116857580c"}, - {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:48be160782c0556156d91adbdd5a4a7e719f8d407cb46ae3bb4eaee09b3111bd"}, - {file = "pyarrow-16.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9cf389d444b0f41d9fe1444b70650fea31e9d52cfcb5f818b7888b91b586efff"}, - {file = "pyarrow-16.1.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:d0ebea336b535b37eee9eee31761813086d33ed06de9ab6fc6aaa0bace7b250c"}, - {file = "pyarrow-16.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e73cfc4a99e796727919c5541c65bb88b973377501e39b9842ea71401ca6c1c"}, - {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf9251264247ecfe93e5f5a0cd43b8ae834f1e61d1abca22da55b20c788417f6"}, - {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddf5aace92d520d3d2a20031d8b0ec27b4395cab9f74e07cc95edf42a5cc0147"}, - {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:25233642583bf658f629eb230b9bb79d9af4d9f9229890b3c878699c82f7d11e"}, - {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a33a64576fddfbec0a44112eaf844c20853647ca833e9a647bfae0582b2ff94b"}, - {file = "pyarrow-16.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:185d121b50836379fe012753cf15c4ba9638bda9645183ab36246923875f8d1b"}, - {file = "pyarrow-16.1.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:2e51ca1d6ed7f2e9d5c3c83decf27b0d17bb207a7dea986e8dc3e24f80ff7d6f"}, - {file = "pyarrow-16.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06ebccb6f8cb7357de85f60d5da50e83507954af617d7b05f48af1621d331c9a"}, - {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b04707f1979815f5e49824ce52d1dceb46e2f12909a48a6a753fe7cafbc44a0c"}, - {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d32000693deff8dc5df444b032b5985a48592c0697cb6e3071a5d59888714e2"}, - {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:8785bb10d5d6fd5e15d718ee1d1f914fe768bf8b4d1e5e9bf253de8a26cb1628"}, - {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:e1369af39587b794873b8a307cc6623a3b1194e69399af0efd05bb202195a5a7"}, - {file = "pyarrow-16.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:febde33305f1498f6df85e8020bca496d0e9ebf2093bab9e0f65e2b4ae2b3444"}, - {file = "pyarrow-16.1.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b5f5705ab977947a43ac83b52ade3b881eb6e95fcc02d76f501d549a210ba77f"}, - {file = "pyarrow-16.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0d27bf89dfc2576f6206e9cd6cf7a107c9c06dc13d53bbc25b0bd4556f19cf5f"}, - {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d07de3ee730647a600037bc1d7b7994067ed64d0eba797ac74b2bc77384f4c2"}, - {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbef391b63f708e103df99fbaa3acf9f671d77a183a07546ba2f2c297b361e83"}, - {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:19741c4dbbbc986d38856ee7ddfdd6a00fc3b0fc2d928795b95410d38bb97d15"}, - {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:f2c5fb249caa17b94e2b9278b36a05ce03d3180e6da0c4c3b3ce5b2788f30eed"}, - {file = "pyarrow-16.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:e6b6d3cd35fbb93b70ade1336022cc1147b95ec6af7d36906ca7fe432eb09710"}, - {file = "pyarrow-16.1.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:18da9b76a36a954665ccca8aa6bd9f46c1145f79c0bb8f4f244f5f8e799bca55"}, - {file = "pyarrow-16.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:99f7549779b6e434467d2aa43ab2b7224dd9e41bdde486020bae198978c9e05e"}, - {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f07fdffe4fd5b15f5ec15c8b64584868d063bc22b86b46c9695624ca3505b7b4"}, - {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddfe389a08ea374972bd4065d5f25d14e36b43ebc22fc75f7b951f24378bf0b5"}, - {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:3b20bd67c94b3a2ea0a749d2a5712fc845a69cb5d52e78e6449bbd295611f3aa"}, - {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:ba8ac20693c0bb0bf4b238751d4409e62852004a8cf031c73b0e0962b03e45e3"}, - {file = "pyarrow-16.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:31a1851751433d89a986616015841977e0a188662fcffd1a5677453f1df2de0a"}, - {file = "pyarrow-16.1.0.tar.gz", hash = "sha256:15fbb22ea96d11f0b5768504a3f961edab25eaf4197c341720c4a387f6c60315"}, + {file = "pyarrow-17.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a5c8b238d47e48812ee577ee20c9a2779e6a5904f1708ae240f53ecbee7c9f07"}, + {file = "pyarrow-17.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db023dc4c6cae1015de9e198d41250688383c3f9af8f565370ab2b4cb5f62655"}, + {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da1e060b3876faa11cee287839f9cc7cdc00649f475714b8680a05fd9071d545"}, + {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c06d4624c0ad6674364bb46ef38c3132768139ddec1c56582dbac54f2663e2"}, + {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:fa3c246cc58cb5a4a5cb407a18f193354ea47dd0648194e6265bd24177982fe8"}, + {file = "pyarrow-17.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:f7ae2de664e0b158d1607699a16a488de3d008ba99b3a7aa5de1cbc13574d047"}, + {file = "pyarrow-17.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:5984f416552eea15fd9cee03da53542bf4cddaef5afecefb9aa8d1010c335087"}, + {file = "pyarrow-17.0.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:1c8856e2ef09eb87ecf937104aacfa0708f22dfeb039c363ec99735190ffb977"}, + {file = "pyarrow-17.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e19f569567efcbbd42084e87f948778eb371d308e137a0f97afe19bb860ccb3"}, + {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b244dc8e08a23b3e352899a006a26ae7b4d0da7bb636872fa8f5884e70acf15"}, + {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b72e87fe3e1db343995562f7fff8aee354b55ee83d13afba65400c178ab2597"}, + {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:dc5c31c37409dfbc5d014047817cb4ccd8c1ea25d19576acf1a001fe07f5b420"}, + {file = "pyarrow-17.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e3343cb1e88bc2ea605986d4b94948716edc7a8d14afd4e2c097232f729758b4"}, + {file = "pyarrow-17.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:a27532c38f3de9eb3e90ecab63dfda948a8ca859a66e3a47f5f42d1e403c4d03"}, + {file = "pyarrow-17.0.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9b8a823cea605221e61f34859dcc03207e52e409ccf6354634143e23af7c8d22"}, + {file = "pyarrow-17.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1e70de6cb5790a50b01d2b686d54aaf73da01266850b05e3af2a1bc89e16053"}, + {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0071ce35788c6f9077ff9ecba4858108eebe2ea5a3f7cf2cf55ebc1dbc6ee24a"}, + {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:757074882f844411fcca735e39aae74248a1531367a7c80799b4266390ae51cc"}, + {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9ba11c4f16976e89146781a83833df7f82077cdab7dc6232c897789343f7891a"}, + {file = "pyarrow-17.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b0c6ac301093b42d34410b187bba560b17c0330f64907bfa4f7f7f2444b0cf9b"}, + {file = "pyarrow-17.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:392bc9feabc647338e6c89267635e111d71edad5fcffba204425a7c8d13610d7"}, + {file = "pyarrow-17.0.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:af5ff82a04b2171415f1410cff7ebb79861afc5dae50be73ce06d6e870615204"}, + {file = "pyarrow-17.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:edca18eaca89cd6382dfbcff3dd2d87633433043650c07375d095cd3517561d8"}, + {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c7916bff914ac5d4a8fe25b7a25e432ff921e72f6f2b7547d1e325c1ad9d155"}, + {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f553ca691b9e94b202ff741bdd40f6ccb70cdd5fbf65c187af132f1317de6145"}, + {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0cdb0e627c86c373205a2f94a510ac4376fdc523f8bb36beab2e7f204416163c"}, + {file = "pyarrow-17.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:d7d192305d9d8bc9082d10f361fc70a73590a4c65cf31c3e6926cd72b76bc35c"}, + {file = "pyarrow-17.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:02dae06ce212d8b3244dd3e7d12d9c4d3046945a5933d28026598e9dbbda1fca"}, + {file = "pyarrow-17.0.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:13d7a460b412f31e4c0efa1148e1d29bdf18ad1411eb6757d38f8fbdcc8645fb"}, + {file = "pyarrow-17.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9b564a51fbccfab5a04a80453e5ac6c9954a9c5ef2890d1bcf63741909c3f8df"}, + {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32503827abbc5aadedfa235f5ece8c4f8f8b0a3cf01066bc8d29de7539532687"}, + {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a155acc7f154b9ffcc85497509bcd0d43efb80d6f733b0dc3bb14e281f131c8b"}, + {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:dec8d129254d0188a49f8a1fc99e0560dc1b85f60af729f47de4046015f9b0a5"}, + {file = "pyarrow-17.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:a48ddf5c3c6a6c505904545c25a4ae13646ae1f8ba703c4df4a1bfe4f4006bda"}, + {file = "pyarrow-17.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:42bf93249a083aca230ba7e2786c5f673507fa97bbd9725a1e2754715151a204"}, + {file = "pyarrow-17.0.0.tar.gz", hash = "sha256:4beca9521ed2c0921c1023e68d097d0299b62c362639ea315572a58f3f50fd28"}, ] [package.dependencies] numpy = ">=1.16.6" +[package.extras] +test = ["cffi", "hypothesis", "pandas", "pytest", "pytz"] + [[package]] name = "pyasn1" -version = "0.6.0" +version = "0.6.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = ">=3.8" files = [ - {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, - {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, + {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, + {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, ] [[package]] name = "pyasn1-modules" -version = "0.4.0" +version = "0.4.1" description = "A collection of ASN.1-based protocols modules" optional = false python-versions = ">=3.8" files = [ - {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, - {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, + {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, + {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, ] [package.dependencies] @@ -2681,54 +2759,54 @@ files = [ [[package]] name = "pydantic" -version = "1.10.17" +version = "1.10.18" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.17-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fa51175313cc30097660b10eec8ca55ed08bfa07acbfe02f7a42f6c242e9a4b"}, - {file = "pydantic-1.10.17-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7e8988bb16988890c985bd2093df9dd731bfb9d5e0860db054c23034fab8f7a"}, - {file = "pydantic-1.10.17-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:371dcf1831f87c9e217e2b6a0c66842879a14873114ebb9d0861ab22e3b5bb1e"}, - {file = "pydantic-1.10.17-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4866a1579c0c3ca2c40575398a24d805d4db6cb353ee74df75ddeee3c657f9a7"}, - {file = "pydantic-1.10.17-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:543da3c6914795b37785703ffc74ba4d660418620cc273490d42c53949eeeca6"}, - {file = "pydantic-1.10.17-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7623b59876f49e61c2e283551cc3647616d2fbdc0b4d36d3d638aae8547ea681"}, - {file = "pydantic-1.10.17-cp310-cp310-win_amd64.whl", hash = "sha256:409b2b36d7d7d19cd8310b97a4ce6b1755ef8bd45b9a2ec5ec2b124db0a0d8f3"}, - {file = "pydantic-1.10.17-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fa43f362b46741df8f201bf3e7dff3569fa92069bcc7b4a740dea3602e27ab7a"}, - {file = "pydantic-1.10.17-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2a72d2a5ff86a3075ed81ca031eac86923d44bc5d42e719d585a8eb547bf0c9b"}, - {file = "pydantic-1.10.17-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4ad32aed3bf5eea5ca5decc3d1bbc3d0ec5d4fbcd72a03cdad849458decbc63"}, - {file = "pydantic-1.10.17-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb4e741782e236ee7dc1fb11ad94dc56aabaf02d21df0e79e0c21fe07c95741"}, - {file = "pydantic-1.10.17-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d2f89a719411cb234105735a520b7c077158a81e0fe1cb05a79c01fc5eb59d3c"}, - {file = "pydantic-1.10.17-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db3b48d9283d80a314f7a682f7acae8422386de659fffaba454b77a083c3937d"}, - {file = "pydantic-1.10.17-cp311-cp311-win_amd64.whl", hash = "sha256:9c803a5113cfab7bbb912f75faa4fc1e4acff43e452c82560349fff64f852e1b"}, - {file = "pydantic-1.10.17-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:820ae12a390c9cbb26bb44913c87fa2ff431a029a785642c1ff11fed0a095fcb"}, - {file = "pydantic-1.10.17-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c1e51d1af306641b7d1574d6d3307eaa10a4991542ca324f0feb134fee259815"}, - {file = "pydantic-1.10.17-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e53fb834aae96e7b0dadd6e92c66e7dd9cdf08965340ed04c16813102a47fab"}, - {file = "pydantic-1.10.17-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e2495309b1266e81d259a570dd199916ff34f7f51f1b549a0d37a6d9b17b4dc"}, - {file = "pydantic-1.10.17-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:098ad8de840c92ea586bf8efd9e2e90c6339d33ab5c1cfbb85be66e4ecf8213f"}, - {file = "pydantic-1.10.17-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:525bbef620dac93c430d5d6bdbc91bdb5521698d434adf4434a7ef6ffd5c4b7f"}, - {file = "pydantic-1.10.17-cp312-cp312-win_amd64.whl", hash = "sha256:6654028d1144df451e1da69a670083c27117d493f16cf83da81e1e50edce72ad"}, - {file = "pydantic-1.10.17-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c87cedb4680d1614f1d59d13fea353faf3afd41ba5c906a266f3f2e8c245d655"}, - {file = "pydantic-1.10.17-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11289fa895bcbc8f18704efa1d8020bb9a86314da435348f59745473eb042e6b"}, - {file = "pydantic-1.10.17-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94833612d6fd18b57c359a127cbfd932d9150c1b72fea7c86ab58c2a77edd7c7"}, - {file = "pydantic-1.10.17-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d4ecb515fa7cb0e46e163ecd9d52f9147ba57bc3633dca0e586cdb7a232db9e3"}, - {file = "pydantic-1.10.17-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7017971ffa7fd7808146880aa41b266e06c1e6e12261768a28b8b41ba55c8076"}, - {file = "pydantic-1.10.17-cp37-cp37m-win_amd64.whl", hash = "sha256:e840e6b2026920fc3f250ea8ebfdedf6ea7a25b77bf04c6576178e681942ae0f"}, - {file = "pydantic-1.10.17-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bfbb18b616abc4df70591b8c1ff1b3eabd234ddcddb86b7cac82657ab9017e33"}, - {file = "pydantic-1.10.17-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebb249096d873593e014535ab07145498957091aa6ae92759a32d40cb9998e2e"}, - {file = "pydantic-1.10.17-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8c209af63ccd7b22fba94b9024e8b7fd07feffee0001efae50dd99316b27768"}, - {file = "pydantic-1.10.17-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b40c9e13a0b61583e5599e7950490c700297b4a375b55b2b592774332798b7"}, - {file = "pydantic-1.10.17-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c31d281c7485223caf6474fc2b7cf21456289dbaa31401844069b77160cab9c7"}, - {file = "pydantic-1.10.17-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae5184e99a060a5c80010a2d53c99aee76a3b0ad683d493e5f0620b5d86eeb75"}, - {file = "pydantic-1.10.17-cp38-cp38-win_amd64.whl", hash = "sha256:ad1e33dc6b9787a6f0f3fd132859aa75626528b49cc1f9e429cdacb2608ad5f0"}, - {file = "pydantic-1.10.17-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e17c0ee7192e54a10943f245dc79e36d9fe282418ea05b886e1c666063a7b54"}, - {file = "pydantic-1.10.17-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cafb9c938f61d1b182dfc7d44a7021326547b7b9cf695db5b68ec7b590214773"}, - {file = "pydantic-1.10.17-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95ef534e3c22e5abbdbdd6f66b6ea9dac3ca3e34c5c632894f8625d13d084cbe"}, - {file = "pydantic-1.10.17-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62d96b8799ae3d782df7ec9615cb59fc32c32e1ed6afa1b231b0595f6516e8ab"}, - {file = "pydantic-1.10.17-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ab2f976336808fd5d539fdc26eb51f9aafc1f4b638e212ef6b6f05e753c8011d"}, - {file = "pydantic-1.10.17-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8ad363330557beac73159acfbeed220d5f1bfcd6b930302a987a375e02f74fd"}, - {file = "pydantic-1.10.17-cp39-cp39-win_amd64.whl", hash = "sha256:48db882e48575ce4b39659558b2f9f37c25b8d348e37a2b4e32971dd5a7d6227"}, - {file = "pydantic-1.10.17-py3-none-any.whl", hash = "sha256:e41b5b973e5c64f674b3b4720286ded184dcc26a691dd55f34391c62c6934688"}, - {file = "pydantic-1.10.17.tar.gz", hash = "sha256:f434160fb14b353caf634149baaf847206406471ba70e64657c1e8330277a991"}, + {file = "pydantic-1.10.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e405ffcc1254d76bb0e760db101ee8916b620893e6edfbfee563b3c6f7a67c02"}, + {file = "pydantic-1.10.18-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e306e280ebebc65040034bff1a0a81fd86b2f4f05daac0131f29541cafd80b80"}, + {file = "pydantic-1.10.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11d9d9b87b50338b1b7de4ebf34fd29fdb0d219dc07ade29effc74d3d2609c62"}, + {file = "pydantic-1.10.18-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b661ce52c7b5e5f600c0c3c5839e71918346af2ef20062705ae76b5c16914cab"}, + {file = "pydantic-1.10.18-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c20f682defc9ef81cd7eaa485879ab29a86a0ba58acf669a78ed868e72bb89e0"}, + {file = "pydantic-1.10.18-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c5ae6b7c8483b1e0bf59e5f1843e4fd8fd405e11df7de217ee65b98eb5462861"}, + {file = "pydantic-1.10.18-cp310-cp310-win_amd64.whl", hash = "sha256:74fe19dda960b193b0eb82c1f4d2c8e5e26918d9cda858cbf3f41dd28549cb70"}, + {file = "pydantic-1.10.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:72fa46abace0a7743cc697dbb830a41ee84c9db8456e8d77a46d79b537efd7ec"}, + {file = "pydantic-1.10.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ef0fe7ad7cbdb5f372463d42e6ed4ca9c443a52ce544472d8842a0576d830da5"}, + {file = "pydantic-1.10.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a00e63104346145389b8e8f500bc6a241e729feaf0559b88b8aa513dd2065481"}, + {file = "pydantic-1.10.18-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae6fa2008e1443c46b7b3a5eb03800121868d5ab6bc7cda20b5df3e133cde8b3"}, + {file = "pydantic-1.10.18-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9f463abafdc92635da4b38807f5b9972276be7c8c5121989768549fceb8d2588"}, + {file = "pydantic-1.10.18-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3445426da503c7e40baccefb2b2989a0c5ce6b163679dd75f55493b460f05a8f"}, + {file = "pydantic-1.10.18-cp311-cp311-win_amd64.whl", hash = "sha256:467a14ee2183bc9c902579bb2f04c3d3dac00eff52e252850509a562255b2a33"}, + {file = "pydantic-1.10.18-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:efbc8a7f9cb5fe26122acba1852d8dcd1e125e723727c59dcd244da7bdaa54f2"}, + {file = "pydantic-1.10.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:24a4a159d0f7a8e26bf6463b0d3d60871d6a52eac5bb6a07a7df85c806f4c048"}, + {file = "pydantic-1.10.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b74be007703547dc52e3c37344d130a7bfacca7df112a9e5ceeb840a9ce195c7"}, + {file = "pydantic-1.10.18-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcb20d4cb355195c75000a49bb4a31d75e4295200df620f454bbc6bdf60ca890"}, + {file = "pydantic-1.10.18-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:46f379b8cb8a3585e3f61bf9ae7d606c70d133943f339d38b76e041ec234953f"}, + {file = "pydantic-1.10.18-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cbfbca662ed3729204090c4d09ee4beeecc1a7ecba5a159a94b5a4eb24e3759a"}, + {file = "pydantic-1.10.18-cp312-cp312-win_amd64.whl", hash = "sha256:c6d0a9f9eccaf7f438671a64acf654ef0d045466e63f9f68a579e2383b63f357"}, + {file = "pydantic-1.10.18-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3d5492dbf953d7d849751917e3b2433fb26010d977aa7a0765c37425a4026ff1"}, + {file = "pydantic-1.10.18-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe734914977eed33033b70bfc097e1baaffb589517863955430bf2e0846ac30f"}, + {file = "pydantic-1.10.18-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15fdbe568beaca9aacfccd5ceadfb5f1a235087a127e8af5e48df9d8a45ae85c"}, + {file = "pydantic-1.10.18-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c3e742f62198c9eb9201781fbebe64533a3bbf6a76a91b8d438d62b813079dbc"}, + {file = "pydantic-1.10.18-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:19a3bd00b9dafc2cd7250d94d5b578edf7a0bd7daf102617153ff9a8fa37871c"}, + {file = "pydantic-1.10.18-cp37-cp37m-win_amd64.whl", hash = "sha256:2ce3fcf75b2bae99aa31bd4968de0474ebe8c8258a0110903478bd83dfee4e3b"}, + {file = "pydantic-1.10.18-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:335a32d72c51a313b33fa3a9b0fe283503272ef6467910338e123f90925f0f03"}, + {file = "pydantic-1.10.18-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:34a3613c7edb8c6fa578e58e9abe3c0f5e7430e0fc34a65a415a1683b9c32d9a"}, + {file = "pydantic-1.10.18-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9ee4e6ca1d9616797fa2e9c0bfb8815912c7d67aca96f77428e316741082a1b"}, + {file = "pydantic-1.10.18-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23e8ec1ce4e57b4f441fc91e3c12adba023fedd06868445a5b5f1d48f0ab3682"}, + {file = "pydantic-1.10.18-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:44ae8a3e35a54d2e8fa88ed65e1b08967a9ef8c320819a969bfa09ce5528fafe"}, + {file = "pydantic-1.10.18-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5389eb3b48a72da28c6e061a247ab224381435256eb541e175798483368fdd3"}, + {file = "pydantic-1.10.18-cp38-cp38-win_amd64.whl", hash = "sha256:069b9c9fc645474d5ea3653788b544a9e0ccd3dca3ad8c900c4c6eac844b4620"}, + {file = "pydantic-1.10.18-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:80b982d42515632eb51f60fa1d217dfe0729f008e81a82d1544cc392e0a50ddf"}, + {file = "pydantic-1.10.18-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:aad8771ec8dbf9139b01b56f66386537c6fe4e76c8f7a47c10261b69ad25c2c9"}, + {file = "pydantic-1.10.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941a2eb0a1509bd7f31e355912eb33b698eb0051730b2eaf9e70e2e1589cae1d"}, + {file = "pydantic-1.10.18-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65f7361a09b07915a98efd17fdec23103307a54db2000bb92095457ca758d485"}, + {file = "pydantic-1.10.18-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6951f3f47cb5ca4da536ab161ac0163cab31417d20c54c6de5ddcab8bc813c3f"}, + {file = "pydantic-1.10.18-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7a4c5eec138a9b52c67f664c7d51d4c7234c5ad65dd8aacd919fb47445a62c86"}, + {file = "pydantic-1.10.18-cp39-cp39-win_amd64.whl", hash = "sha256:49e26c51ca854286bffc22b69787a8d4063a62bf7d83dc21d44d2ff426108518"}, + {file = "pydantic-1.10.18-py3-none-any.whl", hash = "sha256:06a189b81ffc52746ec9c8c007f16e5167c8b0a696e1a726369327e3db7b2a82"}, + {file = "pydantic-1.10.18.tar.gz", hash = "sha256:baebdff1907d1d96a139c25136a9bb7d17e118f133a76a2ef3b845e831e3403a"}, ] [package.dependencies] @@ -2858,13 +2936,13 @@ files = [ [[package]] name = "pytz" -version = "2024.1" +version = "2024.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, ] [[package]] @@ -2907,62 +2985,64 @@ files = [ [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] @@ -3260,110 +3340,114 @@ jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] [[package]] name = "rpds-py" -version = "0.18.1" +version = "0.20.0" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.8" files = [ - {file = "rpds_py-0.18.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d31dea506d718693b6b2cffc0648a8929bdc51c70a311b2770f09611caa10d53"}, - {file = "rpds_py-0.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:732672fbc449bab754e0b15356c077cc31566df874964d4801ab14f71951ea80"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a98a1f0552b5f227a3d6422dbd61bc6f30db170939bd87ed14f3c339aa6c7c9"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f1944ce16401aad1e3f7d312247b3d5de7981f634dc9dfe90da72b87d37887d"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38e14fb4e370885c4ecd734f093a2225ee52dc384b86fa55fe3f74638b2cfb09"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08d74b184f9ab6289b87b19fe6a6d1a97fbfea84b8a3e745e87a5de3029bf944"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d70129cef4a8d979caa37e7fe957202e7eee8ea02c5e16455bc9808a59c6b2f0"}, - {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ce0bb20e3a11bd04461324a6a798af34d503f8d6f1aa3d2aa8901ceaf039176d"}, - {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:81c5196a790032e0fc2464c0b4ab95f8610f96f1f2fa3d4deacce6a79852da60"}, - {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f3027be483868c99b4985fda802a57a67fdf30c5d9a50338d9db646d590198da"}, - {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d44607f98caa2961bab4fa3c4309724b185b464cdc3ba6f3d7340bac3ec97cc1"}, - {file = "rpds_py-0.18.1-cp310-none-win32.whl", hash = "sha256:c273e795e7a0f1fddd46e1e3cb8be15634c29ae8ff31c196debb620e1edb9333"}, - {file = "rpds_py-0.18.1-cp310-none-win_amd64.whl", hash = "sha256:8352f48d511de5f973e4f2f9412736d7dea76c69faa6d36bcf885b50c758ab9a"}, - {file = "rpds_py-0.18.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6b5ff7e1d63a8281654b5e2896d7f08799378e594f09cf3674e832ecaf396ce8"}, - {file = "rpds_py-0.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8927638a4d4137a289e41d0fd631551e89fa346d6dbcfc31ad627557d03ceb6d"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:154bf5c93d79558b44e5b50cc354aa0459e518e83677791e6adb0b039b7aa6a7"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07f2139741e5deb2c5154a7b9629bc5aa48c766b643c1a6750d16f865a82c5fc"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c7672e9fba7425f79019db9945b16e308ed8bc89348c23d955c8c0540da0a07"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:489bdfe1abd0406eba6b3bb4fdc87c7fa40f1031de073d0cfb744634cc8fa261"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c20f05e8e3d4fc76875fc9cb8cf24b90a63f5a1b4c5b9273f0e8225e169b100"}, - {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:967342e045564cef76dfcf1edb700b1e20838d83b1aa02ab313e6a497cf923b8"}, - {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2cc7c1a47f3a63282ab0f422d90ddac4aa3034e39fc66a559ab93041e6505da7"}, - {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f7afbfee1157e0f9376c00bb232e80a60e59ed716e3211a80cb8506550671e6e"}, - {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e6934d70dc50f9f8ea47081ceafdec09245fd9f6032669c3b45705dea096b88"}, - {file = "rpds_py-0.18.1-cp311-none-win32.whl", hash = "sha256:c69882964516dc143083d3795cb508e806b09fc3800fd0d4cddc1df6c36e76bb"}, - {file = "rpds_py-0.18.1-cp311-none-win_amd64.whl", hash = "sha256:70a838f7754483bcdc830444952fd89645569e7452e3226de4a613a4c1793fb2"}, - {file = "rpds_py-0.18.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:3dd3cd86e1db5aadd334e011eba4e29d37a104b403e8ca24dcd6703c68ca55b3"}, - {file = "rpds_py-0.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:05f3d615099bd9b13ecf2fc9cf2d839ad3f20239c678f461c753e93755d629ee"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35b2b771b13eee8729a5049c976197ff58a27a3829c018a04341bcf1ae409b2b"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ee17cd26b97d537af8f33635ef38be873073d516fd425e80559f4585a7b90c43"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b646bf655b135ccf4522ed43d6902af37d3f5dbcf0da66c769a2b3938b9d8184"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19ba472b9606c36716062c023afa2484d1e4220548751bda14f725a7de17b4f6"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e30ac5e329098903262dc5bdd7e2086e0256aa762cc8b744f9e7bf2a427d3f8"}, - {file = "rpds_py-0.18.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d58ad6317d188c43750cb76e9deacf6051d0f884d87dc6518e0280438648a9ac"}, - {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e1735502458621921cee039c47318cb90b51d532c2766593be6207eec53e5c4c"}, - {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f5bab211605d91db0e2995a17b5c6ee5edec1270e46223e513eaa20da20076ac"}, - {file = "rpds_py-0.18.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2fc24a329a717f9e2448f8cd1f960f9dac4e45b6224d60734edeb67499bab03a"}, - {file = "rpds_py-0.18.1-cp312-none-win32.whl", hash = "sha256:1805d5901779662d599d0e2e4159d8a82c0b05faa86ef9222bf974572286b2b6"}, - {file = "rpds_py-0.18.1-cp312-none-win_amd64.whl", hash = "sha256:720edcb916df872d80f80a1cc5ea9058300b97721efda8651efcd938a9c70a72"}, - {file = "rpds_py-0.18.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:c827576e2fa017a081346dce87d532a5310241648eb3700af9a571a6e9fc7e74"}, - {file = "rpds_py-0.18.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:aa3679e751408d75a0b4d8d26d6647b6d9326f5e35c00a7ccd82b78ef64f65f8"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0abeee75434e2ee2d142d650d1e54ac1f8b01e6e6abdde8ffd6eeac6e9c38e20"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed402d6153c5d519a0faf1bb69898e97fb31613b49da27a84a13935ea9164dfc"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:338dee44b0cef8b70fd2ef54b4e09bb1b97fc6c3a58fea5db6cc083fd9fc2724"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7750569d9526199c5b97e5a9f8d96a13300950d910cf04a861d96f4273d5b104"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:607345bd5912aacc0c5a63d45a1f73fef29e697884f7e861094e443187c02be5"}, - {file = "rpds_py-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:207c82978115baa1fd8d706d720b4a4d2b0913df1c78c85ba73fe6c5804505f0"}, - {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6d1e42d2735d437e7e80bab4d78eb2e459af48c0a46e686ea35f690b93db792d"}, - {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5463c47c08630007dc0fe99fb480ea4f34a89712410592380425a9b4e1611d8e"}, - {file = "rpds_py-0.18.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:06d218939e1bf2ca50e6b0ec700ffe755e5216a8230ab3e87c059ebb4ea06afc"}, - {file = "rpds_py-0.18.1-cp38-none-win32.whl", hash = "sha256:312fe69b4fe1ffbe76520a7676b1e5ac06ddf7826d764cc10265c3b53f96dbe9"}, - {file = "rpds_py-0.18.1-cp38-none-win_amd64.whl", hash = "sha256:9437ca26784120a279f3137ee080b0e717012c42921eb07861b412340f85bae2"}, - {file = "rpds_py-0.18.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:19e515b78c3fc1039dd7da0a33c28c3154458f947f4dc198d3c72db2b6b5dc93"}, - {file = "rpds_py-0.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a7b28c5b066bca9a4eb4e2f2663012debe680f097979d880657f00e1c30875a0"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:673fdbbf668dd958eff750e500495ef3f611e2ecc209464f661bc82e9838991e"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d960de62227635d2e61068f42a6cb6aae91a7fe00fca0e3aeed17667c8a34611"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:352a88dc7892f1da66b6027af06a2e7e5d53fe05924cc2cfc56495b586a10b72"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e0ee01ad8260184db21468a6e1c37afa0529acc12c3a697ee498d3c2c4dcaf3"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4c39ad2f512b4041343ea3c7894339e4ca7839ac38ca83d68a832fc8b3748ab"}, - {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aaa71ee43a703c321906813bb252f69524f02aa05bf4eec85f0c41d5d62d0f4c"}, - {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6cd8098517c64a85e790657e7b1e509b9fe07487fd358e19431cb120f7d96338"}, - {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4adec039b8e2928983f885c53b7cc4cda8965b62b6596501a0308d2703f8af1b"}, - {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:32b7daaa3e9389db3695964ce8e566e3413b0c43e3394c05e4b243a4cd7bef26"}, - {file = "rpds_py-0.18.1-cp39-none-win32.whl", hash = "sha256:2625f03b105328729f9450c8badda34d5243231eef6535f80064d57035738360"}, - {file = "rpds_py-0.18.1-cp39-none-win_amd64.whl", hash = "sha256:bf18932d0003c8c4d51a39f244231986ab23ee057d235a12b2684ea26a353590"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cbfbea39ba64f5e53ae2915de36f130588bba71245b418060ec3330ebf85678e"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a3d456ff2a6a4d2adcdf3c1c960a36f4fd2fec6e3b4902a42a384d17cf4e7a65"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7700936ef9d006b7ef605dc53aa364da2de5a3aa65516a1f3ce73bf82ecfc7ae"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:51584acc5916212e1bf45edd17f3a6b05fe0cbb40482d25e619f824dccb679de"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:942695a206a58d2575033ff1e42b12b2aece98d6003c6bc739fbf33d1773b12f"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b906b5f58892813e5ba5c6056d6a5ad08f358ba49f046d910ad992196ea61397"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f8e3fecca256fefc91bb6765a693d96692459d7d4c644660a9fff32e517843"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7732770412bab81c5a9f6d20aeb60ae943a9b36dcd990d876a773526468e7163"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:bd1105b50ede37461c1d51b9698c4f4be6e13e69a908ab7751e3807985fc0346"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:618916f5535784960f3ecf8111581f4ad31d347c3de66d02e728de460a46303c"}, - {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:17c6d2155e2423f7e79e3bb18151c686d40db42d8645e7977442170c360194d4"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c4c4c3f878df21faf5fac86eda32671c27889e13570645a9eea0a1abdd50922"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:fab6ce90574645a0d6c58890e9bcaac8d94dff54fb51c69e5522a7358b80ab64"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:531796fb842b53f2695e94dc338929e9f9dbf473b64710c28af5a160b2a8927d"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:740884bc62a5e2bbb31e584f5d23b32320fd75d79f916f15a788d527a5e83644"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:998125738de0158f088aef3cb264a34251908dd2e5d9966774fdab7402edfab7"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2be6e9dd4111d5b31ba3b74d17da54a8319d8168890fbaea4b9e5c3de630ae5"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0cee71bc618cd93716f3c1bf56653740d2d13ddbd47673efa8bf41435a60daa"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2c3caec4ec5cd1d18e5dd6ae5194d24ed12785212a90b37f5f7f06b8bedd7139"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:27bba383e8c5231cd559affe169ca0b96ec78d39909ffd817f28b166d7ddd4d8"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:a888e8bdb45916234b99da2d859566f1e8a1d2275a801bb8e4a9644e3c7e7909"}, - {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6031b25fb1b06327b43d841f33842b383beba399884f8228a6bb3df3088485ff"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48c2faaa8adfacefcbfdb5f2e2e7bdad081e5ace8d182e5f4ade971f128e6bb3"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:d85164315bd68c0806768dc6bb0429c6f95c354f87485ee3593c4f6b14def2bd"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6afd80f6c79893cfc0574956f78a0add8c76e3696f2d6a15bca2c66c415cf2d4"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa242ac1ff583e4ec7771141606aafc92b361cd90a05c30d93e343a0c2d82a89"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21be4770ff4e08698e1e8e0bce06edb6ea0626e7c8f560bc08222880aca6a6f"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c45a639e93a0c5d4b788b2613bd637468edd62f8f95ebc6fcc303d58ab3f0a8"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:910e71711d1055b2768181efa0a17537b2622afeb0424116619817007f8a2b10"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b9bb1f182a97880f6078283b3505a707057c42bf55d8fca604f70dedfdc0772a"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d54f74f40b1f7aaa595a02ff42ef38ca654b1469bef7d52867da474243cc633"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:8d2e182c9ee01135e11e9676e9a62dfad791a7a467738f06726872374a83db49"}, - {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:636a15acc588f70fda1661234761f9ed9ad79ebed3f2125d44be0862708b666e"}, - {file = "rpds_py-0.18.1.tar.gz", hash = "sha256:dc48b479d540770c811fbd1eb9ba2bb66951863e448efec2e2c102625328e92f"}, + {file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"}, + {file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6377e647bbfd0a0b159fe557f2c6c602c159fc752fa316572f012fc0bf67150"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb851b7df9dda52dc1415ebee12362047ce771fc36914586b2e9fcbd7d293b3e"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e0f80b739e5a8f54837be5d5c924483996b603d5502bfff79bf33da06164ee2"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a8c94dad2e45324fc74dce25e1645d4d14df9a4e54a30fa0ae8bad9a63928e3"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e604fe73ba048c06085beaf51147eaec7df856824bfe7b98657cf436623daf"}, + {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:df3de6b7726b52966edf29663e57306b23ef775faf0ac01a3e9f4012a24a4140"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf258ede5bc22a45c8e726b29835b9303c285ab46fc7c3a4cc770736b5304c9f"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:55fea87029cded5df854ca7e192ec7bdb7ecd1d9a3f63d5c4eb09148acf4a7ce"}, + {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ae94bd0b2f02c28e199e9bc51485d0c5601f58780636185660f86bf80c89af94"}, + {file = "rpds_py-0.20.0-cp310-none-win32.whl", hash = "sha256:28527c685f237c05445efec62426d285e47a58fb05ba0090a4340b73ecda6dee"}, + {file = "rpds_py-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:238a2d5b1cad28cdc6ed15faf93a998336eb041c4e440dd7f902528b8891b399"}, + {file = "rpds_py-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac2f4f7a98934c2ed6505aead07b979e6f999389f16b714448fb39bbaa86a489"}, + {file = "rpds_py-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:220002c1b846db9afd83371d08d239fdc865e8f8c5795bbaec20916a76db3318"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d7919548df3f25374a1f5d01fbcd38dacab338ef5f33e044744b5c36729c8db"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:758406267907b3781beee0f0edfe4a179fbd97c0be2e9b1154d7f0a1279cf8e5"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d61339e9f84a3f0767b1995adfb171a0d00a1185192718a17af6e124728e0f5"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1259c7b3705ac0a0bd38197565a5d603218591d3f6cee6e614e380b6ba61c6f6"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c1dc0f53856b9cc9a0ccca0a7cc61d3d20a7088201c0937f3f4048c1718a209"}, + {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7e60cb630f674a31f0368ed32b2a6b4331b8350d67de53c0359992444b116dd3"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbe982f38565bb50cb7fb061ebf762c2f254ca3d8c20d4006878766e84266272"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:514b3293b64187172bc77c8fb0cdae26981618021053b30d8371c3a902d4d5ad"}, + {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d0a26ffe9d4dd35e4dfdd1e71f46401cff0181c75ac174711ccff0459135fa58"}, + {file = "rpds_py-0.20.0-cp311-none-win32.whl", hash = "sha256:89c19a494bf3ad08c1da49445cc5d13d8fefc265f48ee7e7556839acdacf69d0"}, + {file = "rpds_py-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:c638144ce971df84650d3ed0096e2ae7af8e62ecbbb7b201c8935c370df00a2c"}, + {file = "rpds_py-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6"}, + {file = "rpds_py-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4"}, + {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940"}, + {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174"}, + {file = "rpds_py-0.20.0-cp312-none-win32.whl", hash = "sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139"}, + {file = "rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585"}, + {file = "rpds_py-0.20.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:aa9a0521aeca7d4941499a73ad7d4f8ffa3d1affc50b9ea11d992cd7eff18a29"}, + {file = "rpds_py-0.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1f1d51eccb7e6c32ae89243cb352389228ea62f89cd80823ea7dd1b98e0b91"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a86a9b96070674fc88b6f9f71a97d2c1d3e5165574615d1f9168ecba4cecb24"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c8ef2ebf76df43f5750b46851ed1cdf8f109d7787ca40035fe19fbdc1acc5a7"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b25f024b421d5859d156750ea9a65651793d51b76a2e9238c05c9d5f203a9"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57eb94a8c16ab08fef6404301c38318e2c5a32216bf5de453e2714c964c125c8"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1940dae14e715e2e02dfd5b0f64a52e8374a517a1e531ad9412319dc3ac7879"}, + {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d20277fd62e1b992a50c43f13fbe13277a31f8c9f70d59759c88f644d66c619f"}, + {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:06db23d43f26478303e954c34c75182356ca9aa7797d22c5345b16871ab9c45c"}, + {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2a5db5397d82fa847e4c624b0c98fe59d2d9b7cf0ce6de09e4d2e80f8f5b3f2"}, + {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a35df9f5548fd79cb2f52d27182108c3e6641a4feb0f39067911bf2adaa3e57"}, + {file = "rpds_py-0.20.0-cp313-none-win32.whl", hash = "sha256:fd2d84f40633bc475ef2d5490b9c19543fbf18596dcb1b291e3a12ea5d722f7a"}, + {file = "rpds_py-0.20.0-cp313-none-win_amd64.whl", hash = "sha256:9bc2d153989e3216b0559251b0c260cfd168ec78b1fac33dd485750a228db5a2"}, + {file = "rpds_py-0.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f2fbf7db2012d4876fb0d66b5b9ba6591197b0f165db8d99371d976546472a24"}, + {file = "rpds_py-0.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1e5f3cd7397c8f86c8cc72d5a791071431c108edd79872cdd96e00abd8497d29"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce9845054c13696f7af7f2b353e6b4f676dab1b4b215d7fe5e05c6f8bb06f965"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c3e130fd0ec56cb76eb49ef52faead8ff09d13f4527e9b0c400307ff72b408e1"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b16aa0107ecb512b568244ef461f27697164d9a68d8b35090e9b0c1c8b27752"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7f429242aae2947246587d2964fad750b79e8c233a2367f71b554e9447949c"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0fc424a5842a11e28956e69395fbbeab2c97c42253169d87e90aac2886d751"}, + {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8c00a3b1e70c1d3891f0db1b05292747f0dbcfb49c43f9244d04c70fbc40eb8"}, + {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:40ce74fc86ee4645d0a225498d091d8bc61f39b709ebef8204cb8b5a464d3c0e"}, + {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4fe84294c7019456e56d93e8ababdad5a329cd25975be749c3f5f558abb48253"}, + {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:338ca4539aad4ce70a656e5187a3a31c5204f261aef9f6ab50e50bcdffaf050a"}, + {file = "rpds_py-0.20.0-cp38-none-win32.whl", hash = "sha256:54b43a2b07db18314669092bb2de584524d1ef414588780261e31e85846c26a5"}, + {file = "rpds_py-0.20.0-cp38-none-win_amd64.whl", hash = "sha256:a1862d2d7ce1674cffa6d186d53ca95c6e17ed2b06b3f4c476173565c862d232"}, + {file = "rpds_py-0.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3fde368e9140312b6e8b6c09fb9f8c8c2f00999d1823403ae90cc00480221b22"}, + {file = "rpds_py-0.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9824fb430c9cf9af743cf7aaf6707bf14323fb51ee74425c380f4c846ea70789"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11ef6ce74616342888b69878d45e9f779b95d4bd48b382a229fe624a409b72c5"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c52d3f2f82b763a24ef52f5d24358553e8403ce05f893b5347098014f2d9eff2"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d35cef91e59ebbeaa45214861874bc6f19eb35de96db73e467a8358d701a96c"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d72278a30111e5b5525c1dd96120d9e958464316f55adb030433ea905866f4de"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c29cbbba378759ac5786730d1c3cb4ec6f8ababf5c42a9ce303dc4b3d08cda"}, + {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6632f2d04f15d1bd6fe0eedd3b86d9061b836ddca4c03d5cf5c7e9e6b7c14580"}, + {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d0b67d87bb45ed1cd020e8fbf2307d449b68abc45402fe1a4ac9e46c3c8b192b"}, + {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ec31a99ca63bf3cd7f1a5ac9fe95c5e2d060d3c768a09bc1d16e235840861420"}, + {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22e6c9976e38f4d8c4a63bd8a8edac5307dffd3ee7e6026d97f3cc3a2dc02a0b"}, + {file = "rpds_py-0.20.0-cp39-none-win32.whl", hash = "sha256:569b3ea770c2717b730b61998b6c54996adee3cef69fc28d444f3e7920313cf7"}, + {file = "rpds_py-0.20.0-cp39-none-win_amd64.whl", hash = "sha256:e6900ecdd50ce0facf703f7a00df12374b74bbc8ad9fe0f6559947fb20f82364"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcaeb7b57f1a1e071ebd748984359fef83ecb026325b9d4ca847c95bc7311c92"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d454b8749b4bd70dd0a79f428731ee263fa6995f83ccb8bada706e8d1d3ff89d"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d807dc2051abe041b6649681dce568f8e10668e3c1c6543ebae58f2d7e617855"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c20f0ddeb6e29126d45f89206b8291352b8c5b44384e78a6499d68b52ae511"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7f19250ceef892adf27f0399b9e5afad019288e9be756d6919cb58892129f51"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4f1ed4749a08379555cebf4650453f14452eaa9c43d0a95c49db50c18b7da075"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344"}, + {file = "rpds_py-0.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f918a1a130a6dfe1d7fe0f105064141342e7dd1611f2e6a21cd2f5c8cb1cfb3e"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f60012a73aa396be721558caa3a6fd49b3dd0033d1675c6d59c4502e870fcf0c"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d2b1ad682a3dfda2a4e8ad8572f3100f95fad98cb99faf37ff0ddfe9cbf9d03"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:614fdafe9f5f19c63ea02817fa4861c606a59a604a77c8cdef5aa01d28b97921"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa518bcd7600c584bf42e6617ee8132869e877db2f76bcdc281ec6a4113a53ab"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0475242f447cc6cb8a9dd486d68b2ef7fbee84427124c232bff5f63b1fe11e5"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90a4cd061914a60bd51c68bcb4357086991bd0bb93d8aa66a6da7701370708f"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:def7400461c3a3f26e49078302e1c1b38f6752342c77e3cf72ce91ca69fb1bc1"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:65794e4048ee837494aea3c21a28ad5fc080994dfba5b036cf84de37f7ad5074"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:faefcc78f53a88f3076b7f8be0a8f8d35133a3ecf7f3770895c25f8813460f08"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5b4f105deeffa28bbcdff6c49b34e74903139afa690e35d2d9e3c2c2fba18cec"}, + {file = "rpds_py-0.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdfc3a892927458d98f3d55428ae46b921d1f7543b89382fdb483f5640daaec8"}, + {file = "rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121"}, ] [[package]] @@ -3398,18 +3482,23 @@ win32 = ["pywin32"] [[package]] name = "setuptools" -version = "70.1.0" +version = "75.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-70.1.0-py3-none-any.whl", hash = "sha256:d9b8b771455a97c8a9f3ab3448ebe0b29b5e105f1228bba41028be116985a267"}, - {file = "setuptools-70.1.0.tar.gz", hash = "sha256:01a1e793faa5bd89abc851fa15d0a0db26f160890c7102cd8dce643e886b47f5"}, + {file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"}, + {file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] [[package]] name = "six" @@ -3597,13 +3686,13 @@ files = [ [[package]] name = "tzdata" -version = "2024.1" +version = "2024.2" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, + {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, + {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, ] [[package]] @@ -3622,13 +3711,13 @@ dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake [[package]] name = "urllib3" -version = "2.2.2" +version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] @@ -3639,13 +3728,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.26.3" +version = "20.26.5" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, - {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, + {file = "virtualenv-20.26.5-py3-none-any.whl", hash = "sha256:4f3ac17b81fba3ce3bd6f4ead2749a72da5929c01774948e243db9ba41df4ff6"}, + {file = "virtualenv-20.26.5.tar.gz", hash = "sha256:ce489cac131aa58f4b25e321d6d186171f78e6cb13fafbf32a840cee67733ff4"}, ] [package.dependencies] @@ -3712,13 +3801,13 @@ test = ["websockets"] [[package]] name = "widgetsnbextension" -version = "4.0.11" +version = "4.0.13" description = "Jupyter interactive widgets for Jupyter Notebook" optional = false python-versions = ">=3.7" files = [ - {file = "widgetsnbextension-4.0.11-py3-none-any.whl", hash = "sha256:55d4d6949d100e0d08b94948a42efc3ed6dfdc0e9468b2c4b128c9a2ce3a7a36"}, - {file = "widgetsnbextension-4.0.11.tar.gz", hash = "sha256:8b22a8f1910bfd188e596fe7fc05dcbd87e810c8a4ba010bdb3da86637398474"}, + {file = "widgetsnbextension-4.0.13-py3-none-any.whl", hash = "sha256:74b2692e8500525cc38c2b877236ba51d34541e6385eeed5aec15a70f88a6c71"}, + {file = "widgetsnbextension-4.0.13.tar.gz", hash = "sha256:ffcb67bc9febd10234a362795f643927f4e0c05d9342c727b65d2384f8feacb6"}, ] [[package]] @@ -3802,101 +3891,103 @@ files = [ [[package]] name = "yarl" -version = "1.9.4" +version = "1.13.0" description = "Yet another URL library" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, - {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, - {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, - {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, - {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, - {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, - {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, - {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, - {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, - {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, - {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, - {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, - {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, - {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, - {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, - {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, + {file = "yarl-1.13.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:66c028066be36d54e7a0a38e832302b23222e75db7e65ed862dc94effc8ef062"}, + {file = "yarl-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:517f9d90ca0224bb7002266eba6e70d8fcc8b1d0c9321de2407e41344413ed46"}, + {file = "yarl-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5378cb60f4209505f6aa60423c174336bd7b22e0d8beb87a2a99ad50787f1341"}, + {file = "yarl-1.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0675a9cf65176e11692b20a516d5f744849251aa24024f422582d2d1bf7c8c82"}, + {file = "yarl-1.13.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419c22b419034b4ee3ba1c27cbbfef01ca8d646f9292f614f008093143334cdc"}, + {file = "yarl-1.13.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf10e525e461f43831d82149d904f35929d89f3ccd65beaf7422aecd500dd39"}, + {file = "yarl-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d78ebad57152d301284761b03a708aeac99c946a64ba967d47cbcc040e36688b"}, + {file = "yarl-1.13.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e480a12cec58009eeaeee7f48728dc8f629f8e0f280d84957d42c361969d84da"}, + {file = "yarl-1.13.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e5462756fb34c884ca9d4875b6d2ec80957a767123151c467c97a9b423617048"}, + {file = "yarl-1.13.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:bff0d468664cdf7b2a6bfd5e17d4a7025edb52df12e0e6e17223387b421d425c"}, + {file = "yarl-1.13.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4ffd8a9758b5df7401a49d50e76491f4c582cf7350365439563062cdff45bf16"}, + {file = "yarl-1.13.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:ca71238af0d247d07747cb7202a9359e6e1d6d9e277041e1ad2d9f36b3a111a6"}, + {file = "yarl-1.13.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fda4404bbb6f91e327827f4483d52fe24f02f92de91217954cf51b1cb9ee9c41"}, + {file = "yarl-1.13.0-cp310-cp310-win32.whl", hash = "sha256:e557e2681b47a0ecfdfbea44743b3184d94d31d5ce0e4b13ff64ce227a40f86e"}, + {file = "yarl-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:3590ed9c7477059aea067a58ec87b433bbd47a2ceb67703b1098cca1ba075f0d"}, + {file = "yarl-1.13.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8986fa2be78193dc8b8c27bd0d3667fe612f7232844872714c4200499d5225ca"}, + {file = "yarl-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0db15ce35dfd100bc9ab40173f143fbea26c84d7458d63206934fe5548fae25d"}, + {file = "yarl-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:49bee8c99586482a238a7b2ec0ef94e5f186bfdbb8204d14a3dd31867b3875ce"}, + {file = "yarl-1.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c73e0f8375b75806b8771890580566a2e6135e6785250840c4f6c45b69eb72d"}, + {file = "yarl-1.13.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ab16c9e94726fdfcbf5b37a641c9d9d0b35cc31f286a2c3b9cad6451cb53b2b"}, + {file = "yarl-1.13.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:784d6e50ea96b3bbb078eb7b40d8c0e3674c2f12da4f0061f889b2cfdbab8f37"}, + {file = "yarl-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:580fdb2ea48a40bcaa709ee0dc71f64e7a8f23b44356cc18cd9ce55dc3bc3212"}, + {file = "yarl-1.13.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d2845f1a37438a8e11e4fcbbf6ffd64bc94dc9cb8c815f72d0eb6f6c622deb0"}, + {file = "yarl-1.13.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bcb374db7a609484941c01380c1450728ec84d9c3e68cd9a5feaecb52626c4be"}, + {file = "yarl-1.13.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:561a5f6c054927cf5a993dd7b032aeebc10644419e65db7dd6bdc0b848806e65"}, + {file = "yarl-1.13.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b536c2ac042add7f276d4e5857b08364fc32f28e02add153f6f214de50f12d07"}, + {file = "yarl-1.13.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:52b7bb09bb48f7855d574480e2efc0c30d31cab4e6ffc6203e2f7ffbf2e4496a"}, + {file = "yarl-1.13.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e4dddf99a853b3f60f3ce6363fb1ad94606113743cf653f116a38edd839a4461"}, + {file = "yarl-1.13.0-cp311-cp311-win32.whl", hash = "sha256:0b489858642e4e92203941a8fdeeb6373c0535aa986200b22f84d4b39cd602ba"}, + {file = "yarl-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:31748bee7078db26008bf94d39693c682a26b5c3a80a67194a4c9c8fe3b5cf47"}, + {file = "yarl-1.13.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3a9b2650425b2ab9cc68865978963601b3c2414e1d94ef04f193dd5865e1bd79"}, + {file = "yarl-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:73777f145cd591e1377bf8d8a97e5f8e39c9742ad0f100c898bba1f963aef662"}, + {file = "yarl-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:144b9e9164f21da81731c970dbda52245b343c0f67f3609d71013dd4d0db9ebf"}, + {file = "yarl-1.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3628e4e572b1db95285a72c4be102356f2dfc6214d9f126de975fd51b517ae55"}, + {file = "yarl-1.13.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0bd3caf554a52da78ec08415ebedeb6b9636436ca2afda9b5b9ff4a533478940"}, + {file = "yarl-1.13.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d7a44ae252efb0fcd79ac0997416721a44345f53e5aec4a24f489d983aa00e3"}, + {file = "yarl-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24b78a1f57780eeeb17f5e1be851ab9fa951b98811e1bb4b5a53f74eec3e2666"}, + {file = "yarl-1.13.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79de5f8432b53d1261d92761f71dfab5fc7e1c75faa12a3535c27e681dacfa9d"}, + {file = "yarl-1.13.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f603216d62e9680bfac7fb168ef9673fd98abbb50c43e73d97615dfa1afebf57"}, + {file = "yarl-1.13.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:acf27399c94270103d68f86118a183008d601e4c2c3a7e98dcde0e3b0163132f"}, + {file = "yarl-1.13.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:08037790f973367431b9406a7b9d940e872cca12e081bce3b7cea068daf81f0a"}, + {file = "yarl-1.13.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:33e2f5ef965e69a1f2a1b0071a70c4616157da5a5478f3c2f6e185e06c56a322"}, + {file = "yarl-1.13.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:38a3b742c923fe2cab7d2e2c67220d17da8d0433e8bfe038356167e697ef5524"}, + {file = "yarl-1.13.0-cp312-cp312-win32.whl", hash = "sha256:ab3ee57b25ce15f79ade27b7dfb5e678af26e4b93be5a4e22655acd9d40b81ba"}, + {file = "yarl-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:26214b0a9b8f4b7b04e67eee94a82c9b4e5c721f4d1ce7e8c87c78f0809b7684"}, + {file = "yarl-1.13.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:91251614cca1ba4ab0507f1ba5f5a44e17a5e9a4c7f0308ea441a994bdac3fc7"}, + {file = "yarl-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fe6946c3cbcfbed67c5e50dae49baff82ad054aaa10ff7a4db8dfac646b7b479"}, + {file = "yarl-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:de97ee57e00a82ebb8c378fc73c5d9a773e4c2cec8079ff34ebfef61c8ba5b11"}, + {file = "yarl-1.13.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1129737da2291c9952a93c015e73583dd66054f3ae991c8674f6e39c46d95dd3"}, + {file = "yarl-1.13.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:37049eb26d637a5b2f00562f65aad679f5d231c4c044edcd88320542ad66a2d9"}, + {file = "yarl-1.13.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08d15aff3477fecb7a469d1fdf5939a686fbc5a16858022897d3e9fc99301f19"}, + {file = "yarl-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa187a8599e0425f26b25987d884a8b67deb5565f1c450c3a6e8d3de2cdc8715"}, + {file = "yarl-1.13.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d95fcc9508390db73a0f1c7e78d9a1b1a3532a3f34ceff97c0b3b04140fbe6e4"}, + {file = "yarl-1.13.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d04ea92a3643a9bb28aa6954fff718342caab2cc3d25d0160fe16e26c4a9acb7"}, + {file = "yarl-1.13.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2842a89b697d8ca3dda6a25b4e4d835d14afe25a315c8a79dbdf5f70edfd0960"}, + {file = "yarl-1.13.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db463fce425f935eee04a9182c74fdf9ed90d3bd2079d4a17f8fb7a2d7c11009"}, + {file = "yarl-1.13.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:3ff602aa84420b301c083ae7f07df858ae8e371bf3be294397bda3e0b27c6290"}, + {file = "yarl-1.13.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a9a1a600e8449f3a24bc7dca513be8d69db173fe842e8332a7318b5b8757a6af"}, + {file = "yarl-1.13.0-cp313-cp313-win32.whl", hash = "sha256:5540b4896b244a6539f22b613b32b5d1b737e08011aa4ed56644cb0519d687df"}, + {file = "yarl-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:08a3b0b8d10092dade46424fe775f2c9bc32e5a985fdd6afe410fe28598db6b2"}, + {file = "yarl-1.13.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:be828e92ae67a21d6a252aecd65668dddbf3bb5d5278660be607647335001119"}, + {file = "yarl-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e3b4293f02129cc2f5068f3687ef294846a79c9d19fabaa9bfdfeeebae11c001"}, + {file = "yarl-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2cec7b52903dcf9008311167036775346dcb093bb15ed7ec876debc3095e7dab"}, + {file = "yarl-1.13.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:612bd8d2267558bea36347e4e6e3a96f436bdc5c011f1437824be4f2e3abc5e1"}, + {file = "yarl-1.13.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92a26956d268ad52bd2329c2c674890fe9e8669b41d83ed136e7037b1a29808e"}, + {file = "yarl-1.13.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:01953b5686e5868fd0d8eaea4e484482c158597b8ddb9d9d4d048303fa3334c7"}, + {file = "yarl-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01d3941d416e71ce65f33393beb50e93c1c9e8e516971b6653c96df6eb599a2c"}, + {file = "yarl-1.13.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:801fb5dfc05910cd5ef4806726e2129d8c9a16cdfa26a8166697da0861e59dfc"}, + {file = "yarl-1.13.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:cdcdd49136d423ee5234c9360eae7063d3120a429ee984d7d9da821c012da4d7"}, + {file = "yarl-1.13.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:6072ff51eeb7938ecac35bf24fc465be00e75217eaa1ffad3cc7620accc0f6f4"}, + {file = "yarl-1.13.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:d42227711a4180d0c22cec30fd81d263d7bb378389d8e70b5f4c597e8abae202"}, + {file = "yarl-1.13.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:ebb2236f8098205f59774a28e25a84601a4beb3e974157d418ee6c470d73e0dc"}, + {file = "yarl-1.13.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f997004ff530b5381290e82b212a93bd420fefe5a605872dc16fc7e4a7f4251e"}, + {file = "yarl-1.13.0-cp38-cp38-win32.whl", hash = "sha256:b9648e5ae280babcac867b16e845ce51ed21f8c43bced2ca40cff7eee983d6d4"}, + {file = "yarl-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:f3ef76df654f3547dcb76ba550f9ca59826588eecc6bd7df16937c620df32060"}, + {file = "yarl-1.13.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:92abbe37e3fb08935e0e95ac5f83f7b286a6f2575f542225ec7afde405ed1fa1"}, + {file = "yarl-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1932c7bfa537f89ad5ca3d1e7e05de3388bb9e893230a384159fb974f6e9f90c"}, + {file = "yarl-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4483680e129b2a4250be20947b554cd5f7140fa9e5a1e4f1f42717cf91f8676a"}, + {file = "yarl-1.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f6f4a352d0beea5dd39315ab16fc26f0122d13457a7e65ad4f06c7961dcf87a"}, + {file = "yarl-1.13.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a67f20e97462dee8a89e9b997a78932959d2ed991e8f709514cb4160143e7b1"}, + {file = "yarl-1.13.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cf4f3a87bd52f8f33b0155cd0f6f22bdf2092d88c6c6acbb1aee3bc206ecbe35"}, + {file = "yarl-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:deb70c006be548076d628630aad9a3ef3a1b2c28aaa14b395cf0939b9124252e"}, + {file = "yarl-1.13.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf7a9b31729b97985d4a796808859dfd0e37b55f1ca948d46a568e56e51dd8fb"}, + {file = "yarl-1.13.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d807417ceebafb7ce18085a1205d28e8fcb1435a43197d7aa3fab98f5bfec5ef"}, + {file = "yarl-1.13.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:9671d0d65f86e0a0eee59c5b05e381c44e3d15c36c2a67da247d5d82875b4e4e"}, + {file = "yarl-1.13.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:13a9cd39e47ca4dc25139d3c63fe0dc6acf1b24f9d94d3b5197ac578fbfd84bf"}, + {file = "yarl-1.13.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:acf8c219a59df22609cfaff4a7158a0946f273e3b03a5385f1fdd502496f0cff"}, + {file = "yarl-1.13.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:12c92576633027f297c26e52aba89f6363b460f483d85cf7c14216eb55d06d02"}, + {file = "yarl-1.13.0-cp39-cp39-win32.whl", hash = "sha256:c2518660bd8166e770b76ce92514b491b8720ae7e7f5f975cd888b1592894d2c"}, + {file = "yarl-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:db90702060b1cdb7c7609d04df5f68a12fd5581d013ad379e58e0c2e651d92b8"}, + {file = "yarl-1.13.0-py3-none-any.whl", hash = "sha256:c7d35ff2a5a51bc6d40112cdb4ca3fd9636482ce8c6ceeeee2301e34f7ed7556"}, + {file = "yarl-1.13.0.tar.gz", hash = "sha256:02f117a63d11c8c2ada229029f8bb444a811e62e5041da962de548f26ac2c40f"}, ] [package.dependencies] @@ -3905,20 +3996,24 @@ multidict = ">=4.0" [[package]] name = "zipp" -version = "3.19.2" +version = "3.20.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, - {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, + {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, + {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, ] [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "6f7b98a5dfe61a527f7a5398ebccc955ba6e7233d8c77789dbcf55f3c9f8169f" +content-hash = "a54e3ebe29255d397651cea6d849ada39f03565a1a7bf13084092be3600a77f0" diff --git a/pyproject.toml b/pyproject.toml index a1f02ccc..7417116d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,10 +29,6 @@ cryptography = "40.0.2" executing = "1.2.0" pydantic = "< 2" ipywidgets = "8.1.2" -# If updating setuptools version, please verify the odh-notebooks-sync workflow continues to work. -# https://github.com/project-codeflare/codeflare-sdk/actions/workflows/odh-notebooks-sync.yml -# Reference: https://github.com/target/strelka/pull/469 -setuptools = "<= 73.0.0" [tool.poetry.group.docs] optional = true From 1a95a150ac066d1740d17282877edb23acb35ab2 Mon Sep 17 00:00:00 2001 From: Kevin Date: Mon, 16 Sep 2024 10:47:24 -0400 Subject: [PATCH 350/496] wrap api client to add defaults Signed-off-by: Kevin --- src/codeflare_sdk/cluster/auth.py | 61 ++++++++++++------------ src/codeflare_sdk/cluster/awload.py | 6 +-- src/codeflare_sdk/cluster/cluster.py | 39 +++++++-------- src/codeflare_sdk/cluster/widgets.py | 4 +- src/codeflare_sdk/utils/generate_cert.py | 4 +- src/codeflare_sdk/utils/generate_yaml.py | 8 ++-- tests/unit_test.py | 14 +++--- 7 files changed, 67 insertions(+), 69 deletions(-) diff --git a/src/codeflare_sdk/cluster/auth.py b/src/codeflare_sdk/cluster/auth.py index c39fe1d4..fbba0c22 100644 --- a/src/codeflare_sdk/cluster/auth.py +++ b/src/codeflare_sdk/cluster/auth.py @@ -93,17 +93,7 @@ def __init__( self.token = token self.server = server self.skip_tls = skip_tls - self.ca_cert_path = self._gen_ca_cert_path(ca_cert_path) - - def _gen_ca_cert_path(self, ca_cert_path: str): - if ca_cert_path is not None: - return ca_cert_path - elif "CF_SDK_CA_CERT_PATH" in os.environ: - return os.environ.get("CF_SDK_CA_CERT_PATH") - elif os.path.exists(WORKBENCH_CA_CERT_PATH): - return WORKBENCH_CA_CERT_PATH - else: - return None + self.ca_cert_path = _gen_ca_cert_path(ca_cert_path) def login(self) -> str: """ @@ -119,25 +109,14 @@ def login(self) -> str: configuration.host = self.server configuration.api_key["authorization"] = self.token + api_client = client.ApiClient(configuration) if not self.skip_tls: - if self.ca_cert_path is None: - configuration.ssl_ca_cert = None - elif os.path.isfile(self.ca_cert_path): - print( - f"Authenticated with certificate located at {self.ca_cert_path}" - ) - configuration.ssl_ca_cert = self.ca_cert_path - else: - raise FileNotFoundError( - f"Certificate file not found at {self.ca_cert_path}" - ) - configuration.verify_ssl = True + _client_with_cert(api_client, self.ca_cert_path) else: urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) print("Insecure request warnings have been disabled") configuration.verify_ssl = False - api_client = client.ApiClient(configuration) client.AuthenticationApi(api_client).get_api_group() config_path = None return "Logged into %s" % self.server @@ -211,11 +190,33 @@ def config_check() -> str: return config_path -def api_config_handler() -> Optional[client.ApiClient]: - """ - This function is used to load the api client if the user has logged in - """ - if api_client != None and config_path == None: - return api_client +def _client_with_cert(client: client.ApiClient, ca_cert_path: Optional[str] = None): + client.configuration.verify_ssl = True + cert_path = _gen_ca_cert_path(ca_cert_path) + if cert_path is None: + client.configuration.ssl_ca_cert = None + elif os.path.isfile(cert_path): + client.configuration.ssl_ca_cert = cert_path + else: + raise FileNotFoundError(f"Certificate file not found at {cert_path}") + + +def _gen_ca_cert_path(ca_cert_path: Optional[str]): + """Gets the path to the default CA certificate file either through env config or default path""" + if ca_cert_path is not None: + return ca_cert_path + elif "CF_SDK_CA_CERT_PATH" in os.environ: + return os.environ.get("CF_SDK_CA_CERT_PATH") + elif os.path.exists(WORKBENCH_CA_CERT_PATH): + return WORKBENCH_CA_CERT_PATH else: return None + + +def get_api_client() -> client.ApiClient: + "This function should load the api client with defaults" + if api_client != None: + return api_client + to_return = client.ApiClient() + _client_with_cert(to_return) + return to_return diff --git a/src/codeflare_sdk/cluster/awload.py b/src/codeflare_sdk/cluster/awload.py index 7455b216..1ead5914 100644 --- a/src/codeflare_sdk/cluster/awload.py +++ b/src/codeflare_sdk/cluster/awload.py @@ -24,7 +24,7 @@ from kubernetes import client, config from ..utils.kube_api_helpers import _kube_api_error_handling -from .auth import config_check, api_config_handler +from .auth import config_check, get_api_client class AWManager: @@ -59,7 +59,7 @@ def submit(self) -> None: """ try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) api_instance.create_namespaced_custom_object( group="workload.codeflare.dev", version="v1beta2", @@ -84,7 +84,7 @@ def remove(self) -> None: try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) api_instance.delete_namespaced_custom_object( group="workload.codeflare.dev", version="v1beta2", diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index a32d5a4b..44effda7 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -26,7 +26,7 @@ from kubernetes import config from ray.job_submission import JobSubmissionClient -from .auth import config_check, api_config_handler +from .auth import config_check, get_api_client from ..utils import pretty_print from ..utils.generate_yaml import ( generate_appwrapper, @@ -81,7 +81,7 @@ def __init__(self, config: ClusterConfiguration): @property def _client_headers(self): - k8_client = api_config_handler() or client.ApiClient() + k8_client = get_api_client() return { "Authorization": k8_client.configuration.get_api_key_with_prefix( "authorization" @@ -96,7 +96,7 @@ def _client_verify_tls(self): @property def job_client(self): - k8client = api_config_handler() or client.ApiClient() + k8client = get_api_client() if self._job_submission_client: return self._job_submission_client if is_openshift_cluster(): @@ -142,7 +142,7 @@ def up(self): try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) if self.config.appwrapper: if self.config.write_to_file: with open(self.app_wrapper_yaml) as f: @@ -173,7 +173,7 @@ def up(self): return _kube_api_error_handling(e) def _throw_for_no_raycluster(self): - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) try: api_instance.list_namespaced_custom_object( group="ray.io", @@ -200,7 +200,7 @@ def down(self): self._throw_for_no_raycluster() try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) if self.config.appwrapper: api_instance.delete_namespaced_custom_object( group="workload.codeflare.dev", @@ -359,7 +359,7 @@ def cluster_dashboard_uri(self) -> str: config_check() if is_openshift_cluster(): try: - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) routes = api_instance.list_namespaced_custom_object( group="route.openshift.io", version="v1", @@ -381,7 +381,7 @@ def cluster_dashboard_uri(self) -> str: return f"{protocol}://{route['spec']['host']}" else: try: - api_instance = client.NetworkingV1Api(api_config_handler()) + api_instance = client.NetworkingV1Api(get_api_client()) ingresses = api_instance.list_namespaced_ingress(self.config.namespace) except Exception as e: # pragma no cover return _kube_api_error_handling(e) @@ -580,9 +580,6 @@ def get_current_namespace(): # pragma: no cover return active_context except Exception as e: print("Unable to find current namespace") - - if api_config_handler() != None: - return None print("trying to gather from current context") try: _, active_context = config.list_kube_config_contexts(config_check()) @@ -602,7 +599,7 @@ def get_cluster( ): try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) rcs = api_instance.list_namespaced_custom_object( group="ray.io", version="v1", @@ -657,7 +654,7 @@ def _create_resources(yamls, namespace: str, api_instance: client.CustomObjectsA def _check_aw_exists(name: str, namespace: str) -> bool: try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) aws = api_instance.list_namespaced_custom_object( group="workload.codeflare.dev", version="v1beta2", @@ -684,7 +681,7 @@ def _get_ingress_domain(self): # pragma: no cover if is_openshift_cluster(): try: - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) routes = api_instance.list_namespaced_custom_object( group="route.openshift.io", @@ -703,7 +700,7 @@ def _get_ingress_domain(self): # pragma: no cover domain = route["spec"]["host"] else: try: - api_client = client.NetworkingV1Api(api_config_handler()) + api_client = client.NetworkingV1Api(get_api_client()) ingresses = api_client.list_namespaced_ingress(namespace) except Exception as e: # pragma: no cover return _kube_api_error_handling(e) @@ -717,7 +714,7 @@ def _get_ingress_domain(self): # pragma: no cover def _app_wrapper_status(name, namespace="default") -> Optional[AppWrapper]: try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) aws = api_instance.list_namespaced_custom_object( group="workload.codeflare.dev", version="v1beta2", @@ -736,7 +733,7 @@ def _app_wrapper_status(name, namespace="default") -> Optional[AppWrapper]: def _ray_cluster_status(name, namespace="default") -> Optional[RayCluster]: try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) rcs = api_instance.list_namespaced_custom_object( group="ray.io", version="v1", @@ -758,7 +755,7 @@ def _get_ray_clusters( list_of_clusters = [] try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) rcs = api_instance.list_namespaced_custom_object( group="ray.io", version="v1", @@ -787,7 +784,7 @@ def _get_app_wrappers( try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) aws = api_instance.list_namespaced_custom_object( group="workload.codeflare.dev", version="v1beta2", @@ -816,7 +813,7 @@ def _map_to_ray_cluster(rc) -> Optional[RayCluster]: dashboard_url = None if is_openshift_cluster(): try: - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) routes = api_instance.list_namespaced_custom_object( group="route.openshift.io", version="v1", @@ -835,7 +832,7 @@ def _map_to_ray_cluster(rc) -> Optional[RayCluster]: dashboard_url = f"{protocol}://{route['spec']['host']}" else: try: - api_instance = client.NetworkingV1Api(api_config_handler()) + api_instance = client.NetworkingV1Api(get_api_client()) ingresses = api_instance.list_namespaced_ingress( rc["metadata"]["namespace"] ) diff --git a/src/codeflare_sdk/cluster/widgets.py b/src/codeflare_sdk/cluster/widgets.py index 53afa28b..a96a1a49 100644 --- a/src/codeflare_sdk/cluster/widgets.py +++ b/src/codeflare_sdk/cluster/widgets.py @@ -29,7 +29,7 @@ from .config import ClusterConfiguration from .model import RayClusterStatus from ..utils.kube_api_helpers import _kube_api_error_handling -from .auth import config_check, api_config_handler +from .auth import config_check, get_api_client def cluster_up_down_buttons(cluster: "codeflare_sdk.cluster.Cluster") -> widgets.Button: @@ -343,7 +343,7 @@ def _delete_cluster( try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) if _check_aw_exists(cluster_name, namespace): api_instance.delete_namespaced_custom_object( diff --git a/src/codeflare_sdk/utils/generate_cert.py b/src/codeflare_sdk/utils/generate_cert.py index 5de56882..f3dc80e9 100644 --- a/src/codeflare_sdk/utils/generate_cert.py +++ b/src/codeflare_sdk/utils/generate_cert.py @@ -19,7 +19,7 @@ from cryptography import x509 from cryptography.x509.oid import NameOID import datetime -from ..cluster.auth import config_check, api_config_handler +from ..cluster.auth import config_check, get_api_client from kubernetes import client, config from .kube_api_helpers import _kube_api_error_handling @@ -103,7 +103,7 @@ def generate_tls_cert(cluster_name, namespace, days=30): # oc get secret ca-secret- -o template='{{index .data "ca.key"}}' # oc get secret ca-secret- -o template='{{index .data "ca.crt"}}'|base64 -d > ${TLSDIR}/ca.crt config_check() - v1 = client.CoreV1Api(api_config_handler()) + v1 = client.CoreV1Api(get_api_client()) # Secrets have a suffix appended to the end so we must list them and gather the secret that includes cluster_name-ca-secret- secret_name = get_secret_name(cluster_name, namespace, v1) diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index c4e1755d..7a17e010 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -27,7 +27,7 @@ import uuid from kubernetes import client, config from .kube_api_helpers import _kube_api_error_handling -from ..cluster.auth import api_config_handler, config_check +from ..cluster.auth import get_api_client, config_check from os import urandom from base64 import b64encode from urllib3.util import parse_url @@ -57,7 +57,7 @@ def gen_names(name): def is_openshift_cluster(): try: config_check() - for api in client.ApisApi(api_config_handler()).get_api_versions().groups: + for api in client.ApisApi(get_api_client()).get_api_versions().groups: for v in api.versions: if "route.openshift.io/v1" in v.group_version: return True @@ -235,7 +235,7 @@ def get_default_kueue_name(namespace: str): # If the local queue is set, use it. Otherwise, try to use the default queue. try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) local_queues = api_instance.list_namespaced_custom_object( group="kueue.x-k8s.io", version="v1beta1", @@ -261,7 +261,7 @@ def local_queue_exists(namespace: str, local_queue_name: str): # get all local queues in the namespace try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) local_queues = api_instance.list_namespaced_custom_object( group="kueue.x-k8s.io", version="v1beta1", diff --git a/tests/unit_test.py b/tests/unit_test.py index ae2af659..d4b7b149 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -134,7 +134,7 @@ def test_token_auth_creation(): assert token_auth.skip_tls == True assert token_auth.ca_cert_path == None - os.environ["CF_SDK_CA_CERT_PATH"] = f"/etc/pki/tls/custom-certs/ca-bundle.crt" + os.environ["CF_SDK_CA_CERT_PATH"] = "/etc/pki/tls/custom-certs/ca-bundle.crt" token_auth = TokenAuthentication(token="token", server="server", skip_tls=False) assert token_auth.token == "token" assert token_auth.server == "server" @@ -153,7 +153,7 @@ def test_token_auth_creation(): assert token_auth.skip_tls == False assert token_auth.ca_cert_path == f"{parent}/tests/auth-test.crt" - except Exception: + except Exception as e: assert 0 == 1 @@ -294,6 +294,7 @@ def test_cluster_creation(mocker): ) +@patch.dict("os.environ", {"NB_PREFIX": "test-prefix"}) def test_cluster_no_kueue_no_aw(mocker): mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch( @@ -301,7 +302,6 @@ def test_cluster_no_kueue_no_aw(mocker): return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, ) mocker.patch("kubernetes.client.CustomObjectsApi.list_namespaced_custom_object") - mocker.patch("os.environ.get", return_value="test-prefix") config = createClusterConfig() config.appwrapper = False config.name = "unit-test-no-kueue" @@ -350,6 +350,7 @@ def get_local_queue(group, version, namespace, plural): return local_queues +@patch.dict("os.environ", {"NB_PREFIX": "test-prefix"}) def test_cluster_creation_no_mcad(mocker): # Create Ray Cluster with no local queue specified mocker.patch("kubernetes.client.ApisApi.get_api_versions") @@ -361,7 +362,6 @@ def test_cluster_creation_no_mcad(mocker): "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), ) - mocker.patch("os.environ.get", return_value="test-prefix") config = createClusterConfig() config.name = "unit-test-cluster-ray" @@ -379,6 +379,7 @@ def test_cluster_creation_no_mcad(mocker): ) +@patch.dict("os.environ", {"NB_PREFIX": "test-prefix"}) def test_cluster_creation_no_mcad_local_queue(mocker): # With written resources # Create Ray Cluster with local queue specified @@ -391,7 +392,6 @@ def test_cluster_creation_no_mcad_local_queue(mocker): "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), ) - mocker.patch("os.environ.get", return_value="test-prefix") config = createClusterConfig() config.name = "unit-test-cluster-ray" config.appwrapper = False @@ -460,6 +460,7 @@ def test_default_cluster_creation(mocker): assert cluster.config.namespace == "opendatahub" +@patch.dict("os.environ", {"NB_PREFIX": "test-prefix"}) def test_cluster_creation_with_custom_image(mocker): # With written resources # Create Ray Cluster with local queue specified @@ -472,7 +473,6 @@ def test_cluster_creation_with_custom_image(mocker): "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), ) - mocker.patch("os.environ.get", return_value="test-prefix") config = createClusterConfig() config.name = "unit-test-cluster-custom-image" config.appwrapper = False @@ -2170,7 +2170,7 @@ def test_map_to_ray_cluster(mocker): mock_api_client = mocker.MagicMock(spec=client.ApiClient) mocker.patch( - "codeflare_sdk.cluster.auth.api_config_handler", return_value=mock_api_client + "codeflare_sdk.cluster.auth.get_api_client", return_value=mock_api_client ) mock_routes = { From 4fc5482c320d8721b52e974fd26e137ea8e2231e Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Fri, 27 Sep 2024 16:18:54 +0000 Subject: [PATCH 351/496] Changes in docs for release: v0.21.1 --- docs/detailed-documentation/cluster/auth.html | 144 +++++++----------- .../cluster/awload.html | 14 +- .../cluster/cluster.html | 68 ++++----- .../cluster/widgets.html | 4 +- .../utils/generate_cert.html | 6 +- .../utils/generate_yaml.html | 14 +- 6 files changed, 106 insertions(+), 144 deletions(-) diff --git a/docs/detailed-documentation/cluster/auth.html b/docs/detailed-documentation/cluster/auth.html index 61d199e5..d35b4609 100644 --- a/docs/detailed-documentation/cluster/auth.html +++ b/docs/detailed-documentation/cluster/auth.html @@ -126,17 +126,7 @@

    Module codeflare_sdk.cluster.auth

    self.token = token self.server = server self.skip_tls = skip_tls - self.ca_cert_path = self._gen_ca_cert_path(ca_cert_path) - - def _gen_ca_cert_path(self, ca_cert_path: str): - if ca_cert_path is not None: - return ca_cert_path - elif "CF_SDK_CA_CERT_PATH" in os.environ: - return os.environ.get("CF_SDK_CA_CERT_PATH") - elif os.path.exists(WORKBENCH_CA_CERT_PATH): - return WORKBENCH_CA_CERT_PATH - else: - return None + self.ca_cert_path = _gen_ca_cert_path(ca_cert_path) def login(self) -> str: """ @@ -152,25 +142,14 @@

    Module codeflare_sdk.cluster.auth

    configuration.host = self.server configuration.api_key["authorization"] = self.token + api_client = client.ApiClient(configuration) if not self.skip_tls: - if self.ca_cert_path is None: - configuration.ssl_ca_cert = None - elif os.path.isfile(self.ca_cert_path): - print( - f"Authenticated with certificate located at {self.ca_cert_path}" - ) - configuration.ssl_ca_cert = self.ca_cert_path - else: - raise FileNotFoundError( - f"Certificate file not found at {self.ca_cert_path}" - ) - configuration.verify_ssl = True + _client_with_cert(api_client, self.ca_cert_path) else: urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) print("Insecure request warnings have been disabled") configuration.verify_ssl = False - api_client = client.ApiClient(configuration) client.AuthenticationApi(api_client).get_api_group() config_path = None return "Logged into %s" % self.server @@ -244,14 +223,36 @@

    Module codeflare_sdk.cluster.auth

    return config_path -def api_config_handler() -> Optional[client.ApiClient]: - """ - This function is used to load the api client if the user has logged in - """ - if api_client != None and config_path == None: - return api_client +def _client_with_cert(client: client.ApiClient, ca_cert_path: Optional[str] = None): + client.configuration.verify_ssl = True + cert_path = _gen_ca_cert_path(ca_cert_path) + if cert_path is None: + client.configuration.ssl_ca_cert = None + elif os.path.isfile(cert_path): + client.configuration.ssl_ca_cert = cert_path else: - return None + raise FileNotFoundError(f"Certificate file not found at {cert_path}") + + +def _gen_ca_cert_path(ca_cert_path: Optional[str]): + """Gets the path to the default CA certificate file either through env config or default path""" + if ca_cert_path is not None: + return ca_cert_path + elif "CF_SDK_CA_CERT_PATH" in os.environ: + return os.environ.get("CF_SDK_CA_CERT_PATH") + elif os.path.exists(WORKBENCH_CA_CERT_PATH): + return WORKBENCH_CA_CERT_PATH + else: + return None + + +def get_api_client() -> client.ApiClient: + "This function should load the api client with defaults" + if api_client != None: + return api_client + to_return = client.ApiClient() + _client_with_cert(to_return) + return to_return
  • @@ -261,25 +262,6 @@

    Module codeflare_sdk.cluster.auth

    Functions

    -
    -def api_config_handler() ‑> Optional[kubernetes.client.api_client.ApiClient] -
    -
    -

    This function is used to load the api client if the user has logged in

    -
    - -Expand source code - -
    def api_config_handler() -> Optional[client.ApiClient]:
    -    """
    -    This function is used to load the api client if the user has logged in
    -    """
    -    if api_client != None and config_path == None:
    -        return api_client
    -    else:
    -        return None
    -
    -
    def config_check() ‑> str
    @@ -318,6 +300,24 @@

    Functions

    return config_path +
    +def get_api_client() ‑> kubernetes.client.api_client.ApiClient +
    +
    +

    This function should load the api client with defaults

    +
    + +Expand source code + +
    def get_api_client() -> client.ApiClient:
    +    "This function should load the api client with defaults"
    +    if api_client != None:
    +        return api_client
    +    to_return = client.ApiClient()
    +    _client_with_cert(to_return)
    +    return to_return
    +
    +
    @@ -573,17 +573,7 @@

    Methods

    self.token = token self.server = server self.skip_tls = skip_tls - self.ca_cert_path = self._gen_ca_cert_path(ca_cert_path) - - def _gen_ca_cert_path(self, ca_cert_path: str): - if ca_cert_path is not None: - return ca_cert_path - elif "CF_SDK_CA_CERT_PATH" in os.environ: - return os.environ.get("CF_SDK_CA_CERT_PATH") - elif os.path.exists(WORKBENCH_CA_CERT_PATH): - return WORKBENCH_CA_CERT_PATH - else: - return None + self.ca_cert_path = _gen_ca_cert_path(ca_cert_path) def login(self) -> str: """ @@ -599,25 +589,14 @@

    Methods

    configuration.host = self.server configuration.api_key["authorization"] = self.token + api_client = client.ApiClient(configuration) if not self.skip_tls: - if self.ca_cert_path is None: - configuration.ssl_ca_cert = None - elif os.path.isfile(self.ca_cert_path): - print( - f"Authenticated with certificate located at {self.ca_cert_path}" - ) - configuration.ssl_ca_cert = self.ca_cert_path - else: - raise FileNotFoundError( - f"Certificate file not found at {self.ca_cert_path}" - ) - configuration.verify_ssl = True + _client_with_cert(api_client, self.ca_cert_path) else: urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) print("Insecure request warnings have been disabled") configuration.verify_ssl = False - api_client = client.ApiClient(configuration) client.AuthenticationApi(api_client).get_api_group() config_path = None return "Logged into %s" % self.server @@ -665,25 +644,14 @@

    Methods

    configuration.host = self.server configuration.api_key["authorization"] = self.token + api_client = client.ApiClient(configuration) if not self.skip_tls: - if self.ca_cert_path is None: - configuration.ssl_ca_cert = None - elif os.path.isfile(self.ca_cert_path): - print( - f"Authenticated with certificate located at {self.ca_cert_path}" - ) - configuration.ssl_ca_cert = self.ca_cert_path - else: - raise FileNotFoundError( - f"Certificate file not found at {self.ca_cert_path}" - ) - configuration.verify_ssl = True + _client_with_cert(api_client, self.ca_cert_path) else: urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) print("Insecure request warnings have been disabled") configuration.verify_ssl = False - api_client = client.ApiClient(configuration) client.AuthenticationApi(api_client).get_api_group() config_path = None return "Logged into %s" % self.server @@ -729,8 +697,8 @@

    Index

  • Functions

  • Classes

    diff --git a/docs/detailed-documentation/cluster/awload.html b/docs/detailed-documentation/cluster/awload.html index c77ac0e7..fba18e3f 100644 --- a/docs/detailed-documentation/cluster/awload.html +++ b/docs/detailed-documentation/cluster/awload.html @@ -55,7 +55,7 @@

    Module codeflare_sdk.cluster.awload

    from kubernetes import client, config from ..utils.kube_api_helpers import _kube_api_error_handling -from .auth import config_check, api_config_handler +from .auth import config_check, get_api_client class AWManager: @@ -90,7 +90,7 @@

    Module codeflare_sdk.cluster.awload

    """ try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) api_instance.create_namespaced_custom_object( group="workload.codeflare.dev", version="v1beta2", @@ -115,7 +115,7 @@

    Module codeflare_sdk.cluster.awload

    try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) api_instance.delete_namespaced_custom_object( group="workload.codeflare.dev", version="v1beta2", @@ -184,7 +184,7 @@

    Classes

    """ try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) api_instance.create_namespaced_custom_object( group="workload.codeflare.dev", version="v1beta2", @@ -209,7 +209,7 @@

    Classes

    try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) api_instance.delete_namespaced_custom_object( group="workload.codeflare.dev", version="v1beta2", @@ -246,7 +246,7 @@

    Methods

    try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) api_instance.delete_namespaced_custom_object( group="workload.codeflare.dev", version="v1beta2", @@ -276,7 +276,7 @@

    Methods

    """ try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) api_instance.create_namespaced_custom_object( group="workload.codeflare.dev", version="v1beta2", diff --git a/docs/detailed-documentation/cluster/cluster.html b/docs/detailed-documentation/cluster/cluster.html index 5276ee94..12865c04 100644 --- a/docs/detailed-documentation/cluster/cluster.html +++ b/docs/detailed-documentation/cluster/cluster.html @@ -58,7 +58,7 @@

    Module codeflare_sdk.cluster.cluster

    from kubernetes import config from ray.job_submission import JobSubmissionClient -from .auth import config_check, api_config_handler +from .auth import config_check, get_api_client from ..utils import pretty_print from ..utils.generate_yaml import ( generate_appwrapper, @@ -113,7 +113,7 @@

    Module codeflare_sdk.cluster.cluster

    @property def _client_headers(self): - k8_client = api_config_handler() or client.ApiClient() + k8_client = get_api_client() return { "Authorization": k8_client.configuration.get_api_key_with_prefix( "authorization" @@ -128,7 +128,7 @@

    Module codeflare_sdk.cluster.cluster

    @property def job_client(self): - k8client = api_config_handler() or client.ApiClient() + k8client = get_api_client() if self._job_submission_client: return self._job_submission_client if is_openshift_cluster(): @@ -174,7 +174,7 @@

    Module codeflare_sdk.cluster.cluster

    try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) if self.config.appwrapper: if self.config.write_to_file: with open(self.app_wrapper_yaml) as f: @@ -205,7 +205,7 @@

    Module codeflare_sdk.cluster.cluster

    return _kube_api_error_handling(e) def _throw_for_no_raycluster(self): - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) try: api_instance.list_namespaced_custom_object( group="ray.io", @@ -232,7 +232,7 @@

    Module codeflare_sdk.cluster.cluster

    self._throw_for_no_raycluster() try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) if self.config.appwrapper: api_instance.delete_namespaced_custom_object( group="workload.codeflare.dev", @@ -391,7 +391,7 @@

    Module codeflare_sdk.cluster.cluster

    config_check() if is_openshift_cluster(): try: - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) routes = api_instance.list_namespaced_custom_object( group="route.openshift.io", version="v1", @@ -413,7 +413,7 @@

    Module codeflare_sdk.cluster.cluster

    return f"{protocol}://{route['spec']['host']}" else: try: - api_instance = client.NetworkingV1Api(api_config_handler()) + api_instance = client.NetworkingV1Api(get_api_client()) ingresses = api_instance.list_namespaced_ingress(self.config.namespace) except Exception as e: # pragma no cover return _kube_api_error_handling(e) @@ -612,9 +612,6 @@

    Module codeflare_sdk.cluster.cluster

    return active_context except Exception as e: print("Unable to find current namespace") - - if api_config_handler() != None: - return None print("trying to gather from current context") try: _, active_context = config.list_kube_config_contexts(config_check()) @@ -634,7 +631,7 @@

    Module codeflare_sdk.cluster.cluster

    ): try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) rcs = api_instance.list_namespaced_custom_object( group="ray.io", version="v1", @@ -689,7 +686,7 @@

    Module codeflare_sdk.cluster.cluster

    def _check_aw_exists(name: str, namespace: str) -> bool: try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) aws = api_instance.list_namespaced_custom_object( group="workload.codeflare.dev", version="v1beta2", @@ -716,7 +713,7 @@

    Module codeflare_sdk.cluster.cluster

    if is_openshift_cluster(): try: - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) routes = api_instance.list_namespaced_custom_object( group="route.openshift.io", @@ -735,7 +732,7 @@

    Module codeflare_sdk.cluster.cluster

    domain = route["spec"]["host"] else: try: - api_client = client.NetworkingV1Api(api_config_handler()) + api_client = client.NetworkingV1Api(get_api_client()) ingresses = api_client.list_namespaced_ingress(namespace) except Exception as e: # pragma: no cover return _kube_api_error_handling(e) @@ -749,7 +746,7 @@

    Module codeflare_sdk.cluster.cluster

    def _app_wrapper_status(name, namespace="default") -> Optional[AppWrapper]: try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) aws = api_instance.list_namespaced_custom_object( group="workload.codeflare.dev", version="v1beta2", @@ -768,7 +765,7 @@

    Module codeflare_sdk.cluster.cluster

    def _ray_cluster_status(name, namespace="default") -> Optional[RayCluster]: try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) rcs = api_instance.list_namespaced_custom_object( group="ray.io", version="v1", @@ -790,7 +787,7 @@

    Module codeflare_sdk.cluster.cluster

    list_of_clusters = [] try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) rcs = api_instance.list_namespaced_custom_object( group="ray.io", version="v1", @@ -819,7 +816,7 @@

    Module codeflare_sdk.cluster.cluster

    try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) aws = api_instance.list_namespaced_custom_object( group="workload.codeflare.dev", version="v1beta2", @@ -848,7 +845,7 @@

    Module codeflare_sdk.cluster.cluster

    dashboard_url = None if is_openshift_cluster(): try: - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) routes = api_instance.list_namespaced_custom_object( group="route.openshift.io", version="v1", @@ -867,7 +864,7 @@

    Module codeflare_sdk.cluster.cluster

    dashboard_url = f"{protocol}://{route['spec']['host']}" else: try: - api_instance = client.NetworkingV1Api(api_config_handler()) + api_instance = client.NetworkingV1Api(get_api_client()) ingresses = api_instance.list_namespaced_ingress( rc["metadata"]["namespace"] ) @@ -986,7 +983,7 @@

    Functions

    ): try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) rcs = api_instance.list_namespaced_custom_object( group="ray.io", version="v1", @@ -1027,9 +1024,6 @@

    Functions

    return active_context except Exception as e: print("Unable to find current namespace") - - if api_config_handler() != None: - return None print("trying to gather from current context") try: _, active_context = config.list_kube_config_contexts(config_check()) @@ -1135,7 +1129,7 @@

    Classes

    @property def _client_headers(self): - k8_client = api_config_handler() or client.ApiClient() + k8_client = get_api_client() return { "Authorization": k8_client.configuration.get_api_key_with_prefix( "authorization" @@ -1150,7 +1144,7 @@

    Classes

    @property def job_client(self): - k8client = api_config_handler() or client.ApiClient() + k8client = get_api_client() if self._job_submission_client: return self._job_submission_client if is_openshift_cluster(): @@ -1196,7 +1190,7 @@

    Classes

    try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) if self.config.appwrapper: if self.config.write_to_file: with open(self.app_wrapper_yaml) as f: @@ -1227,7 +1221,7 @@

    Classes

    return _kube_api_error_handling(e) def _throw_for_no_raycluster(self): - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) try: api_instance.list_namespaced_custom_object( group="ray.io", @@ -1254,7 +1248,7 @@

    Classes

    self._throw_for_no_raycluster() try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) if self.config.appwrapper: api_instance.delete_namespaced_custom_object( group="workload.codeflare.dev", @@ -1413,7 +1407,7 @@

    Classes

    config_check() if is_openshift_cluster(): try: - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) routes = api_instance.list_namespaced_custom_object( group="route.openshift.io", version="v1", @@ -1435,7 +1429,7 @@

    Classes

    return f"{protocol}://{route['spec']['host']}" else: try: - api_instance = client.NetworkingV1Api(api_config_handler()) + api_instance = client.NetworkingV1Api(get_api_client()) ingresses = api_instance.list_namespaced_ingress(self.config.namespace) except Exception as e: # pragma no cover return _kube_api_error_handling(e) @@ -1606,7 +1600,7 @@

    Instance variables

    @property
     def job_client(self):
    -    k8client = api_config_handler() or client.ApiClient()
    +    k8client = get_api_client()
         if self._job_submission_client:
             return self._job_submission_client
         if is_openshift_cluster():
    @@ -1641,7 +1635,7 @@ 

    Methods

    config_check() if is_openshift_cluster(): try: - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) routes = api_instance.list_namespaced_custom_object( group="route.openshift.io", version="v1", @@ -1663,7 +1657,7 @@

    Methods

    return f"{protocol}://{route['spec']['host']}" else: try: - api_instance = client.NetworkingV1Api(api_config_handler()) + api_instance = client.NetworkingV1Api(get_api_client()) ingresses = api_instance.list_namespaced_ingress(self.config.namespace) except Exception as e: # pragma no cover return _kube_api_error_handling(e) @@ -1762,7 +1756,7 @@

    Methods

    self._throw_for_no_raycluster() try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) if self.config.appwrapper: api_instance.delete_namespaced_custom_object( group="workload.codeflare.dev", @@ -2046,7 +2040,7 @@

    Methods

    try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) if self.config.appwrapper: if self.config.write_to_file: with open(self.app_wrapper_yaml) as f: diff --git a/docs/detailed-documentation/cluster/widgets.html b/docs/detailed-documentation/cluster/widgets.html index e07fa2ea..b0334903 100644 --- a/docs/detailed-documentation/cluster/widgets.html +++ b/docs/detailed-documentation/cluster/widgets.html @@ -58,7 +58,7 @@

    Module codeflare_sdk.cluster.widgets

    from .config import ClusterConfiguration from .model import RayClusterStatus from ..utils.kube_api_helpers import _kube_api_error_handling -from .auth import config_check, api_config_handler +from .auth import config_check, get_api_client def cluster_up_down_buttons(cluster: "codeflare_sdk.cluster.Cluster") -> widgets.Button: @@ -372,7 +372,7 @@

    Module codeflare_sdk.cluster.widgets

    try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) if _check_aw_exists(cluster_name, namespace): api_instance.delete_namespaced_custom_object( diff --git a/docs/detailed-documentation/utils/generate_cert.html b/docs/detailed-documentation/utils/generate_cert.html index 176584c4..01084d84 100644 --- a/docs/detailed-documentation/utils/generate_cert.html +++ b/docs/detailed-documentation/utils/generate_cert.html @@ -47,7 +47,7 @@

    Module codeflare_sdk.utils.generate_cert

    from cryptography import x509 from cryptography.x509.oid import NameOID import datetime -from ..cluster.auth import config_check, api_config_handler +from ..cluster.auth import config_check, get_api_client from kubernetes import client, config from .kube_api_helpers import _kube_api_error_handling @@ -131,7 +131,7 @@

    Module codeflare_sdk.utils.generate_cert

    # oc get secret ca-secret-<cluster-name> -o template='{{index .data "ca.key"}}' # oc get secret ca-secret-<cluster-name> -o template='{{index .data "ca.crt"}}'|base64 -d > ${TLSDIR}/ca.crt config_check() - v1 = client.CoreV1Api(api_config_handler()) + v1 = client.CoreV1Api(get_api_client()) # Secrets have a suffix appended to the end so we must list them and gather the secret that includes cluster_name-ca-secret- secret_name = get_secret_name(cluster_name, namespace, v1) @@ -316,7 +316,7 @@

    Functions

    # oc get secret ca-secret-<cluster-name> -o template='{{index .data "ca.key"}}' # oc get secret ca-secret-<cluster-name> -o template='{{index .data "ca.crt"}}'|base64 -d > ${TLSDIR}/ca.crt config_check() - v1 = client.CoreV1Api(api_config_handler()) + v1 = client.CoreV1Api(get_api_client()) # Secrets have a suffix appended to the end so we must list them and gather the secret that includes cluster_name-ca-secret- secret_name = get_secret_name(cluster_name, namespace, v1) diff --git a/docs/detailed-documentation/utils/generate_yaml.html b/docs/detailed-documentation/utils/generate_yaml.html index 60ce89df..c2a7bb34 100644 --- a/docs/detailed-documentation/utils/generate_yaml.html +++ b/docs/detailed-documentation/utils/generate_yaml.html @@ -58,7 +58,7 @@

    Module codeflare_sdk.utils.generate_yaml

    import uuid from kubernetes import client, config from .kube_api_helpers import _kube_api_error_handling -from ..cluster.auth import api_config_handler, config_check +from ..cluster.auth import get_api_client, config_check from os import urandom from base64 import b64encode from urllib3.util import parse_url @@ -88,7 +88,7 @@

    Module codeflare_sdk.utils.generate_yaml

    def is_openshift_cluster(): try: config_check() - for api in client.ApisApi(api_config_handler()).get_api_versions().groups: + for api in client.ApisApi(get_api_client()).get_api_versions().groups: for v in api.versions: if "route.openshift.io/v1" in v.group_version: return True @@ -266,7 +266,7 @@

    Module codeflare_sdk.utils.generate_yaml

    # If the local queue is set, use it. Otherwise, try to use the default queue. try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) local_queues = api_instance.list_namespaced_custom_object( group="kueue.x-k8s.io", version="v1beta1", @@ -292,7 +292,7 @@

    Module codeflare_sdk.utils.generate_yaml

    # get all local queues in the namespace try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) local_queues = api_instance.list_namespaced_custom_object( group="kueue.x-k8s.io", version="v1beta1", @@ -514,7 +514,7 @@

    Functions

    # If the local queue is set, use it. Otherwise, try to use the default queue. try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) local_queues = api_instance.list_namespaced_custom_object( group="kueue.x-k8s.io", version="v1beta1", @@ -627,7 +627,7 @@

    Functions

    def is_openshift_cluster():
         try:
             config_check()
    -        for api in client.ApisApi(api_config_handler()).get_api_versions().groups:
    +        for api in client.ApisApi(get_api_client()).get_api_versions().groups:
                 for v in api.versions:
                     if "route.openshift.io/v1" in v.group_version:
                         return True
    @@ -650,7 +650,7 @@ 

    Functions

    # get all local queues in the namespace try: config_check() - api_instance = client.CustomObjectsApi(api_config_handler()) + api_instance = client.CustomObjectsApi(get_api_client()) local_queues = api_instance.list_namespaced_custom_object( group="kueue.x-k8s.io", version="v1beta1", From 52f20a49694feab4507b7fd3e9ce4ff570aa13f0 Mon Sep 17 00:00:00 2001 From: Jiri Petrlik Date: Mon, 30 Sep 2024 11:52:11 +0200 Subject: [PATCH 352/496] Delete custom notebook image (#683) We stoped building this image. I think we can delete it completely. --- .github/workflows/nightly-image-build.yaml | 50 ----- README.md | 6 - custom-nb-image/Dockerfile | 31 --- custom-nb-image/imagestream.yaml | 36 --- custom-nb-image/requirements/requirements.txt | 205 ------------------ 5 files changed, 328 deletions(-) delete mode 100644 .github/workflows/nightly-image-build.yaml delete mode 100644 custom-nb-image/Dockerfile delete mode 100644 custom-nb-image/imagestream.yaml delete mode 100644 custom-nb-image/requirements/requirements.txt diff --git a/.github/workflows/nightly-image-build.yaml b/.github/workflows/nightly-image-build.yaml deleted file mode 100644 index 4b033c70..00000000 --- a/.github/workflows/nightly-image-build.yaml +++ /dev/null @@ -1,50 +0,0 @@ -name: Nightly Image Build - -on: - workflow_dispatch: - push: - branches: - - main - -env: - PYTHON_VERSION: 3.8 - POETRY_VERSION: 1.8.3 - QUAY_ORGANIZATION: 'project-codeflare' - SDK_FILE: 'codeflare_sdk-0.0.0.dev0.tar.gz' - IMAGE_TAG: 'dev' - -jobs: - nightly-build: - runs-on: ubuntu-latest - steps: - - name: Checkout the repository - uses: actions/checkout@v4 - - name: Install Python - uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - - name: Install Poetry - uses: abatilo/actions-poetry@v2 - with: - poetry-version: ${{ env.POETRY_VERSION }} - - name: Run poetry install - run: poetry install --with docs - - name: Run poetry build - run: poetry build - - name: Copy SDK package - run: cp dist/${SDK_FILE} custom-nb-image - - name: Modify Dockerfile to use locally built SDK - run: | - sed -i "s/ARG SDK_VERSION=.*/COPY ${{ env.SDK_FILE }} ./" custom-nb-image/Dockerfile - sed -i "s/codeflare-sdk==.*/${{ env.SDK_FILE }}\\\\/" custom-nb-image/Dockerfile - - name: Image Build - working-directory: custom-nb-image - run: docker build -t quay.io/${{ env.QUAY_ORGANIZATION }}/notebook:${{ env.IMAGE_TAG }} . - - name: Login to Quay.io - uses: docker/login-action@v3 - with: - registry: quay.io - username: ${{ secrets.QUAY_ID }} - password: ${{ secrets.QUAY_TOKEN }} - - name: Image Push - run: docker push quay.io/${{ env.QUAY_ORGANIZATION }}/notebook:${{ env.IMAGE_TAG }} diff --git a/README.md b/README.md index a98f542c..e166b4f5 100644 --- a/README.md +++ b/README.md @@ -40,9 +40,3 @@ The following instructions apply when doing release manually. This may be requir - If not present already, add the API token to Poetry. `poetry config pypi-token.pypi API_TOKEN` - Publish the Python package. `poetry publish` -- Change directory to custom-nb-image. `cd custom-nb-image` -- Set tag `export tag=TAG` -- Build the container image. `podman build --build-arg SDK_VERSION= -t quay.io/project-codeflare/notebook:${tag} .` -- Login to quay.io. `podman login quay.io` -- Push the image. `podman push quay.io/project-codeflare/notebook:${tag}` -- Push the stable image tag `podman push quay.io/project-codeflare/notebook:${tag} quay.io/project-codeflare/notebook:stable` diff --git a/custom-nb-image/Dockerfile b/custom-nb-image/Dockerfile deleted file mode 100644 index fca83e25..00000000 --- a/custom-nb-image/Dockerfile +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2022 IBM, Red Hat -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -FROM quay.io/opendatahub/notebooks:jupyter-minimal-ubi9-python-3.9-2023a_20230302 - -COPY requirements/requirements.txt requirements.txt - -RUN pip install -r requirements.txt - -RUN pip uninstall pickle5 -y - -# Install codeflare-sdk and other libraries -ARG SDK_VERSION=0.* -RUN pip install codeflare-sdk==${SDK_VERSION} \ - datasets==2.6.1 \ - transformers==4.23.1 \ - evaluate==0.3.0 - -RUN chmod -R g+w /opt/app-root/lib/python3.9/site-packages && \ - fix-permissions /opt/app-root -P diff --git a/custom-nb-image/imagestream.yaml b/custom-nb-image/imagestream.yaml deleted file mode 100644 index 833a42fd..00000000 --- a/custom-nb-image/imagestream.yaml +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright 2022 IBM, Red Hat -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -kind: ImageStream -apiVersion: image.openshift.io/v1 -metadata: - name: codeflare-notebook - labels: - opendatahub.io/notebook-image: 'true' - annotations: - opendatahub.io/notebook-image-name: - "CodeFlare Notebook" - opendatahub.io/notebook-image-desc: "Custom Jupyter notebook image with CodeFlare SDK, Python 3.9, Ray 2.7.0 and PyTorch 1.12.1" -spec: - lookupPolicy: - local: true - tags: - - annotations: - openshift.io/imported-from: quay.io/project-codeflare/notebook - name: latest - from: - kind: DockerImage - name: quay.io/project-codeflare/notebook:latest - importPolicy: - scheduled: true diff --git a/custom-nb-image/requirements/requirements.txt b/custom-nb-image/requirements/requirements.txt deleted file mode 100644 index 35b5d559..00000000 --- a/custom-nb-image/requirements/requirements.txt +++ /dev/null @@ -1,205 +0,0 @@ -# -# These requirements were autogenerated by pipenv -# To regenerate from the project's Pipfile, run: -# -# pipenv lock --requirements -# - --i https://pypi.org/simple -aiohttp-cors==0.7.0 -aiohttp==3.8.3 -aiorwlock==1.3.0 -aiosignal==1.2.0; python_version >= '3.6' -anyio==3.6.1; python_full_version >= '3.6.2' -argon2-cffi-bindings==21.2.0; python_version >= '3.6' -argon2-cffi==21.3.0; python_version >= '3.6' -asgiref==3.5.2; python_version >= '3.7' -asttokens==2.0.8 -astunparse==1.6.3 -async-timeout==4.0.2; python_version >= '3.6' -attrs==22.1.0; python_version >= '3.5' -babel==2.10.3; python_version >= '3.6' -backcall==0.2.0 -bcrypt==4.0.0; python_version >= '3.6' -beautifulsoup4==4.11.1; python_version >= '3.6' -black==22.8.0; python_full_version >= '3.6.2' -bleach==5.0.1; python_version >= '3.7' -blessed==1.19.1; python_version >= '2.7' -boto3==1.17.11 -botocore==1.20.112; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5' -cachetools==5.2.0; python_version ~= '3.7' -certifi==2022.9.24; python_version >= '3.6' -cffi==1.15.1 -charset-normalizer==2.1.1; python_version >= '3.6' -click==8.0.4; python_version >= '3.6' -cloudpickle==2.2.0; python_version >= '3.6' -codeflare==0.1.2.dev0 -colorama==0.4.5; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' -colorful==0.5.4 -contourpy==1.0.5; python_version >= '3.7' -cryptography==38.0.1; python_version >= '3.6' -cycler==0.11.0; python_version >= '3.6' -cython==0.29.32 -dask[array,dataframe]==2021.2.0 -dataclasses==0.6 -debugpy==1.6.3; python_version >= '3.7' -decorator==5.1.1; python_version >= '3.5' -defusedxml==0.7.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' -distlib==0.3.6 -entrypoints==0.4; python_version >= '3.6' -executing==1.1.0 -fastapi==0.85.0 -fastjsonschema==2.16.2 -filelock==3.8.0; python_version >= '3.7' -flatbuffers==22.9.24 -fonttools==4.37.3; python_version >= '3.7' -frozenlist==1.3.1; python_version >= '3.7' -fsspec==2022.8.2 -future==0.18.2; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3' -gitdb==4.0.9; python_version >= '3.6' -gitpython==3.1.27; python_version >= '3.7' -google-api-core==2.10.1; python_version >= '3.6' -google-auth==2.12.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5' -googleapis-common-protos==1.56.4; python_version >= '3.7' -gpustat==1.0.0 -graphviz==0.20.1; python_version >= '3.7' -greenery==3.3.3 -grpcio==1.43.0; python_version >= '3.6' -h11==0.14.0; python_version >= '3.7' -hyperopt==0.2.5 -idna==3.4; python_version >= '3.5' -importlib-metadata==4.12.0; python_version < '3.10' -importlib-resources==5.9.0; python_version < '3.9' -ipykernel==6.16.0; python_version >= '3.7' -ipython-genutils==0.2.0 -ipython==8.5.0; python_version >= '3.8' -ipywidgets==8.0.2 -iso8601==1.1.0; python_version < '4' and python_full_version >= '3.6.2' -jedi==0.18.1; python_version >= '3.6' -jinja2==3.1.2; python_version >= '3.7' -jmespath==0.10.0; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3' -joblib==1.2.0; python_version >= '3.7' -json5==0.9.10 -jsonref==0.2 -jsonschema==4.16.0; python_version >= '3.7' -jsonsubschema==0.0.6 -jupyter-client==7.3.5; python_version >= '3.7' -jupyter-core==4.11.1; python_version >= '3.7' -jupyter-server-mathjax==0.2.6; python_version >= '3.7' -jupyter-server==1.19.1; python_version >= '3.7' -jupyterlab-git==0.30.0 -jupyterlab-pygments==0.2.2; python_version >= '3.7' -jupyterlab-s3-browser==0.10.1 -jupyterlab-server==2.15.2; python_version >= '3.7' -jupyterlab-widgets==3.0.3; python_version >= '3.7' -jupyterlab==3.4.7; python_version >= '3.7' -kiwisolver==1.4.4; python_version >= '3.7' -kopf==1.35.6 -kubernetes==24.2.0 -lale==0.6.19 -locket==1.0.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' -lxml==4.9.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' -markupsafe==2.1.1; python_version >= '3.7' -matplotlib-inline==0.1.6; python_version >= '3.5' -matplotlib==3.6.0 -memory-profiler==0.60.0 -mistune==2.0.4 -msgpack==1.0.4 -multidict==6.0.2; python_version >= '3.7' -mypy-extensions==0.4.3 -nbclassic==0.4.3; python_version >= '3.7' -nbclient==0.6.8; python_version >= '3.7' -nbconvert==7.0.0; python_version >= '3.7' -nbdime==3.1.1; python_version >= '3.6' -nbformat==5.6.1; python_version >= '3.7' -nest-asyncio==1.5.5; python_version >= '3.5' -networkx==2.8.6; python_version >= '3.8' -notebook-shim==0.1.0; python_version >= '3.7' -notebook==6.4.12; python_version >= '3.7' -numpy==1.23.3 -nvidia-ml-py==11.495.46 -oauthlib==3.2.1; python_version >= '3.6' -opencensus-context==0.1.3 -opencensus==0.11.0 -openshift-client==1.0.18 -packaging==21.3 -pandas==1.5.0 -pandocfilters==1.5.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' -paramiko==2.11.0 -parso==0.8.3; python_version >= '3.6' -partd==1.3.0 -pathspec==0.10.1; python_version >= '3.7' -pexpect==4.8.0; sys_platform != 'win32' -pickleshare==0.7.5 -pillow==9.2.0; python_version >= '3.7' -pkgutil-resolve-name==1.3.10; python_version < '3.9' -platformdirs==2.5.2; python_version >= '3.7' -portion==2.3.0; python_version ~= '3.6' -prometheus-client==0.13.1 -prompt-toolkit==3.0.31; python_full_version >= '3.6.2' -protobuf==3.20.1; python_version >= '3.7' -psutil==5.9.2; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' -ptyprocess==0.7.0; os_name != 'nt' -pure-eval==0.2.2 -py-spy==0.3.14 -pyarrow==6.0.1 -pyasn1-modules==0.2.8 -pyasn1==0.4.8 -pycparser==2.21 -pydantic==1.10.2; python_version >= '3.7' -pygments==2.13.0; python_version >= '3.6' -pynacl==1.5.0; python_version >= '3.6' -pyparsing==3.0.9; python_full_version >= '3.6.8' -pyrsistent==0.18.1; python_version >= '3.7' -python-dateutil==2.8.2; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' -python-dotenv==0.21.0 -python-json-logger==2.0.4; python_version >= '3.5' -pytz==2022.2.1 -pyyaml==6.0; python_version >= '3.6' -pyzmq==24.0.1; python_version >= '3.6' -ray[default]==2.5.0 -requests-oauthlib==1.3.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' -requests==2.28.1; python_version >= '3.7' and python_version < '4' -rsa==4.9; python_version >= '3.6' -s3fs==0.3.4 -s3transfer==0.3.7 -scikit-learn==1.1.1 -scipy==1.8.1 -send2trash==1.8.0 -singleton-decorator==1.0.0 -six==1.16.0; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' -sklearn==0.0 -smart-open==6.2.0 -smmap==5.0.0; python_version >= '3.6' -sniffio==1.3.0; python_version >= '3.7' -sortedcontainers==2.4.0 -soupsieve==2.3.2.post1; python_version >= '3.6' -stack-data==0.5.1 -starlette==0.20.4 -tabulate==0.8.10 -tensorboardx==2.5.1 -terminado==0.15.0; python_version >= '3.7' -threadpoolctl==3.1.0; python_version >= '3.6' -tinycss2==1.1.1; python_version >= '3.6' -tomli==2.0.1; python_full_version < '3.11.0a7' -toolz==0.12.0 -torch==1.12.1 -torchvision==0.13.1 -tornado==6.2; python_version >= '3.7' -tqdm==4.64.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3' -traitlets==5.4.0; python_version >= '3.7' -tune-sklearn==0.4.3 -typing-extensions==4.3.0; python_version < '3.10' -urllib3==1.26.12 -uvicorn==0.16.0 -virtualenv==20.16.5; python_version >= '3.6' -wcwidth==0.2.5 -webencodings==0.5.1 -websocket-client==1.4.1; python_version >= '3.7' -wheel==0.37.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' -widgetsnbextension==4.0.3; python_version >= '3.7' -wrapt==1.14.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' -xgboost-ray==0.1.10 -xgboost==1.6.2 -yarl==1.8.1; python_version >= '3.7' -zipp==3.8.1; python_version < '3.10' From bdedaff807e4906d6ff000ff3c6c0c65e40e298f Mon Sep 17 00:00:00 2001 From: Jan Stourac Date: Sat, 28 Sep 2024 13:06:56 +0200 Subject: [PATCH 353/496] Fix for the odh-notebook-sync workflow. This is a followup of #655 where the Pipfile.cpu and Pipfile.gpu support was broken. Apart from that - the array initialization was fixed and the link to this workflow is added to the raised PR description. --- .github/workflows/odh-notebooks-sync.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/odh-notebooks-sync.yml b/.github/workflows/odh-notebooks-sync.yml index 80337fe0..f1a9e0fe 100644 --- a/.github/workflows/odh-notebooks-sync.yml +++ b/.github/workflows/odh-notebooks-sync.yml @@ -55,12 +55,12 @@ jobs: - name: Update Pipfiles in accordance with Codeflare-SDK latest release run: | package_name=codeflare-sdk - available_python_versions=("3.9", "3.11") # add space separated python versions according to 'python-versions' specified in 'Setup Python Environment' step + available_python_versions=("3.9" "3.11") # add space separated python versions according to 'python-versions' specified in 'Setup Python Environment' step install_package_using_pipenv(){ # args allow custom names for Pipfile and Pipfile.lock - if $# -eq 2; then - mv $1 Pipfile - mv $2 Pipfile.lock + if [ $# -eq 2 ]; then + mv "${1}" Pipfile + mv "${2}" Pipfile.lock fi if ! pipenv install ${package_name}~="${CODEFLARE_RELEASE_VERSION}"; then echo "Failed to install ${package_name} with version ${CODEFLARE_RELEASE_VERSION} in $dir" @@ -76,9 +76,9 @@ jobs: echo "Failed to remove virtual environment" exit 1 fi - if $# -eq 2; then - mv Pipfile $1 - mv Pipfile.lock $2 + if [ $# -eq 2 ]; then + mv Pipfile "${1}" + mv Pipfile.lock "${2}" fi } # Get the list of available versions for the package @@ -153,6 +153,6 @@ jobs: env: pr_title: "[Codeflare Action] Update notebook's pipfile to sync with Codeflare-SDK release ${{ env.CODEFLARE_RELEASE_VERSION }}" pr_body: | - :rocket: This is an automated Pull Request. + :rocket: This is an automated Pull Request generated by [odh-notebooks-sync.yml](https://github.com/project-codeflare/codeflare-sdk/blob/main/.github/workflows/odh-notebooks-sync.yml) workflow. This PR updates the `Pipfile` to sync with latest Codeflare-SDK release. From dc4f2f1eb76f105366fb642954b9c65a959ef785 Mon Sep 17 00:00:00 2001 From: Kevin Date: Fri, 27 Sep 2024 13:56:46 -0400 Subject: [PATCH 354/496] remove installation of packages in nb sync Signed-off-by: Kevin --- .github/workflows/odh-notebooks-sync.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/odh-notebooks-sync.yml b/.github/workflows/odh-notebooks-sync.yml index f1a9e0fe..d31a49f5 100644 --- a/.github/workflows/odh-notebooks-sync.yml +++ b/.github/workflows/odh-notebooks-sync.yml @@ -62,10 +62,8 @@ jobs: mv "${1}" Pipfile mv "${2}" Pipfile.lock fi - if ! pipenv install ${package_name}~="${CODEFLARE_RELEASE_VERSION}"; then - echo "Failed to install ${package_name} with version ${CODEFLARE_RELEASE_VERSION} in $dir" - exit 1 - fi + # replace existing version of cf-sdk with new version in Pipfile + sed -i "s/codeflare-sdk = .*$/codeflare-sdk = \"~=$CODEFLARE_RELEASE_VERSION\"/g" Pipfile # Lock dependencies, ensuring pre-release are included and clear previous state if ! pipenv lock --pre --clear ; then echo "Failed to lock dependencies" From 932b925e2ff0afab96c43f536d620d45489aa074 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Tue, 1 Oct 2024 12:59:55 +0100 Subject: [PATCH 355/496] Use default image in UI notebook tests --- .github/workflows/ui_notebooks_test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ui_notebooks_test.yaml b/.github/workflows/ui_notebooks_test.yaml index 5e8d506d..283486e6 100644 --- a/.github/workflows/ui_notebooks_test.yaml +++ b/.github/workflows/ui_notebooks_test.yaml @@ -86,7 +86,7 @@ jobs: jq -r 'del(.cells[] | select(.source[] | contains("Create authentication object for user permissions")))' 3_widget_example.ipynb > 3_widget_example.ipynb.tmp && mv 3_widget_example.ipynb.tmp 3_widget_example.ipynb jq -r 'del(.cells[] | select(.source[] | contains("auth.logout()")))' 3_widget_example.ipynb > 3_widget_example.ipynb.tmp && mv 3_widget_example.ipynb.tmp 3_widget_example.ipynb # Set explicit namespace as SDK need it (currently) to resolve local queues - sed -i "s|head_memory_limits=2,|head_memory_limits=2, namespace='default', image='quay.io/modh/ray:2.35.0-py39-cu121',|" 3_widget_example.ipynb + sed -i "s|head_memory_limits=2,|head_memory_limits=2, namespace='default',|" 3_widget_example.ipynb sed -i "s|view_clusters()|view_clusters('default')|" 3_widget_example.ipynb working-directory: demo-notebooks/guided-demos From 730b0ed7c584975cea1f53f5ba4082dc29d136ca Mon Sep 17 00:00:00 2001 From: Varsha Prasad Narsing Date: Mon, 30 Sep 2024 14:28:26 -0700 Subject: [PATCH 356/496] [Part-1] Refactor k8s related code into a separate module This commit is first of the chain in refactoring the cluster/ package. --- .gitignore | 1 + poetry.lock | 495 +++++++++--------- src/codeflare_sdk.egg-info/SOURCES.txt | 6 +- src/codeflare_sdk/__init__.py | 11 +- src/codeflare_sdk/cluster/__init__.py | 7 - src/codeflare_sdk/cluster/awload.py | 9 +- src/codeflare_sdk/cluster/cluster.py | 13 +- src/codeflare_sdk/cluster/widgets.py | 7 +- src/codeflare_sdk/common/__init__.py | 8 + .../common/kubernetes_cluster/__init__.py | 10 + .../kubernetes_cluster}/auth.py | 2 +- .../kubernetes_cluster}/kube_api_helpers.py | 0 src/codeflare_sdk/utils/generate_cert.py | 9 +- src/codeflare_sdk/utils/generate_yaml.py | 14 +- tests/e2e/support.py | 4 +- tests/unit_test.py | 25 +- .../raycluster_sdk_upgrade_sleep_test.py | 2 +- tests/upgrade/raycluster_sdk_upgrade_test.py | 2 +- 18 files changed, 331 insertions(+), 294 deletions(-) create mode 100644 src/codeflare_sdk/common/__init__.py create mode 100644 src/codeflare_sdk/common/kubernetes_cluster/__init__.py rename src/codeflare_sdk/{cluster => common/kubernetes_cluster}/auth.py (99%) rename src/codeflare_sdk/{utils => common/kubernetes_cluster}/kube_api_helpers.py (100%) diff --git a/.gitignore b/.gitignore index c089dfc6..2940f885 100644 --- a/.gitignore +++ b/.gitignore @@ -12,3 +12,4 @@ node_modules .DS_Store ui-tests/playwright-report ui-tests/test-results +/src/codeflare_sdk.egg-info/ diff --git a/poetry.lock b/poetry.lock index 3d68e4a8..3b65c16b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,114 +1,114 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "aiohappyeyeballs" -version = "2.4.2" +version = "2.4.3" description = "Happy Eyeballs for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "aiohappyeyeballs-2.4.2-py3-none-any.whl", hash = "sha256:8522691d9a154ba1145b157d6d5c15e5c692527ce6a53c5e5f9876977f6dab2f"}, - {file = "aiohappyeyeballs-2.4.2.tar.gz", hash = "sha256:4ca893e6c5c1f5bf3888b04cb5a3bee24995398efef6e0b9f747b5e89d84fd74"}, + {file = "aiohappyeyeballs-2.4.3-py3-none-any.whl", hash = "sha256:8a7a83727b2756f394ab2895ea0765a0a8c475e3c71e98d43d76f22b4b435572"}, + {file = "aiohappyeyeballs-2.4.3.tar.gz", hash = "sha256:75cf88a15106a5002a8eb1dab212525c00d1f4c0fa96e551c9fbe6f09a621586"}, ] [[package]] name = "aiohttp" -version = "3.10.6" +version = "3.10.8" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.10.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:682836fc672972cc3101cc9e30d49c5f7e8f1d010478d46119fe725a4545acfd"}, - {file = "aiohttp-3.10.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:289fa8a20018d0d5aa9e4b35d899bd51bcb80f0d5f365d9a23e30dac3b79159b"}, - {file = "aiohttp-3.10.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8617c96a20dd57e7e9d398ff9d04f3d11c4d28b1767273a5b1a018ada5a654d3"}, - {file = "aiohttp-3.10.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdbeff1b062751c2a2a55b171f7050fb7073633c699299d042e962aacdbe1a07"}, - {file = "aiohttp-3.10.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ea35d849cdd4a9268f910bff4497baebbc1aa3f2f625fd8ccd9ac99c860c621"}, - {file = "aiohttp-3.10.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:473961b3252f3b949bb84873d6e268fb6d8aa0ccc6eb7404fa58c76a326bb8e1"}, - {file = "aiohttp-3.10.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d2665c5df629eb2f981dab244c01bfa6cdc185f4ffa026639286c4d56fafb54"}, - {file = "aiohttp-3.10.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25d92f794f1332f656e3765841fc2b7ad5c26c3f3d01e8949eeb3495691cf9f4"}, - {file = "aiohttp-3.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9bd6b2033993d5ae80883bb29b83fb2b432270bbe067c2f53cc73bb57c46065f"}, - {file = "aiohttp-3.10.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d7f408c43f5e75ea1edc152fb375e8f46ef916f545fb66d4aebcbcfad05e2796"}, - {file = "aiohttp-3.10.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:cf8b8560aa965f87bf9c13bf9fed7025993a155ca0ce8422da74bf46d18c2f5f"}, - {file = "aiohttp-3.10.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14477c4e52e2f17437b99893fd220ffe7d7ee41df5ebf931a92b8ca82e6fd094"}, - {file = "aiohttp-3.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fb138fbf9f53928e779650f5ed26d0ea1ed8b2cab67f0ea5d63afa09fdc07593"}, - {file = "aiohttp-3.10.6-cp310-cp310-win32.whl", hash = "sha256:9843d683b8756971797be171ead21511d2215a2d6e3c899c6e3107fbbe826791"}, - {file = "aiohttp-3.10.6-cp310-cp310-win_amd64.whl", hash = "sha256:f8b8e49fe02f744d38352daca1dbef462c3874900bd8166516f6ea8e82b5aacf"}, - {file = "aiohttp-3.10.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f52e54fd776ad0da1006708762213b079b154644db54bcfc62f06eaa5b896402"}, - {file = "aiohttp-3.10.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:995ab1a238fd0d19dc65f2d222e5eb064e409665c6426a3e51d5101c1979ee84"}, - {file = "aiohttp-3.10.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0749c4d5a08a802dd66ecdf59b2df4d76b900004017468a7bb736c3b5a3dd902"}, - {file = "aiohttp-3.10.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e05b39158f2af0e2438cc2075cfc271f4ace0c3cc4a81ec95b27a0432e161951"}, - {file = "aiohttp-3.10.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a9f196c970db2dcde4f24317e06615363349dc357cf4d7a3b0716c20ac6d7bcd"}, - {file = "aiohttp-3.10.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:47647c8af04a70e07a2462931b0eba63146a13affa697afb4ecbab9d03a480ce"}, - {file = "aiohttp-3.10.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:669c0efe7e99f6d94d63274c06344bd0e9c8daf184ce5602a29bc39e00a18720"}, - {file = "aiohttp-3.10.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9721cdd83a994225352ca84cd537760d41a9da3c0eacb3ff534747ab8fba6d0"}, - {file = "aiohttp-3.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0b82c8ebed66ce182893e7c0b6b60ba2ace45b1df104feb52380edae266a4850"}, - {file = "aiohttp-3.10.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b169f8e755e541b72e714b89a831b315bbe70db44e33fead28516c9e13d5f931"}, - {file = "aiohttp-3.10.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0be3115753baf8b4153e64f9aa7bf6c0c64af57979aa900c31f496301b374570"}, - {file = "aiohttp-3.10.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e1f80cd17d81a404b6e70ef22bfe1870bafc511728397634ad5f5efc8698df56"}, - {file = "aiohttp-3.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6419728b08fb6380c66a470d2319cafcec554c81780e2114b7e150329b9a9a7f"}, - {file = "aiohttp-3.10.6-cp311-cp311-win32.whl", hash = "sha256:bd294dcdc1afdc510bb51d35444003f14e327572877d016d576ac3b9a5888a27"}, - {file = "aiohttp-3.10.6-cp311-cp311-win_amd64.whl", hash = "sha256:bf861da9a43d282d6dd9dcd64c23a0fccf2c5aa5cd7c32024513c8c79fb69de3"}, - {file = "aiohttp-3.10.6-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2708baccdc62f4b1251e59c2aac725936a900081f079b88843dabcab0feeeb27"}, - {file = "aiohttp-3.10.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7475da7a5e2ccf1a1c86c8fee241e277f4874c96564d06f726d8df8e77683ef7"}, - {file = "aiohttp-3.10.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:02108326574ff60267b7b35b17ac5c0bbd0008ccb942ce4c48b657bb90f0b8aa"}, - {file = "aiohttp-3.10.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:029a019627b37fa9eac5c75cc54a6bb722c4ebbf5a54d8c8c0fb4dd8facf2702"}, - {file = "aiohttp-3.10.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a637d387db6fdad95e293fab5433b775fd104ae6348d2388beaaa60d08b38c4"}, - {file = "aiohttp-3.10.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc1a16f3fc1944c61290d33c88dc3f09ba62d159b284c38c5331868425aca426"}, - {file = "aiohttp-3.10.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81b292f37969f9cc54f4643f0be7dacabf3612b3b4a65413661cf6c350226787"}, - {file = "aiohttp-3.10.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0754690a3a26e819173a34093798c155bafb21c3c640bff13be1afa1e9d421f9"}, - {file = "aiohttp-3.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:164ecd32e65467d86843dbb121a6666c3deb23b460e3f8aefdcaacae79eb718a"}, - {file = "aiohttp-3.10.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:438c5863feb761f7ca3270d48c292c334814459f61cc12bab5ba5b702d7c9e56"}, - {file = "aiohttp-3.10.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ba18573bb1de1063d222f41de64a0d3741223982dcea863b3f74646faf618ec7"}, - {file = "aiohttp-3.10.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:c82a94ddec996413a905f622f3da02c4359952aab8d817c01cf9915419525e95"}, - {file = "aiohttp-3.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:92351aa5363fc3c1f872ca763f86730ced32b01607f0c9662b1fa711087968d0"}, - {file = "aiohttp-3.10.6-cp312-cp312-win32.whl", hash = "sha256:3e15e33bfc73fa97c228f72e05e8795e163a693fd5323549f49367c76a6e5883"}, - {file = "aiohttp-3.10.6-cp312-cp312-win_amd64.whl", hash = "sha256:fe517113fe4d35d9072b826c3e147d63c5f808ca8167d450b4f96c520c8a1d8d"}, - {file = "aiohttp-3.10.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:482f74057ea13d387a7549d7a7ecb60e45146d15f3e58a2d93a0ad2d5a8457cd"}, - {file = "aiohttp-3.10.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:03fa40d1450ee5196e843315ddf74a51afc7e83d489dbfc380eecefea74158b1"}, - {file = "aiohttp-3.10.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1e52e59ed5f4cc3a3acfe2a610f8891f216f486de54d95d6600a2c9ba1581f4d"}, - {file = "aiohttp-3.10.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b3935a22c9e41a8000d90588bed96cf395ef572dbb409be44c6219c61d900d"}, - {file = "aiohttp-3.10.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4bef1480ee50f75abcfcb4b11c12de1005968ca9d0172aec4a5057ba9f2b644f"}, - {file = "aiohttp-3.10.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:671745ea7db19693ce867359d503772177f0b20fa8f6ee1e74e00449f4c4151d"}, - {file = "aiohttp-3.10.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b50b367308ca8c12e0b50cba5773bc9abe64c428d3fd2bbf5cd25aab37c77bf"}, - {file = "aiohttp-3.10.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a504d7cdb431a777d05a124fd0b21efb94498efa743103ea01b1e3136d2e4fb"}, - {file = "aiohttp-3.10.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:66bc81361131763660b969132a22edce2c4d184978ba39614e8f8f95db5c95f8"}, - {file = "aiohttp-3.10.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:27cf19a38506e2e9f12fc17e55f118f04897b0a78537055d93a9de4bf3022e3d"}, - {file = "aiohttp-3.10.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3468b39f977a11271517c6925b226720e148311039a380cc9117b1e2258a721f"}, - {file = "aiohttp-3.10.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:9d26da22a793dfd424be1050712a70c0afd96345245c29aced1e35dbace03413"}, - {file = "aiohttp-3.10.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:844d48ff9173d0b941abed8b2ea6a412f82b56d9ab1edb918c74000c15839362"}, - {file = "aiohttp-3.10.6-cp313-cp313-win32.whl", hash = "sha256:2dd56e3c43660ed3bea67fd4c5025f1ac1f9ecf6f0b991a6e5efe2e678c490c5"}, - {file = "aiohttp-3.10.6-cp313-cp313-win_amd64.whl", hash = "sha256:c91781d969fbced1993537f45efe1213bd6fccb4b37bfae2a026e20d6fbed206"}, - {file = "aiohttp-3.10.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4407a80bca3e694f2d2a523058e20e1f9f98a416619e04f6dc09dc910352ac8b"}, - {file = "aiohttp-3.10.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1cb045ec5961f51af3e2c08cd6fe523f07cc6e345033adee711c49b7b91bb954"}, - {file = "aiohttp-3.10.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4fabdcdc781a36b8fd7b2ca9dea8172f29a99e11d00ca0f83ffeb50958da84a1"}, - {file = "aiohttp-3.10.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a9f42efcc2681790595ab3d03c0e52d01edc23a0973ea09f0dc8d295e12b8e"}, - {file = "aiohttp-3.10.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cca776a440795db437d82c07455761c85bbcf3956221c3c23b8c93176c278ce7"}, - {file = "aiohttp-3.10.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5582de171f0898139cf51dd9fcdc79b848e28d9abd68e837f0803fc9f30807b1"}, - {file = "aiohttp-3.10.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:370e2d47575c53c817ee42a18acc34aad8da4dbdaac0a6c836d58878955f1477"}, - {file = "aiohttp-3.10.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:444d1704e2af6b30766debed9be8a795958029e552fe77551355badb1944012c"}, - {file = "aiohttp-3.10.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:40271a2a375812967401c9ca8077de9368e09a43a964f4dce0ff603301ec9358"}, - {file = "aiohttp-3.10.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f3af26f86863fad12e25395805bb0babbd49d512806af91ec9708a272b696248"}, - {file = "aiohttp-3.10.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4752df44df48fd42b80f51d6a97553b482cda1274d9dc5df214a3a1aa5d8f018"}, - {file = "aiohttp-3.10.6-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:2cd5290ab66cfca2f90045db2cc6434c1f4f9fbf97c9f1c316e785033782e7d2"}, - {file = "aiohttp-3.10.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3427031064b0d5c95647e6369c4aa3c556402f324a3e18107cb09517abe5f962"}, - {file = "aiohttp-3.10.6-cp38-cp38-win32.whl", hash = "sha256:614fc21e86adc28e4165a6391f851a6da6e9cbd7bb232d0df7718b453a89ee98"}, - {file = "aiohttp-3.10.6-cp38-cp38-win_amd64.whl", hash = "sha256:58c5d7318a136a3874c78717dd6de57519bc64f6363c5827c2b1cb775bea71dd"}, - {file = "aiohttp-3.10.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5db26bbca8e7968c4c977a0c640e0b9ce7224e1f4dcafa57870dc6ee28e27de6"}, - {file = "aiohttp-3.10.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3fb4216e3ec0dbc01db5ba802f02ed78ad8f07121be54eb9e918448cc3f61b7c"}, - {file = "aiohttp-3.10.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a976ef488f26e224079deb3d424f29144c6d5ba4ded313198169a8af8f47fb82"}, - {file = "aiohttp-3.10.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a86610174de8a85a920e956e2d4f9945e7da89f29a00e95ac62a4a414c4ef4e"}, - {file = "aiohttp-3.10.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:217791c6a399cc4f2e6577bb44344cba1f5714a2aebf6a0bea04cfa956658284"}, - {file = "aiohttp-3.10.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ba3662d41abe2eab0eeec7ee56f33ef4e0b34858f38abf24377687f9e1fb00a5"}, - {file = "aiohttp-3.10.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4dfa5ad4bce9ca30a76117fbaa1c1decf41ebb6c18a4e098df44298941566f9"}, - {file = "aiohttp-3.10.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0009258e97502936d3bd5bf2ced15769629097d0abb81e6495fba1047824fe0"}, - {file = "aiohttp-3.10.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0a75d5c9fb4f06c41d029ae70ad943c3a844c40c0a769d12be4b99b04f473d3d"}, - {file = "aiohttp-3.10.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8198b7c002aae2b40b2d16bfe724b9a90bcbc9b78b2566fc96131ef4e382574d"}, - {file = "aiohttp-3.10.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:4611db8c907f90fe86be112efdc2398cd7b4c8eeded5a4f0314b70fdea8feab0"}, - {file = "aiohttp-3.10.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ff99ae06eef85c7a565854826114ced72765832ee16c7e3e766c5e4c5b98d20e"}, - {file = "aiohttp-3.10.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7641920bdcc7cd2d3ddfb8bb9133a6c9536b09dbd49490b79e125180b2d25b93"}, - {file = "aiohttp-3.10.6-cp39-cp39-win32.whl", hash = "sha256:e2e7d5591ea868d5ec82b90bbeb366a198715672841d46281b623e23079593db"}, - {file = "aiohttp-3.10.6-cp39-cp39-win_amd64.whl", hash = "sha256:b504c08c45623bf5c7ca41be380156d925f00199b3970efd758aef4a77645feb"}, - {file = "aiohttp-3.10.6.tar.gz", hash = "sha256:d2578ef941be0c2ba58f6f421a703527d08427237ed45ecb091fed6f83305336"}, + {file = "aiohttp-3.10.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a1ba7bc139592339ddeb62c06486d0fa0f4ca61216e14137a40d626c81faf10c"}, + {file = "aiohttp-3.10.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85e4d7bd05d18e4b348441e7584c681eff646e3bf38f68b2626807f3add21aa2"}, + {file = "aiohttp-3.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:69de056022e7abf69cb9fec795515973cc3eeaff51e3ea8d72a77aa933a91c52"}, + {file = "aiohttp-3.10.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee3587506898d4a404b33bd19689286ccf226c3d44d7a73670c8498cd688e42c"}, + {file = "aiohttp-3.10.8-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fe285a697c851734285369614443451462ce78aac2b77db23567507484b1dc6f"}, + {file = "aiohttp-3.10.8-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10c7932337285a6bfa3a5fe1fd4da90b66ebfd9d0cbd1544402e1202eb9a8c3e"}, + {file = "aiohttp-3.10.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd9716ef0224fe0d0336997eb242f40619f9f8c5c57e66b525a1ebf9f1d8cebe"}, + {file = "aiohttp-3.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ceacea31f8a55cdba02bc72c93eb2e1b77160e91f8abd605969c168502fd71eb"}, + {file = "aiohttp-3.10.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9721554bfa9e15f6e462da304374c2f1baede3cb06008c36c47fa37ea32f1dc4"}, + {file = "aiohttp-3.10.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:22cdeb684d8552490dd2697a5138c4ecb46f844892df437aaf94f7eea99af879"}, + {file = "aiohttp-3.10.8-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e56bb7e31c4bc79956b866163170bc89fd619e0581ce813330d4ea46921a4881"}, + {file = "aiohttp-3.10.8-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:3a95d2686bc4794d66bd8de654e41b5339fab542b2bca9238aa63ed5f4f2ce82"}, + {file = "aiohttp-3.10.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d82404a0e7b10e0d7f022cf44031b78af8a4f99bd01561ac68f7c24772fed021"}, + {file = "aiohttp-3.10.8-cp310-cp310-win32.whl", hash = "sha256:4e10b04542d27e21538e670156e88766543692a0a883f243ba8fad9ddea82e53"}, + {file = "aiohttp-3.10.8-cp310-cp310-win_amd64.whl", hash = "sha256:680dbcff5adc7f696ccf8bf671d38366a1f620b5616a1d333d0cb33956065395"}, + {file = "aiohttp-3.10.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:33a68011a38020ed4ff41ae0dbf4a96a202562ecf2024bdd8f65385f1d07f6ef"}, + {file = "aiohttp-3.10.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6c7efa6616a95e3bd73b8a69691012d2ef1f95f9ea0189e42f338fae080c2fc6"}, + {file = "aiohttp-3.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ddb9b9764cfb4459acf01c02d2a59d3e5066b06a846a364fd1749aa168efa2be"}, + {file = "aiohttp-3.10.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7f270f4ca92760f98a42c45a58674fff488e23b144ec80b1cc6fa2effed377"}, + {file = "aiohttp-3.10.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6984dda9d79064361ab58d03f6c1e793ea845c6cfa89ffe1a7b9bb400dfd56bd"}, + {file = "aiohttp-3.10.8-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f6d47e392c27206701565c8df4cac6ebed28fdf6dcaea5b1eea7a4631d8e6db"}, + {file = "aiohttp-3.10.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a72f89aea712c619b2ca32c6f4335c77125ede27530ad9705f4f349357833695"}, + {file = "aiohttp-3.10.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c36074b26f3263879ba8e4dbd33db2b79874a3392f403a70b772701363148b9f"}, + {file = "aiohttp-3.10.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e32148b4a745e70a255a1d44b5664de1f2e24fcefb98a75b60c83b9e260ddb5b"}, + {file = "aiohttp-3.10.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5aa1a073514cf59c81ad49a4ed9b5d72b2433638cd53160fd2f3a9cfa94718db"}, + {file = "aiohttp-3.10.8-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d3a79200a9d5e621c4623081ddb25380b713c8cf5233cd11c1aabad990bb9381"}, + {file = "aiohttp-3.10.8-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e45fdfcb2d5bcad83373e4808825b7512953146d147488114575780640665027"}, + {file = "aiohttp-3.10.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f78e2a78432c537ae876a93013b7bc0027ba5b93ad7b3463624c4b6906489332"}, + {file = "aiohttp-3.10.8-cp311-cp311-win32.whl", hash = "sha256:f8179855a4e4f3b931cb1764ec87673d3fbdcca2af496c8d30567d7b034a13db"}, + {file = "aiohttp-3.10.8-cp311-cp311-win_amd64.whl", hash = "sha256:ef9b484604af05ca745b6108ca1aaa22ae1919037ae4f93aaf9a37ba42e0b835"}, + {file = "aiohttp-3.10.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ab2d6523575fc98896c80f49ac99e849c0b0e69cc80bf864eed6af2ae728a52b"}, + {file = "aiohttp-3.10.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f5d5d5401744dda50b943d8764508d0e60cc2d3305ac1e6420935861a9d544bc"}, + {file = "aiohttp-3.10.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de23085cf90911600ace512e909114385026b16324fa203cc74c81f21fd3276a"}, + {file = "aiohttp-3.10.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4618f0d2bf523043866a9ff8458900d8eb0a6d4018f251dae98e5f1fb699f3a8"}, + {file = "aiohttp-3.10.8-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21c1925541ca84f7b5e0df361c0a813a7d6a56d3b0030ebd4b220b8d232015f9"}, + {file = "aiohttp-3.10.8-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:497a7d20caea8855c5429db3cdb829385467217d7feb86952a6107e033e031b9"}, + {file = "aiohttp-3.10.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c887019dbcb4af58a091a45ccf376fffe800b5531b45c1efccda4bedf87747ea"}, + {file = "aiohttp-3.10.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40d2d719c3c36a7a65ed26400e2b45b2d9ed7edf498f4df38b2ae130f25a0d01"}, + {file = "aiohttp-3.10.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:57359785f27394a8bcab0da6dcd46706d087dfebf59a8d0ad2e64a4bc2f6f94f"}, + {file = "aiohttp-3.10.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a961ee6f2cdd1a2be4735333ab284691180d40bad48f97bb598841bfcbfb94ec"}, + {file = "aiohttp-3.10.8-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:fe3d79d6af839ffa46fdc5d2cf34295390894471e9875050eafa584cb781508d"}, + {file = "aiohttp-3.10.8-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9a281cba03bdaa341c70b7551b2256a88d45eead149f48b75a96d41128c240b3"}, + {file = "aiohttp-3.10.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c6769d71bfb1ed60321363a9bc05e94dcf05e38295ef41d46ac08919e5b00d19"}, + {file = "aiohttp-3.10.8-cp312-cp312-win32.whl", hash = "sha256:a3081246bab4d419697ee45e555cef5cd1def7ac193dff6f50be761d2e44f194"}, + {file = "aiohttp-3.10.8-cp312-cp312-win_amd64.whl", hash = "sha256:ab1546fc8e00676febc81c548a876c7bde32f881b8334b77f84719ab2c7d28dc"}, + {file = "aiohttp-3.10.8-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:b1a012677b8e0a39e181e218de47d6741c5922202e3b0b65e412e2ce47c39337"}, + {file = "aiohttp-3.10.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2df786c96c57cd6b87156ba4c5f166af7b88f3fc05f9d592252fdc83d8615a3c"}, + {file = "aiohttp-3.10.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8885ca09d3a9317219c0831276bfe26984b17b2c37b7bf70dd478d17092a4772"}, + {file = "aiohttp-3.10.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4dbf252ac19860e0ab56cd480d2805498f47c5a2d04f5995d8d8a6effd04b48c"}, + {file = "aiohttp-3.10.8-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b2036479b6b94afaaca7d07b8a68dc0e67b0caf5f6293bb6a5a1825f5923000"}, + {file = "aiohttp-3.10.8-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:365783e1b7c40b59ed4ce2b5a7491bae48f41cd2c30d52647a5b1ee8604c68ad"}, + {file = "aiohttp-3.10.8-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:270e653b5a4b557476a1ed40e6b6ce82f331aab669620d7c95c658ef976c9c5e"}, + {file = "aiohttp-3.10.8-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8960fabc20bfe4fafb941067cda8e23c8c17c98c121aa31c7bf0cdab11b07842"}, + {file = "aiohttp-3.10.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f21e8f2abed9a44afc3d15bba22e0dfc71e5fa859bea916e42354c16102b036f"}, + {file = "aiohttp-3.10.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fecd55e7418fabd297fd836e65cbd6371aa4035a264998a091bbf13f94d9c44d"}, + {file = "aiohttp-3.10.8-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:badb51d851358cd7535b647bb67af4854b64f3c85f0d089c737f75504d5910ec"}, + {file = "aiohttp-3.10.8-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e860985f30f3a015979e63e7ba1a391526cdac1b22b7b332579df7867848e255"}, + {file = "aiohttp-3.10.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:71462f8eeca477cbc0c9700a9464e3f75f59068aed5e9d4a521a103692da72dc"}, + {file = "aiohttp-3.10.8-cp313-cp313-win32.whl", hash = "sha256:177126e971782769b34933e94fddd1089cef0fe6b82fee8a885e539f5b0f0c6a"}, + {file = "aiohttp-3.10.8-cp313-cp313-win_amd64.whl", hash = "sha256:98a4eb60e27033dee9593814ca320ee8c199489fbc6b2699d0f710584db7feb7"}, + {file = "aiohttp-3.10.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ffef3d763e4c8fc97e740da5b4d0f080b78630a3914f4e772a122bbfa608c1db"}, + {file = "aiohttp-3.10.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:597128cb7bc5f068181b49a732961f46cb89f85686206289d6ccb5e27cb5fbe2"}, + {file = "aiohttp-3.10.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f23a6c1d09de5de89a33c9e9b229106cb70dcfdd55e81a3a3580eaadaa32bc92"}, + {file = "aiohttp-3.10.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da57af0c54a302b7c655fa1ccd5b1817a53739afa39924ef1816e7b7c8a07ccb"}, + {file = "aiohttp-3.10.8-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e7a6af57091056a79a35104d6ec29d98ec7f1fb7270ad9c6fff871b678d1ff8"}, + {file = "aiohttp-3.10.8-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32710d6b3b6c09c60c794d84ca887a3a2890131c0b02b3cefdcc6709a2260a7c"}, + {file = "aiohttp-3.10.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b91f4f62ad39a8a42d511d66269b46cb2fb7dea9564c21ab6c56a642d28bff5"}, + {file = "aiohttp-3.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:471a8c47344b9cc309558b3fcc469bd2c12b49322b4b31eb386c4a2b2d44e44a"}, + {file = "aiohttp-3.10.8-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fc0e7f91705445d79beafba9bb3057dd50830e40fe5417017a76a214af54e122"}, + {file = "aiohttp-3.10.8-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:85431c9131a9a0f65260dc7a65c800ca5eae78c4c9931618f18c8e0933a0e0c1"}, + {file = "aiohttp-3.10.8-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:b91557ee0893da52794b25660d4f57bb519bcad8b7df301acd3898f7197c5d81"}, + {file = "aiohttp-3.10.8-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:4954e6b06dd0be97e1a5751fc606be1f9edbdc553c5d9b57d72406a8fbd17f9d"}, + {file = "aiohttp-3.10.8-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a087c84b4992160ffef7afd98ef24177c8bd4ad61c53607145a8377457385100"}, + {file = "aiohttp-3.10.8-cp38-cp38-win32.whl", hash = "sha256:e1f0f7b27171b2956a27bd8f899751d0866ddabdd05cbddf3520f945130a908c"}, + {file = "aiohttp-3.10.8-cp38-cp38-win_amd64.whl", hash = "sha256:c4916070e12ae140110aa598031876c1bf8676a36a750716ea0aa5bd694aa2e7"}, + {file = "aiohttp-3.10.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5284997e3d88d0dfb874c43e51ae8f4a6f4ca5b90dcf22995035187253d430db"}, + {file = "aiohttp-3.10.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9443d9ebc5167ce1fbb552faf2d666fb22ef5716a8750be67efd140a7733738c"}, + {file = "aiohttp-3.10.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b667e2a03407d79a76c618dc30cedebd48f082d85880d0c9c4ec2faa3e10f43e"}, + {file = "aiohttp-3.10.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98fae99d5c2146f254b7806001498e6f9ffb0e330de55a35e72feb7cb2fa399b"}, + {file = "aiohttp-3.10.8-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8296edd99d0dd9d0eb8b9e25b3b3506eef55c1854e9cc230f0b3f885f680410b"}, + {file = "aiohttp-3.10.8-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ce46dfb49cfbf9e92818be4b761d4042230b1f0e05ffec0aad15b3eb162b905"}, + {file = "aiohttp-3.10.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c38cfd355fd86c39b2d54651bd6ed7d63d4fe3b5553f364bae3306e2445f847"}, + {file = "aiohttp-3.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:713dff3f87ceec3bde4f3f484861464e722cf7533f9fa6b824ec82bb5a9010a7"}, + {file = "aiohttp-3.10.8-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:21a72f4a9c69a8567a0aca12042f12bba25d3139fd5dd8eeb9931f4d9e8599cd"}, + {file = "aiohttp-3.10.8-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6d1ad868624f6cea77341ef2877ad4e71f7116834a6cd7ec36ec5c32f94ee6ae"}, + {file = "aiohttp-3.10.8-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:a78ba86d5a08207d1d1ad10b97aed6ea48b374b3f6831d02d0b06545ac0f181e"}, + {file = "aiohttp-3.10.8-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:aff048793d05e1ce05b62e49dccf81fe52719a13f4861530706619506224992b"}, + {file = "aiohttp-3.10.8-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d088ca05381fd409793571d8e34eca06daf41c8c50a05aeed358d2d340c7af81"}, + {file = "aiohttp-3.10.8-cp39-cp39-win32.whl", hash = "sha256:ee97c4e54f457c366e1f76fbbf3e8effee9de57dae671084a161c00f481106ce"}, + {file = "aiohttp-3.10.8-cp39-cp39-win_amd64.whl", hash = "sha256:d95ae4420669c871667aad92ba8cce6251d61d79c1a38504621094143f94a8b4"}, + {file = "aiohttp-3.10.8.tar.gz", hash = "sha256:21f8225f7dc187018e8433c9326be01477fb2810721e048b33ac49091b19fb4a"}, ] [package.dependencies] @@ -1121,61 +1121,70 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] [[package]] name = "grpcio" -version = "1.66.1" +version = "1.66.2" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.8" files = [ - {file = "grpcio-1.66.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:4877ba180591acdf127afe21ec1c7ff8a5ecf0fe2600f0d3c50e8c4a1cbc6492"}, - {file = "grpcio-1.66.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:3750c5a00bd644c75f4507f77a804d0189d97a107eb1481945a0cf3af3e7a5ac"}, - {file = "grpcio-1.66.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:a013c5fbb12bfb5f927444b477a26f1080755a931d5d362e6a9a720ca7dbae60"}, - {file = "grpcio-1.66.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1b24c23d51a1e8790b25514157d43f0a4dce1ac12b3f0b8e9f66a5e2c4c132f"}, - {file = "grpcio-1.66.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7ffb8ea674d68de4cac6f57d2498fef477cef582f1fa849e9f844863af50083"}, - {file = "grpcio-1.66.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:307b1d538140f19ccbd3aed7a93d8f71103c5d525f3c96f8616111614b14bf2a"}, - {file = "grpcio-1.66.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1c17ebcec157cfb8dd445890a03e20caf6209a5bd4ac5b040ae9dbc59eef091d"}, - {file = "grpcio-1.66.1-cp310-cp310-win32.whl", hash = "sha256:ef82d361ed5849d34cf09105d00b94b6728d289d6b9235513cb2fcc79f7c432c"}, - {file = "grpcio-1.66.1-cp310-cp310-win_amd64.whl", hash = "sha256:292a846b92cdcd40ecca46e694997dd6b9be6c4c01a94a0dfb3fcb75d20da858"}, - {file = "grpcio-1.66.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:c30aeceeaff11cd5ddbc348f37c58bcb96da8d5aa93fed78ab329de5f37a0d7a"}, - {file = "grpcio-1.66.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8a1e224ce6f740dbb6b24c58f885422deebd7eb724aff0671a847f8951857c26"}, - {file = "grpcio-1.66.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a66fe4dc35d2330c185cfbb42959f57ad36f257e0cc4557d11d9f0a3f14311df"}, - {file = "grpcio-1.66.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3ba04659e4fce609de2658fe4dbf7d6ed21987a94460f5f92df7579fd5d0e22"}, - {file = "grpcio-1.66.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4573608e23f7e091acfbe3e84ac2045680b69751d8d67685ffa193a4429fedb1"}, - {file = "grpcio-1.66.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7e06aa1f764ec8265b19d8f00140b8c4b6ca179a6dc67aa9413867c47e1fb04e"}, - {file = "grpcio-1.66.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3885f037eb11f1cacc41f207b705f38a44b69478086f40608959bf5ad85826dd"}, - {file = "grpcio-1.66.1-cp311-cp311-win32.whl", hash = "sha256:97ae7edd3f3f91480e48ede5d3e7d431ad6005bfdbd65c1b56913799ec79e791"}, - {file = "grpcio-1.66.1-cp311-cp311-win_amd64.whl", hash = "sha256:cfd349de4158d797db2bd82d2020554a121674e98fbe6b15328456b3bf2495bb"}, - {file = "grpcio-1.66.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:a92c4f58c01c77205df6ff999faa008540475c39b835277fb8883b11cada127a"}, - {file = "grpcio-1.66.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:fdb14bad0835914f325349ed34a51940bc2ad965142eb3090081593c6e347be9"}, - {file = "grpcio-1.66.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f03a5884c56256e08fd9e262e11b5cfacf1af96e2ce78dc095d2c41ccae2c80d"}, - {file = "grpcio-1.66.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ca2559692d8e7e245d456877a85ee41525f3ed425aa97eb7a70fc9a79df91a0"}, - {file = "grpcio-1.66.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84ca1be089fb4446490dd1135828bd42a7c7f8421e74fa581611f7afdf7ab761"}, - {file = "grpcio-1.66.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:d639c939ad7c440c7b2819a28d559179a4508783f7e5b991166f8d7a34b52815"}, - {file = "grpcio-1.66.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b9feb4e5ec8dc2d15709f4d5fc367794d69277f5d680baf1910fc9915c633524"}, - {file = "grpcio-1.66.1-cp312-cp312-win32.whl", hash = "sha256:7101db1bd4cd9b880294dec41a93fcdce465bdbb602cd8dc5bd2d6362b618759"}, - {file = "grpcio-1.66.1-cp312-cp312-win_amd64.whl", hash = "sha256:b0aa03d240b5539648d996cc60438f128c7f46050989e35b25f5c18286c86734"}, - {file = "grpcio-1.66.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:ecfe735e7a59e5a98208447293ff8580e9db1e890e232b8b292dc8bd15afc0d2"}, - {file = "grpcio-1.66.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4825a3aa5648010842e1c9d35a082187746aa0cdbf1b7a2a930595a94fb10fce"}, - {file = "grpcio-1.66.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:f517fd7259fe823ef3bd21e508b653d5492e706e9f0ef82c16ce3347a8a5620c"}, - {file = "grpcio-1.66.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1fe60d0772831d96d263b53d83fb9a3d050a94b0e94b6d004a5ad111faa5b5b"}, - {file = "grpcio-1.66.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31a049daa428f928f21090403e5d18ea02670e3d5d172581670be006100db9ef"}, - {file = "grpcio-1.66.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6f914386e52cbdeb5d2a7ce3bf1fdfacbe9d818dd81b6099a05b741aaf3848bb"}, - {file = "grpcio-1.66.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bff2096bdba686019fb32d2dde45b95981f0d1490e054400f70fc9a8af34b49d"}, - {file = "grpcio-1.66.1-cp38-cp38-win32.whl", hash = "sha256:aa8ba945c96e73de29d25331b26f3e416e0c0f621e984a3ebdb2d0d0b596a3b3"}, - {file = "grpcio-1.66.1-cp38-cp38-win_amd64.whl", hash = "sha256:161d5c535c2bdf61b95080e7f0f017a1dfcb812bf54093e71e5562b16225b4ce"}, - {file = "grpcio-1.66.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:d0cd7050397b3609ea51727b1811e663ffda8bda39c6a5bb69525ef12414b503"}, - {file = "grpcio-1.66.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0e6c9b42ded5d02b6b1fea3a25f036a2236eeb75d0579bfd43c0018c88bf0a3e"}, - {file = "grpcio-1.66.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:c9f80f9fad93a8cf71c7f161778ba47fd730d13a343a46258065c4deb4b550c0"}, - {file = "grpcio-1.66.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dd67ed9da78e5121efc5c510f0122a972216808d6de70953a740560c572eb44"}, - {file = "grpcio-1.66.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48b0d92d45ce3be2084b92fb5bae2f64c208fea8ceed7fccf6a7b524d3c4942e"}, - {file = "grpcio-1.66.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4d813316d1a752be6f5c4360c49f55b06d4fe212d7df03253dfdae90c8a402bb"}, - {file = "grpcio-1.66.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9c9bebc6627873ec27a70fc800f6083a13c70b23a5564788754b9ee52c5aef6c"}, - {file = "grpcio-1.66.1-cp39-cp39-win32.whl", hash = "sha256:30a1c2cf9390c894c90bbc70147f2372130ad189cffef161f0432d0157973f45"}, - {file = "grpcio-1.66.1-cp39-cp39-win_amd64.whl", hash = "sha256:17663598aadbedc3cacd7bbde432f541c8e07d2496564e22b214b22c7523dac8"}, - {file = "grpcio-1.66.1.tar.gz", hash = "sha256:35334f9c9745add3e357e3372756fd32d925bd52c41da97f4dfdafbde0bf0ee2"}, -] - -[package.extras] -protobuf = ["grpcio-tools (>=1.66.1)"] + {file = "grpcio-1.66.2-cp310-cp310-linux_armv7l.whl", hash = "sha256:fe96281713168a3270878255983d2cb1a97e034325c8c2c25169a69289d3ecfa"}, + {file = "grpcio-1.66.2-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:73fc8f8b9b5c4a03e802b3cd0c18b2b06b410d3c1dcbef989fdeb943bd44aff7"}, + {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:03b0b307ba26fae695e067b94cbb014e27390f8bc5ac7a3a39b7723fed085604"}, + {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d69ce1f324dc2d71e40c9261d3fdbe7d4c9d60f332069ff9b2a4d8a257c7b2b"}, + {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05bc2ceadc2529ab0b227b1310d249d95d9001cd106aa4d31e8871ad3c428d73"}, + {file = "grpcio-1.66.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ac475e8da31484efa25abb774674d837b343afb78bb3bcdef10f81a93e3d6bf"}, + {file = "grpcio-1.66.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0be4e0490c28da5377283861bed2941d1d20ec017ca397a5df4394d1c31a9b50"}, + {file = "grpcio-1.66.2-cp310-cp310-win32.whl", hash = "sha256:4e504572433f4e72b12394977679161d495c4c9581ba34a88d843eaf0f2fbd39"}, + {file = "grpcio-1.66.2-cp310-cp310-win_amd64.whl", hash = "sha256:2018b053aa15782db2541ca01a7edb56a0bf18c77efed975392583725974b249"}, + {file = "grpcio-1.66.2-cp311-cp311-linux_armv7l.whl", hash = "sha256:2335c58560a9e92ac58ff2bc5649952f9b37d0735608242973c7a8b94a6437d8"}, + {file = "grpcio-1.66.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45a3d462826f4868b442a6b8fdbe8b87b45eb4f5b5308168c156b21eca43f61c"}, + {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a9539f01cb04950fd4b5ab458e64a15f84c2acc273670072abe49a3f29bbad54"}, + {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce89f5876662f146d4c1f695dda29d4433a5d01c8681fbd2539afff535da14d4"}, + {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25a14af966438cddf498b2e338f88d1c9706f3493b1d73b93f695c99c5f0e2a"}, + {file = "grpcio-1.66.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6001e575b8bbd89eee11960bb640b6da6ae110cf08113a075f1e2051cc596cae"}, + {file = "grpcio-1.66.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4ea1d062c9230278793820146c95d038dc0f468cbdd172eec3363e42ff1c7d01"}, + {file = "grpcio-1.66.2-cp311-cp311-win32.whl", hash = "sha256:38b68498ff579a3b1ee8f93a05eb48dc2595795f2f62716e797dc24774c1aaa8"}, + {file = "grpcio-1.66.2-cp311-cp311-win_amd64.whl", hash = "sha256:6851de821249340bdb100df5eacfecfc4e6075fa85c6df7ee0eb213170ec8e5d"}, + {file = "grpcio-1.66.2-cp312-cp312-linux_armv7l.whl", hash = "sha256:802d84fd3d50614170649853d121baaaa305de7b65b3e01759247e768d691ddf"}, + {file = "grpcio-1.66.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:80fd702ba7e432994df208f27514280b4b5c6843e12a48759c9255679ad38db8"}, + {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:12fda97ffae55e6526825daf25ad0fa37483685952b5d0f910d6405c87e3adb6"}, + {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:950da58d7d80abd0ea68757769c9db0a95b31163e53e5bb60438d263f4bed7b7"}, + {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e636ce23273683b00410f1971d209bf3689238cf5538d960adc3cdfe80dd0dbd"}, + {file = "grpcio-1.66.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a917d26e0fe980b0ac7bfcc1a3c4ad6a9a4612c911d33efb55ed7833c749b0ee"}, + {file = "grpcio-1.66.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49f0ca7ae850f59f828a723a9064cadbed90f1ece179d375966546499b8a2c9c"}, + {file = "grpcio-1.66.2-cp312-cp312-win32.whl", hash = "sha256:31fd163105464797a72d901a06472860845ac157389e10f12631025b3e4d0453"}, + {file = "grpcio-1.66.2-cp312-cp312-win_amd64.whl", hash = "sha256:ff1f7882e56c40b0d33c4922c15dfa30612f05fb785074a012f7cda74d1c3679"}, + {file = "grpcio-1.66.2-cp313-cp313-linux_armv7l.whl", hash = "sha256:3b00efc473b20d8bf83e0e1ae661b98951ca56111feb9b9611df8efc4fe5d55d"}, + {file = "grpcio-1.66.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1caa38fb22a8578ab8393da99d4b8641e3a80abc8fd52646f1ecc92bcb8dee34"}, + {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:c408f5ef75cfffa113cacd8b0c0e3611cbfd47701ca3cdc090594109b9fcbaed"}, + {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c806852deaedee9ce8280fe98955c9103f62912a5b2d5ee7e3eaa284a6d8d8e7"}, + {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f145cc21836c332c67baa6fc81099d1d27e266401565bf481948010d6ea32d46"}, + {file = "grpcio-1.66.2-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:73e3b425c1e155730273f73e419de3074aa5c5e936771ee0e4af0814631fb30a"}, + {file = "grpcio-1.66.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:9c509a4f78114cbc5f0740eb3d7a74985fd2eff022971bc9bc31f8bc93e66a3b"}, + {file = "grpcio-1.66.2-cp313-cp313-win32.whl", hash = "sha256:20657d6b8cfed7db5e11b62ff7dfe2e12064ea78e93f1434d61888834bc86d75"}, + {file = "grpcio-1.66.2-cp313-cp313-win_amd64.whl", hash = "sha256:fb70487c95786e345af5e854ffec8cb8cc781bcc5df7930c4fbb7feaa72e1cdf"}, + {file = "grpcio-1.66.2-cp38-cp38-linux_armv7l.whl", hash = "sha256:a18e20d8321c6400185b4263e27982488cb5cdd62da69147087a76a24ef4e7e3"}, + {file = "grpcio-1.66.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:02697eb4a5cbe5a9639f57323b4c37bcb3ab2d48cec5da3dc2f13334d72790dd"}, + {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:99a641995a6bc4287a6315989ee591ff58507aa1cbe4c2e70d88411c4dcc0839"}, + {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ed71e81782966ffead60268bbda31ea3f725ebf8aa73634d5dda44f2cf3fb9c"}, + {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbd27c24a4cc5e195a7f56cfd9312e366d5d61b86e36d46bbe538457ea6eb8dd"}, + {file = "grpcio-1.66.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d9a9724a156c8ec6a379869b23ba3323b7ea3600851c91489b871e375f710bc8"}, + {file = "grpcio-1.66.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d8d4732cc5052e92cea2f78b233c2e2a52998ac40cd651f40e398893ad0d06ec"}, + {file = "grpcio-1.66.2-cp38-cp38-win32.whl", hash = "sha256:7b2c86457145ce14c38e5bf6bdc19ef88e66c5fee2c3d83285c5aef026ba93b3"}, + {file = "grpcio-1.66.2-cp38-cp38-win_amd64.whl", hash = "sha256:e88264caad6d8d00e7913996030bac8ad5f26b7411495848cc218bd3a9040b6c"}, + {file = "grpcio-1.66.2-cp39-cp39-linux_armv7l.whl", hash = "sha256:c400ba5675b67025c8a9f48aa846f12a39cf0c44df5cd060e23fda5b30e9359d"}, + {file = "grpcio-1.66.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:66a0cd8ba6512b401d7ed46bb03f4ee455839957f28b8d61e7708056a806ba6a"}, + {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:06de8ec0bd71be123eec15b0e0d457474931c2c407869b6c349bd9bed4adbac3"}, + {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb57870449dfcfac428afbb5a877829fcb0d6db9d9baa1148705739e9083880e"}, + {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b672abf90a964bfde2d0ecbce30f2329a47498ba75ce6f4da35a2f4532b7acbc"}, + {file = "grpcio-1.66.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ad2efdbe90c73b0434cbe64ed372e12414ad03c06262279b104a029d1889d13e"}, + {file = "grpcio-1.66.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9c3a99c519f4638e700e9e3f83952e27e2ea10873eecd7935823dab0c1c9250e"}, + {file = "grpcio-1.66.2-cp39-cp39-win32.whl", hash = "sha256:78fa51ebc2d9242c0fc5db0feecc57a9943303b46664ad89921f5079e2e4ada7"}, + {file = "grpcio-1.66.2-cp39-cp39-win_amd64.whl", hash = "sha256:728bdf36a186e7f51da73be7f8d09457a03061be848718d0edf000e709418987"}, + {file = "grpcio-1.66.2.tar.gz", hash = "sha256:563588c587b75c34b928bc428548e5b00ea38c46972181a4d8b75ba7e3f24231"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.66.2)"] [[package]] name = "h11" @@ -3728,13 +3737,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.26.5" +version = "20.26.6" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.26.5-py3-none-any.whl", hash = "sha256:4f3ac17b81fba3ce3bd6f4ead2749a72da5929c01774948e243db9ba41df4ff6"}, - {file = "virtualenv-20.26.5.tar.gz", hash = "sha256:ce489cac131aa58f4b25e321d6d186171f78e6cb13fafbf32a840cee67733ff4"}, + {file = "virtualenv-20.26.6-py3-none-any.whl", hash = "sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2"}, + {file = "virtualenv-20.26.6.tar.gz", hash = "sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48"}, ] [package.dependencies] @@ -3891,103 +3900,103 @@ files = [ [[package]] name = "yarl" -version = "1.13.0" +version = "1.13.1" description = "Yet another URL library" optional = false python-versions = ">=3.8" files = [ - {file = "yarl-1.13.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:66c028066be36d54e7a0a38e832302b23222e75db7e65ed862dc94effc8ef062"}, - {file = "yarl-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:517f9d90ca0224bb7002266eba6e70d8fcc8b1d0c9321de2407e41344413ed46"}, - {file = "yarl-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5378cb60f4209505f6aa60423c174336bd7b22e0d8beb87a2a99ad50787f1341"}, - {file = "yarl-1.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0675a9cf65176e11692b20a516d5f744849251aa24024f422582d2d1bf7c8c82"}, - {file = "yarl-1.13.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419c22b419034b4ee3ba1c27cbbfef01ca8d646f9292f614f008093143334cdc"}, - {file = "yarl-1.13.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf10e525e461f43831d82149d904f35929d89f3ccd65beaf7422aecd500dd39"}, - {file = "yarl-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d78ebad57152d301284761b03a708aeac99c946a64ba967d47cbcc040e36688b"}, - {file = "yarl-1.13.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e480a12cec58009eeaeee7f48728dc8f629f8e0f280d84957d42c361969d84da"}, - {file = "yarl-1.13.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e5462756fb34c884ca9d4875b6d2ec80957a767123151c467c97a9b423617048"}, - {file = "yarl-1.13.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:bff0d468664cdf7b2a6bfd5e17d4a7025edb52df12e0e6e17223387b421d425c"}, - {file = "yarl-1.13.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4ffd8a9758b5df7401a49d50e76491f4c582cf7350365439563062cdff45bf16"}, - {file = "yarl-1.13.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:ca71238af0d247d07747cb7202a9359e6e1d6d9e277041e1ad2d9f36b3a111a6"}, - {file = "yarl-1.13.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fda4404bbb6f91e327827f4483d52fe24f02f92de91217954cf51b1cb9ee9c41"}, - {file = "yarl-1.13.0-cp310-cp310-win32.whl", hash = "sha256:e557e2681b47a0ecfdfbea44743b3184d94d31d5ce0e4b13ff64ce227a40f86e"}, - {file = "yarl-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:3590ed9c7477059aea067a58ec87b433bbd47a2ceb67703b1098cca1ba075f0d"}, - {file = "yarl-1.13.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8986fa2be78193dc8b8c27bd0d3667fe612f7232844872714c4200499d5225ca"}, - {file = "yarl-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0db15ce35dfd100bc9ab40173f143fbea26c84d7458d63206934fe5548fae25d"}, - {file = "yarl-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:49bee8c99586482a238a7b2ec0ef94e5f186bfdbb8204d14a3dd31867b3875ce"}, - {file = "yarl-1.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c73e0f8375b75806b8771890580566a2e6135e6785250840c4f6c45b69eb72d"}, - {file = "yarl-1.13.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ab16c9e94726fdfcbf5b37a641c9d9d0b35cc31f286a2c3b9cad6451cb53b2b"}, - {file = "yarl-1.13.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:784d6e50ea96b3bbb078eb7b40d8c0e3674c2f12da4f0061f889b2cfdbab8f37"}, - {file = "yarl-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:580fdb2ea48a40bcaa709ee0dc71f64e7a8f23b44356cc18cd9ce55dc3bc3212"}, - {file = "yarl-1.13.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9d2845f1a37438a8e11e4fcbbf6ffd64bc94dc9cb8c815f72d0eb6f6c622deb0"}, - {file = "yarl-1.13.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bcb374db7a609484941c01380c1450728ec84d9c3e68cd9a5feaecb52626c4be"}, - {file = "yarl-1.13.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:561a5f6c054927cf5a993dd7b032aeebc10644419e65db7dd6bdc0b848806e65"}, - {file = "yarl-1.13.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b536c2ac042add7f276d4e5857b08364fc32f28e02add153f6f214de50f12d07"}, - {file = "yarl-1.13.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:52b7bb09bb48f7855d574480e2efc0c30d31cab4e6ffc6203e2f7ffbf2e4496a"}, - {file = "yarl-1.13.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e4dddf99a853b3f60f3ce6363fb1ad94606113743cf653f116a38edd839a4461"}, - {file = "yarl-1.13.0-cp311-cp311-win32.whl", hash = "sha256:0b489858642e4e92203941a8fdeeb6373c0535aa986200b22f84d4b39cd602ba"}, - {file = "yarl-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:31748bee7078db26008bf94d39693c682a26b5c3a80a67194a4c9c8fe3b5cf47"}, - {file = "yarl-1.13.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3a9b2650425b2ab9cc68865978963601b3c2414e1d94ef04f193dd5865e1bd79"}, - {file = "yarl-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:73777f145cd591e1377bf8d8a97e5f8e39c9742ad0f100c898bba1f963aef662"}, - {file = "yarl-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:144b9e9164f21da81731c970dbda52245b343c0f67f3609d71013dd4d0db9ebf"}, - {file = "yarl-1.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3628e4e572b1db95285a72c4be102356f2dfc6214d9f126de975fd51b517ae55"}, - {file = "yarl-1.13.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0bd3caf554a52da78ec08415ebedeb6b9636436ca2afda9b5b9ff4a533478940"}, - {file = "yarl-1.13.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d7a44ae252efb0fcd79ac0997416721a44345f53e5aec4a24f489d983aa00e3"}, - {file = "yarl-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24b78a1f57780eeeb17f5e1be851ab9fa951b98811e1bb4b5a53f74eec3e2666"}, - {file = "yarl-1.13.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79de5f8432b53d1261d92761f71dfab5fc7e1c75faa12a3535c27e681dacfa9d"}, - {file = "yarl-1.13.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f603216d62e9680bfac7fb168ef9673fd98abbb50c43e73d97615dfa1afebf57"}, - {file = "yarl-1.13.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:acf27399c94270103d68f86118a183008d601e4c2c3a7e98dcde0e3b0163132f"}, - {file = "yarl-1.13.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:08037790f973367431b9406a7b9d940e872cca12e081bce3b7cea068daf81f0a"}, - {file = "yarl-1.13.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:33e2f5ef965e69a1f2a1b0071a70c4616157da5a5478f3c2f6e185e06c56a322"}, - {file = "yarl-1.13.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:38a3b742c923fe2cab7d2e2c67220d17da8d0433e8bfe038356167e697ef5524"}, - {file = "yarl-1.13.0-cp312-cp312-win32.whl", hash = "sha256:ab3ee57b25ce15f79ade27b7dfb5e678af26e4b93be5a4e22655acd9d40b81ba"}, - {file = "yarl-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:26214b0a9b8f4b7b04e67eee94a82c9b4e5c721f4d1ce7e8c87c78f0809b7684"}, - {file = "yarl-1.13.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:91251614cca1ba4ab0507f1ba5f5a44e17a5e9a4c7f0308ea441a994bdac3fc7"}, - {file = "yarl-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fe6946c3cbcfbed67c5e50dae49baff82ad054aaa10ff7a4db8dfac646b7b479"}, - {file = "yarl-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:de97ee57e00a82ebb8c378fc73c5d9a773e4c2cec8079ff34ebfef61c8ba5b11"}, - {file = "yarl-1.13.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1129737da2291c9952a93c015e73583dd66054f3ae991c8674f6e39c46d95dd3"}, - {file = "yarl-1.13.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:37049eb26d637a5b2f00562f65aad679f5d231c4c044edcd88320542ad66a2d9"}, - {file = "yarl-1.13.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08d15aff3477fecb7a469d1fdf5939a686fbc5a16858022897d3e9fc99301f19"}, - {file = "yarl-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa187a8599e0425f26b25987d884a8b67deb5565f1c450c3a6e8d3de2cdc8715"}, - {file = "yarl-1.13.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d95fcc9508390db73a0f1c7e78d9a1b1a3532a3f34ceff97c0b3b04140fbe6e4"}, - {file = "yarl-1.13.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d04ea92a3643a9bb28aa6954fff718342caab2cc3d25d0160fe16e26c4a9acb7"}, - {file = "yarl-1.13.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2842a89b697d8ca3dda6a25b4e4d835d14afe25a315c8a79dbdf5f70edfd0960"}, - {file = "yarl-1.13.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db463fce425f935eee04a9182c74fdf9ed90d3bd2079d4a17f8fb7a2d7c11009"}, - {file = "yarl-1.13.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:3ff602aa84420b301c083ae7f07df858ae8e371bf3be294397bda3e0b27c6290"}, - {file = "yarl-1.13.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a9a1a600e8449f3a24bc7dca513be8d69db173fe842e8332a7318b5b8757a6af"}, - {file = "yarl-1.13.0-cp313-cp313-win32.whl", hash = "sha256:5540b4896b244a6539f22b613b32b5d1b737e08011aa4ed56644cb0519d687df"}, - {file = "yarl-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:08a3b0b8d10092dade46424fe775f2c9bc32e5a985fdd6afe410fe28598db6b2"}, - {file = "yarl-1.13.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:be828e92ae67a21d6a252aecd65668dddbf3bb5d5278660be607647335001119"}, - {file = "yarl-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e3b4293f02129cc2f5068f3687ef294846a79c9d19fabaa9bfdfeeebae11c001"}, - {file = "yarl-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2cec7b52903dcf9008311167036775346dcb093bb15ed7ec876debc3095e7dab"}, - {file = "yarl-1.13.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:612bd8d2267558bea36347e4e6e3a96f436bdc5c011f1437824be4f2e3abc5e1"}, - {file = "yarl-1.13.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92a26956d268ad52bd2329c2c674890fe9e8669b41d83ed136e7037b1a29808e"}, - {file = "yarl-1.13.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:01953b5686e5868fd0d8eaea4e484482c158597b8ddb9d9d4d048303fa3334c7"}, - {file = "yarl-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01d3941d416e71ce65f33393beb50e93c1c9e8e516971b6653c96df6eb599a2c"}, - {file = "yarl-1.13.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:801fb5dfc05910cd5ef4806726e2129d8c9a16cdfa26a8166697da0861e59dfc"}, - {file = "yarl-1.13.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:cdcdd49136d423ee5234c9360eae7063d3120a429ee984d7d9da821c012da4d7"}, - {file = "yarl-1.13.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:6072ff51eeb7938ecac35bf24fc465be00e75217eaa1ffad3cc7620accc0f6f4"}, - {file = "yarl-1.13.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:d42227711a4180d0c22cec30fd81d263d7bb378389d8e70b5f4c597e8abae202"}, - {file = "yarl-1.13.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:ebb2236f8098205f59774a28e25a84601a4beb3e974157d418ee6c470d73e0dc"}, - {file = "yarl-1.13.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f997004ff530b5381290e82b212a93bd420fefe5a605872dc16fc7e4a7f4251e"}, - {file = "yarl-1.13.0-cp38-cp38-win32.whl", hash = "sha256:b9648e5ae280babcac867b16e845ce51ed21f8c43bced2ca40cff7eee983d6d4"}, - {file = "yarl-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:f3ef76df654f3547dcb76ba550f9ca59826588eecc6bd7df16937c620df32060"}, - {file = "yarl-1.13.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:92abbe37e3fb08935e0e95ac5f83f7b286a6f2575f542225ec7afde405ed1fa1"}, - {file = "yarl-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1932c7bfa537f89ad5ca3d1e7e05de3388bb9e893230a384159fb974f6e9f90c"}, - {file = "yarl-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4483680e129b2a4250be20947b554cd5f7140fa9e5a1e4f1f42717cf91f8676a"}, - {file = "yarl-1.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f6f4a352d0beea5dd39315ab16fc26f0122d13457a7e65ad4f06c7961dcf87a"}, - {file = "yarl-1.13.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a67f20e97462dee8a89e9b997a78932959d2ed991e8f709514cb4160143e7b1"}, - {file = "yarl-1.13.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cf4f3a87bd52f8f33b0155cd0f6f22bdf2092d88c6c6acbb1aee3bc206ecbe35"}, - {file = "yarl-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:deb70c006be548076d628630aad9a3ef3a1b2c28aaa14b395cf0939b9124252e"}, - {file = "yarl-1.13.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf7a9b31729b97985d4a796808859dfd0e37b55f1ca948d46a568e56e51dd8fb"}, - {file = "yarl-1.13.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d807417ceebafb7ce18085a1205d28e8fcb1435a43197d7aa3fab98f5bfec5ef"}, - {file = "yarl-1.13.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:9671d0d65f86e0a0eee59c5b05e381c44e3d15c36c2a67da247d5d82875b4e4e"}, - {file = "yarl-1.13.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:13a9cd39e47ca4dc25139d3c63fe0dc6acf1b24f9d94d3b5197ac578fbfd84bf"}, - {file = "yarl-1.13.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:acf8c219a59df22609cfaff4a7158a0946f273e3b03a5385f1fdd502496f0cff"}, - {file = "yarl-1.13.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:12c92576633027f297c26e52aba89f6363b460f483d85cf7c14216eb55d06d02"}, - {file = "yarl-1.13.0-cp39-cp39-win32.whl", hash = "sha256:c2518660bd8166e770b76ce92514b491b8720ae7e7f5f975cd888b1592894d2c"}, - {file = "yarl-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:db90702060b1cdb7c7609d04df5f68a12fd5581d013ad379e58e0c2e651d92b8"}, - {file = "yarl-1.13.0-py3-none-any.whl", hash = "sha256:c7d35ff2a5a51bc6d40112cdb4ca3fd9636482ce8c6ceeeee2301e34f7ed7556"}, - {file = "yarl-1.13.0.tar.gz", hash = "sha256:02f117a63d11c8c2ada229029f8bb444a811e62e5041da962de548f26ac2c40f"}, + {file = "yarl-1.13.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:82e692fb325013a18a5b73a4fed5a1edaa7c58144dc67ad9ef3d604eccd451ad"}, + {file = "yarl-1.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df4e82e68f43a07735ae70a2d84c0353e58e20add20ec0af611f32cd5ba43fb4"}, + {file = "yarl-1.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ec9dd328016d8d25702a24ee274932aebf6be9787ed1c28d021945d264235b3c"}, + {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5820bd4178e6a639b3ef1db8b18500a82ceab6d8b89309e121a6859f56585b05"}, + {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86c438ce920e089c8c2388c7dcc8ab30dfe13c09b8af3d306bcabb46a053d6f7"}, + {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3de86547c820e4f4da4606d1c8ab5765dd633189791f15247706a2eeabc783ae"}, + {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca53632007c69ddcdefe1e8cbc3920dd88825e618153795b57e6ebcc92e752a"}, + {file = "yarl-1.13.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4ee1d240b84e2f213565f0ec08caef27a0e657d4c42859809155cf3a29d1735"}, + {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c49f3e379177f4477f929097f7ed4b0622a586b0aa40c07ac8c0f8e40659a1ac"}, + {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5c5e32fef09ce101fe14acd0f498232b5710effe13abac14cd95de9c274e689e"}, + {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ab9524e45ee809a083338a749af3b53cc7efec458c3ad084361c1dbf7aaf82a2"}, + {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:b1481c048fe787f65e34cb06f7d6824376d5d99f1231eae4778bbe5c3831076d"}, + {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:31497aefd68036d8e31bfbacef915826ca2e741dbb97a8d6c7eac66deda3b606"}, + {file = "yarl-1.13.1-cp310-cp310-win32.whl", hash = "sha256:1fa56f34b2236f5192cb5fceba7bbb09620e5337e0b6dfe2ea0ddbd19dd5b154"}, + {file = "yarl-1.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:1bbb418f46c7f7355084833051701b2301092e4611d9e392360c3ba2e3e69f88"}, + {file = "yarl-1.13.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:216a6785f296169ed52cd7dcdc2612f82c20f8c9634bf7446327f50398732a51"}, + {file = "yarl-1.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40c6e73c03a6befb85b72da213638b8aaa80fe4136ec8691560cf98b11b8ae6e"}, + {file = "yarl-1.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2430cf996113abe5aee387d39ee19529327205cda975d2b82c0e7e96e5fdabdc"}, + {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fb4134cc6e005b99fa29dbc86f1ea0a298440ab6b07c6b3ee09232a3b48f495"}, + {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:309c104ecf67626c033845b860d31594a41343766a46fa58c3309c538a1e22b2"}, + {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f90575e9fe3aae2c1e686393a9689c724cd00045275407f71771ae5d690ccf38"}, + {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d2e1626be8712333a9f71270366f4a132f476ffbe83b689dd6dc0d114796c74"}, + {file = "yarl-1.13.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b66c87da3c6da8f8e8b648878903ca54589038a0b1e08dde2c86d9cd92d4ac9"}, + {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cf1ad338620249f8dd6d4b6a91a69d1f265387df3697ad5dc996305cf6c26fb2"}, + {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9915300fe5a0aa663c01363db37e4ae8e7c15996ebe2c6cce995e7033ff6457f"}, + {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:703b0f584fcf157ef87816a3c0ff868e8c9f3c370009a8b23b56255885528f10"}, + {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1d8e3ca29f643dd121f264a7c89f329f0fcb2e4461833f02de6e39fef80f89da"}, + {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7055bbade838d68af73aea13f8c86588e4bcc00c2235b4b6d6edb0dbd174e246"}, + {file = "yarl-1.13.1-cp311-cp311-win32.whl", hash = "sha256:a3442c31c11088e462d44a644a454d48110f0588de830921fd201060ff19612a"}, + {file = "yarl-1.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:81bad32c8f8b5897c909bf3468bf601f1b855d12f53b6af0271963ee67fff0d2"}, + {file = "yarl-1.13.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f452cc1436151387d3d50533523291d5f77c6bc7913c116eb985304abdbd9ec9"}, + {file = "yarl-1.13.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9cec42a20eae8bebf81e9ce23fb0d0c729fc54cf00643eb251ce7c0215ad49fe"}, + {file = "yarl-1.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d959fe96e5c2712c1876d69af0507d98f0b0e8d81bee14cfb3f6737470205419"}, + {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8c837ab90c455f3ea8e68bee143472ee87828bff19ba19776e16ff961425b57"}, + {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94a993f976cdcb2dc1b855d8b89b792893220db8862d1a619efa7451817c836b"}, + {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b2442a415a5f4c55ced0fade7b72123210d579f7d950e0b5527fc598866e62c"}, + {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3fdbf0418489525231723cdb6c79e7738b3cbacbaed2b750cb033e4ea208f220"}, + {file = "yarl-1.13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b7f6e699304717fdc265a7e1922561b02a93ceffdaefdc877acaf9b9f3080b8"}, + {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bcd5bf4132e6a8d3eb54b8d56885f3d3a38ecd7ecae8426ecf7d9673b270de43"}, + {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2a93a4557f7fc74a38ca5a404abb443a242217b91cd0c4840b1ebedaad8919d4"}, + {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:22b739f99c7e4787922903f27a892744189482125cc7b95b747f04dd5c83aa9f"}, + {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2db874dd1d22d4c2c657807562411ffdfabec38ce4c5ce48b4c654be552759dc"}, + {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4feaaa4742517eaceafcbe74595ed335a494c84634d33961214b278126ec1485"}, + {file = "yarl-1.13.1-cp312-cp312-win32.whl", hash = "sha256:bbf9c2a589be7414ac4a534d54e4517d03f1cbb142c0041191b729c2fa23f320"}, + {file = "yarl-1.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:d07b52c8c450f9366c34aa205754355e933922c79135125541daae6cbf31c799"}, + {file = "yarl-1.13.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:95c6737f28069153c399d875317f226bbdea939fd48a6349a3b03da6829fb550"}, + {file = "yarl-1.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cd66152561632ed4b2a9192e7f8e5a1d41e28f58120b4761622e0355f0fe034c"}, + {file = "yarl-1.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6a2acde25be0cf9be23a8f6cbd31734536a264723fca860af3ae5e89d771cd71"}, + {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a18595e6a2ee0826bf7dfdee823b6ab55c9b70e8f80f8b77c37e694288f5de1"}, + {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a31d21089894942f7d9a8df166b495101b7258ff11ae0abec58e32daf8088813"}, + {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:45f209fb4bbfe8630e3d2e2052535ca5b53d4ce2d2026bed4d0637b0416830da"}, + {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f722f30366474a99745533cc4015b1781ee54b08de73260b2bbe13316079851"}, + {file = "yarl-1.13.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3bf60444269345d712838bb11cc4eadaf51ff1a364ae39ce87a5ca8ad3bb2c8"}, + {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:942c80a832a79c3707cca46bd12ab8aa58fddb34b1626d42b05aa8f0bcefc206"}, + {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:44b07e1690f010c3c01d353b5790ec73b2f59b4eae5b0000593199766b3f7a5c"}, + {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:396e59b8de7e4d59ff5507fb4322d2329865b909f29a7ed7ca37e63ade7f835c"}, + {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:3bb83a0f12701c0b91112a11148b5217617982e1e466069d0555be9b372f2734"}, + {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c92b89bffc660f1274779cb6fbb290ec1f90d6dfe14492523a0667f10170de26"}, + {file = "yarl-1.13.1-cp313-cp313-win32.whl", hash = "sha256:269c201bbc01d2cbba5b86997a1e0f73ba5e2f471cfa6e226bcaa7fd664b598d"}, + {file = "yarl-1.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:1d0828e17fa701b557c6eaed5edbd9098eb62d8838344486248489ff233998b8"}, + {file = "yarl-1.13.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8be8cdfe20787e6a5fcbd010f8066227e2bb9058331a4eccddec6c0db2bb85b2"}, + {file = "yarl-1.13.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:08d7148ff11cb8e886d86dadbfd2e466a76d5dd38c7ea8ebd9b0e07946e76e4b"}, + {file = "yarl-1.13.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4afdf84610ca44dcffe8b6c22c68f309aff96be55f5ea2fa31c0c225d6b83e23"}, + {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0d12fe78dcf60efa205e9a63f395b5d343e801cf31e5e1dda0d2c1fb618073d"}, + {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298c1eecfd3257aa16c0cb0bdffb54411e3e831351cd69e6b0739be16b1bdaa8"}, + {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c14c16831b565707149c742d87a6203eb5597f4329278446d5c0ae7a1a43928e"}, + {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a9bacedbb99685a75ad033fd4de37129449e69808e50e08034034c0bf063f99"}, + {file = "yarl-1.13.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:658e8449b84b92a4373f99305de042b6bd0d19bf2080c093881e0516557474a5"}, + {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:373f16f38721c680316a6a00ae21cc178e3a8ef43c0227f88356a24c5193abd6"}, + {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:45d23c4668d4925688e2ea251b53f36a498e9ea860913ce43b52d9605d3d8177"}, + {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f7917697bcaa3bc3e83db91aa3a0e448bf5cde43c84b7fc1ae2427d2417c0224"}, + {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:5989a38ba1281e43e4663931a53fbf356f78a0325251fd6af09dd03b1d676a09"}, + {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:11b3ca8b42a024513adce810385fcabdd682772411d95bbbda3b9ed1a4257644"}, + {file = "yarl-1.13.1-cp38-cp38-win32.whl", hash = "sha256:dcaef817e13eafa547cdfdc5284fe77970b891f731266545aae08d6cce52161e"}, + {file = "yarl-1.13.1-cp38-cp38-win_amd64.whl", hash = "sha256:7addd26594e588503bdef03908fc207206adac5bd90b6d4bc3e3cf33a829f57d"}, + {file = "yarl-1.13.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a0ae6637b173d0c40b9c1462e12a7a2000a71a3258fa88756a34c7d38926911c"}, + {file = "yarl-1.13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:576365c9f7469e1f6124d67b001639b77113cfd05e85ce0310f5f318fd02fe85"}, + {file = "yarl-1.13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:78f271722423b2d4851cf1f4fa1a1c4833a128d020062721ba35e1a87154a049"}, + {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d74f3c335cfe9c21ea78988e67f18eb9822f5d31f88b41aec3a1ec5ecd32da5"}, + {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1891d69a6ba16e89473909665cd355d783a8a31bc84720902c5911dbb6373465"}, + {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb382fd7b4377363cc9f13ba7c819c3c78ed97c36a82f16f3f92f108c787cbbf"}, + {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c8854b9f80693d20cec797d8e48a848c2fb273eb6f2587b57763ccba3f3bd4b"}, + {file = "yarl-1.13.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbf2c3f04ff50f16404ce70f822cdc59760e5e2d7965905f0e700270feb2bbfc"}, + {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fb9f59f3848edf186a76446eb8bcf4c900fe147cb756fbbd730ef43b2e67c6a7"}, + {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ef9b85fa1bc91c4db24407e7c4da93a5822a73dd4513d67b454ca7064e8dc6a3"}, + {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:098b870c18f1341786f290b4d699504e18f1cd050ed179af8123fd8232513424"}, + {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:8c723c91c94a3bc8033dd2696a0f53e5d5f8496186013167bddc3fb5d9df46a3"}, + {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:44a4c40a6f84e4d5955b63462a0e2a988f8982fba245cf885ce3be7618f6aa7d"}, + {file = "yarl-1.13.1-cp39-cp39-win32.whl", hash = "sha256:84bbcdcf393139f0abc9f642bf03f00cac31010f3034faa03224a9ef0bb74323"}, + {file = "yarl-1.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:fc2931ac9ce9c61c9968989ec831d3a5e6fcaaff9474e7cfa8de80b7aff5a093"}, + {file = "yarl-1.13.1-py3-none-any.whl", hash = "sha256:6a5185ad722ab4dd52d5fb1f30dcc73282eb1ed494906a92d1a228d3f89607b0"}, + {file = "yarl-1.13.1.tar.gz", hash = "sha256:ec8cfe2295f3e5e44c51f57272afbd69414ae629ec7c6b27f5a410efc78b70a0"}, ] [package.dependencies] diff --git a/src/codeflare_sdk.egg-info/SOURCES.txt b/src/codeflare_sdk.egg-info/SOURCES.txt index 63614a81..53043edf 100644 --- a/src/codeflare_sdk.egg-info/SOURCES.txt +++ b/src/codeflare_sdk.egg-info/SOURCES.txt @@ -7,17 +7,19 @@ src/codeflare_sdk.egg-info/SOURCES.txt src/codeflare_sdk.egg-info/dependency_links.txt src/codeflare_sdk.egg-info/top_level.txt src/codeflare_sdk/cluster/__init__.py -src/codeflare_sdk/cluster/auth.py src/codeflare_sdk/cluster/awload.py src/codeflare_sdk/cluster/cluster.py src/codeflare_sdk/cluster/config.py src/codeflare_sdk/cluster/model.py src/codeflare_sdk/cluster/widgets.py +src/codeflare_sdk/common/__init__.py +src/codeflare_sdk/common/kubernetes_cluster/__init__.py +src/codeflare_sdk/common/kubernetes_cluster/auth.py +src/codeflare_sdk/common/kubernetes_cluster/kube_api_helpers.py src/codeflare_sdk/job/__init__.py src/codeflare_sdk/job/ray_jobs.py src/codeflare_sdk/utils/__init__.py src/codeflare_sdk/utils/demos.py src/codeflare_sdk/utils/generate_cert.py src/codeflare_sdk/utils/generate_yaml.py -src/codeflare_sdk/utils/kube_api_helpers.py src/codeflare_sdk/utils/pretty_print.py diff --git a/src/codeflare_sdk/__init__.py b/src/codeflare_sdk/__init__.py index 29205a36..ba191bc5 100644 --- a/src/codeflare_sdk/__init__.py +++ b/src/codeflare_sdk/__init__.py @@ -1,8 +1,4 @@ from .cluster import ( - Authentication, - KubeConfiguration, - TokenAuthentication, - KubeConfigFileAuthentication, AWManager, Cluster, ClusterConfiguration, @@ -17,6 +13,13 @@ view_clusters, ) +from .common import ( + Authentication, + KubeConfiguration, + TokenAuthentication, + KubeConfigFileAuthentication, +) + from .job import RayJobClient from .utils import generate_cert diff --git a/src/codeflare_sdk/cluster/__init__.py b/src/codeflare_sdk/cluster/__init__.py index 6490a224..5be1c9a5 100644 --- a/src/codeflare_sdk/cluster/__init__.py +++ b/src/codeflare_sdk/cluster/__init__.py @@ -1,10 +1,3 @@ -from .auth import ( - Authentication, - KubeConfiguration, - TokenAuthentication, - KubeConfigFileAuthentication, -) - from .model import ( RayClusterStatus, AppWrapperStatus, diff --git a/src/codeflare_sdk/cluster/awload.py b/src/codeflare_sdk/cluster/awload.py index 1ead5914..e840c1ca 100644 --- a/src/codeflare_sdk/cluster/awload.py +++ b/src/codeflare_sdk/cluster/awload.py @@ -22,9 +22,12 @@ import os import yaml -from kubernetes import client, config -from ..utils.kube_api_helpers import _kube_api_error_handling -from .auth import config_check, get_api_client +from kubernetes import client +from ..common import _kube_api_error_handling +from ..common.kubernetes_cluster.auth import ( + config_check, + get_api_client, +) class AWManager: diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/cluster/cluster.py index 44effda7..77308245 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/cluster/cluster.py @@ -18,21 +18,21 @@ cluster setup queue, a list of all existing clusters, and the user's working namespace. """ -import re -import subprocess from time import sleep from typing import List, Optional, Tuple, Dict -from kubernetes import config from ray.job_submission import JobSubmissionClient -from .auth import config_check, get_api_client +from ..common.kubernetes_cluster.auth import ( + config_check, + get_api_client, +) from ..utils import pretty_print from ..utils.generate_yaml import ( generate_appwrapper, head_worker_gpu_count_from_cluster, ) -from ..utils.kube_api_helpers import _kube_api_error_handling +from ..common import _kube_api_error_handling from ..utils.generate_yaml import is_openshift_cluster from .config import ClusterConfiguration @@ -47,8 +47,7 @@ cluster_up_down_buttons, is_notebook, ) -from kubernetes import client, config -from kubernetes.utils import parse_quantity +from kubernetes import client import yaml import os import requests diff --git a/src/codeflare_sdk/cluster/widgets.py b/src/codeflare_sdk/cluster/widgets.py index a96a1a49..11a92632 100644 --- a/src/codeflare_sdk/cluster/widgets.py +++ b/src/codeflare_sdk/cluster/widgets.py @@ -28,8 +28,11 @@ import pandas as pd from .config import ClusterConfiguration from .model import RayClusterStatus -from ..utils.kube_api_helpers import _kube_api_error_handling -from .auth import config_check, get_api_client +from ..common import _kube_api_error_handling +from ..common.kubernetes_cluster.auth import ( + config_check, + get_api_client, +) def cluster_up_down_buttons(cluster: "codeflare_sdk.cluster.Cluster") -> widgets.Button: diff --git a/src/codeflare_sdk/common/__init__.py b/src/codeflare_sdk/common/__init__.py new file mode 100644 index 00000000..c8bd2d5c --- /dev/null +++ b/src/codeflare_sdk/common/__init__.py @@ -0,0 +1,8 @@ +# Importing everything from the kubernetes_cluster module +from .kubernetes_cluster import ( + Authentication, + KubeConfiguration, + TokenAuthentication, + KubeConfigFileAuthentication, + _kube_api_error_handling, +) diff --git a/src/codeflare_sdk/common/kubernetes_cluster/__init__.py b/src/codeflare_sdk/common/kubernetes_cluster/__init__.py new file mode 100644 index 00000000..beac4d99 --- /dev/null +++ b/src/codeflare_sdk/common/kubernetes_cluster/__init__.py @@ -0,0 +1,10 @@ +from .auth import ( + Authentication, + KubeConfiguration, + TokenAuthentication, + KubeConfigFileAuthentication, + config_check, + get_api_client, +) + +from .kube_api_helpers import _kube_api_error_handling diff --git a/src/codeflare_sdk/cluster/auth.py b/src/codeflare_sdk/common/kubernetes_cluster/auth.py similarity index 99% rename from src/codeflare_sdk/cluster/auth.py rename to src/codeflare_sdk/common/kubernetes_cluster/auth.py index fbba0c22..2525d2e3 100644 --- a/src/codeflare_sdk/cluster/auth.py +++ b/src/codeflare_sdk/common/kubernetes_cluster/auth.py @@ -23,7 +23,7 @@ from kubernetes import client, config import os import urllib3 -from ..utils.kube_api_helpers import _kube_api_error_handling +from .kube_api_helpers import _kube_api_error_handling from typing import Optional diff --git a/src/codeflare_sdk/utils/kube_api_helpers.py b/src/codeflare_sdk/common/kubernetes_cluster/kube_api_helpers.py similarity index 100% rename from src/codeflare_sdk/utils/kube_api_helpers.py rename to src/codeflare_sdk/common/kubernetes_cluster/kube_api_helpers.py diff --git a/src/codeflare_sdk/utils/generate_cert.py b/src/codeflare_sdk/utils/generate_cert.py index f3dc80e9..6d2c0d91 100644 --- a/src/codeflare_sdk/utils/generate_cert.py +++ b/src/codeflare_sdk/utils/generate_cert.py @@ -19,9 +19,12 @@ from cryptography import x509 from cryptography.x509.oid import NameOID import datetime -from ..cluster.auth import config_check, get_api_client -from kubernetes import client, config -from .kube_api_helpers import _kube_api_error_handling +from ..common.kubernetes_cluster.auth import ( + config_check, + get_api_client, +) +from kubernetes import client +from ..common import _kube_api_error_handling def generate_ca_cert(days: int = 30): diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/utils/generate_yaml.py index 7a17e010..2ddfb31a 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/utils/generate_yaml.py @@ -21,16 +21,14 @@ from typing import Optional import typing import yaml -import sys import os -import argparse import uuid -from kubernetes import client, config -from .kube_api_helpers import _kube_api_error_handling -from ..cluster.auth import get_api_client, config_check -from os import urandom -from base64 import b64encode -from urllib3.util import parse_url +from kubernetes import client +from ..common import _kube_api_error_handling +from ..common.kubernetes_cluster.auth import ( + get_api_client, + config_check, +) from kubernetes.client.exceptions import ApiException import codeflare_sdk diff --git a/tests/e2e/support.py b/tests/e2e/support.py index 38325057..6fdd03af 100644 --- a/tests/e2e/support.py +++ b/tests/e2e/support.py @@ -4,7 +4,9 @@ import subprocess from kubernetes import client, config import kubernetes.client -from codeflare_sdk.utils.kube_api_helpers import _kube_api_error_handling +from codeflare_sdk.common.kubernetes_cluster.kube_api_helpers import ( + _kube_api_error_handling, +) def get_ray_image(): diff --git a/tests/unit_test.py b/tests/unit_test.py index d4b7b149..bb2e2f7d 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -39,7 +39,7 @@ _app_wrapper_status, _ray_cluster_status, ) -from codeflare_sdk.cluster.auth import ( +from codeflare_sdk.common.kubernetes_cluster import ( TokenAuthentication, Authentication, KubeConfigFileAuthentication, @@ -71,7 +71,7 @@ get_package_and_version, ) -import codeflare_sdk.utils.kube_api_helpers +import codeflare_sdk.common.kubernetes_cluster.kube_api_helpers from codeflare_sdk.utils.generate_yaml import ( gen_names, is_openshift_cluster, @@ -198,8 +198,8 @@ def test_token_auth_login_tls(mocker): def test_config_check_no_config_file(mocker): mocker.patch("os.path.expanduser", return_value="/mock/home/directory") mocker.patch("os.path.isfile", return_value=False) - mocker.patch("codeflare_sdk.cluster.auth.config_path", None) - mocker.patch("codeflare_sdk.cluster.auth.api_client", None) + mocker.patch("codeflare_sdk.common.kubernetes_cluster.auth.config_path", None) + mocker.patch("codeflare_sdk.common.kubernetes_cluster.auth.api_client", None) with pytest.raises(PermissionError) as e: config_check() @@ -210,8 +210,8 @@ def test_config_check_with_incluster_config(mocker): mocker.patch("os.path.isfile", return_value=False) mocker.patch.dict(os.environ, {"KUBERNETES_PORT": "number"}) mocker.patch("kubernetes.config.load_incluster_config", side_effect=None) - mocker.patch("codeflare_sdk.cluster.auth.config_path", None) - mocker.patch("codeflare_sdk.cluster.auth.api_client", None) + mocker.patch("codeflare_sdk.common.kubernetes_cluster.auth.config_path", None) + mocker.patch("codeflare_sdk.common.kubernetes_cluster.auth.api_client", None) result = config_check() assert result == None @@ -221,16 +221,18 @@ def test_config_check_with_existing_config_file(mocker): mocker.patch("os.path.expanduser", return_value="/mock/home/directory") mocker.patch("os.path.isfile", return_value=True) mocker.patch("kubernetes.config.load_kube_config", side_effect=None) - mocker.patch("codeflare_sdk.cluster.auth.config_path", None) - mocker.patch("codeflare_sdk.cluster.auth.api_client", None) + mocker.patch("codeflare_sdk.common.kubernetes_cluster.auth.config_path", None) + mocker.patch("codeflare_sdk.common.kubernetes_cluster.auth.api_client", None) result = config_check() assert result == None def test_config_check_with_config_path_and_no_api_client(mocker): - mocker.patch("codeflare_sdk.cluster.auth.config_path", "/mock/config/path") - mocker.patch("codeflare_sdk.cluster.auth.api_client", None) + mocker.patch( + "codeflare_sdk.common.kubernetes_cluster.auth.config_path", "/mock/config/path" + ) + mocker.patch("codeflare_sdk.common.kubernetes_cluster.auth.api_client", None) result = config_check() assert result == "/mock/config/path" @@ -2170,7 +2172,8 @@ def test_map_to_ray_cluster(mocker): mock_api_client = mocker.MagicMock(spec=client.ApiClient) mocker.patch( - "codeflare_sdk.cluster.auth.get_api_client", return_value=mock_api_client + "codeflare_sdk.common.kubernetes_cluster.auth.get_api_client", + return_value=mock_api_client, ) mock_routes = { diff --git a/tests/upgrade/raycluster_sdk_upgrade_sleep_test.py b/tests/upgrade/raycluster_sdk_upgrade_sleep_test.py index cf21fee0..65c836ed 100644 --- a/tests/upgrade/raycluster_sdk_upgrade_sleep_test.py +++ b/tests/upgrade/raycluster_sdk_upgrade_sleep_test.py @@ -12,7 +12,7 @@ from tests.e2e.support import * -from codeflare_sdk.utils.kube_api_helpers import _kube_api_error_handling +from codeflare_sdk.common import _kube_api_error_handling namespace = "test-ns-rayupgrade-sleep" # Global variables for kueue resources diff --git a/tests/upgrade/raycluster_sdk_upgrade_test.py b/tests/upgrade/raycluster_sdk_upgrade_test.py index 05862d07..dce1ca71 100644 --- a/tests/upgrade/raycluster_sdk_upgrade_test.py +++ b/tests/upgrade/raycluster_sdk_upgrade_test.py @@ -7,7 +7,7 @@ from tests.e2e.support import * from codeflare_sdk.cluster.cluster import get_cluster -from codeflare_sdk.utils.kube_api_helpers import _kube_api_error_handling +from codeflare_sdk.common import _kube_api_error_handling namespace = "test-ns-rayupgrade" # Global variables for kueue resources From 6e4e4ff2077e17bbac332c4bd4ee0c38b51b93b2 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Wed, 2 Oct 2024 10:32:51 +0100 Subject: [PATCH 357/496] refactor: ray and appwrapper modules Signed-off-by: Bobbins228 --- src/codeflare_sdk/__init__.py | 17 ++- src/codeflare_sdk/cluster/__init__.py | 18 --- src/codeflare_sdk/cluster/widgets.py | 18 +-- .../{ => common}/utils/__init__.py | 0 src/codeflare_sdk/{ => common}/utils/demos.py | 0 .../{ => common}/utils/generate_cert.py | 4 +- src/codeflare_sdk/ray/__init__.py | 16 +++ src/codeflare_sdk/ray/appwrapper/__init__.py | 6 + .../{cluster => ray/appwrapper}/awload.py | 4 +- src/codeflare_sdk/ray/appwrapper/status.py | 46 ++++++++ .../{job => ray/client}/__init__.py | 0 .../{job => ray/client}/ray_jobs.py | 0 src/codeflare_sdk/ray/cluster/__init__.py | 13 +++ .../{ => ray}/cluster/cluster.py | 20 ++-- src/codeflare_sdk/{ => ray}/cluster/config.py | 0 .../{utils => ray/cluster}/generate_yaml.py | 14 +-- .../{utils => ray/cluster}/pretty_print.py | 3 +- .../model.py => ray/cluster/status.py} | 33 +----- .../{ => ray}/templates/base-template.yaml | 0 tests/demo_test.py | 6 +- .../e2e/mnist_raycluster_sdk_aw_kind_test.py | 2 +- tests/e2e/mnist_raycluster_sdk_kind_test.py | 2 +- tests/e2e/mnist_raycluster_sdk_oauth_test.py | 2 +- tests/e2e/mnist_rayjob.py | 4 +- tests/e2e/start_ray_cluster.py | 2 +- tests/unit_test.py | 109 ++++++++++-------- tests/unit_test_support.py | 2 +- .../raycluster_sdk_upgrade_sleep_test.py | 2 +- tests/upgrade/raycluster_sdk_upgrade_test.py | 4 +- 29 files changed, 202 insertions(+), 145 deletions(-) rename src/codeflare_sdk/{ => common}/utils/__init__.py (100%) rename src/codeflare_sdk/{ => common}/utils/demos.py (100%) rename src/codeflare_sdk/{ => common}/utils/generate_cert.py (98%) create mode 100644 src/codeflare_sdk/ray/__init__.py create mode 100644 src/codeflare_sdk/ray/appwrapper/__init__.py rename src/codeflare_sdk/{cluster => ray/appwrapper}/awload.py (97%) create mode 100644 src/codeflare_sdk/ray/appwrapper/status.py rename src/codeflare_sdk/{job => ray/client}/__init__.py (100%) rename src/codeflare_sdk/{job => ray/client}/ray_jobs.py (100%) create mode 100644 src/codeflare_sdk/ray/cluster/__init__.py rename src/codeflare_sdk/{ => ray}/cluster/cluster.py (99%) rename src/codeflare_sdk/{ => ray}/cluster/config.py (100%) rename src/codeflare_sdk/{utils => ray/cluster}/generate_yaml.py (96%) rename src/codeflare_sdk/{utils => ray/cluster}/pretty_print.py (98%) rename src/codeflare_sdk/{cluster/model.py => ray/cluster/status.py} (72%) rename src/codeflare_sdk/{ => ray}/templates/base-template.yaml (100%) diff --git a/src/codeflare_sdk/__init__.py b/src/codeflare_sdk/__init__.py index ba191bc5..599171a9 100644 --- a/src/codeflare_sdk/__init__.py +++ b/src/codeflare_sdk/__init__.py @@ -1,18 +1,19 @@ -from .cluster import ( - AWManager, +from .ray import ( Cluster, ClusterConfiguration, RayClusterStatus, - AppWrapperStatus, CodeFlareClusterStatus, RayCluster, - AppWrapper, get_cluster, list_all_queued, list_all_clusters, - view_clusters, + AWManager, + AppWrapperStatus, + RayJobClient, ) +from .cluster import view_clusters + from .common import ( Authentication, KubeConfiguration, @@ -20,10 +21,8 @@ KubeConfigFileAuthentication, ) -from .job import RayJobClient - -from .utils import generate_cert -from .utils.demos import copy_demo_nbs +from .common.utils import generate_cert +from .common.utils.demos import copy_demo_nbs from importlib.metadata import version, PackageNotFoundError diff --git a/src/codeflare_sdk/cluster/__init__.py b/src/codeflare_sdk/cluster/__init__.py index 5be1c9a5..60be4fcd 100644 --- a/src/codeflare_sdk/cluster/__init__.py +++ b/src/codeflare_sdk/cluster/__init__.py @@ -1,21 +1,3 @@ -from .model import ( - RayClusterStatus, - AppWrapperStatus, - CodeFlareClusterStatus, - RayCluster, - AppWrapper, -) - -from .cluster import ( - Cluster, - ClusterConfiguration, - get_cluster, - list_all_queued, - list_all_clusters, -) - from .widgets import ( view_clusters, ) - -from .awload import AWManager diff --git a/src/codeflare_sdk/cluster/widgets.py b/src/codeflare_sdk/cluster/widgets.py index 11a92632..d827c661 100644 --- a/src/codeflare_sdk/cluster/widgets.py +++ b/src/codeflare_sdk/cluster/widgets.py @@ -26,8 +26,8 @@ import ipywidgets as widgets from IPython.display import display, HTML, Javascript import pandas as pd -from .config import ClusterConfiguration -from .model import RayClusterStatus +from ..ray.cluster.config import ClusterConfiguration +from ..ray.cluster.status import RayClusterStatus from ..common import _kube_api_error_handling from ..common.kubernetes_cluster.auth import ( config_check, @@ -35,7 +35,9 @@ ) -def cluster_up_down_buttons(cluster: "codeflare_sdk.cluster.Cluster") -> widgets.Button: +def cluster_up_down_buttons( + cluster: "codeflare_sdk.ray.cluster.cluster.Cluster", +) -> widgets.Button: """ The cluster_up_down_buttons function returns two button widgets for a create and delete button. The function uses the appwrapper bool to distinguish between resource type for the tool tip. @@ -115,7 +117,7 @@ def view_clusters(namespace: str = None): ) return # Exit function if not in Jupyter Notebook - from .cluster import get_current_namespace + from ..ray.cluster.cluster import get_current_namespace if not namespace: namespace = get_current_namespace() @@ -278,7 +280,7 @@ def _on_ray_dashboard_button_click( """ _on_ray_dashboard_button_click handles the event when the Open Ray Dashboard button is clicked, opening the Ray Dashboard in a new tab """ - from codeflare_sdk.cluster import Cluster + from codeflare_sdk.ray.cluster import Cluster cluster_name = classification_widget.value namespace = ray_clusters_df[ray_clusters_df["Name"] == classification_widget.value][ @@ -309,7 +311,7 @@ def _on_list_jobs_button_click( """ _on_list_jobs_button_click handles the event when the View Jobs button is clicked, opening the Ray Jobs Dashboard in a new tab """ - from codeflare_sdk.cluster import Cluster + from codeflare_sdk.ray.cluster import Cluster cluster_name = classification_widget.value namespace = ray_clusters_df[ray_clusters_df["Name"] == classification_widget.value][ @@ -342,7 +344,7 @@ def _delete_cluster( _delete_cluster function deletes the cluster with the given name and namespace. It optionally waits for the cluster to be deleted. """ - from .cluster import _check_aw_exists + from ..ray.cluster.cluster import _check_aw_exists try: config_check() @@ -400,7 +402,7 @@ def _fetch_cluster_data(namespace): """ _fetch_cluster_data function fetches all clusters and their spec in a given namespace and returns a DataFrame. """ - from .cluster import list_all_clusters + from ..ray.cluster.cluster import list_all_clusters rayclusters = list_all_clusters(namespace, False) if not rayclusters: diff --git a/src/codeflare_sdk/utils/__init__.py b/src/codeflare_sdk/common/utils/__init__.py similarity index 100% rename from src/codeflare_sdk/utils/__init__.py rename to src/codeflare_sdk/common/utils/__init__.py diff --git a/src/codeflare_sdk/utils/demos.py b/src/codeflare_sdk/common/utils/demos.py similarity index 100% rename from src/codeflare_sdk/utils/demos.py rename to src/codeflare_sdk/common/utils/demos.py diff --git a/src/codeflare_sdk/utils/generate_cert.py b/src/codeflare_sdk/common/utils/generate_cert.py similarity index 98% rename from src/codeflare_sdk/utils/generate_cert.py rename to src/codeflare_sdk/common/utils/generate_cert.py index 6d2c0d91..014d192d 100644 --- a/src/codeflare_sdk/utils/generate_cert.py +++ b/src/codeflare_sdk/common/utils/generate_cert.py @@ -19,12 +19,12 @@ from cryptography import x509 from cryptography.x509.oid import NameOID import datetime -from ..common.kubernetes_cluster.auth import ( +from ..kubernetes_cluster.auth import ( config_check, get_api_client, ) from kubernetes import client -from ..common import _kube_api_error_handling +from .. import _kube_api_error_handling def generate_ca_cert(days: int = 30): diff --git a/src/codeflare_sdk/ray/__init__.py b/src/codeflare_sdk/ray/__init__.py new file mode 100644 index 00000000..ab55cc82 --- /dev/null +++ b/src/codeflare_sdk/ray/__init__.py @@ -0,0 +1,16 @@ +from .appwrapper import AppWrapper, AppWrapperStatus, AWManager + +from .client import ( + RayJobClient, +) + +from .cluster import ( + Cluster, + ClusterConfiguration, + get_cluster, + list_all_queued, + list_all_clusters, + RayClusterStatus, + CodeFlareClusterStatus, + RayCluster, +) diff --git a/src/codeflare_sdk/ray/appwrapper/__init__.py b/src/codeflare_sdk/ray/appwrapper/__init__.py new file mode 100644 index 00000000..537fdf8a --- /dev/null +++ b/src/codeflare_sdk/ray/appwrapper/__init__.py @@ -0,0 +1,6 @@ +from .awload import AWManager + +from .status import ( + AppWrapperStatus, + AppWrapper, +) diff --git a/src/codeflare_sdk/cluster/awload.py b/src/codeflare_sdk/ray/appwrapper/awload.py similarity index 97% rename from src/codeflare_sdk/cluster/awload.py rename to src/codeflare_sdk/ray/appwrapper/awload.py index e840c1ca..02794f3d 100644 --- a/src/codeflare_sdk/cluster/awload.py +++ b/src/codeflare_sdk/ray/appwrapper/awload.py @@ -23,8 +23,8 @@ import yaml from kubernetes import client -from ..common import _kube_api_error_handling -from ..common.kubernetes_cluster.auth import ( +from ...common import _kube_api_error_handling +from ...common.kubernetes_cluster.auth import ( config_check, get_api_client, ) diff --git a/src/codeflare_sdk/ray/appwrapper/status.py b/src/codeflare_sdk/ray/appwrapper/status.py new file mode 100644 index 00000000..79fe0fd2 --- /dev/null +++ b/src/codeflare_sdk/ray/appwrapper/status.py @@ -0,0 +1,46 @@ +# Copyright 2024 IBM, Red Hat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +The status sub-module defines Enums containing information for +AppWrapper states, as well as dataclasses to store information for AppWrappers. +""" + +from dataclasses import dataclass +from enum import Enum + + +class AppWrapperStatus(Enum): + """ + Defines the possible reportable phases of an AppWrapper. + """ + + SUSPENDED = "suspended" + RESUMING = "resuming" + RUNNING = "running" + RESETTING = "resetting" + SUSPENDING = "suspending" + SUCCEEDED = "succeeded" + FAILED = "failed" + TERMINATING = "terminating" + + +@dataclass +class AppWrapper: + """ + For storing information about an AppWrapper. + """ + + name: str + status: AppWrapperStatus diff --git a/src/codeflare_sdk/job/__init__.py b/src/codeflare_sdk/ray/client/__init__.py similarity index 100% rename from src/codeflare_sdk/job/__init__.py rename to src/codeflare_sdk/ray/client/__init__.py diff --git a/src/codeflare_sdk/job/ray_jobs.py b/src/codeflare_sdk/ray/client/ray_jobs.py similarity index 100% rename from src/codeflare_sdk/job/ray_jobs.py rename to src/codeflare_sdk/ray/client/ray_jobs.py diff --git a/src/codeflare_sdk/ray/cluster/__init__.py b/src/codeflare_sdk/ray/cluster/__init__.py new file mode 100644 index 00000000..bf32459b --- /dev/null +++ b/src/codeflare_sdk/ray/cluster/__init__.py @@ -0,0 +1,13 @@ +from .status import ( + RayClusterStatus, + CodeFlareClusterStatus, + RayCluster, +) + +from .cluster import ( + Cluster, + ClusterConfiguration, + get_cluster, + list_all_queued, + list_all_clusters, +) diff --git a/src/codeflare_sdk/cluster/cluster.py b/src/codeflare_sdk/ray/cluster/cluster.py similarity index 99% rename from src/codeflare_sdk/cluster/cluster.py rename to src/codeflare_sdk/ray/cluster/cluster.py index 77308245..4d8201f7 100644 --- a/src/codeflare_sdk/cluster/cluster.py +++ b/src/codeflare_sdk/ray/cluster/cluster.py @@ -23,27 +23,29 @@ from ray.job_submission import JobSubmissionClient -from ..common.kubernetes_cluster.auth import ( +from ...common.kubernetes_cluster.auth import ( config_check, get_api_client, ) -from ..utils import pretty_print -from ..utils.generate_yaml import ( +from . import pretty_print +from .generate_yaml import ( generate_appwrapper, head_worker_gpu_count_from_cluster, ) -from ..common import _kube_api_error_handling -from ..utils.generate_yaml import is_openshift_cluster +from ...common import _kube_api_error_handling +from .generate_yaml import is_openshift_cluster from .config import ClusterConfiguration -from .model import ( - AppWrapper, - AppWrapperStatus, +from .status import ( CodeFlareClusterStatus, RayCluster, RayClusterStatus, ) -from .widgets import ( +from ..appwrapper import ( + AppWrapper, + AppWrapperStatus, +) +from ...cluster.widgets import ( cluster_up_down_buttons, is_notebook, ) diff --git a/src/codeflare_sdk/cluster/config.py b/src/codeflare_sdk/ray/cluster/config.py similarity index 100% rename from src/codeflare_sdk/cluster/config.py rename to src/codeflare_sdk/ray/cluster/config.py diff --git a/src/codeflare_sdk/utils/generate_yaml.py b/src/codeflare_sdk/ray/cluster/generate_yaml.py similarity index 96% rename from src/codeflare_sdk/utils/generate_yaml.py rename to src/codeflare_sdk/ray/cluster/generate_yaml.py index 2ddfb31a..f0d70cf5 100755 --- a/src/codeflare_sdk/utils/generate_yaml.py +++ b/src/codeflare_sdk/ray/cluster/generate_yaml.py @@ -24,8 +24,8 @@ import os import uuid from kubernetes import client -from ..common import _kube_api_error_handling -from ..common.kubernetes_cluster.auth import ( +from ...common import _kube_api_error_handling +from ...common.kubernetes_cluster.auth import ( get_api_client, config_check, ) @@ -80,7 +80,7 @@ def is_kind_cluster(): def update_names( cluster_yaml: dict, - cluster: "codeflare_sdk.cluster.Cluster", + cluster: "codeflare_sdk.ray.cluster.cluster.Cluster", ): metadata = cluster_yaml.get("metadata") metadata["name"] = cluster.config.name @@ -135,7 +135,7 @@ def update_resources( def head_worker_gpu_count_from_cluster( - cluster: "codeflare_sdk.cluster.Cluster", + cluster: "codeflare_sdk.ray.cluster.cluster.Cluster", ) -> typing.Tuple[int, int]: head_gpus = 0 worker_gpus = 0 @@ -155,7 +155,7 @@ def head_worker_gpu_count_from_cluster( def head_worker_resources_from_cluster( - cluster: "codeflare_sdk.cluster.Cluster", + cluster: "codeflare_sdk.ray.cluster.cluster.Cluster", ) -> typing.Tuple[dict, dict]: to_return = {}, {} for k in cluster.config.head_extended_resource_requests.keys(): @@ -178,7 +178,7 @@ def head_worker_resources_from_cluster( def update_nodes( ray_cluster_dict: dict, - cluster: "codeflare_sdk.cluster.Cluster", + cluster: "codeflare_sdk.ray.cluster.cluster.Cluster", ): head = ray_cluster_dict.get("spec").get("headGroupSpec") worker = ray_cluster_dict.get("spec").get("workerGroupSpecs")[0] @@ -325,7 +325,7 @@ def write_user_yaml(user_yaml, output_file_name): print(f"Written to: {output_file_name}") -def generate_appwrapper(cluster: "codeflare_sdk.cluster.Cluster"): +def generate_appwrapper(cluster: "codeflare_sdk.ray.cluster.cluster.Cluster"): cluster_yaml = read_template(cluster.config.template) appwrapper_name, _ = gen_names(cluster.config.name) update_names( diff --git a/src/codeflare_sdk/utils/pretty_print.py b/src/codeflare_sdk/ray/cluster/pretty_print.py similarity index 98% rename from src/codeflare_sdk/utils/pretty_print.py rename to src/codeflare_sdk/ray/cluster/pretty_print.py index 30331319..883f14ad 100644 --- a/src/codeflare_sdk/utils/pretty_print.py +++ b/src/codeflare_sdk/ray/cluster/pretty_print.py @@ -24,7 +24,8 @@ from rich.panel import Panel from rich import box from typing import List -from ..cluster.model import RayCluster, AppWrapper, RayClusterStatus +from .status import RayCluster, RayClusterStatus +from ..appwrapper.status import AppWrapper def print_no_resources_found(): diff --git a/src/codeflare_sdk/cluster/model.py b/src/codeflare_sdk/ray/cluster/status.py similarity index 72% rename from src/codeflare_sdk/cluster/model.py rename to src/codeflare_sdk/ray/cluster/status.py index 44be5456..136ae302 100644 --- a/src/codeflare_sdk/cluster/model.py +++ b/src/codeflare_sdk/ray/cluster/status.py @@ -1,4 +1,4 @@ -# Copyright 2022 IBM, Red Hat +# Copyright 2024 IBM, Red Hat # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,9 +13,9 @@ # limitations under the License. """ -The model sub-module defines Enums containing information for Ray cluster -states and AppWrapper states, and CodeFlare cluster states, as well as -dataclasses to store information for Ray clusters and AppWrappers. +The status sub-module defines Enums containing information for Ray cluster +states states, and CodeFlare cluster states, as well as +dataclasses to store information for Ray clusters. """ from dataclasses import dataclass, field @@ -37,21 +37,6 @@ class RayClusterStatus(Enum): SUSPENDED = "suspended" -class AppWrapperStatus(Enum): - """ - Defines the possible reportable phases of an AppWrapper. - """ - - SUSPENDED = "suspended" - RESUMING = "resuming" - RUNNING = "running" - RESETTING = "resetting" - SUSPENDING = "suspending" - SUCCEEDED = "succeeded" - FAILED = "failed" - TERMINATING = "terminating" - - class CodeFlareClusterStatus(Enum): """ Defines the possible reportable states of a Codeflare cluster. @@ -87,13 +72,3 @@ class RayCluster: dashboard: str worker_extended_resources: typing.Dict[str, int] = field(default_factory=dict) head_extended_resources: typing.Dict[str, int] = field(default_factory=dict) - - -@dataclass -class AppWrapper: - """ - For storing information about an AppWrapper. - """ - - name: str - status: AppWrapperStatus diff --git a/src/codeflare_sdk/templates/base-template.yaml b/src/codeflare_sdk/ray/templates/base-template.yaml similarity index 100% rename from src/codeflare_sdk/templates/base-template.yaml rename to src/codeflare_sdk/ray/templates/base-template.yaml diff --git a/tests/demo_test.py b/tests/demo_test.py index 65324af0..b5453058 100644 --- a/tests/demo_test.py +++ b/tests/demo_test.py @@ -16,12 +16,12 @@ These were the old tests used during initial demo building, and they will soon be fully deprecated. """ -from codeflare_sdk.cluster.cluster import ( +from codeflare_sdk.ray.cluster.cluster import ( list_all_clusters, list_all_queued, _app_wrapper_status, ) -from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration +from codeflare_sdk.ray.cluster.cluster import Cluster, ClusterConfiguration import time @@ -53,7 +53,7 @@ def test_cluster_down(): def test_no_resources_found(): - from codeflare_sdk.utils import pretty_print + from codeflare_sdk.ray.cluster import pretty_print pretty_print.print_no_resources_found() diff --git a/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py b/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py index d7949b8c..4623a9e5 100644 --- a/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py +++ b/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py @@ -3,7 +3,7 @@ from time import sleep from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication -from codeflare_sdk.job import RayJobClient +from codeflare_sdk.ray.client import RayJobClient import pytest diff --git a/tests/e2e/mnist_raycluster_sdk_kind_test.py b/tests/e2e/mnist_raycluster_sdk_kind_test.py index d81e9149..6bfb19af 100644 --- a/tests/e2e/mnist_raycluster_sdk_kind_test.py +++ b/tests/e2e/mnist_raycluster_sdk_kind_test.py @@ -3,7 +3,7 @@ from time import sleep from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication -from codeflare_sdk.job import RayJobClient +from codeflare_sdk.ray.client import RayJobClient import pytest diff --git a/tests/e2e/mnist_raycluster_sdk_oauth_test.py b/tests/e2e/mnist_raycluster_sdk_oauth_test.py index 7e4002ad..d3e69868 100644 --- a/tests/e2e/mnist_raycluster_sdk_oauth_test.py +++ b/tests/e2e/mnist_raycluster_sdk_oauth_test.py @@ -3,7 +3,7 @@ from time import sleep from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication -from codeflare_sdk.job import RayJobClient +from codeflare_sdk.ray.client import RayJobClient import pytest diff --git a/tests/e2e/mnist_rayjob.py b/tests/e2e/mnist_rayjob.py index c9306da6..bf47e6da 100644 --- a/tests/e2e/mnist_rayjob.py +++ b/tests/e2e/mnist_rayjob.py @@ -4,8 +4,8 @@ from support import * -from codeflare_sdk.cluster.cluster import get_cluster -from codeflare_sdk.job import RayJobClient +from codeflare_sdk.ray.cluster.cluster import get_cluster +from codeflare_sdk.ray.client import RayJobClient namespace = sys.argv[1] diff --git a/tests/e2e/start_ray_cluster.py b/tests/e2e/start_ray_cluster.py index 48ab604f..8aac19f0 100644 --- a/tests/e2e/start_ray_cluster.py +++ b/tests/e2e/start_ray_cluster.py @@ -3,7 +3,7 @@ from time import sleep -from codeflare_sdk.cluster.cluster import Cluster, ClusterConfiguration +from codeflare_sdk.ray.cluster.cluster import Cluster, ClusterConfiguration namespace = sys.argv[1] ray_image = os.getenv("RAY_IMAGE") diff --git a/tests/unit_test.py b/tests/unit_test.py index bb2e2f7d..235eed0e 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -20,15 +20,15 @@ import re import uuid -from codeflare_sdk.cluster import cluster +from codeflare_sdk.ray.cluster import cluster parent = Path(__file__).resolve().parents[1] aw_dir = os.path.expanduser("~/.codeflare/resources/") sys.path.append(str(parent) + "/src") from kubernetes import client, config, dynamic -from codeflare_sdk.cluster.awload import AWManager -from codeflare_sdk.cluster.cluster import ( +from codeflare_sdk.ray.appwrapper.awload import AWManager +from codeflare_sdk.ray.cluster.cluster import ( Cluster, ClusterConfiguration, _map_to_ray_cluster, @@ -45,20 +45,22 @@ KubeConfigFileAuthentication, config_check, ) -from codeflare_sdk.utils.pretty_print import ( +from codeflare_sdk.ray.cluster.pretty_print import ( print_no_resources_found, print_app_wrappers_status, print_cluster_status, print_clusters, ) -from codeflare_sdk.cluster.model import ( +from codeflare_sdk.ray.appwrapper.status import ( AppWrapper, - RayCluster, AppWrapperStatus, +) +from codeflare_sdk.ray.cluster.status import ( + RayCluster, RayClusterStatus, CodeFlareClusterStatus, ) -from codeflare_sdk.utils.generate_cert import ( +from codeflare_sdk.common.utils.generate_cert import ( generate_ca_cert, generate_tls_cert, export_env, @@ -72,7 +74,7 @@ ) import codeflare_sdk.common.kubernetes_cluster.kube_api_helpers -from codeflare_sdk.utils.generate_yaml import ( +from codeflare_sdk.ray.cluster.generate_yaml import ( gen_names, is_openshift_cluster, ) @@ -88,7 +90,7 @@ from unittest.mock import MagicMock, patch from pytest_mock import MockerFixture from ray.job_submission import JobSubmissionClient -from codeflare_sdk.job.ray_jobs import RayJobClient +from codeflare_sdk.ray.client.ray_jobs import RayJobClient # For mocking openshift client results fake_res = openshift.Result("fake") @@ -268,7 +270,10 @@ def test_config_creation(): assert config.worker_cpu_requests == 3 and config.worker_cpu_limits == 4 assert config.worker_memory_requests == "5G" and config.worker_memory_limits == "6G" assert config.worker_extended_resource_requests == {"nvidia.com/gpu": 7} - assert config.template == f"{parent}/src/codeflare_sdk/templates/base-template.yaml" + assert ( + config.template + == f"{parent}/src/codeflare_sdk/ray/templates/base-template.yaml" + ) assert config.machine_types == ["cpu.small", "gpu.large"] assert config.image_pull_secrets == ["unit-test-pull-secret"] assert config.appwrapper == True @@ -438,7 +443,7 @@ def test_cluster_creation_no_mcad_local_queue(mocker): def test_default_cluster_creation(mocker): mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch( - "codeflare_sdk.cluster.cluster.get_current_namespace", + "codeflare_sdk.ray.cluster.cluster.get_current_namespace", return_value="opendatahub", ) mocker.patch( @@ -593,7 +598,7 @@ def arg_check_del_effect(group, version, namespace, plural, name, *args): def test_cluster_up_down(mocker): mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch("codeflare_sdk.cluster.cluster.Cluster._throw_for_no_raycluster") + mocker.patch("codeflare_sdk.ray.cluster.cluster.Cluster._throw_for_no_raycluster") mocker.patch( "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", return_value={"spec": {"domain": ""}}, @@ -620,7 +625,7 @@ def test_cluster_up_down(mocker): def test_cluster_up_down_no_mcad(mocker): - mocker.patch("codeflare_sdk.cluster.cluster.Cluster._throw_for_no_raycluster") + mocker.patch("codeflare_sdk.ray.cluster.cluster.Cluster._throw_for_no_raycluster") mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch( @@ -716,7 +721,7 @@ def test_cluster_uris(mocker): mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") mocker.patch( - "codeflare_sdk.cluster.cluster._get_ingress_domain", + "codeflare_sdk.ray.cluster.cluster._get_ingress_domain", return_value="apps.cluster.awsroute.org", ) mocker.patch( @@ -760,11 +765,11 @@ def test_local_client_url(mocker): return_value={"spec": {"domain": ""}}, ) mocker.patch( - "codeflare_sdk.cluster.cluster._get_ingress_domain", + "codeflare_sdk.ray.cluster.cluster._get_ingress_domain", return_value="rayclient-unit-test-cluster-localinter-ns.apps.cluster.awsroute.org", ) mocker.patch( - "codeflare_sdk.cluster.cluster.Cluster.create_app_wrapper", + "codeflare_sdk.ray.cluster.cluster.Cluster.create_app_wrapper", return_value="unit-test-cluster-localinter.yaml", ) @@ -955,15 +960,15 @@ def test_ray_details(mocker, capsys): head_mem_limits=8, ) mocker.patch( - "codeflare_sdk.cluster.cluster.Cluster.status", + "codeflare_sdk.ray.cluster.cluster.Cluster.status", return_value=(False, CodeFlareClusterStatus.UNKNOWN), ) mocker.patch( - "codeflare_sdk.cluster.cluster.Cluster.cluster_dashboard_uri", + "codeflare_sdk.ray.cluster.cluster.Cluster.cluster_dashboard_uri", return_value="", ) mocker.patch( - "codeflare_sdk.utils.generate_yaml.local_queue_exists", + "codeflare_sdk.ray.cluster.generate_yaml.local_queue_exists", return_value="true", ) cf = Cluster( @@ -2002,7 +2007,7 @@ def test_get_cluster_openshift(mocker): ] mocker.patch("kubernetes.client.ApisApi", return_value=mock_api) mocker.patch( - "codeflare_sdk.utils.generate_yaml.local_queue_exists", + "codeflare_sdk.ray.cluster.generate_yaml.local_queue_exists", return_value="true", ) @@ -2037,7 +2042,7 @@ def custom_side_effect(group, version, namespace, plural, **kwargs): ], ) mocker.patch( - "codeflare_sdk.utils.generate_yaml.local_queue_exists", + "codeflare_sdk.ray.cluster.generate_yaml.local_queue_exists", return_value="true", ) @@ -2080,7 +2085,7 @@ def test_get_cluster(mocker): return_value=ingress_retrieval(cluster_name="quicktest", client_ing=True), ) mocker.patch( - "codeflare_sdk.utils.generate_yaml.local_queue_exists", + "codeflare_sdk.ray.cluster.generate_yaml.local_queue_exists", return_value="true", ) cluster = get_cluster("quicktest") @@ -2118,7 +2123,7 @@ def test_get_cluster_no_mcad(mocker): return_value=ingress_retrieval(cluster_name="quicktest", client_ing=True), ) mocker.patch( - "codeflare_sdk.utils.generate_yaml.local_queue_exists", + "codeflare_sdk.ray.cluster.generate_yaml.local_queue_exists", return_value="true", ) cluster = get_cluster("quicktest") @@ -2167,7 +2172,7 @@ def test_map_to_ray_cluster(mocker): mocker.patch("kubernetes.config.load_kube_config") mocker.patch( - "codeflare_sdk.cluster.cluster.is_openshift_cluster", return_value=True + "codeflare_sdk.ray.cluster.cluster.is_openshift_cluster", return_value=True ) mock_api_client = mocker.MagicMock(spec=client.ApiClient) @@ -2354,7 +2359,7 @@ def test_cluster_status(mocker): mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") mocker.patch( - "codeflare_sdk.utils.generate_yaml.local_queue_exists", + "codeflare_sdk.ray.cluster.generate_yaml.local_queue_exists", return_value="true", ) fake_aw = AppWrapper("test", AppWrapperStatus.FAILED) @@ -2382,14 +2387,18 @@ def test_cluster_status(mocker): local_queue="local_default_queue", ) ) - mocker.patch("codeflare_sdk.cluster.cluster._app_wrapper_status", return_value=None) - mocker.patch("codeflare_sdk.cluster.cluster._ray_cluster_status", return_value=None) + mocker.patch( + "codeflare_sdk.ray.cluster.cluster._app_wrapper_status", return_value=None + ) + mocker.patch( + "codeflare_sdk.ray.cluster.cluster._ray_cluster_status", return_value=None + ) status, ready = cf.status() assert status == CodeFlareClusterStatus.UNKNOWN assert ready == False mocker.patch( - "codeflare_sdk.cluster.cluster._app_wrapper_status", return_value=fake_aw + "codeflare_sdk.ray.cluster.cluster._app_wrapper_status", return_value=fake_aw ) status, ready = cf.status() assert status == CodeFlareClusterStatus.FAILED @@ -2416,7 +2425,7 @@ def test_cluster_status(mocker): assert ready == False mocker.patch( - "codeflare_sdk.cluster.cluster._ray_cluster_status", return_value=fake_ray + "codeflare_sdk.ray.cluster.cluster._ray_cluster_status", return_value=fake_ray ) status, ready = cf.status() @@ -2446,10 +2455,14 @@ def test_wait_ready(mocker, capsys): return_value=ingress_retrieval(), ) mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch("codeflare_sdk.cluster.cluster._app_wrapper_status", return_value=None) - mocker.patch("codeflare_sdk.cluster.cluster._ray_cluster_status", return_value=None) mocker.patch( - "codeflare_sdk.utils.generate_yaml.local_queue_exists", + "codeflare_sdk.ray.cluster.cluster._app_wrapper_status", return_value=None + ) + mocker.patch( + "codeflare_sdk.ray.cluster.cluster._ray_cluster_status", return_value=None + ) + mocker.patch( + "codeflare_sdk.ray.cluster.generate_yaml.local_queue_exists", return_value="true", ) mocker.patch.object( @@ -2488,7 +2501,7 @@ def test_wait_ready(mocker, capsys): in captured.out ) mocker.patch( - "codeflare_sdk.cluster.cluster.Cluster.status", + "codeflare_sdk.ray.cluster.cluster.Cluster.status", return_value=(True, CodeFlareClusterStatus.READY), ) cf.wait_ready() @@ -2599,7 +2612,7 @@ def test_AWManager_submit_remove(mocker, capsys): def test_generate_ca_cert(): """ - test the function codeflare_sdk.utils.generate_ca_cert generates the correct outputs + test the function codeflare_sdk.common.utils.generate_ca_cert generates the correct outputs """ key, certificate = generate_ca_cert() cert = load_pem_x509_certificate(base64.b64decode(certificate)) @@ -2629,11 +2642,11 @@ def secret_ca_retreival(secret_name, namespace): def test_generate_tls_cert(mocker): """ - test the function codeflare_sdk.utils.generate_ca_cert generates the correct outputs + test the function codeflare_sdk.common.utils.generate_ca_cert generates the correct outputs """ mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") mocker.patch( - "codeflare_sdk.utils.generate_cert.get_secret_name", + "codeflare_sdk.common.utils.generate_cert.get_secret_name", return_value="ca-secret-cluster", ) mocker.patch( @@ -2657,7 +2670,7 @@ def test_generate_tls_cert(mocker): def test_export_env(): """ - test the function codeflare_sdk.utils.export_ev generates the correct outputs + test the function codeflare_sdk.common.utils.generate_ca_cert.export_ev generates the correct outputs """ tls_dir = "cluster" ns = "namespace" @@ -2677,15 +2690,15 @@ def test_export_env(): def test_cluster_throw_for_no_raycluster(mocker: MockerFixture): mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch( - "codeflare_sdk.cluster.cluster.get_current_namespace", + "codeflare_sdk.ray.cluster.cluster.get_current_namespace", return_value="opendatahub", ) mocker.patch( - "codeflare_sdk.utils.generate_yaml.get_default_kueue_name", + "codeflare_sdk.ray.cluster.generate_yaml.get_default_kueue_name", return_value="default", ) mocker.patch( - "codeflare_sdk.utils.generate_yaml.local_queue_exists", + "codeflare_sdk.ray.cluster.generate_yaml.local_queue_exists", return_value="true", ) @@ -2980,12 +2993,14 @@ def test_view_clusters(mocker, capsys): "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", return_value={"items": []}, ) - mocker.patch("codeflare_sdk.cluster.cluster._check_aw_exists", return_value=False) + mocker.patch( + "codeflare_sdk.ray.cluster.cluster._check_aw_exists", return_value=False + ) # Return empty dataframe when no clusters are found - mocker.patch("codeflare_sdk.cluster.cluster.list_all_clusters", return_value=[]) + mocker.patch("codeflare_sdk.ray.cluster.cluster.list_all_clusters", return_value=[]) mocker.patch( - "codeflare_sdk.cluster.cluster.get_current_namespace", + "codeflare_sdk.ray.cluster.cluster.get_current_namespace", return_value="default", ) df = cf_widgets._fetch_cluster_data(namespace="default") @@ -3019,8 +3034,8 @@ def test_view_clusters(mocker, capsys): ) # Mock the Cluster class and related methods - mocker.patch("codeflare_sdk.cluster.Cluster") - mocker.patch("codeflare_sdk.cluster.ClusterConfiguration") + mocker.patch("codeflare_sdk.ray.cluster.Cluster") + mocker.patch("codeflare_sdk.ray.cluster.ClusterConfiguration") with patch("ipywidgets.ToggleButtons") as MockToggleButtons, patch( "ipywidgets.Button" @@ -3103,7 +3118,7 @@ def test_view_clusters(mocker, capsys): def test_fetch_cluster_data(mocker): # Return empty dataframe when no clusters are found - mocker.patch("codeflare_sdk.cluster.cluster.list_all_clusters", return_value=[]) + mocker.patch("codeflare_sdk.ray.cluster.cluster.list_all_clusters", return_value=[]) df = cf_widgets._fetch_cluster_data(namespace="default") assert df.empty @@ -3145,7 +3160,7 @@ def test_fetch_cluster_data(mocker): mock_raycluster2.status = RayClusterStatus.SUSPENDED with patch( - "codeflare_sdk.cluster.cluster.list_all_clusters", + "codeflare_sdk.ray.cluster.cluster.list_all_clusters", return_value=[mock_raycluster1, mock_raycluster2], ): # Call the function under test diff --git a/tests/unit_test_support.py b/tests/unit_test_support.py index c23c47eb..b3c2e197 100644 --- a/tests/unit_test_support.py +++ b/tests/unit_test_support.py @@ -1,4 +1,4 @@ -from codeflare_sdk.cluster.cluster import ( +from codeflare_sdk.ray.cluster.cluster import ( Cluster, ClusterConfiguration, ) diff --git a/tests/upgrade/raycluster_sdk_upgrade_sleep_test.py b/tests/upgrade/raycluster_sdk_upgrade_sleep_test.py index 65c836ed..793853d0 100644 --- a/tests/upgrade/raycluster_sdk_upgrade_sleep_test.py +++ b/tests/upgrade/raycluster_sdk_upgrade_sleep_test.py @@ -7,7 +7,7 @@ TokenAuthentication, get_cluster, ) -from codeflare_sdk.job import RayJobClient +from codeflare_sdk.ray.client import RayJobClient from tests.e2e.support import * diff --git a/tests/upgrade/raycluster_sdk_upgrade_test.py b/tests/upgrade/raycluster_sdk_upgrade_test.py index dce1ca71..7c8b2922 100644 --- a/tests/upgrade/raycluster_sdk_upgrade_test.py +++ b/tests/upgrade/raycluster_sdk_upgrade_test.py @@ -2,10 +2,10 @@ from time import sleep from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication -from codeflare_sdk.job import RayJobClient +from codeflare_sdk.ray.client import RayJobClient from tests.e2e.support import * -from codeflare_sdk.cluster.cluster import get_cluster +from codeflare_sdk.ray.cluster.cluster import get_cluster from codeflare_sdk.common import _kube_api_error_handling From 92a89617d85cd684f0a4f44c1f02a3832ff78477 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Wed, 2 Oct 2024 10:57:38 +0100 Subject: [PATCH 358/496] remove codeflare_sdk.egg-info Signed-off-by: Bobbins228 --- src/codeflare_sdk.egg-info/PKG-INFO | 4 --- src/codeflare_sdk.egg-info/SOURCES.txt | 25 ------------------- .../dependency_links.txt | 0 src/codeflare_sdk.egg-info/top_level.txt | 1 - 4 files changed, 30 deletions(-) delete mode 100644 src/codeflare_sdk.egg-info/PKG-INFO delete mode 100644 src/codeflare_sdk.egg-info/SOURCES.txt delete mode 100644 src/codeflare_sdk.egg-info/dependency_links.txt delete mode 100644 src/codeflare_sdk.egg-info/top_level.txt diff --git a/src/codeflare_sdk.egg-info/PKG-INFO b/src/codeflare_sdk.egg-info/PKG-INFO deleted file mode 100644 index c4061c62..00000000 --- a/src/codeflare_sdk.egg-info/PKG-INFO +++ /dev/null @@ -1,4 +0,0 @@ -Metadata-Version: 2.1 -Name: codeflare-sdk -Version: 0.0.0 -License-File: LICENSE diff --git a/src/codeflare_sdk.egg-info/SOURCES.txt b/src/codeflare_sdk.egg-info/SOURCES.txt deleted file mode 100644 index 53043edf..00000000 --- a/src/codeflare_sdk.egg-info/SOURCES.txt +++ /dev/null @@ -1,25 +0,0 @@ -LICENSE -README.md -pyproject.toml -src/codeflare_sdk/__init__.py -src/codeflare_sdk.egg-info/PKG-INFO -src/codeflare_sdk.egg-info/SOURCES.txt -src/codeflare_sdk.egg-info/dependency_links.txt -src/codeflare_sdk.egg-info/top_level.txt -src/codeflare_sdk/cluster/__init__.py -src/codeflare_sdk/cluster/awload.py -src/codeflare_sdk/cluster/cluster.py -src/codeflare_sdk/cluster/config.py -src/codeflare_sdk/cluster/model.py -src/codeflare_sdk/cluster/widgets.py -src/codeflare_sdk/common/__init__.py -src/codeflare_sdk/common/kubernetes_cluster/__init__.py -src/codeflare_sdk/common/kubernetes_cluster/auth.py -src/codeflare_sdk/common/kubernetes_cluster/kube_api_helpers.py -src/codeflare_sdk/job/__init__.py -src/codeflare_sdk/job/ray_jobs.py -src/codeflare_sdk/utils/__init__.py -src/codeflare_sdk/utils/demos.py -src/codeflare_sdk/utils/generate_cert.py -src/codeflare_sdk/utils/generate_yaml.py -src/codeflare_sdk/utils/pretty_print.py diff --git a/src/codeflare_sdk.egg-info/dependency_links.txt b/src/codeflare_sdk.egg-info/dependency_links.txt deleted file mode 100644 index e69de29b..00000000 diff --git a/src/codeflare_sdk.egg-info/top_level.txt b/src/codeflare_sdk.egg-info/top_level.txt deleted file mode 100644 index 633675b3..00000000 --- a/src/codeflare_sdk.egg-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -codeflare_sdk From ffbdfecfed79a890bf250fbd840d3aee1c140fe7 Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Tue, 1 Oct 2024 17:41:59 +0100 Subject: [PATCH 359/496] Refactor: kueue module --- src/codeflare_sdk/common/kueue/__init__.py | 5 ++ src/codeflare_sdk/common/kueue/kueue.py | 78 +++++++++++++++++++ .../ray/cluster/generate_yaml.py | 62 +-------------- tests/unit_test.py | 18 ++--- 4 files changed, 93 insertions(+), 70 deletions(-) create mode 100644 src/codeflare_sdk/common/kueue/__init__.py create mode 100644 src/codeflare_sdk/common/kueue/kueue.py diff --git a/src/codeflare_sdk/common/kueue/__init__.py b/src/codeflare_sdk/common/kueue/__init__.py new file mode 100644 index 00000000..b02e3c24 --- /dev/null +++ b/src/codeflare_sdk/common/kueue/__init__.py @@ -0,0 +1,5 @@ +from .kueue import ( + get_default_kueue_name, + local_queue_exists, + add_queue_label, +) diff --git a/src/codeflare_sdk/common/kueue/kueue.py b/src/codeflare_sdk/common/kueue/kueue.py new file mode 100644 index 00000000..0c207548 --- /dev/null +++ b/src/codeflare_sdk/common/kueue/kueue.py @@ -0,0 +1,78 @@ +# Copyright 2024 IBM, Red Hat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional +from codeflare_sdk.common import _kube_api_error_handling +from codeflare_sdk.common.kubernetes_cluster.auth import config_check, get_api_client +from kubernetes import client +from kubernetes.client.exceptions import ApiException + + +def get_default_kueue_name(namespace: str): + # If the local queue is set, use it. Otherwise, try to use the default queue. + try: + config_check() + api_instance = client.CustomObjectsApi(get_api_client()) + local_queues = api_instance.list_namespaced_custom_object( + group="kueue.x-k8s.io", + version="v1beta1", + namespace=namespace, + plural="localqueues", + ) + except ApiException as e: # pragma: no cover + if e.status == 404 or e.status == 403: + return + else: + return _kube_api_error_handling(e) + for lq in local_queues["items"]: + if ( + "annotations" in lq["metadata"] + and "kueue.x-k8s.io/default-queue" in lq["metadata"]["annotations"] + and lq["metadata"]["annotations"]["kueue.x-k8s.io/default-queue"].lower() + == "true" + ): + return lq["metadata"]["name"] + + +def local_queue_exists(namespace: str, local_queue_name: str): + # get all local queues in the namespace + try: + config_check() + api_instance = client.CustomObjectsApi(get_api_client()) + local_queues = api_instance.list_namespaced_custom_object( + group="kueue.x-k8s.io", + version="v1beta1", + namespace=namespace, + plural="localqueues", + ) + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + # check if local queue with the name provided in cluster config exists + for lq in local_queues["items"]: + if lq["metadata"]["name"] == local_queue_name: + return True + return False + + +def add_queue_label(item: dict, namespace: str, local_queue: Optional[str]): + lq_name = local_queue or get_default_kueue_name(namespace) + if lq_name == None: + return + elif not local_queue_exists(namespace, lq_name): + raise ValueError( + "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration" + ) + if not "labels" in item["metadata"]: + item["metadata"]["labels"] = {} + item["metadata"]["labels"].update({"kueue.x-k8s.io/queue-name": lq_name}) diff --git a/src/codeflare_sdk/ray/cluster/generate_yaml.py b/src/codeflare_sdk/ray/cluster/generate_yaml.py index f0d70cf5..0b174650 100755 --- a/src/codeflare_sdk/ray/cluster/generate_yaml.py +++ b/src/codeflare_sdk/ray/cluster/generate_yaml.py @@ -18,18 +18,17 @@ """ import json -from typing import Optional import typing import yaml import os import uuid from kubernetes import client from ...common import _kube_api_error_handling +from ...common.kueue import add_queue_label from ...common.kubernetes_cluster.auth import ( get_api_client, config_check, ) -from kubernetes.client.exceptions import ApiException import codeflare_sdk @@ -229,65 +228,6 @@ def del_from_list_by_name(l: list, target: typing.List[str]) -> list: return [x for x in l if x["name"] not in target] -def get_default_kueue_name(namespace: str): - # If the local queue is set, use it. Otherwise, try to use the default queue. - try: - config_check() - api_instance = client.CustomObjectsApi(get_api_client()) - local_queues = api_instance.list_namespaced_custom_object( - group="kueue.x-k8s.io", - version="v1beta1", - namespace=namespace, - plural="localqueues", - ) - except ApiException as e: # pragma: no cover - if e.status == 404 or e.status == 403: - return - else: - return _kube_api_error_handling(e) - for lq in local_queues["items"]: - if ( - "annotations" in lq["metadata"] - and "kueue.x-k8s.io/default-queue" in lq["metadata"]["annotations"] - and lq["metadata"]["annotations"]["kueue.x-k8s.io/default-queue"].lower() - == "true" - ): - return lq["metadata"]["name"] - - -def local_queue_exists(namespace: str, local_queue_name: str): - # get all local queues in the namespace - try: - config_check() - api_instance = client.CustomObjectsApi(get_api_client()) - local_queues = api_instance.list_namespaced_custom_object( - group="kueue.x-k8s.io", - version="v1beta1", - namespace=namespace, - plural="localqueues", - ) - except Exception as e: # pragma: no cover - return _kube_api_error_handling(e) - # check if local queue with the name provided in cluster config exists - for lq in local_queues["items"]: - if lq["metadata"]["name"] == local_queue_name: - return True - return False - - -def add_queue_label(item: dict, namespace: str, local_queue: Optional[str]): - lq_name = local_queue or get_default_kueue_name(namespace) - if lq_name == None: - return - elif not local_queue_exists(namespace, lq_name): - raise ValueError( - "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration" - ) - if not "labels" in item["metadata"]: - item["metadata"]["labels"] = {} - item["metadata"]["labels"].update({"kueue.x-k8s.io/queue-name": lq_name}) - - def augment_labels(item: dict, labels: dict): if not "labels" in item["metadata"]: item["metadata"]["labels"] = {} diff --git a/tests/unit_test.py b/tests/unit_test.py index 235eed0e..74da56b7 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -968,7 +968,7 @@ def test_ray_details(mocker, capsys): return_value="", ) mocker.patch( - "codeflare_sdk.ray.cluster.generate_yaml.local_queue_exists", + "codeflare_sdk.common.kueue.kueue.local_queue_exists", return_value="true", ) cf = Cluster( @@ -2007,7 +2007,7 @@ def test_get_cluster_openshift(mocker): ] mocker.patch("kubernetes.client.ApisApi", return_value=mock_api) mocker.patch( - "codeflare_sdk.ray.cluster.generate_yaml.local_queue_exists", + "codeflare_sdk.common.kueue.kueue.local_queue_exists", return_value="true", ) @@ -2042,7 +2042,7 @@ def custom_side_effect(group, version, namespace, plural, **kwargs): ], ) mocker.patch( - "codeflare_sdk.ray.cluster.generate_yaml.local_queue_exists", + "codeflare_sdk.common.kueue.kueue.local_queue_exists", return_value="true", ) @@ -2085,7 +2085,7 @@ def test_get_cluster(mocker): return_value=ingress_retrieval(cluster_name="quicktest", client_ing=True), ) mocker.patch( - "codeflare_sdk.ray.cluster.generate_yaml.local_queue_exists", + "codeflare_sdk.common.kueue.kueue.local_queue_exists", return_value="true", ) cluster = get_cluster("quicktest") @@ -2123,7 +2123,7 @@ def test_get_cluster_no_mcad(mocker): return_value=ingress_retrieval(cluster_name="quicktest", client_ing=True), ) mocker.patch( - "codeflare_sdk.ray.cluster.generate_yaml.local_queue_exists", + "codeflare_sdk.common.kueue.kueue.local_queue_exists", return_value="true", ) cluster = get_cluster("quicktest") @@ -2359,7 +2359,7 @@ def test_cluster_status(mocker): mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") mocker.patch( - "codeflare_sdk.ray.cluster.generate_yaml.local_queue_exists", + "codeflare_sdk.common.kueue.kueue.local_queue_exists", return_value="true", ) fake_aw = AppWrapper("test", AppWrapperStatus.FAILED) @@ -2462,7 +2462,7 @@ def test_wait_ready(mocker, capsys): "codeflare_sdk.ray.cluster.cluster._ray_cluster_status", return_value=None ) mocker.patch( - "codeflare_sdk.ray.cluster.generate_yaml.local_queue_exists", + "codeflare_sdk.common.kueue.kueue.local_queue_exists", return_value="true", ) mocker.patch.object( @@ -2694,11 +2694,11 @@ def test_cluster_throw_for_no_raycluster(mocker: MockerFixture): return_value="opendatahub", ) mocker.patch( - "codeflare_sdk.ray.cluster.generate_yaml.get_default_kueue_name", + "codeflare_sdk.common.kueue.kueue.get_default_kueue_name", return_value="default", ) mocker.patch( - "codeflare_sdk.ray.cluster.generate_yaml.local_queue_exists", + "codeflare_sdk.common.kueue.kueue.local_queue_exists", return_value="true", ) From d4d0dce14b51f7bbb3565da4cfbd6d4bcc442405 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Wed, 2 Oct 2024 16:34:10 +0100 Subject: [PATCH 360/496] Refactor: widgets module --- src/codeflare_sdk/__init__.py | 2 +- .../{cluster => common/widgets}/__init__.py | 0 .../{cluster => common/widgets}/widgets.py | 22 +++++++++---------- src/codeflare_sdk/ray/cluster/cluster.py | 4 ++-- tests/unit_test.py | 20 +++++++---------- 5 files changed, 22 insertions(+), 26 deletions(-) rename src/codeflare_sdk/{cluster => common/widgets}/__init__.py (100%) rename src/codeflare_sdk/{cluster => common/widgets}/widgets.py (96%) diff --git a/src/codeflare_sdk/__init__.py b/src/codeflare_sdk/__init__.py index 599171a9..a1b5535c 100644 --- a/src/codeflare_sdk/__init__.py +++ b/src/codeflare_sdk/__init__.py @@ -12,7 +12,7 @@ RayJobClient, ) -from .cluster import view_clusters +from .common.widgets import view_clusters from .common import ( Authentication, diff --git a/src/codeflare_sdk/cluster/__init__.py b/src/codeflare_sdk/common/widgets/__init__.py similarity index 100% rename from src/codeflare_sdk/cluster/__init__.py rename to src/codeflare_sdk/common/widgets/__init__.py diff --git a/src/codeflare_sdk/cluster/widgets.py b/src/codeflare_sdk/common/widgets/widgets.py similarity index 96% rename from src/codeflare_sdk/cluster/widgets.py rename to src/codeflare_sdk/common/widgets/widgets.py index d827c661..64e5dea9 100644 --- a/src/codeflare_sdk/cluster/widgets.py +++ b/src/codeflare_sdk/common/widgets/widgets.py @@ -26,10 +26,10 @@ import ipywidgets as widgets from IPython.display import display, HTML, Javascript import pandas as pd -from ..ray.cluster.config import ClusterConfiguration -from ..ray.cluster.status import RayClusterStatus -from ..common import _kube_api_error_handling -from ..common.kubernetes_cluster.auth import ( +from ...ray.cluster.config import ClusterConfiguration +from ...ray.cluster.status import RayClusterStatus +from ..kubernetes_cluster import _kube_api_error_handling +from ..kubernetes_cluster.auth import ( config_check, get_api_client, ) @@ -58,7 +58,7 @@ def cluster_up_down_buttons( icon="trash", ) - wait_ready_check = wait_ready_check_box() + wait_ready_check = _wait_ready_check_box() output = widgets.Output() # Display the buttons in an HBox wrapped in a VBox which includes the wait_ready Checkbox @@ -83,7 +83,7 @@ def on_down_button_clicked(b): # Handle the down button click event delete_button.on_click(on_down_button_clicked) -def wait_ready_check_box(): +def _wait_ready_check_box(): """ The wait_ready_check_box function will return a checkbox widget used for waiting for the resource to be in the state READY. """ @@ -117,7 +117,7 @@ def view_clusters(namespace: str = None): ) return # Exit function if not in Jupyter Notebook - from ..ray.cluster.cluster import get_current_namespace + from ...ray.cluster.cluster import get_current_namespace if not namespace: namespace = get_current_namespace() @@ -280,7 +280,7 @@ def _on_ray_dashboard_button_click( """ _on_ray_dashboard_button_click handles the event when the Open Ray Dashboard button is clicked, opening the Ray Dashboard in a new tab """ - from codeflare_sdk.ray.cluster import Cluster + from codeflare_sdk import Cluster cluster_name = classification_widget.value namespace = ray_clusters_df[ray_clusters_df["Name"] == classification_widget.value][ @@ -311,7 +311,7 @@ def _on_list_jobs_button_click( """ _on_list_jobs_button_click handles the event when the View Jobs button is clicked, opening the Ray Jobs Dashboard in a new tab """ - from codeflare_sdk.ray.cluster import Cluster + from codeflare_sdk import Cluster cluster_name = classification_widget.value namespace = ray_clusters_df[ray_clusters_df["Name"] == classification_widget.value][ @@ -344,7 +344,7 @@ def _delete_cluster( _delete_cluster function deletes the cluster with the given name and namespace. It optionally waits for the cluster to be deleted. """ - from ..ray.cluster.cluster import _check_aw_exists + from ...ray.cluster.cluster import _check_aw_exists try: config_check() @@ -402,7 +402,7 @@ def _fetch_cluster_data(namespace): """ _fetch_cluster_data function fetches all clusters and their spec in a given namespace and returns a DataFrame. """ - from ..ray.cluster.cluster import list_all_clusters + from ...ray.cluster.cluster import list_all_clusters rayclusters = list_all_clusters(namespace, False) if not rayclusters: diff --git a/src/codeflare_sdk/ray/cluster/cluster.py b/src/codeflare_sdk/ray/cluster/cluster.py index 4d8201f7..da87639c 100644 --- a/src/codeflare_sdk/ray/cluster/cluster.py +++ b/src/codeflare_sdk/ray/cluster/cluster.py @@ -1,4 +1,4 @@ -# Copyright 2022 IBM, Red Hat +# Copyright 2024 IBM, Red Hat # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -45,7 +45,7 @@ AppWrapper, AppWrapperStatus, ) -from ...cluster.widgets import ( +from ...common.widgets.widgets import ( cluster_up_down_buttons, is_notebook, ) diff --git a/tests/unit_test.py b/tests/unit_test.py index 74da56b7..fae4865f 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -1,4 +1,4 @@ -# Copyright 2022 IBM, Red Hat +# Copyright 2024 IBM, Red Hat # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -79,7 +79,7 @@ is_openshift_cluster, ) -import codeflare_sdk.cluster.widgets as cf_widgets +import codeflare_sdk.common.widgets.widgets as cf_widgets import pandas as pd import openshift @@ -2959,24 +2959,20 @@ def test_cluster_up_down_buttons(mocker): @patch.dict("os.environ", {}, clear=True) # Mock environment with no variables def test_is_notebook_false(): - from codeflare_sdk.cluster.widgets import is_notebook - - assert is_notebook() is False + assert cf_widgets.is_notebook() is False @patch.dict( "os.environ", {"JPY_SESSION_NAME": "example-test"} ) # Mock Jupyter environment variable def test_is_notebook_true(): - from codeflare_sdk.cluster.widgets import is_notebook - - assert is_notebook() is True + assert cf_widgets.is_notebook() is True def test_view_clusters(mocker, capsys): from kubernetes.client.rest import ApiException - mocker.patch("codeflare_sdk.cluster.widgets.is_notebook", return_value=False) + mocker.patch("codeflare_sdk.common.widgets.widgets.is_notebook", return_value=False) with pytest.warns( UserWarning, match="view_clusters can only be used in a Jupyter Notebook environment.", @@ -2985,7 +2981,7 @@ def test_view_clusters(mocker, capsys): # Assert the function returns None when not in a notebook environment assert result is None - mocker.patch("codeflare_sdk.cluster.widgets.is_notebook", return_value=True) + mocker.patch("codeflare_sdk.common.widgets.widgets.is_notebook", return_value=True) # Mock Kubernetes API responses mocker.patch("kubernetes.client.ApisApi.get_api_versions") @@ -3030,7 +3026,7 @@ def test_view_clusters(mocker, capsys): # Mock the _fetch_cluster_data function to return a test DataFrame mocker.patch( - "codeflare_sdk.cluster.widgets._fetch_cluster_data", return_value=test_df + "codeflare_sdk.common.widgets.widgets._fetch_cluster_data", return_value=test_df ) # Mock the Cluster class and related methods @@ -3048,7 +3044,7 @@ def test_view_clusters(mocker, capsys): ) as mock_display, patch( "IPython.display.HTML" ), patch( - "codeflare_sdk.cluster.widgets.Javascript" + "codeflare_sdk.common.widgets.widgets.Javascript" ) as mock_javascript: # Create mock widget instances mock_toggle = MagicMock() From 3385e348c76005d2fbb949283742a133ceb6bbb0 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Wed, 2 Oct 2024 19:22:58 +0100 Subject: [PATCH 361/496] Convert UI widgets to its own class --- src/codeflare_sdk/common/widgets/widgets.py | 382 +++++++++----------- 1 file changed, 179 insertions(+), 203 deletions(-) diff --git a/src/codeflare_sdk/common/widgets/widgets.py b/src/codeflare_sdk/common/widgets/widgets.py index 64e5dea9..c65a1721 100644 --- a/src/codeflare_sdk/common/widgets/widgets.py +++ b/src/codeflare_sdk/common/widgets/widgets.py @@ -34,6 +34,180 @@ get_api_client, ) +class RayClusterManagerWidgets: + def __init__(self, ray_clusters_df: pd.DataFrame, namespace: str = None): + from ...ray.cluster.cluster import get_current_namespace + + # Data + self.ray_clusters_df = ray_clusters_df + self.namespace = get_current_namespace() if not namespace else namespace + self.raycluster_data_output = widgets.Output() + self.user_output = widgets.Output() + self.url_output = widgets.Output() + + # Widgets + self.classification_widget = widgets.ToggleButtons( + options=ray_clusters_df["Name"].tolist(), + value=ray_clusters_df["Name"].tolist()[0], + description="Select an existing cluster:", + ) + self.delete_button = widgets.Button( + description="Delete Cluster", + icon="trash", + tooltip="Delete the selected cluster", + ) + self.list_jobs_button = widgets.Button( + description="View Jobs", + icon="suitcase", + tooltip="Open the Ray Job Dashboard", + ) + self.ray_dashboard_button = widgets.Button( + description="Open Ray Dashboard", + icon="dashboard", + tooltip="Open the Ray Dashboard in a new tab", + layout=widgets.Layout(width="auto"), + ) + + # Set up interactions + self._initialize_callbacks() + self._trigger_initial_display() + + def _initialize_callbacks(self): + # Observe cluster selection + self.classification_widget.observe( + lambda selection_change: self._on_cluster_click(selection_change), + names="value", + ) + # Set up button clicks + self.delete_button.on_click(lambda b: self._on_delete_button_click(b)) + self.list_jobs_button.on_click(lambda b: self._on_list_jobs_button_click(b)) + self.ray_dashboard_button.on_click( + lambda b: self._on_ray_dashboard_button_click(b) + ) + + def _trigger_initial_display(self): + # Trigger display with initial cluster value + initial_value = self.classification_widget.value + self._on_cluster_click({"new": initial_value}) + + def _on_cluster_click(self, selection_change): + """ + _on_cluster_click handles the event when a cluster is selected from the toggle buttons, updating the output with cluster details. + """ + new_value = selection_change["new"] + self.raycluster_data_output.clear_output() + ray_clusters_df = _fetch_cluster_data(self.namespace) + self.classification_widget.options = ray_clusters_df["Name"].tolist() + with self.raycluster_data_output: + display( + HTML( + ray_clusters_df[ray_clusters_df["Name"] == new_value][ + [ + "Name", + "Namespace", + "Num Workers", + "Head GPUs", + "Head CPU Req~Lim", + "Head Memory Req~Lim", + "Worker GPUs", + "Worker CPU Req~Lim", + "Worker Memory Req~Lim", + "status", + ] + ].to_html(escape=False, index=False, border=2) + ) + ) + + def _on_delete_button_click(self, b): + """ + _on_delete_button_click handles the event when the Delete Button is clicked, deleting the selected cluster. + """ + cluster_name = self.classification_widget.value + namespace = self.ray_clusters_df[ + self.ray_clusters_df["Name"] == self.classification_widget.value + ]["Namespace"].values[0] + + _delete_cluster(cluster_name, namespace) + + with self.user_output: + self.user_output.clear_output() + print( + f"Cluster {cluster_name} in the {namespace} namespace was deleted successfully." + ) + + # Refresh the dataframe + new_df = _fetch_cluster_data(namespace) + if new_df.empty: + self.classification_widget.close() + self.delete_button.close() + self.list_jobs_button.close() + self.ray_dashboard_button.close() + with self.raycluster_data_output: + self.raycluster_data_output.clear_output() + print(f"No clusters found in the {namespace} namespace.") + else: + self.classification_widget.options = new_df["Name"].tolist() + + def _on_list_jobs_button_click(self, b): + """ + _on_list_jobs_button_click handles the event when the View Jobs button is clicked, opening the Ray Jobs Dashboard in a new tab + """ + from codeflare_sdk import Cluster + + cluster_name = self.classification_widget.value + namespace = self.ray_clusters_df[ + self.ray_clusters_df["Name"] == self.classification_widget.value + ]["Namespace"].values[0] + + # Suppress from Cluster Object initialisation widgets and outputs + with widgets.Output(), contextlib.redirect_stdout( + io.StringIO() + ), contextlib.redirect_stderr(io.StringIO()): + cluster = Cluster(ClusterConfiguration(cluster_name, namespace)) + dashboard_url = cluster.cluster_dashboard_uri() + + with self.user_output: + self.user_output.clear_output() + print( + f"Opening Ray Jobs Dashboard for {cluster_name} cluster:\n{dashboard_url}/#/jobs" + ) + with self.url_output: + display(Javascript(f'window.open("{dashboard_url}/#/jobs", "_blank");')) + + def _on_ray_dashboard_button_click(self, b): + """ + _on_ray_dashboard_button_click handles the event when the Open Ray Dashboard button is clicked, opening the Ray Dashboard in a new tab + """ + from codeflare_sdk import Cluster + + cluster_name = self.classification_widget.value + namespace = self.ray_clusters_df[ + self.ray_clusters_df["Name"] == self.classification_widget.value + ]["Namespace"].values[0] + + # Suppress from Cluster Object initialisation widgets and outputs + with widgets.Output(), contextlib.redirect_stdout( + io.StringIO() + ), contextlib.redirect_stderr(io.StringIO()): + cluster = Cluster(ClusterConfiguration(cluster_name, namespace)) + dashboard_url = cluster.cluster_dashboard_uri() + + with self.user_output: + self.user_output.clear_output() + print(f"Opening Ray Dashboard for {cluster_name} cluster:\n{dashboard_url}") + with self.url_output: + display(Javascript(f'window.open("{dashboard_url}", "_blank");')) + + def display_widgets(self): + display(widgets.VBox([self.classification_widget, self.raycluster_data_output])) + display( + widgets.HBox( + [self.delete_button, self.list_jobs_button, self.ray_dashboard_button] + ), + self.url_output, + self.user_output, + ) + def cluster_up_down_buttons( cluster: "codeflare_sdk.ray.cluster.cluster.Cluster", @@ -122,216 +296,18 @@ def view_clusters(namespace: str = None): if not namespace: namespace = get_current_namespace() - user_output = widgets.Output() - raycluster_data_output = widgets.Output() - url_output = widgets.Output() - ray_clusters_df = _fetch_cluster_data(namespace) if ray_clusters_df.empty: print(f"No clusters found in the {namespace} namespace.") return - classification_widget = widgets.ToggleButtons( - options=ray_clusters_df["Name"].tolist(), - value=ray_clusters_df["Name"].tolist()[0], - description="Select an existing cluster:", - ) - # Setting the initial value to trigger the event handler to display the cluster details. - initial_value = classification_widget.value - _on_cluster_click( - {"new": initial_value}, raycluster_data_output, namespace, classification_widget - ) - classification_widget.observe( - lambda selection_change: _on_cluster_click( - selection_change, raycluster_data_output, namespace, classification_widget - ), - names="value", - ) - - # UI table buttons - delete_button = widgets.Button( - description="Delete Cluster", - icon="trash", - tooltip="Delete the selected cluster", - ) - delete_button.on_click( - lambda b: _on_delete_button_click( - b, - classification_widget, - ray_clusters_df, - raycluster_data_output, - user_output, - delete_button, - list_jobs_button, - ray_dashboard_button, - ) - ) - - list_jobs_button = widgets.Button( - description="View Jobs", icon="suitcase", tooltip="Open the Ray Job Dashboard" - ) - list_jobs_button.on_click( - lambda b: _on_list_jobs_button_click( - b, classification_widget, ray_clusters_df, user_output, url_output - ) - ) - - ray_dashboard_button = widgets.Button( - description="Open Ray Dashboard", - icon="dashboard", - tooltip="Open the Ray Dashboard in a new tab", - layout=widgets.Layout(width="auto"), - ) - ray_dashboard_button.on_click( - lambda b: _on_ray_dashboard_button_click( - b, classification_widget, ray_clusters_df, user_output, url_output - ) - ) - - display(widgets.VBox([classification_widget, raycluster_data_output])) - display( - widgets.HBox([delete_button, list_jobs_button, ray_dashboard_button]), - url_output, - user_output, + # Initialize the RayClusterManagerWidgets class + ray_cluster_manager = RayClusterManagerWidgets( + ray_clusters_df=ray_clusters_df, namespace=namespace ) - -def _on_cluster_click( - selection_change, - raycluster_data_output: widgets.Output, - namespace: str, - classification_widget: widgets.ToggleButtons, -): - """ - _on_cluster_click handles the event when a cluster is selected from the toggle buttons, updating the output with cluster details. - """ - new_value = selection_change["new"] - raycluster_data_output.clear_output() - ray_clusters_df = _fetch_cluster_data(namespace) - classification_widget.options = ray_clusters_df["Name"].tolist() - with raycluster_data_output: - display( - HTML( - ray_clusters_df[ray_clusters_df["Name"] == new_value][ - [ - "Name", - "Namespace", - "Num Workers", - "Head GPUs", - "Head CPU Req~Lim", - "Head Memory Req~Lim", - "Worker GPUs", - "Worker CPU Req~Lim", - "Worker Memory Req~Lim", - "status", - ] - ].to_html(escape=False, index=False, border=2) - ) - ) - - -def _on_delete_button_click( - b, - classification_widget: widgets.ToggleButtons, - ray_clusters_df: pd.DataFrame, - raycluster_data_output: widgets.Output, - user_output: widgets.Output, - delete_button: widgets.Button, - list_jobs_button: widgets.Button, - ray_dashboard_button: widgets.Button, -): - """ - _on_delete_button_click handles the event when the Delete Button is clicked, deleting the selected cluster. - """ - cluster_name = classification_widget.value - namespace = ray_clusters_df[ray_clusters_df["Name"] == classification_widget.value][ - "Namespace" - ].values[0] - - _delete_cluster(cluster_name, namespace) - - with user_output: - user_output.clear_output() - print( - f"Cluster {cluster_name} in the {namespace} namespace was deleted successfully." - ) - - # Refresh the dataframe - new_df = _fetch_cluster_data(namespace) - if new_df.empty: - classification_widget.close() - delete_button.close() - list_jobs_button.close() - ray_dashboard_button.close() - with raycluster_data_output: - raycluster_data_output.clear_output() - print(f"No clusters found in the {namespace} namespace.") - else: - classification_widget.options = new_df["Name"].tolist() - - -def _on_ray_dashboard_button_click( - b, - classification_widget: widgets.ToggleButtons, - ray_clusters_df: pd.DataFrame, - user_output: widgets.Output, - url_output: widgets.Output, -): - """ - _on_ray_dashboard_button_click handles the event when the Open Ray Dashboard button is clicked, opening the Ray Dashboard in a new tab - """ - from codeflare_sdk import Cluster - - cluster_name = classification_widget.value - namespace = ray_clusters_df[ray_clusters_df["Name"] == classification_widget.value][ - "Namespace" - ].values[0] - - # Suppress from Cluster Object initialisation widgets and outputs - with widgets.Output(), contextlib.redirect_stdout( - io.StringIO() - ), contextlib.redirect_stderr(io.StringIO()): - cluster = Cluster(ClusterConfiguration(cluster_name, namespace)) - dashboard_url = cluster.cluster_dashboard_uri() - - with user_output: - user_output.clear_output() - print(f"Opening Ray Dashboard for {cluster_name} cluster:\n{dashboard_url}") - with url_output: - display(Javascript(f'window.open("{dashboard_url}", "_blank");')) - - -def _on_list_jobs_button_click( - b, - classification_widget: widgets.ToggleButtons, - ray_clusters_df: pd.DataFrame, - user_output: widgets.Output, - url_output: widgets.Output, -): - """ - _on_list_jobs_button_click handles the event when the View Jobs button is clicked, opening the Ray Jobs Dashboard in a new tab - """ - from codeflare_sdk import Cluster - - cluster_name = classification_widget.value - namespace = ray_clusters_df[ray_clusters_df["Name"] == classification_widget.value][ - "Namespace" - ].values[0] - - # Suppress from Cluster Object initialisation widgets and outputs - with widgets.Output(), contextlib.redirect_stdout( - io.StringIO() - ), contextlib.redirect_stderr(io.StringIO()): - cluster = Cluster(ClusterConfiguration(cluster_name, namespace)) - dashboard_url = cluster.cluster_dashboard_uri() - - with user_output: - user_output.clear_output() - print( - f"Opening Ray Jobs Dashboard for {cluster_name} cluster:\n{dashboard_url}/#/jobs" - ) - with url_output: - display(Javascript(f'window.open("{dashboard_url}/#/jobs", "_blank");')) + # Display the UI components + ray_cluster_manager.display_widgets() def _delete_cluster( From 693a019db3c2c79dda7abeb302df8368b74f4022 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Fri, 4 Oct 2024 11:16:24 +0100 Subject: [PATCH 362/496] Enhance widgets unit tests and increase coverage --- src/codeflare_sdk/common/widgets/widgets.py | 18 +- tests/unit_test.py | 347 ++++++++++++++------ 2 files changed, 254 insertions(+), 111 deletions(-) diff --git a/src/codeflare_sdk/common/widgets/widgets.py b/src/codeflare_sdk/common/widgets/widgets.py index c65a1721..cf7bccf9 100644 --- a/src/codeflare_sdk/common/widgets/widgets.py +++ b/src/codeflare_sdk/common/widgets/widgets.py @@ -34,6 +34,7 @@ get_api_client, ) + class RayClusterManagerWidgets: def __init__(self, ray_clusters_df: pd.DataFrame, namespace: str = None): from ...ray.cluster.cluster import get_current_namespace @@ -137,6 +138,7 @@ def _on_delete_button_click(self, b): # Refresh the dataframe new_df = _fetch_cluster_data(namespace) + self.ray_clusters_df = new_df if new_df.empty: self.classification_widget.close() self.delete_button.close() @@ -387,15 +389,19 @@ def _fetch_cluster_data(namespace): namespaces = [item.namespace for item in rayclusters] num_workers = [item.num_workers for item in rayclusters] head_extended_resources = [ - f"{list(item.head_extended_resources.keys())[0]}: {list(item.head_extended_resources.values())[0]}" - if item.head_extended_resources - else "0" + ( + f"{list(item.head_extended_resources.keys())[0]}: {list(item.head_extended_resources.values())[0]}" + if item.head_extended_resources + else "0" + ) for item in rayclusters ] worker_extended_resources = [ - f"{list(item.worker_extended_resources.keys())[0]}: {list(item.worker_extended_resources.values())[0]}" - if item.worker_extended_resources - else "0" + ( + f"{list(item.worker_extended_resources.keys())[0]}: {list(item.worker_extended_resources.values())[0]}" + if item.worker_extended_resources + else "0" + ) for item in rayclusters ] head_cpu_requests = [ diff --git a/tests/unit_test.py b/tests/unit_test.py index fae4865f..4c103e01 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -19,6 +19,7 @@ import os import re import uuid +from io import StringIO from codeflare_sdk.ray.cluster import cluster @@ -2970,146 +2971,282 @@ def test_is_notebook_true(): def test_view_clusters(mocker, capsys): - from kubernetes.client.rest import ApiException - - mocker.patch("codeflare_sdk.common.widgets.widgets.is_notebook", return_value=False) + # If is not a notebook environment, a warning should be raised with pytest.warns( UserWarning, match="view_clusters can only be used in a Jupyter Notebook environment.", ): - result = cf_widgets.view_clusters(namespace="default") + result = cf_widgets.view_clusters("default") + # Assert the function returns None when not in a notebook environment assert result is None + # Prepare to run view_clusters when notebook environment is detected mocker.patch("codeflare_sdk.common.widgets.widgets.is_notebook", return_value=True) + mock_get_current_namespace = mocker.patch( + "codeflare_sdk.ray.cluster.cluster.get_current_namespace", + return_value="default", + ) + namespace = mock_get_current_namespace.return_value + + # Assert the function returns None when no clusters are found + mock_fetch_cluster_data = mocker.patch( + "codeflare_sdk.common.widgets.widgets._fetch_cluster_data", + return_value=pd.DataFrame(), + ) + result = cf_widgets.view_clusters() + captured = capsys.readouterr() + assert mock_fetch_cluster_data.return_value.empty + assert "No clusters found in the default namespace." in captured.out + assert result is None + + # Prepare to run view_clusters with a test DataFrame + mock_fetch_cluster_data = mocker.patch( + "codeflare_sdk.common.widgets.widgets._fetch_cluster_data", + return_value=pd.DataFrame( + { + "Name": ["test-cluster"], + "Namespace": ["default"], + "Num Workers": ["1"], + "Head GPUs": ["0"], + "Worker GPUs": ["0"], + "Head CPU Req~Lim": ["1~1"], + "Head Memory Req~Lim": ["1Gi~1Gi"], + "Worker CPU Req~Lim": ["1~1"], + "Worker Memory Req~Lim": ["1Gi~1Gi"], + "status": ['Ready ✓'], + } + ), + ) + # Create a RayClusterManagerWidgets instance + ray_cluster_manager_instance = cf_widgets.RayClusterManagerWidgets( + ray_clusters_df=mock_fetch_cluster_data.return_value, namespace=namespace + ) + # Patch the constructor of RayClusterManagerWidgets to return our initialized instance + mock_constructor = mocker.patch( + "codeflare_sdk.common.widgets.widgets.RayClusterManagerWidgets", + return_value=ray_cluster_manager_instance, + ) + + # Use a spy to track calls to display_widgets without replacing it + spy_display_widgets = mocker.spy(ray_cluster_manager_instance, "display_widgets") - # Mock Kubernetes API responses + cf_widgets.view_clusters() + + mock_constructor.assert_called_once_with( + ray_clusters_df=mock_fetch_cluster_data.return_value, namespace=namespace + ) + + spy_display_widgets.assert_called_once() + + +def test_delete_cluster(mocker, capsys): + name = "test-cluster" + namespace = "default" + + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") mocker.patch("kubernetes.client.ApisApi.get_api_versions") + + mock_ray_cluster = MagicMock() mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - return_value={"items": []}, + "kubernetes.client.CustomObjectsApi.get_namespaced_custom_object", + side_effect=[ + mock_ray_cluster, + client.ApiException(status=404), + client.ApiException(status=404), + mock_ray_cluster, + ], ) + + # In this scenario, the RayCluster exists and the AppWrapper does not. mocker.patch( "codeflare_sdk.ray.cluster.cluster._check_aw_exists", return_value=False ) + mock_delete_rc = mocker.patch( + "kubernetes.client.CustomObjectsApi.delete_namespaced_custom_object" + ) + cf_widgets._delete_cluster(name, namespace) - # Return empty dataframe when no clusters are found - mocker.patch("codeflare_sdk.ray.cluster.cluster.list_all_clusters", return_value=[]) + mock_delete_rc.assert_called_once_with( + group="ray.io", + version="v1", + namespace=namespace, + plural="rayclusters", + name=name, + ) + + # In this scenario, the AppWrapper exists and the RayCluster does not mocker.patch( - "codeflare_sdk.ray.cluster.cluster.get_current_namespace", - return_value="default", + "codeflare_sdk.ray.cluster.cluster._check_aw_exists", return_value=True ) - df = cf_widgets._fetch_cluster_data(namespace="default") - assert df.empty + mock_delete_aw = mocker.patch( + "kubernetes.client.CustomObjectsApi.delete_namespaced_custom_object" + ) + cf_widgets._delete_cluster(name, namespace) - cf_widgets.view_clusters() - captured = capsys.readouterr() - assert f"No clusters found in the default namespace." in captured.out + mock_delete_aw.assert_called_once_with( + group="workload.codeflare.dev", + version="v1beta2", + namespace=namespace, + plural="appwrappers", + name=name, + ) + + # In this scenario, the deletion of the resource times out. + with pytest.raises( + TimeoutError, match=f"Timeout waiting for {name} to be deleted." + ): + cf_widgets._delete_cluster(name, namespace, 1) - # Assert the function returns None - assert result is None - test_df = pd.DataFrame( +def test_ray_cluster_manager_widgets_init(mocker, capsys): + namespace = "default" + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), + ) + test_ray_clusters_df = pd.DataFrame( { - "Name": ["test-cluster"], - "Namespace": ["default"], - "Num Workers": ["1"], - "Head GPUs": ["0"], - "Worker GPUs": ["0"], - "Head CPU Req~Lim": ["1~1"], - "Head Memory Req~Lim": ["1Gi~1Gi"], - "Worker CPU Req~Lim": ["1~1"], - "Worker Memory Req~Lim": ["1Gi~1Gi"], - "status": ['Ready ✓'], + "Name": ["test-cluster-1", "test-cluster-2"], + "Namespace": [namespace, namespace], + "Num Workers": ["1", "2"], + "Head GPUs": ["0", "0"], + "Worker GPUs": ["0", "0"], + "Head CPU Req~Lim": ["1~1", "1~1"], + "Head Memory Req~Lim": ["1Gi~1Gi", "1Gi~1Gi"], + "Worker CPU Req~Lim": ["1~1", "1~1"], + "Worker Memory Req~Lim": ["1Gi~1Gi", "1Gi~1Gi"], + "status": [ + 'Ready ✓', + 'Ready ✓', + ], } ) - - # Mock the _fetch_cluster_data function to return a test DataFrame + mock_fetch_cluster_data = mocker.patch( + "codeflare_sdk.common.widgets.widgets._fetch_cluster_data", + return_value=test_ray_clusters_df, + ) mocker.patch( - "codeflare_sdk.common.widgets.widgets._fetch_cluster_data", return_value=test_df + "codeflare_sdk.ray.cluster.cluster.get_current_namespace", + return_value=namespace, + ) + mock_delete_cluster = mocker.patch( + "codeflare_sdk.common.widgets.widgets._delete_cluster" ) - # Mock the Cluster class and related methods - mocker.patch("codeflare_sdk.ray.cluster.Cluster") - mocker.patch("codeflare_sdk.ray.cluster.ClusterConfiguration") + # # Mock ToggleButtons + mock_toggle_buttons = mocker.patch("ipywidgets.ToggleButtons") + mock_button = mocker.patch("ipywidgets.Button") + mock_output = mocker.patch("ipywidgets.Output") - with patch("ipywidgets.ToggleButtons") as MockToggleButtons, patch( - "ipywidgets.Button" - ) as MockButton, patch("ipywidgets.Output") as MockOutput, patch( - "ipywidgets.HBox" - ), patch( - "ipywidgets.VBox" - ), patch( - "IPython.display.display" - ) as mock_display, patch( - "IPython.display.HTML" - ), patch( - "codeflare_sdk.common.widgets.widgets.Javascript" - ) as mock_javascript: - # Create mock widget instances - mock_toggle = MagicMock() - mock_delete_button = MagicMock() - mock_list_jobs_button = MagicMock() - mock_ray_dashboard_button = MagicMock() - mock_output = MagicMock() - - # Set the return values for the mocked widgets - MockToggleButtons.return_value = mock_toggle - MockButton.side_effect = [ - mock_delete_button, - mock_list_jobs_button, - mock_ray_dashboard_button, - ] - MockOutput.return_value = mock_output + # Initialize the RayClusterManagerWidgets instance + ray_cluster_manager_instance = cf_widgets.RayClusterManagerWidgets( + ray_clusters_df=test_ray_clusters_df, namespace=namespace + ) - # Call the function under test - cf_widgets.view_clusters() + # Assertions for DataFrame and attributes + assert ray_cluster_manager_instance.ray_clusters_df.equals( + test_ray_clusters_df + ), "ray_clusters_df attribute does not match the input DataFrame" + assert ( + ray_cluster_manager_instance.namespace == namespace + ), f"Expected namespace to be '{namespace}', but got '{ray_cluster_manager_instance.namespace}'" + assert ( + ray_cluster_manager_instance.classification_widget.options + == test_ray_clusters_df["Name"].tolist() + ), "classification_widget options do not match the input DataFrame" - # Simulate selecting a cluster - mock_toggle.value = "test-cluster" - selection_change = {"new": "test-cluster"} - cf_widgets._on_cluster_click( - selection_change, mock_output, "default", mock_toggle - ) + # Assertions for widgets + mock_toggle_buttons.assert_called_once_with( + options=test_ray_clusters_df["Name"].tolist(), + value=test_ray_clusters_df["Name"].tolist()[0], + description="Select an existing cluster:", + ) + assert ( + ray_cluster_manager_instance.classification_widget + == mock_toggle_buttons.return_value + ), "classification_widget is not set correctly" + assert ( + ray_cluster_manager_instance.delete_button == mock_button.return_value + ), "delete_button is not set correctly" + assert ( + ray_cluster_manager_instance.list_jobs_button == mock_button.return_value + ), "list_jobs_button is not set correctly" + assert ( + ray_cluster_manager_instance.ray_dashboard_button == mock_button.return_value + ), "ray_dashboard_button is not set correctly" + assert ( + ray_cluster_manager_instance.raycluster_data_output == mock_output.return_value + ), "raycluster_data_output is not set correctly" + assert ( + ray_cluster_manager_instance.user_output == mock_output.return_value + ), "user_output is not set correctly" + assert ( + ray_cluster_manager_instance.url_output == mock_output.return_value + ), "url_output is not set correctly" - # Assert that the toggle options are set correctly - mock_toggle.observe.assert_called() + ### Test button click events + mock_delete_button = MagicMock() + mock_list_jobs_button = MagicMock() + mock_ray_dashboard_button = MagicMock() - # Simulate clicking the list jobs button - cf_widgets._on_list_jobs_button_click( - None, mock_toggle, test_df, mock_output, mock_output - ) - mock_javascript.assert_called_once() + mock_javascript = mocker.patch("codeflare_sdk.common.widgets.widgets.Javascript") + ray_cluster_manager_instance.url_output = MagicMock() - # Simulate clicking the Ray dashboard button - cf_widgets._on_ray_dashboard_button_click( - None, mock_toggle, test_df, mock_output, mock_output - ) - mock_javascript.call_count = 2 + mock_dashboard_uri = mocker.patch( + "codeflare_sdk.ray.cluster.cluster.Cluster.cluster_dashboard_uri", + return_value="https://ray-dashboard-test-cluster-1-ns.apps.cluster.awsroute.org", + ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.delete_namespaced_custom_object", - ) - mock_response = mocker.MagicMock() - mock_response.status = 404 - mock_exception = ApiException(http_resp=mock_response) - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_namespaced_custom_object", - side_effect=mock_exception, - ) + # Simulate clicking the list jobs button + ray_cluster_manager_instance.classification_widget.value = "test-cluster-1" + ray_cluster_manager_instance._on_list_jobs_button_click(mock_list_jobs_button) - # Simulate clicking the delete button - cf_widgets._on_delete_button_click( - None, - mock_toggle, - test_df, - mock_output, - mock_output, - mock_delete_button, - mock_list_jobs_button, - mock_ray_dashboard_button, - ) - MockButton.call_count = 3 + captured = capsys.readouterr() + assert ( + f"Opening Ray Jobs Dashboard for test-cluster-1 cluster:\n{mock_dashboard_uri.return_value}/#/jobs" + in captured.out + ) + mock_javascript.assert_called_with( + f'window.open("{mock_dashboard_uri.return_value}/#/jobs", "_blank");' + ) + + # Simulate clicking the Ray dashboard button + ray_cluster_manager_instance.classification_widget.value = "test-cluster-1" + ray_cluster_manager_instance._on_ray_dashboard_button_click( + mock_ray_dashboard_button + ) + + captured = capsys.readouterr() + assert ( + f"Opening Ray Dashboard for test-cluster-1 cluster:\n{mock_dashboard_uri.return_value}" + in captured.out + ) + mock_javascript.assert_called_with( + f'window.open("{mock_dashboard_uri.return_value}", "_blank");' + ) + + # Simulate clicking the delete button + ray_cluster_manager_instance.classification_widget.value = "test-cluster-1" + ray_cluster_manager_instance._on_delete_button_click(mock_delete_button) + mock_delete_cluster.assert_called_with("test-cluster-1", namespace) + + mock_fetch_cluster_data.return_value = pd.DataFrame() + ray_cluster_manager_instance.classification_widget.value = "test-cluster-2" + ray_cluster_manager_instance._on_delete_button_click(mock_delete_button) + mock_delete_cluster.assert_called_with("test-cluster-2", namespace) + + # Assert on deletion that the dataframe is empty + assert ( + ray_cluster_manager_instance.ray_clusters_df.empty + ), "Expected DataFrame to be empty after deletion" + + captured = capsys.readouterr() + assert ( + f"Cluster test-cluster-1 in the {namespace} namespace was deleted successfully." + in captured.out + ) def test_fetch_cluster_data(mocker): From 455bea294b1835d17b2c0470a897ed2577c94389 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Fri, 4 Oct 2024 12:43:09 +0100 Subject: [PATCH 363/496] Add comments to widgets class and functions --- src/codeflare_sdk/common/widgets/widgets.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/src/codeflare_sdk/common/widgets/widgets.py b/src/codeflare_sdk/common/widgets/widgets.py index cf7bccf9..8a13a4d4 100644 --- a/src/codeflare_sdk/common/widgets/widgets.py +++ b/src/codeflare_sdk/common/widgets/widgets.py @@ -36,6 +36,12 @@ class RayClusterManagerWidgets: + """ + The RayClusterManagerWidgets class is responsible for initialising the ToggleButtons, Button, and Output widgets. + It also handles the user interactions and displays the cluster details. + Used when calling the view_clusters function. + """ + def __init__(self, ray_clusters_df: pd.DataFrame, namespace: str = None): from ...ray.cluster.cluster import get_current_namespace @@ -74,6 +80,10 @@ def __init__(self, ray_clusters_df: pd.DataFrame, namespace: str = None): self._trigger_initial_display() def _initialize_callbacks(self): + """ + Called upon RayClusterManagerWidgets initialisation. + Sets up event handlers and callbacks for UI interactions. + """ # Observe cluster selection self.classification_widget.observe( lambda selection_change: self._on_cluster_click(selection_change), @@ -87,6 +97,10 @@ def _initialize_callbacks(self): ) def _trigger_initial_display(self): + """ + Called upon RayClusterManagerWidgets initialisation. + Triggers an initial display update with the current cluster value. + """ # Trigger display with initial cluster value initial_value = self.classification_widget.value self._on_cluster_click({"new": initial_value}) From 5cbe8a23447a144b6b7ead78af5576ef4706dbc6 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Fri, 4 Oct 2024 15:25:23 +0100 Subject: [PATCH 364/496] Remove unused imports and re-organise in unit_test.py --- tests/unit_test.py | 104 ++++++++++++++++++++------------------------- 1 file changed, 46 insertions(+), 58 deletions(-) diff --git a/tests/unit_test.py b/tests/unit_test.py index 4c103e01..1f11643b 100644 --- a/tests/unit_test.py +++ b/tests/unit_test.py @@ -1,4 +1,4 @@ -# Copyright 2024 IBM, Red Hat +# Copyright 2022 IBM, Red Hat # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,86 +13,73 @@ # limitations under the License. -from pathlib import Path -import sys import filecmp import os import re +import sys import uuid -from io import StringIO - -from codeflare_sdk.ray.cluster import cluster +from pathlib import Path parent = Path(__file__).resolve().parents[1] aw_dir = os.path.expanduser("~/.codeflare/resources/") sys.path.append(str(parent) + "/src") -from kubernetes import client, config, dynamic +from unittest.mock import MagicMock, patch + +import openshift +import pandas as pd +import pytest +import ray +import yaml +from kubernetes import client, config +from pytest_mock import MockerFixture +from ray.job_submission import JobSubmissionClient + +import codeflare_sdk.common.widgets.widgets as cf_widgets +from codeflare_sdk.common.kubernetes_cluster import ( + Authentication, + KubeConfigFileAuthentication, + TokenAuthentication, + config_check, +) +from codeflare_sdk.common.utils.generate_cert import ( + export_env, + generate_ca_cert, + generate_tls_cert, +) from codeflare_sdk.ray.appwrapper.awload import AWManager +from codeflare_sdk.ray.appwrapper.status import AppWrapper, AppWrapperStatus +from codeflare_sdk.ray.client.ray_jobs import RayJobClient from codeflare_sdk.ray.cluster.cluster import ( Cluster, ClusterConfiguration, + _app_wrapper_status, + _copy_to_ray, _map_to_ray_cluster, + _ray_cluster_status, + get_cluster, list_all_clusters, list_all_queued, - _copy_to_ray, - get_cluster, - _app_wrapper_status, - _ray_cluster_status, -) -from codeflare_sdk.common.kubernetes_cluster import ( - TokenAuthentication, - Authentication, - KubeConfigFileAuthentication, - config_check, ) +from codeflare_sdk.ray.cluster.generate_yaml import gen_names, is_openshift_cluster from codeflare_sdk.ray.cluster.pretty_print import ( - print_no_resources_found, print_app_wrappers_status, print_cluster_status, print_clusters, -) -from codeflare_sdk.ray.appwrapper.status import ( - AppWrapper, - AppWrapperStatus, + print_no_resources_found, ) from codeflare_sdk.ray.cluster.status import ( + CodeFlareClusterStatus, RayCluster, RayClusterStatus, - CodeFlareClusterStatus, ) -from codeflare_sdk.common.utils.generate_cert import ( - generate_ca_cert, - generate_tls_cert, - export_env, -) - from tests.unit_test_support import ( - createClusterWithConfig, createClusterConfig, + createClusterWithConfig, createClusterWrongType, get_package_and_version, ) -import codeflare_sdk.common.kubernetes_cluster.kube_api_helpers -from codeflare_sdk.ray.cluster.generate_yaml import ( - gen_names, - is_openshift_cluster, -) - -import codeflare_sdk.common.widgets.widgets as cf_widgets -import pandas as pd - -import openshift -from openshift.selector import Selector -import ray -import pytest -import yaml -from unittest.mock import MagicMock, patch -from pytest_mock import MockerFixture -from ray.job_submission import JobSubmissionClient -from codeflare_sdk.ray.client.ray_jobs import RayJobClient - # For mocking openshift client results fake_res = openshift.Result("fake") @@ -156,7 +143,7 @@ def test_token_auth_creation(): assert token_auth.skip_tls == False assert token_auth.ca_cert_path == f"{parent}/tests/auth-test.crt" - except Exception as e: + except Exception: assert 0 == 1 @@ -204,7 +191,7 @@ def test_config_check_no_config_file(mocker): mocker.patch("codeflare_sdk.common.kubernetes_cluster.auth.config_path", None) mocker.patch("codeflare_sdk.common.kubernetes_cluster.auth.api_client", None) - with pytest.raises(PermissionError) as e: + with pytest.raises(PermissionError): config_check() @@ -282,7 +269,7 @@ def test_config_creation(): def test_config_creation_wrong_type(): with pytest.raises(TypeError): - config = createClusterWrongType() + createClusterWrongType() def test_cluster_creation(mocker): @@ -890,7 +877,7 @@ def test_ray_job_wrapping(mocker): def test_print_no_resources(capsys): try: print_no_resources_found() - except: + except Exception: assert 1 == 0 captured = capsys.readouterr() assert captured.out == ( @@ -903,7 +890,7 @@ def test_print_no_resources(capsys): def test_print_no_cluster(capsys): try: print_cluster_status(None) - except: + except Exception: assert 1 == 0 captured = capsys.readouterr() assert captured.out == ( @@ -924,7 +911,7 @@ def test_print_appwrappers(capsys): ) try: print_app_wrappers_status([aw1, aw2]) - except: + except Exception: assert 1 == 0 captured = capsys.readouterr() assert captured.out == ( @@ -997,7 +984,7 @@ def test_ray_details(mocker, capsys): print_clusters([ray1, ray2]) print_cluster_status(ray1) print_cluster_status(ray2) - except: + except Exception: assert 0 == 1 captured = capsys.readouterr() assert captured.out == ( @@ -2602,13 +2589,14 @@ def test_AWManager_submit_remove(mocker, capsys): assert testaw.submitted == False -from cryptography.x509 import load_pem_x509_certificate import base64 + from cryptography.hazmat.primitives.serialization import ( - load_pem_private_key, Encoding, PublicFormat, + load_pem_private_key, ) +from cryptography.x509 import load_pem_x509_certificate def test_generate_ca_cert(): From 8537c2d8d067be7d66e37b1e5c27752bbe8724a6 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Fri, 4 Oct 2024 14:57:55 +0000 Subject: [PATCH 365/496] Updated coverage.svg --- coverage.svg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/coverage.svg b/coverage.svg index c1490035..607d3de4 100644 --- a/coverage.svg +++ b/coverage.svg @@ -15,7 +15,7 @@ coverage coverage - 90% - 90% + 91% + 91% From 0d445fe8dbf40049b9f9bad24fe0d7e2647c6b1e Mon Sep 17 00:00:00 2001 From: Varsha Prasad Narsing Date: Mon, 30 Sep 2024 14:58:50 -0700 Subject: [PATCH 366/496] Add codecov for unit test coverage --- .github/workflows/unit-tests.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index 5d12791b..bb010e2c 100755 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -29,3 +29,8 @@ jobs: coverage run -m --source=src pytest -v tests/unit_test.py coverage=$(coverage report -m | tail -1 | tail -c 4 | head -c 2) if (( $coverage < 90 )); then echo "Coverage failed at ${coverage}%"; exit 1; else echo "Coverage passed, ${coverage}%"; fi + continue-on-error: true + - name: Upload to Codecov + uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} From 73b8cb750a6ef60c35010ae43ac330c21393f88c Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Mon, 7 Oct 2024 10:28:19 +0100 Subject: [PATCH 367/496] fix: skip_tls authentication functionality Signed-off-by: Bobbins228 --- src/codeflare_sdk/common/kubernetes_cluster/auth.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/codeflare_sdk/common/kubernetes_cluster/auth.py b/src/codeflare_sdk/common/kubernetes_cluster/auth.py index 2525d2e3..e6146093 100644 --- a/src/codeflare_sdk/common/kubernetes_cluster/auth.py +++ b/src/codeflare_sdk/common/kubernetes_cluster/auth.py @@ -109,14 +109,15 @@ def login(self) -> str: configuration.host = self.server configuration.api_key["authorization"] = self.token - api_client = client.ApiClient(configuration) - if not self.skip_tls: - _client_with_cert(api_client, self.ca_cert_path) - else: + if self.skip_tls: urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) print("Insecure request warnings have been disabled") configuration.verify_ssl = False + api_client = client.ApiClient(configuration) + if not self.skip_tls: + _client_with_cert(api_client, self.ca_cert_path) + client.AuthenticationApi(api_client).get_api_group() config_path = None return "Logged into %s" % self.server From bff13a5eede423ac2fcc3f68a4ce3638eaf105fa Mon Sep 17 00:00:00 2001 From: Kevin Date: Tue, 8 Oct 2024 16:11:04 -0400 Subject: [PATCH 368/496] bugfix: int being interpreted directly to bytes not GB Signed-off-by: Kevin --- src/codeflare_sdk/ray/cluster/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/codeflare_sdk/ray/cluster/config.py b/src/codeflare_sdk/ray/cluster/config.py index 3c59d593..a9fe4408 100644 --- a/src/codeflare_sdk/ray/cluster/config.py +++ b/src/codeflare_sdk/ray/cluster/config.py @@ -120,9 +120,9 @@ def __post_init__(self): ) self._validate_types() + self._memory_to_resource() self._memory_to_string() self._str_mem_no_unit_add_GB() - self._memory_to_resource() self._cpu_to_resource() self._gpu_to_resource() self._combine_extended_resource_mapping() From 7aa79e86c3a43bd7e7a1f1f1c374003e16356ef3 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Tue, 8 Oct 2024 16:27:59 +0100 Subject: [PATCH 369/496] refactor: codeflare sdk unit tests sort unit tests into individual files and made minor enhancements --- pyproject.toml | 2 + .../common/kubernetes_cluster/test_auth.py | 162 + src/codeflare_sdk/common/kueue/test_kueue.py | 137 + .../common/utils/test_generate_cert.py | 114 + .../common/utils/unit_test_support.py | 383 ++ .../common/widgets/test_widgets.py | 469 +++ .../ray/appwrapper/test_awload.py | 88 + .../ray/appwrapper/test_status.py | 104 + src/codeflare_sdk/ray/client/test_ray_jobs.py | 173 + src/codeflare_sdk/ray/cluster/test_cluster.py | 610 +++ src/codeflare_sdk/ray/cluster/test_config.py | 170 + .../ray/cluster/test_generate_yaml.py | 34 + .../ray/cluster/test_pretty_print.py | 208 + src/codeflare_sdk/ray/cluster/test_status.py | 114 + tests/demo_test.py | 62 - tests/func_test.py | 21 - .../appwrapper}/test-case-bad.yaml | 0 .../kueue/aw_kueue.yaml} | 16 +- .../kueue/ray_cluster_kueue.yaml | 157 + .../ray/default-appwrapper.yaml | 155 + .../ray/default-ray-cluster.yaml} | 30 +- .../ray/unit-test-all-params.yaml} | 54 +- .../support_clusters/test-aw-a.yaml} | 16 +- .../support_clusters/test-aw-b.yaml | 157 + .../support_clusters/test-rc-a.yaml} | 34 +- .../support_clusters/test-rc-b.yaml | 148 + tests/unit_test.py | 3350 ----------------- tests/unit_test_support.py | 60 - 28 files changed, 3455 insertions(+), 3573 deletions(-) create mode 100644 src/codeflare_sdk/common/kubernetes_cluster/test_auth.py create mode 100644 src/codeflare_sdk/common/kueue/test_kueue.py create mode 100644 src/codeflare_sdk/common/utils/test_generate_cert.py create mode 100644 src/codeflare_sdk/common/utils/unit_test_support.py create mode 100644 src/codeflare_sdk/common/widgets/test_widgets.py create mode 100644 src/codeflare_sdk/ray/appwrapper/test_awload.py create mode 100644 src/codeflare_sdk/ray/appwrapper/test_status.py create mode 100644 src/codeflare_sdk/ray/client/test_ray_jobs.py create mode 100644 src/codeflare_sdk/ray/cluster/test_cluster.py create mode 100644 src/codeflare_sdk/ray/cluster/test_config.py create mode 100644 src/codeflare_sdk/ray/cluster/test_generate_yaml.py create mode 100644 src/codeflare_sdk/ray/cluster/test_pretty_print.py create mode 100644 src/codeflare_sdk/ray/cluster/test_status.py delete mode 100644 tests/demo_test.py delete mode 100644 tests/func_test.py rename tests/{ => test_cluster_yamls/appwrapper}/test-case-bad.yaml (100%) rename tests/{test-case.yaml => test_cluster_yamls/kueue/aw_kueue.yaml} (93%) create mode 100644 tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml create mode 100644 tests/test_cluster_yamls/ray/default-appwrapper.yaml rename tests/{test-case-no-kueue-no-aw.yaml => test_cluster_yamls/ray/default-ray-cluster.yaml} (88%) rename tests/{test-case-custom-image.yaml => test_cluster_yamls/ray/unit-test-all-params.yaml} (83%) rename tests/{test-default-appwrapper.yaml => test_cluster_yamls/support_clusters/test-aw-a.yaml} (95%) create mode 100644 tests/test_cluster_yamls/support_clusters/test-aw-b.yaml rename tests/{test-case-no-mcad.yamls => test_cluster_yamls/support_clusters/test-rc-a.yaml} (86%) create mode 100644 tests/test_cluster_yamls/support_clusters/test-rc-b.yaml delete mode 100644 tests/unit_test.py delete mode 100644 tests/unit_test_support.py diff --git a/pyproject.toml b/pyproject.toml index 7417116d..37eb17a4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,3 +57,5 @@ markers = [ "nvidia_gpu" ] addopts = "--timeout=900" +testpaths = ["src/codeflare_sdk"] +collect_ignore = ["src/codeflare_sdk/common/utils/unit_test_support.py"] diff --git a/src/codeflare_sdk/common/kubernetes_cluster/test_auth.py b/src/codeflare_sdk/common/kubernetes_cluster/test_auth.py new file mode 100644 index 00000000..be9e90f5 --- /dev/null +++ b/src/codeflare_sdk/common/kubernetes_cluster/test_auth.py @@ -0,0 +1,162 @@ +# Copyright 2024 IBM, Red Hat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from codeflare_sdk.common.kubernetes_cluster import ( + Authentication, + KubeConfigFileAuthentication, + TokenAuthentication, + config_check, +) +from kubernetes import client, config +import os +from pathlib import Path +import pytest + +parent = Path(__file__).resolve().parents[4] # project directory + + +def test_token_auth_creation(): + token_auth = TokenAuthentication(token="token", server="server") + assert token_auth.token == "token" + assert token_auth.server == "server" + assert token_auth.skip_tls == False + assert token_auth.ca_cert_path == None + + token_auth = TokenAuthentication(token="token", server="server", skip_tls=True) + assert token_auth.token == "token" + assert token_auth.server == "server" + assert token_auth.skip_tls == True + assert token_auth.ca_cert_path == None + + os.environ["CF_SDK_CA_CERT_PATH"] = "/etc/pki/tls/custom-certs/ca-bundle.crt" + token_auth = TokenAuthentication(token="token", server="server", skip_tls=False) + assert token_auth.token == "token" + assert token_auth.server == "server" + assert token_auth.skip_tls == False + assert token_auth.ca_cert_path == "/etc/pki/tls/custom-certs/ca-bundle.crt" + os.environ.pop("CF_SDK_CA_CERT_PATH") + + token_auth = TokenAuthentication( + token="token", + server="server", + skip_tls=False, + ca_cert_path=f"{parent}/tests/auth-test.crt", + ) + assert token_auth.token == "token" + assert token_auth.server == "server" + assert token_auth.skip_tls == False + assert token_auth.ca_cert_path == f"{parent}/tests/auth-test.crt" + + +def test_token_auth_login_logout(mocker): + mocker.patch.object(client, "ApiClient") + + token_auth = TokenAuthentication( + token="testtoken", server="testserver:6443", skip_tls=False, ca_cert_path=None + ) + assert token_auth.login() == ("Logged into testserver:6443") + assert token_auth.logout() == ("Successfully logged out of testserver:6443") + + +def test_token_auth_login_tls(mocker): + mocker.patch.object(client, "ApiClient") + + token_auth = TokenAuthentication( + token="testtoken", server="testserver:6443", skip_tls=True, ca_cert_path=None + ) + assert token_auth.login() == ("Logged into testserver:6443") + token_auth = TokenAuthentication( + token="testtoken", server="testserver:6443", skip_tls=False, ca_cert_path=None + ) + assert token_auth.login() == ("Logged into testserver:6443") + token_auth = TokenAuthentication( + token="testtoken", + server="testserver:6443", + skip_tls=False, + ca_cert_path=f"{parent}/tests/auth-test.crt", + ) + assert token_auth.login() == ("Logged into testserver:6443") + + os.environ["CF_SDK_CA_CERT_PATH"] = f"{parent}/tests/auth-test.crt" + token_auth = TokenAuthentication( + token="testtoken", + server="testserver:6443", + skip_tls=False, + ) + assert token_auth.login() == ("Logged into testserver:6443") + + +def test_config_check_no_config_file(mocker): + mocker.patch("os.path.expanduser", return_value="/mock/home/directory") + mocker.patch("os.path.isfile", return_value=False) + mocker.patch("codeflare_sdk.common.kubernetes_cluster.auth.config_path", None) + mocker.patch("codeflare_sdk.common.kubernetes_cluster.auth.api_client", None) + + with pytest.raises(PermissionError): + config_check() + + +def test_config_check_with_incluster_config(mocker): + mocker.patch("os.path.expanduser", return_value="/mock/home/directory") + mocker.patch("os.path.isfile", return_value=False) + mocker.patch.dict(os.environ, {"KUBERNETES_PORT": "number"}) + mocker.patch("kubernetes.config.load_incluster_config", side_effect=None) + mocker.patch("codeflare_sdk.common.kubernetes_cluster.auth.config_path", None) + mocker.patch("codeflare_sdk.common.kubernetes_cluster.auth.api_client", None) + + result = config_check() + assert result == None + + +def test_config_check_with_existing_config_file(mocker): + mocker.patch("os.path.expanduser", return_value="/mock/home/directory") + mocker.patch("os.path.isfile", return_value=True) + mocker.patch("kubernetes.config.load_kube_config", side_effect=None) + mocker.patch("codeflare_sdk.common.kubernetes_cluster.auth.config_path", None) + mocker.patch("codeflare_sdk.common.kubernetes_cluster.auth.api_client", None) + + result = config_check() + assert result == None + + +def test_config_check_with_config_path_and_no_api_client(mocker): + mocker.patch( + "codeflare_sdk.common.kubernetes_cluster.auth.config_path", "/mock/config/path" + ) + mocker.patch("codeflare_sdk.common.kubernetes_cluster.auth.api_client", None) + result = config_check() + assert result == "/mock/config/path" + + +def test_load_kube_config(mocker): + mocker.patch.object(config, "load_kube_config") + kube_config_auth = KubeConfigFileAuthentication( + kube_config_path="/path/to/your/config" + ) + response = kube_config_auth.load_kube_config() + + assert ( + response + == "Loaded user config file at path %s" % kube_config_auth.kube_config_path + ) + + kube_config_auth = KubeConfigFileAuthentication(kube_config_path=None) + response = kube_config_auth.load_kube_config() + assert response == "Please specify a config file path" + + +def test_auth_coverage(): + abstract = Authentication() + abstract.login() + abstract.logout() diff --git a/src/codeflare_sdk/common/kueue/test_kueue.py b/src/codeflare_sdk/common/kueue/test_kueue.py new file mode 100644 index 00000000..a4e984c3 --- /dev/null +++ b/src/codeflare_sdk/common/kueue/test_kueue.py @@ -0,0 +1,137 @@ +# Copyright 2024 IBM, Red Hat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from ..utils.unit_test_support import get_local_queue, createClusterConfig +from unittest.mock import patch +from codeflare_sdk.ray.cluster.cluster import Cluster, ClusterConfiguration +import yaml +import os +import filecmp +from pathlib import Path + +parent = Path(__file__).resolve().parents[4] # project directory +aw_dir = os.path.expanduser("~/.codeflare/resources/") + + +def test_none_local_queue(mocker): + mocker.patch("kubernetes.client.CustomObjectsApi.list_namespaced_custom_object") + config = ClusterConfiguration(name="unit-test-aw-kueue", namespace="ns") + config.name = "unit-test-aw-kueue" + config.local_queue = None + + cluster = Cluster(config) + assert cluster.config.local_queue == None + + +def test_cluster_creation_no_aw_local_queue(mocker): + # With written resources + # Create Ray Cluster with local queue specified + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), + ) + config = createClusterConfig() + config.name = "unit-test-cluster-kueue" + config.write_to_file = True + config.local_queue = "local-queue-default" + cluster = Cluster(config) + assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-kueue.yaml" + assert cluster.app_wrapper_name == "unit-test-cluster-kueue" + assert filecmp.cmp( + f"{aw_dir}unit-test-cluster-kueue.yaml", + f"{parent}/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml", + shallow=True, + ) + + # With resources loaded in memory, no Local Queue specified. + config = createClusterConfig() + config.name = "unit-test-cluster-kueue" + config.write_to_file = False + cluster = Cluster(config) + + test_rc = yaml.load(cluster.app_wrapper_yaml, Loader=yaml.FullLoader) + with open(f"{parent}/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml") as f: + expected_rc = yaml.load(f, Loader=yaml.FullLoader) + assert test_rc == expected_rc + + +def test_aw_creation_local_queue(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), + ) + config = createClusterConfig() + config.name = "unit-test-aw-kueue" + config.appwrapper = True + config.write_to_file = True + config.local_queue = "local-queue-default" + cluster = Cluster(config) + assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-aw-kueue.yaml" + assert cluster.app_wrapper_name == "unit-test-aw-kueue" + assert filecmp.cmp( + f"{aw_dir}unit-test-aw-kueue.yaml", + f"{parent}/tests/test_cluster_yamls/kueue/aw_kueue.yaml", + shallow=True, + ) + + # With resources loaded in memory, no Local Queue specified. + config = createClusterConfig() + config.name = "unit-test-aw-kueue" + config.appwrapper = True + config.write_to_file = False + cluster = Cluster(config) + + test_rc = yaml.load(cluster.app_wrapper_yaml, Loader=yaml.FullLoader) + with open(f"{parent}/tests/test_cluster_yamls/kueue/aw_kueue.yaml") as f: + expected_rc = yaml.load(f, Loader=yaml.FullLoader) + assert test_rc == expected_rc + + +def test_get_local_queue_exists_fail(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), + ) + config = createClusterConfig() + config.name = "unit-test-aw-kueue" + config.appwrapper = True + config.write_to_file = True + config.local_queue = "local_queue_doesn't_exist" + try: + Cluster(config) + except ValueError as e: + assert ( + str(e) + == "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration" + ) + + +# Make sure to always keep this function last +def test_cleanup(): + os.remove(f"{aw_dir}unit-test-cluster-kueue.yaml") + os.remove(f"{aw_dir}unit-test-aw-kueue.yaml") diff --git a/src/codeflare_sdk/common/utils/test_generate_cert.py b/src/codeflare_sdk/common/utils/test_generate_cert.py new file mode 100644 index 00000000..b4439c20 --- /dev/null +++ b/src/codeflare_sdk/common/utils/test_generate_cert.py @@ -0,0 +1,114 @@ +# Copyright 2024 IBM, Red Hat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 + +from cryptography.hazmat.primitives.serialization import ( + Encoding, + PublicFormat, + load_pem_private_key, +) +from cryptography.x509 import load_pem_x509_certificate +import os +from codeflare_sdk.common.utils.generate_cert import ( + export_env, + generate_ca_cert, + generate_tls_cert, +) +from kubernetes import client + + +def test_generate_ca_cert(): + """ + test the function codeflare_sdk.common.utils.generate_ca_cert generates the correct outputs + """ + key, certificate = generate_ca_cert() + cert = load_pem_x509_certificate(base64.b64decode(certificate)) + private_pub_key_bytes = ( + load_pem_private_key(base64.b64decode(key), password=None) + .public_key() + .public_bytes(Encoding.PEM, PublicFormat.SubjectPublicKeyInfo) + ) + cert_pub_key_bytes = cert.public_key().public_bytes( + Encoding.PEM, PublicFormat.SubjectPublicKeyInfo + ) + assert type(key) == str + assert type(certificate) == str + # Veirfy ca.cert is self signed + assert cert.verify_directly_issued_by(cert) == None + # Verify cert has the public key bytes from the private key + assert cert_pub_key_bytes == private_pub_key_bytes + + +def secret_ca_retreival(secret_name, namespace): + ca_private_key_bytes, ca_cert = generate_ca_cert() + data = {"ca.crt": ca_cert, "ca.key": ca_private_key_bytes} + assert secret_name == "ca-secret-cluster" + assert namespace == "namespace" + return client.models.V1Secret(data=data) + + +def test_generate_tls_cert(mocker): + """ + test the function codeflare_sdk.common.utils.generate_ca_cert generates the correct outputs + """ + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch( + "codeflare_sdk.common.utils.generate_cert.get_secret_name", + return_value="ca-secret-cluster", + ) + mocker.patch( + "kubernetes.client.CoreV1Api.read_namespaced_secret", + side_effect=secret_ca_retreival, + ) + + generate_tls_cert("cluster", "namespace") + assert os.path.exists("tls-cluster-namespace") + assert os.path.exists(os.path.join("tls-cluster-namespace", "ca.crt")) + assert os.path.exists(os.path.join("tls-cluster-namespace", "tls.crt")) + assert os.path.exists(os.path.join("tls-cluster-namespace", "tls.key")) + + # verify the that the signed tls.crt is issued by the ca_cert (root cert) + with open(os.path.join("tls-cluster-namespace", "tls.crt"), "r") as f: + tls_cert = load_pem_x509_certificate(f.read().encode("utf-8")) + with open(os.path.join("tls-cluster-namespace", "ca.crt"), "r") as f: + root_cert = load_pem_x509_certificate(f.read().encode("utf-8")) + assert tls_cert.verify_directly_issued_by(root_cert) == None + + +def test_export_env(): + """ + test the function codeflare_sdk.common.utils.generate_ca_cert.export_ev generates the correct outputs + """ + tls_dir = "cluster" + ns = "namespace" + export_env(tls_dir, ns) + assert os.environ["RAY_USE_TLS"] == "1" + assert os.environ["RAY_TLS_SERVER_CERT"] == os.path.join( + os.getcwd(), f"tls-{tls_dir}-{ns}", "tls.crt" + ) + assert os.environ["RAY_TLS_SERVER_KEY"] == os.path.join( + os.getcwd(), f"tls-{tls_dir}-{ns}", "tls.key" + ) + assert os.environ["RAY_TLS_CA_CERT"] == os.path.join( + os.getcwd(), f"tls-{tls_dir}-{ns}", "ca.crt" + ) + + +# Make sure to always keep this function last +def test_cleanup(): + os.remove("tls-cluster-namespace/ca.crt") + os.remove("tls-cluster-namespace/tls.crt") + os.remove("tls-cluster-namespace/tls.key") + os.rmdir("tls-cluster-namespace") diff --git a/src/codeflare_sdk/common/utils/unit_test_support.py b/src/codeflare_sdk/common/utils/unit_test_support.py new file mode 100644 index 00000000..61a16260 --- /dev/null +++ b/src/codeflare_sdk/common/utils/unit_test_support.py @@ -0,0 +1,383 @@ +# Copyright 2024 IBM, Red Hat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from codeflare_sdk.ray.cluster.cluster import ( + Cluster, + ClusterConfiguration, +) +import os +import yaml +from pathlib import Path +from kubernetes import client + +parent = Path(__file__).resolve().parents[4] # project directory +aw_dir = os.path.expanduser("~/.codeflare/resources/") + + +def createClusterConfig(): + config = ClusterConfiguration( + name="unit-test-cluster", + namespace="ns", + num_workers=2, + worker_cpu_requests=3, + worker_cpu_limits=4, + worker_memory_requests=5, + worker_memory_limits=6, + appwrapper=True, + write_to_file=False, + ) + return config + + +def createClusterWithConfig(mocker): + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, + ) + cluster = Cluster(createClusterConfig()) + return cluster + + +def createClusterWrongType(): + config = ClusterConfiguration( + name="unit-test-cluster", + namespace="ns", + num_workers=2, + worker_cpu_requests=[], + worker_cpu_limits=4, + worker_memory_requests=5, + worker_memory_limits=6, + worker_extended_resource_requests={"nvidia.com/gpu": 7}, + appwrapper=True, + machine_types=[True, False], + image_pull_secrets=["unit-test-pull-secret"], + image="quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06", + write_to_file=True, + labels={1: 1}, + ) + return config + + +def get_package_and_version(package_name, requirements_file_path): + with open(requirements_file_path, "r") as file: + for line in file: + if line.strip().startswith(f"{package_name}=="): + return line.strip() + return None + + +def get_local_queue(group, version, namespace, plural): + assert group == "kueue.x-k8s.io" + assert version == "v1beta1" + assert namespace == "ns" + assert plural == "localqueues" + local_queues = { + "apiVersion": "kueue.x-k8s.io/v1beta1", + "items": [ + { + "apiVersion": "kueue.x-k8s.io/v1beta1", + "kind": "LocalQueue", + "metadata": { + "annotations": {"kueue.x-k8s.io/default-queue": "true"}, + "name": "local-queue-default", + "namespace": "ns", + }, + "spec": {"clusterQueue": "cluster-queue"}, + }, + { + "apiVersion": "kueue.x-k8s.io/v1beta1", + "kind": "LocalQueue", + "metadata": { + "name": "team-a-queue", + "namespace": "ns", + }, + "spec": {"clusterQueue": "team-a-queue"}, + }, + ], + "kind": "LocalQueueList", + "metadata": {"continue": "", "resourceVersion": "2266811"}, + } + return local_queues + + +def arg_check_aw_apply_effect(group, version, namespace, plural, body, *args): + assert group == "workload.codeflare.dev" + assert version == "v1beta2" + assert namespace == "ns" + assert plural == "appwrappers" + with open(f"{aw_dir}test.yaml") as f: + aw = yaml.load(f, Loader=yaml.FullLoader) + assert body == aw + assert args == tuple() + + +def arg_check_aw_del_effect(group, version, namespace, plural, name, *args): + assert group == "workload.codeflare.dev" + assert version == "v1beta2" + assert namespace == "ns" + assert plural == "appwrappers" + assert name == "test" + assert args == tuple() + + +def get_cluster_object(file_a, file_b): + with open(file_a) as f: + cluster_a = yaml.load(f, Loader=yaml.FullLoader) + with open(file_b) as f: + cluster_b = yaml.load(f, Loader=yaml.FullLoader) + + return cluster_a, cluster_b + + +def get_ray_obj(group, version, namespace, plural): + # To be used for mocking list_namespaced_custom_object for Ray Clusters + rc_a_path = f"{parent}/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml" + rc_b_path = f"{parent}/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml" + rc_a, rc_b = get_cluster_object(rc_a_path, rc_b_path) + + rc_list = {"items": [rc_a, rc_b]} + return rc_list + + +def get_ray_obj_with_status(group, version, namespace, plural): + # To be used for mocking list_namespaced_custom_object for Ray Clusters with statuses + rc_a_path = f"{parent}/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml" + rc_b_path = f"{parent}/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml" + rc_a, rc_b = get_cluster_object(rc_a_path, rc_b_path) + + rc_a.update( + { + "status": { + "desiredWorkerReplicas": 1, + "endpoints": { + "client": "10001", + "dashboard": "8265", + "gcs": "6379", + "metrics": "8080", + }, + "head": {"serviceIP": "172.30.179.88"}, + "lastUpdateTime": "2024-03-05T09:55:37Z", + "maxWorkerReplicas": 1, + "minWorkerReplicas": 1, + "observedGeneration": 1, + "state": "ready", + }, + } + ) + rc_b.update( + { + "status": { + "availableWorkerReplicas": 2, + "desiredWorkerReplicas": 1, + "endpoints": { + "client": "10001", + "dashboard": "8265", + "gcs": "6379", + }, + "lastUpdateTime": "2023-02-22T16:26:16Z", + "maxWorkerReplicas": 1, + "minWorkerReplicas": 1, + "state": "suspended", + } + } + ) + + rc_list = {"items": [rc_a, rc_b]} + return rc_list + + +def get_aw_obj(group, version, namespace, plural): + # To be used for mocking list_namespaced_custom_object for AppWrappers + aw_a_path = f"{parent}/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml" + aw_b_path = f"{parent}/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml" + aw_a, aw_b = get_cluster_object(aw_a_path, aw_b_path) + + aw_list = {"items": [aw_a, aw_b]} + return aw_list + + +def get_aw_obj_with_status(group, version, namespace, plural): + # To be used for mocking list_namespaced_custom_object for AppWrappers with statuses + aw_a_path = f"{parent}/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml" + aw_b_path = f"{parent}/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml" + aw_a, aw_b = get_cluster_object(aw_a_path, aw_b_path) + + aw_a.update( + { + "status": { + "phase": "Running", + }, + } + ) + aw_b.update( + { + "status": { + "phase": "Suspended", + }, + } + ) + + aw_list = {"items": [aw_a, aw_b]} + return aw_list + + +def get_named_aw(group, version, namespace, plural, name): + aws = get_aw_obj("workload.codeflare.dev", "v1beta2", "ns", "appwrappers") + return aws["items"][0] + + +def arg_check_del_effect(group, version, namespace, plural, name, *args): + assert namespace == "ns" + assert args == tuple() + if plural == "appwrappers": + assert group == "workload.codeflare.dev" + assert version == "v1beta2" + assert name == "unit-test-cluster" + elif plural == "rayclusters": + assert group == "ray.io" + assert version == "v1" + assert name == "unit-test-cluster-ray" + elif plural == "ingresses": + assert group == "networking.k8s.io" + assert version == "v1" + assert name == "ray-dashboard-unit-test-cluster-ray" + + +def arg_check_apply_effect(group, version, namespace, plural, body, *args): + assert namespace == "ns" + assert args == tuple() + if plural == "appwrappers": + assert group == "workload.codeflare.dev" + assert version == "v1beta2" + elif plural == "rayclusters": + assert group == "ray.io" + assert version == "v1" + elif plural == "ingresses": + assert group == "networking.k8s.io" + assert version == "v1" + elif plural == "routes": + assert group == "route.openshift.io" + assert version == "v1" + else: + assert 1 == 0 + + +def get_obj_none(group, version, namespace, plural): + return {"items": []} + + +def route_list_retrieval(group, version, namespace, plural): + assert group == "route.openshift.io" + assert version == "v1" + assert namespace == "ns" + assert plural == "routes" + return { + "kind": "RouteList", + "apiVersion": "route.openshift.io/v1", + "metadata": {"resourceVersion": "6072398"}, + "items": [ + { + "metadata": { + "name": "ray-dashboard-quicktest", + "namespace": "ns", + }, + "spec": { + "host": "ray-dashboard-quicktest-opendatahub.apps.cluster.awsroute.org", + "to": { + "kind": "Service", + "name": "quicktest-head-svc", + "weight": 100, + }, + "port": {"targetPort": "dashboard"}, + "tls": {"termination": "edge"}, + }, + }, + { + "metadata": { + "name": "rayclient-quicktest", + "namespace": "ns", + }, + "spec": { + "host": "rayclient-quicktest-opendatahub.apps.cluster.awsroute.org", + "to": { + "kind": "Service", + "name": "quicktest-head-svc", + "weight": 100, + }, + "port": {"targetPort": "client"}, + "tls": {"termination": "passthrough"}, + }, + }, + ], + } + + +def ingress_retrieval( + cluster_name="unit-test-cluster", client_ing: bool = False, annotations: dict = None +): + dashboard_ingress = mocked_ingress(8265, cluster_name, annotations) + if client_ing: + client_ingress = mocked_ingress( + 10001, cluster_name=cluster_name, annotations=annotations + ) + mock_ingress_list = client.V1IngressList( + items=[client_ingress, dashboard_ingress] + ) + else: + mock_ingress_list = client.V1IngressList(items=[dashboard_ingress]) + + return mock_ingress_list + + +def mocked_ingress(port, cluster_name="unit-test-cluster", annotations: dict = None): + labels = {"ingress-owner": cluster_name} + if port == 10001: + name = f"rayclient-{cluster_name}" + else: + name = f"ray-dashboard-{cluster_name}" + mock_ingress = client.V1Ingress( + metadata=client.V1ObjectMeta( + name=name, + annotations=annotations, + labels=labels, + owner_references=[ + client.V1OwnerReference( + api_version="v1", kind="Ingress", name=cluster_name, uid="unique-id" + ) + ], + ), + spec=client.V1IngressSpec( + rules=[ + client.V1IngressRule( + host=f"{name}-ns.apps.cluster.awsroute.org", + http=client.V1HTTPIngressRuleValue( + paths=[ + client.V1HTTPIngressPath( + path_type="Prefix", + path="/", + backend=client.V1IngressBackend( + service=client.V1IngressServiceBackend( + name="head-svc-test", + port=client.V1ServiceBackendPort(number=port), + ) + ), + ) + ] + ), + ) + ], + ), + ) + return mock_ingress diff --git a/src/codeflare_sdk/common/widgets/test_widgets.py b/src/codeflare_sdk/common/widgets/test_widgets.py new file mode 100644 index 00000000..e01b9193 --- /dev/null +++ b/src/codeflare_sdk/common/widgets/test_widgets.py @@ -0,0 +1,469 @@ +# Copyright 2024 IBM, Red Hat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import codeflare_sdk.common.widgets.widgets as cf_widgets +import pandas as pd +from unittest.mock import MagicMock, patch +from ..utils.unit_test_support import get_local_queue, createClusterConfig +from codeflare_sdk.ray.cluster.cluster import Cluster +from codeflare_sdk.ray.cluster.status import ( + RayCluster, + RayClusterStatus, +) +import pytest +from kubernetes import client + + +@patch.dict( + "os.environ", {"JPY_SESSION_NAME": "example-test"} +) # Mock Jupyter environment variable +def test_cluster_up_down_buttons(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), + ) + cluster = Cluster(createClusterConfig()) + + with patch("ipywidgets.Button") as MockButton, patch( + "ipywidgets.Checkbox" + ) as MockCheckbox, patch("ipywidgets.Output"), patch("ipywidgets.HBox"), patch( + "ipywidgets.VBox" + ), patch.object( + cluster, "up" + ) as mock_up, patch.object( + cluster, "down" + ) as mock_down, patch.object( + cluster, "wait_ready" + ) as mock_wait_ready: + # Create mock button & CheckBox instances + mock_up_button = MagicMock() + mock_down_button = MagicMock() + mock_wait_ready_check_box = MagicMock() + + # Ensure the mock Button class returns the mock button instances in sequence + MockCheckbox.side_effect = [mock_wait_ready_check_box] + MockButton.side_effect = [mock_up_button, mock_down_button] + + # Call the method under test + cf_widgets.cluster_up_down_buttons(cluster) + + # Simulate checkbox being checked or unchecked + mock_wait_ready_check_box.value = True # Simulate checkbox being checked + + # Simulate the button clicks by calling the mock on_click handlers + mock_up_button.on_click.call_args[0][0](None) # Simulate clicking "Cluster Up" + mock_down_button.on_click.call_args[0][0]( + None + ) # Simulate clicking "Cluster Down" + + # Check if the `up` and `down` methods were called + mock_wait_ready.assert_called_once() + mock_up.assert_called_once() + mock_down.assert_called_once() + + +@patch.dict("os.environ", {}, clear=True) # Mock environment with no variables +def test_is_notebook_false(): + assert cf_widgets.is_notebook() is False + + +@patch.dict( + "os.environ", {"JPY_SESSION_NAME": "example-test"} +) # Mock Jupyter environment variable +def test_is_notebook_true(): + assert cf_widgets.is_notebook() is True + + +def test_view_clusters(mocker, capsys): + # If is not a notebook environment, a warning should be raised + with pytest.warns( + UserWarning, + match="view_clusters can only be used in a Jupyter Notebook environment.", + ): + result = cf_widgets.view_clusters("default") + + # Assert the function returns None when not in a notebook environment + assert result is None + + # Prepare to run view_clusters when notebook environment is detected + mocker.patch("codeflare_sdk.common.widgets.widgets.is_notebook", return_value=True) + mock_get_current_namespace = mocker.patch( + "codeflare_sdk.ray.cluster.cluster.get_current_namespace", + return_value="default", + ) + namespace = mock_get_current_namespace.return_value + + # Assert the function returns None when no clusters are found + mock_fetch_cluster_data = mocker.patch( + "codeflare_sdk.common.widgets.widgets._fetch_cluster_data", + return_value=pd.DataFrame(), + ) + result = cf_widgets.view_clusters() + captured = capsys.readouterr() + assert mock_fetch_cluster_data.return_value.empty + assert "No clusters found in the default namespace." in captured.out + assert result is None + + # Prepare to run view_clusters with a test DataFrame + mock_fetch_cluster_data = mocker.patch( + "codeflare_sdk.common.widgets.widgets._fetch_cluster_data", + return_value=pd.DataFrame( + { + "Name": ["test-cluster"], + "Namespace": ["default"], + "Num Workers": ["1"], + "Head GPUs": ["0"], + "Worker GPUs": ["0"], + "Head CPU Req~Lim": ["1~1"], + "Head Memory Req~Lim": ["1Gi~1Gi"], + "Worker CPU Req~Lim": ["1~1"], + "Worker Memory Req~Lim": ["1Gi~1Gi"], + "status": ['Ready ✓'], + } + ), + ) + # Create a RayClusterManagerWidgets instance + ray_cluster_manager_instance = cf_widgets.RayClusterManagerWidgets( + ray_clusters_df=mock_fetch_cluster_data.return_value, namespace=namespace + ) + # Patch the constructor of RayClusterManagerWidgets to return our initialized instance + mock_constructor = mocker.patch( + "codeflare_sdk.common.widgets.widgets.RayClusterManagerWidgets", + return_value=ray_cluster_manager_instance, + ) + + # Use a spy to track calls to display_widgets without replacing it + spy_display_widgets = mocker.spy(ray_cluster_manager_instance, "display_widgets") + + cf_widgets.view_clusters() + + mock_constructor.assert_called_once_with( + ray_clusters_df=mock_fetch_cluster_data.return_value, namespace=namespace + ) + + spy_display_widgets.assert_called_once() + + +def test_delete_cluster(mocker, capsys): + name = "test-cluster" + namespace = "default" + + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + + mock_ray_cluster = MagicMock() + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_namespaced_custom_object", + side_effect=[ + mock_ray_cluster, + client.ApiException(status=404), + client.ApiException(status=404), + mock_ray_cluster, + ], + ) + + # In this scenario, the RayCluster exists and the AppWrapper does not. + mocker.patch( + "codeflare_sdk.ray.cluster.cluster._check_aw_exists", return_value=False + ) + mock_delete_rc = mocker.patch( + "kubernetes.client.CustomObjectsApi.delete_namespaced_custom_object" + ) + cf_widgets._delete_cluster(name, namespace) + + mock_delete_rc.assert_called_once_with( + group="ray.io", + version="v1", + namespace=namespace, + plural="rayclusters", + name=name, + ) + + # In this scenario, the AppWrapper exists and the RayCluster does not + mocker.patch( + "codeflare_sdk.ray.cluster.cluster._check_aw_exists", return_value=True + ) + mock_delete_aw = mocker.patch( + "kubernetes.client.CustomObjectsApi.delete_namespaced_custom_object" + ) + cf_widgets._delete_cluster(name, namespace) + + mock_delete_aw.assert_called_once_with( + group="workload.codeflare.dev", + version="v1beta2", + namespace=namespace, + plural="appwrappers", + name=name, + ) + + # In this scenario, the deletion of the resource times out. + with pytest.raises( + TimeoutError, match=f"Timeout waiting for {name} to be deleted." + ): + cf_widgets._delete_cluster(name, namespace, 1) + + +def test_ray_cluster_manager_widgets_init(mocker, capsys): + namespace = "default" + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), + ) + test_ray_clusters_df = pd.DataFrame( + { + "Name": ["test-cluster-1", "test-cluster-2"], + "Namespace": [namespace, namespace], + "Num Workers": ["1", "2"], + "Head GPUs": ["0", "0"], + "Worker GPUs": ["0", "0"], + "Head CPU Req~Lim": ["1~1", "1~1"], + "Head Memory Req~Lim": ["1Gi~1Gi", "1Gi~1Gi"], + "Worker CPU Req~Lim": ["1~1", "1~1"], + "Worker Memory Req~Lim": ["1Gi~1Gi", "1Gi~1Gi"], + "status": [ + 'Ready ✓', + 'Ready ✓', + ], + } + ) + mock_fetch_cluster_data = mocker.patch( + "codeflare_sdk.common.widgets.widgets._fetch_cluster_data", + return_value=test_ray_clusters_df, + ) + mocker.patch( + "codeflare_sdk.ray.cluster.cluster.get_current_namespace", + return_value=namespace, + ) + mock_delete_cluster = mocker.patch( + "codeflare_sdk.common.widgets.widgets._delete_cluster" + ) + + # # Mock ToggleButtons + mock_toggle_buttons = mocker.patch("ipywidgets.ToggleButtons") + mock_button = mocker.patch("ipywidgets.Button") + mock_output = mocker.patch("ipywidgets.Output") + + # Initialize the RayClusterManagerWidgets instance + ray_cluster_manager_instance = cf_widgets.RayClusterManagerWidgets( + ray_clusters_df=test_ray_clusters_df, namespace=namespace + ) + + # Assertions for DataFrame and attributes + assert ray_cluster_manager_instance.ray_clusters_df.equals( + test_ray_clusters_df + ), "ray_clusters_df attribute does not match the input DataFrame" + assert ( + ray_cluster_manager_instance.namespace == namespace + ), f"Expected namespace to be '{namespace}', but got '{ray_cluster_manager_instance.namespace}'" + assert ( + ray_cluster_manager_instance.classification_widget.options + == test_ray_clusters_df["Name"].tolist() + ), "classification_widget options do not match the input DataFrame" + + # Assertions for widgets + mock_toggle_buttons.assert_called_once_with( + options=test_ray_clusters_df["Name"].tolist(), + value=test_ray_clusters_df["Name"].tolist()[0], + description="Select an existing cluster:", + ) + assert ( + ray_cluster_manager_instance.classification_widget + == mock_toggle_buttons.return_value + ), "classification_widget is not set correctly" + assert ( + ray_cluster_manager_instance.delete_button == mock_button.return_value + ), "delete_button is not set correctly" + assert ( + ray_cluster_manager_instance.list_jobs_button == mock_button.return_value + ), "list_jobs_button is not set correctly" + assert ( + ray_cluster_manager_instance.ray_dashboard_button == mock_button.return_value + ), "ray_dashboard_button is not set correctly" + assert ( + ray_cluster_manager_instance.raycluster_data_output == mock_output.return_value + ), "raycluster_data_output is not set correctly" + assert ( + ray_cluster_manager_instance.user_output == mock_output.return_value + ), "user_output is not set correctly" + assert ( + ray_cluster_manager_instance.url_output == mock_output.return_value + ), "url_output is not set correctly" + + ### Test button click events + mock_delete_button = MagicMock() + mock_list_jobs_button = MagicMock() + mock_ray_dashboard_button = MagicMock() + + mock_javascript = mocker.patch("codeflare_sdk.common.widgets.widgets.Javascript") + ray_cluster_manager_instance.url_output = MagicMock() + + mock_dashboard_uri = mocker.patch( + "codeflare_sdk.ray.cluster.cluster.Cluster.cluster_dashboard_uri", + return_value="https://ray-dashboard-test-cluster-1-ns.apps.cluster.awsroute.org", + ) + + # Simulate clicking the list jobs button + ray_cluster_manager_instance.classification_widget.value = "test-cluster-1" + ray_cluster_manager_instance._on_list_jobs_button_click(mock_list_jobs_button) + + captured = capsys.readouterr() + assert ( + f"Opening Ray Jobs Dashboard for test-cluster-1 cluster:\n{mock_dashboard_uri.return_value}/#/jobs" + in captured.out + ) + mock_javascript.assert_called_with( + f'window.open("{mock_dashboard_uri.return_value}/#/jobs", "_blank");' + ) + + # Simulate clicking the Ray dashboard button + ray_cluster_manager_instance.classification_widget.value = "test-cluster-1" + ray_cluster_manager_instance._on_ray_dashboard_button_click( + mock_ray_dashboard_button + ) + + captured = capsys.readouterr() + assert ( + f"Opening Ray Dashboard for test-cluster-1 cluster:\n{mock_dashboard_uri.return_value}" + in captured.out + ) + mock_javascript.assert_called_with( + f'window.open("{mock_dashboard_uri.return_value}", "_blank");' + ) + + # Simulate clicking the delete button + ray_cluster_manager_instance.classification_widget.value = "test-cluster-1" + ray_cluster_manager_instance._on_delete_button_click(mock_delete_button) + mock_delete_cluster.assert_called_with("test-cluster-1", namespace) + + mock_fetch_cluster_data.return_value = pd.DataFrame() + ray_cluster_manager_instance.classification_widget.value = "test-cluster-2" + ray_cluster_manager_instance._on_delete_button_click(mock_delete_button) + mock_delete_cluster.assert_called_with("test-cluster-2", namespace) + + # Assert on deletion that the dataframe is empty + assert ( + ray_cluster_manager_instance.ray_clusters_df.empty + ), "Expected DataFrame to be empty after deletion" + + captured = capsys.readouterr() + assert ( + f"Cluster test-cluster-1 in the {namespace} namespace was deleted successfully." + in captured.out + ) + + +def test_fetch_cluster_data(mocker): + # Return empty dataframe when no clusters are found + mocker.patch("codeflare_sdk.ray.cluster.cluster.list_all_clusters", return_value=[]) + df = cf_widgets._fetch_cluster_data(namespace="default") + assert df.empty + + # Create mock RayCluster objects + mock_raycluster1 = MagicMock(spec=RayCluster) + mock_raycluster1.name = "test-cluster-1" + mock_raycluster1.namespace = "default" + mock_raycluster1.num_workers = 1 + mock_raycluster1.head_extended_resources = {"nvidia.com/gpu": "1"} + mock_raycluster1.worker_extended_resources = {"nvidia.com/gpu": "2"} + mock_raycluster1.head_cpu_requests = "500m" + mock_raycluster1.head_cpu_limits = "1000m" + mock_raycluster1.head_mem_requests = "1Gi" + mock_raycluster1.head_mem_limits = "2Gi" + mock_raycluster1.worker_cpu_requests = "1000m" + mock_raycluster1.worker_cpu_limits = "2000m" + mock_raycluster1.worker_mem_requests = "2Gi" + mock_raycluster1.worker_mem_limits = "4Gi" + mock_raycluster1.status = MagicMock() + mock_raycluster1.status.name = "READY" + mock_raycluster1.status = RayClusterStatus.READY + + mock_raycluster2 = MagicMock(spec=RayCluster) + mock_raycluster2.name = "test-cluster-2" + mock_raycluster2.namespace = "default" + mock_raycluster2.num_workers = 2 + mock_raycluster2.head_extended_resources = {} + mock_raycluster2.worker_extended_resources = {} + mock_raycluster2.head_cpu_requests = None + mock_raycluster2.head_cpu_limits = None + mock_raycluster2.head_mem_requests = None + mock_raycluster2.head_mem_limits = None + mock_raycluster2.worker_cpu_requests = None + mock_raycluster2.worker_cpu_limits = None + mock_raycluster2.worker_mem_requests = None + mock_raycluster2.worker_mem_limits = None + mock_raycluster2.status = MagicMock() + mock_raycluster2.status.name = "SUSPENDED" + mock_raycluster2.status = RayClusterStatus.SUSPENDED + + with patch( + "codeflare_sdk.ray.cluster.cluster.list_all_clusters", + return_value=[mock_raycluster1, mock_raycluster2], + ): + # Call the function under test + df = cf_widgets._fetch_cluster_data(namespace="default") + + # Expected DataFrame + expected_data = { + "Name": ["test-cluster-1", "test-cluster-2"], + "Namespace": ["default", "default"], + "Num Workers": [1, 2], + "Head GPUs": ["nvidia.com/gpu: 1", "0"], + "Worker GPUs": ["nvidia.com/gpu: 2", "0"], + "Head CPU Req~Lim": ["500m~1000m", "0~0"], + "Head Memory Req~Lim": ["1Gi~2Gi", "0~0"], + "Worker CPU Req~Lim": ["1000m~2000m", "0~0"], + "Worker Memory Req~Lim": ["2Gi~4Gi", "0~0"], + "status": [ + 'Ready ✓', + 'Suspended ❄️', + ], + } + + expected_df = pd.DataFrame(expected_data) + + # Assert that the DataFrame matches expected + pd.testing.assert_frame_equal( + df.reset_index(drop=True), expected_df.reset_index(drop=True) + ) + + +def test_format_status(): + # Test each possible status + test_cases = [ + (RayClusterStatus.READY, 'Ready ✓'), + ( + RayClusterStatus.SUSPENDED, + 'Suspended ❄️', + ), + (RayClusterStatus.FAILED, 'Failed ✗'), + (RayClusterStatus.UNHEALTHY, 'Unhealthy'), + (RayClusterStatus.UNKNOWN, 'Unknown'), + ] + + for status, expected_output in test_cases: + assert ( + cf_widgets._format_status(status) == expected_output + ), f"Failed for status: {status}" + + # Test an unrecognized status + unrecognized_status = "NotAStatus" + assert ( + cf_widgets._format_status(unrecognized_status) == "NotAStatus" + ), "Failed for unrecognized status" diff --git a/src/codeflare_sdk/ray/appwrapper/test_awload.py b/src/codeflare_sdk/ray/appwrapper/test_awload.py new file mode 100644 index 00000000..6909394b --- /dev/null +++ b/src/codeflare_sdk/ray/appwrapper/test_awload.py @@ -0,0 +1,88 @@ +# Copyright 2024 IBM, Red Hat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from codeflare_sdk.common.utils.unit_test_support import ( + arg_check_aw_apply_effect, + arg_check_aw_del_effect, +) +from codeflare_sdk.ray.appwrapper import AWManager +from codeflare_sdk.ray.cluster import Cluster, ClusterConfiguration +import os +from pathlib import Path + +parent = Path(__file__).resolve().parents[4] # project directory +aw_dir = os.path.expanduser("~/.codeflare/resources/") + + +def test_AWManager_creation(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch("kubernetes.client.CustomObjectsApi.list_namespaced_custom_object") + # Create test.yaml + Cluster( + ClusterConfiguration( + name="test", + namespace="ns", + write_to_file=True, + appwrapper=True, + ) + ) + + testaw = AWManager(f"{aw_dir}test.yaml") + assert testaw.name == "test" + assert testaw.namespace == "ns" + assert testaw.submitted == False + try: + testaw = AWManager("fake") + except Exception as e: + assert type(e) == FileNotFoundError + assert str(e) == "[Errno 2] No such file or directory: 'fake'" + try: + testaw = AWManager( + f"{parent}/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml" + ) + except Exception as e: + assert type(e) == ValueError + assert ( + str(e) + == f"{parent}/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml is not a correctly formatted AppWrapper yaml" + ) + + +def test_AWManager_submit_remove(mocker, capsys): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + testaw = AWManager(f"{aw_dir}test.yaml") + testaw.remove() + captured = capsys.readouterr() + assert ( + captured.out + == "AppWrapper not submitted by this manager yet, nothing to remove\n" + ) + assert testaw.submitted == False + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch( + "kubernetes.client.CustomObjectsApi.create_namespaced_custom_object", + side_effect=arg_check_aw_apply_effect, + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.delete_namespaced_custom_object", + side_effect=arg_check_aw_del_effect, + ) + testaw.submit() + assert testaw.submitted == True + testaw.remove() + assert testaw.submitted == False + + +# Make sure to always keep this function last +def test_cleanup(): + os.remove(f"{aw_dir}test.yaml") diff --git a/src/codeflare_sdk/ray/appwrapper/test_status.py b/src/codeflare_sdk/ray/appwrapper/test_status.py new file mode 100644 index 00000000..8c693767 --- /dev/null +++ b/src/codeflare_sdk/ray/appwrapper/test_status.py @@ -0,0 +1,104 @@ +# Copyright 2024 IBM, Red Hat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from codeflare_sdk.ray.cluster.cluster import ( + _app_wrapper_status, + Cluster, + ClusterConfiguration, +) +from codeflare_sdk.ray.appwrapper import AppWrapper, AppWrapperStatus +from codeflare_sdk.ray.cluster.status import CodeFlareClusterStatus +import os + +aw_dir = os.path.expanduser("~/.codeflare/resources/") + + +def test_cluster_status(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch( + "codeflare_sdk.common.kueue.kueue.local_queue_exists", + return_value="true", + ) + fake_aw = AppWrapper("test", AppWrapperStatus.FAILED) + + cf = Cluster( + ClusterConfiguration( + name="test", + namespace="ns", + write_to_file=True, + appwrapper=True, + local_queue="local_default_queue", + ) + ) + mocker.patch( + "codeflare_sdk.ray.cluster.cluster._app_wrapper_status", return_value=None + ) + mocker.patch( + "codeflare_sdk.ray.cluster.cluster._ray_cluster_status", return_value=None + ) + status, ready = cf.status() + assert status == CodeFlareClusterStatus.UNKNOWN + assert ready == False + + mocker.patch( + "codeflare_sdk.ray.cluster.cluster._app_wrapper_status", return_value=fake_aw + ) + status, ready = cf.status() + assert status == CodeFlareClusterStatus.FAILED + assert ready == False + + fake_aw.status = AppWrapperStatus.SUSPENDED + status, ready = cf.status() + assert status == CodeFlareClusterStatus.QUEUED + assert ready == False + + fake_aw.status = AppWrapperStatus.RESUMING + status, ready = cf.status() + assert status == CodeFlareClusterStatus.STARTING + assert ready == False + + fake_aw.status = AppWrapperStatus.RESETTING + status, ready = cf.status() + assert status == CodeFlareClusterStatus.STARTING + assert ready == False + + fake_aw.status = AppWrapperStatus.RUNNING + status, ready = cf.status() + assert status == CodeFlareClusterStatus.UNKNOWN + assert ready == False + + +def aw_status_fields(group, version, namespace, plural, *args): + assert group == "workload.codeflare.dev" + assert version == "v1beta2" + assert namespace == "test-ns" + assert plural == "appwrappers" + assert args == tuple() + return {"items": []} + + +def test_aw_status(mocker): + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + side_effect=aw_status_fields, + ) + aw = _app_wrapper_status("test-aw", "test-ns") + assert aw == None + + +# Make sure to always keep this function last +def test_cleanup(): + os.remove(f"{aw_dir}test.yaml") diff --git a/src/codeflare_sdk/ray/client/test_ray_jobs.py b/src/codeflare_sdk/ray/client/test_ray_jobs.py new file mode 100644 index 00000000..cbb27aa7 --- /dev/null +++ b/src/codeflare_sdk/ray/client/test_ray_jobs.py @@ -0,0 +1,173 @@ +# Copyright 2024 IBM, Red Hat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ray.job_submission import JobSubmissionClient +from codeflare_sdk.ray.client.ray_jobs import RayJobClient +from codeflare_sdk.common.utils.unit_test_support import get_package_and_version +import pytest + + +# rjc == RayJobClient +@pytest.fixture +def ray_job_client(mocker): + # Creating a fixture to instantiate RayJobClient with a mocked JobSubmissionClient + mocker.patch.object(JobSubmissionClient, "__init__", return_value=None) + return RayJobClient( + "https://ray-dashboard-unit-test-cluster-ns.apps.cluster.awsroute.org" + ) + + +def test_rjc_submit_job(ray_job_client, mocker): + mocked_submit_job = mocker.patch.object( + JobSubmissionClient, "submit_job", return_value="mocked_submission_id" + ) + submission_id = ray_job_client.submit_job(entrypoint={"pip": ["numpy"]}) + + mocked_submit_job.assert_called_once_with( + entrypoint={"pip": ["numpy"]}, + job_id=None, + runtime_env=None, + metadata=None, + submission_id=None, + entrypoint_num_cpus=None, + entrypoint_num_gpus=None, + entrypoint_memory=None, + entrypoint_resources=None, + ) + + assert submission_id == "mocked_submission_id" + + +def test_rjc_delete_job(ray_job_client, mocker): + # Case return True + mocked_delete_job_True = mocker.patch.object( + JobSubmissionClient, "delete_job", return_value=True + ) + result = ray_job_client.delete_job(job_id="mocked_job_id") + + mocked_delete_job_True.assert_called_once_with(job_id="mocked_job_id") + assert result == (True, "Successfully deleted Job mocked_job_id") + + # Case return False + mocked_delete_job_False = mocker.patch.object( + JobSubmissionClient, "delete_job", return_value=(False) + ) + result = ray_job_client.delete_job(job_id="mocked_job_id") + + mocked_delete_job_False.assert_called_once_with(job_id="mocked_job_id") + assert result == (False, "Failed to delete Job mocked_job_id") + + +def test_rjc_stop_job(ray_job_client, mocker): + # Case return True + mocked_stop_job_True = mocker.patch.object( + JobSubmissionClient, "stop_job", return_value=(True) + ) + result = ray_job_client.stop_job(job_id="mocked_job_id") + + mocked_stop_job_True.assert_called_once_with(job_id="mocked_job_id") + assert result == (True, "Successfully stopped Job mocked_job_id") + + # Case return False + mocked_stop_job_False = mocker.patch.object( + JobSubmissionClient, "stop_job", return_value=(False) + ) + result = ray_job_client.stop_job(job_id="mocked_job_id") + + mocked_stop_job_False.assert_called_once_with(job_id="mocked_job_id") + assert result == ( + False, + "Failed to stop Job, mocked_job_id could have already completed.", + ) + + +def test_rjc_address(ray_job_client, mocker): + mocked_rjc_address = mocker.patch.object( + JobSubmissionClient, + "get_address", + return_value="https://ray-dashboard-unit-test-cluster-ns.apps.cluster.awsroute.org", + ) + address = ray_job_client.get_address() + + mocked_rjc_address.assert_called_once() + assert ( + address + == "https://ray-dashboard-unit-test-cluster-ns.apps.cluster.awsroute.org" + ) + + +def test_rjc_get_job_logs(ray_job_client, mocker): + mocked_rjc_get_job_logs = mocker.patch.object( + JobSubmissionClient, "get_job_logs", return_value="Logs" + ) + logs = ray_job_client.get_job_logs(job_id="mocked_job_id") + + mocked_rjc_get_job_logs.assert_called_once_with(job_id="mocked_job_id") + assert logs == "Logs" + + +def test_rjc_get_job_info(ray_job_client, mocker): + job_details_example = "JobDetails(type=, job_id=None, submission_id='mocked_submission_id', driver_info=None, status=, entrypoint='python test.py', message='Job has not started yet. It may be waiting for the runtime environment to be set up.', error_type=None, start_time=1701271760641, end_time=None, metadata={}, runtime_env={'working_dir': 'gcs://_ray_pkg_67de6f0e60d43b19.zip', 'pip': {'packages': ['numpy'], 'pip_check': False}, '_ray_commit': 'b4bba4717f5ba04ee25580fe8f88eed63ef0c5dc'}, driver_agent_http_address=None, driver_node_id=None)" + mocked_rjc_get_job_info = mocker.patch.object( + JobSubmissionClient, "get_job_info", return_value=job_details_example + ) + job_details = ray_job_client.get_job_info(job_id="mocked_job_id") + + mocked_rjc_get_job_info.assert_called_once_with(job_id="mocked_job_id") + assert job_details == job_details_example + + +def test_rjc_get_job_status(ray_job_client, mocker): + job_status_example = "" + mocked_rjc_get_job_status = mocker.patch.object( + JobSubmissionClient, "get_job_status", return_value=job_status_example + ) + job_status = ray_job_client.get_job_status(job_id="mocked_job_id") + + mocked_rjc_get_job_status.assert_called_once_with(job_id="mocked_job_id") + assert job_status == job_status_example + + +def test_rjc_tail_job_logs(ray_job_client, mocker): + logs_example = [ + "Job started...", + "Processing input data...", + "Finalizing results...", + "Job completed successfully.", + ] + mocked_rjc_tail_job_logs = mocker.patch.object( + JobSubmissionClient, "tail_job_logs", return_value=logs_example + ) + job_tail_job_logs = ray_job_client.tail_job_logs(job_id="mocked_job_id") + + mocked_rjc_tail_job_logs.assert_called_once_with(job_id="mocked_job_id") + assert job_tail_job_logs == logs_example + + +def test_rjc_list_jobs(ray_job_client, mocker): + requirements_path = "tests/e2e/mnist_pip_requirements.txt" + pytorch_lightning = get_package_and_version("pytorch_lightning", requirements_path) + torchmetrics = get_package_and_version("torchmetrics", requirements_path) + torchvision = get_package_and_version("torchvision", requirements_path) + jobs_list = [ + f"JobDetails(type=, job_id=None, submission_id='raysubmit_4k2NYS1YbRXYPZCM', driver_info=None, status=, entrypoint='python mnist.py', message='Job finished successfully.', error_type=None, start_time=1701352132585, end_time=1701352192002, metadata={{}}, runtime_env={{'working_dir': 'gcs://_ray_pkg_6200b93a110e8033.zip', 'pip': {{'packages': ['{pytorch_lightning}', 'ray_lightning', '{torchmetrics}', '{torchvision}'], 'pip_check': False}}, '_ray_commit': 'b4bba4717f5ba04ee25580fe8f88eed63ef0c5dc'}}, driver_agent_http_address='http://10.131.0.18:52365', driver_node_id='9fb515995f5fb13ad4db239ceea378333bebf0a2d45b6aa09d02e691')", + f"JobDetails(type=, job_id=None, submission_id='raysubmit_iRuwU8vdkbUZZGvT', driver_info=None, status=, entrypoint='python mnist.py', message='Job was intentionally stopped.', error_type=None, start_time=1701353096163, end_time=1701353097733, metadata={{}}, runtime_env={{'working_dir': 'gcs://_ray_pkg_6200b93a110e8033.zip', 'pip': {{'packages': ['{pytorch_lightning}', 'ray_lightning', '{torchmetrics}', '{torchvision}'], 'pip_check': False}}, '_ray_commit': 'b4bba4717f5ba04ee25580fe8f88eed63ef0c5dc'}}, driver_agent_http_address='http://10.131.0.18:52365', driver_node_id='9fb515995f5fb13ad4db239ceea378333bebf0a2d45b6aa09d02e691')", + ] + mocked_rjc_list_jobs = mocker.patch.object( + JobSubmissionClient, "list_jobs", return_value=jobs_list + ) + job_list_jobs = ray_job_client.list_jobs() + + mocked_rjc_list_jobs.assert_called_once() + assert job_list_jobs == jobs_list diff --git a/src/codeflare_sdk/ray/cluster/test_cluster.py b/src/codeflare_sdk/ray/cluster/test_cluster.py new file mode 100644 index 00000000..20438bbe --- /dev/null +++ b/src/codeflare_sdk/ray/cluster/test_cluster.py @@ -0,0 +1,610 @@ +# Copyright 2024 IBM, Red Hat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from codeflare_sdk.ray.cluster.cluster import ( + Cluster, + ClusterConfiguration, + get_cluster, + list_all_queued, +) +from codeflare_sdk.common.utils.unit_test_support import ( + createClusterWithConfig, + arg_check_del_effect, + ingress_retrieval, + arg_check_apply_effect, + get_local_queue, + createClusterConfig, + route_list_retrieval, + get_ray_obj, + get_aw_obj, + get_named_aw, + get_obj_none, + get_ray_obj_with_status, + get_aw_obj_with_status, +) +from codeflare_sdk.ray.cluster.generate_yaml import ( + is_openshift_cluster, + is_kind_cluster, +) +from pathlib import Path +from unittest.mock import MagicMock +from kubernetes import client +import os + +parent = Path(__file__).resolve().parents[4] # project directory +expected_clusters_dir = f"{parent}/tests/test_cluster_yamls" +aw_dir = os.path.expanduser("~/.codeflare/resources/") + + +def test_cluster_up_down(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch("codeflare_sdk.ray.cluster.cluster.Cluster._throw_for_no_raycluster") + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": ""}}, + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.create_namespaced_custom_object", + side_effect=arg_check_apply_effect, + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.delete_namespaced_custom_object", + side_effect=arg_check_del_effect, + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_cluster_custom_object", + return_value={"items": []}, + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), + ) + cluster = cluster = createClusterWithConfig(mocker) + cluster.up() + cluster.down() + + +def test_cluster_up_down_no_mcad(mocker): + mocker.patch("codeflare_sdk.ray.cluster.cluster.Cluster._throw_for_no_raycluster") + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.create_namespaced_custom_object", + side_effect=arg_check_apply_effect, + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.delete_namespaced_custom_object", + side_effect=arg_check_del_effect, + ) + mocker.patch( + "kubernetes.client.CoreV1Api.create_namespaced_secret", + ) + mocker.patch( + "kubernetes.client.CoreV1Api.delete_namespaced_secret", + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_cluster_custom_object", + return_value={"items": []}, + ) + config = createClusterConfig() + config.name = "unit-test-cluster-ray" + config.appwrapper = False + cluster = Cluster(config) + cluster.up() + cluster.down() + + +def test_cluster_uris(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch( + "codeflare_sdk.ray.cluster.cluster._get_ingress_domain", + return_value="apps.cluster.awsroute.org", + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), + ) + cluster = cluster = createClusterWithConfig(mocker) + mocker.patch( + "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", + return_value=ingress_retrieval( + cluster_name="unit-test-cluster", + annotations={"route.openshift.io/termination": "passthrough"}, + ), + ) + assert ( + cluster.cluster_dashboard_uri() + == "https://ray-dashboard-unit-test-cluster-ns.apps.cluster.awsroute.org" + ) + mocker.patch( + "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", + return_value=ingress_retrieval(), + ) + assert cluster.cluster_uri() == "ray://unit-test-cluster-head-svc.ns.svc:10001" + assert ( + cluster.cluster_dashboard_uri() + == "http://ray-dashboard-unit-test-cluster-ns.apps.cluster.awsroute.org" + ) + cluster.config.name = "fake" + mocker.patch( + "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", + ) + assert ( + cluster.cluster_dashboard_uri() + == "Dashboard not available yet, have you run cluster.up()?" + ) + + +def test_ray_job_wrapping(mocker): + import ray + + def ray_addr(self, *args): + return self._address + + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), + ) + cluster = cluster = createClusterWithConfig(mocker) + mocker.patch( + "ray.job_submission.JobSubmissionClient._check_connection_and_version_with_url", + return_value="None", + ) + mock_res = mocker.patch.object( + ray.job_submission.JobSubmissionClient, "list_jobs", autospec=True + ) + mock_res.side_effect = ray_addr + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": ""}}, + ) + mocker.patch( + "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", + return_value=ingress_retrieval(), + ) + assert cluster.list_jobs() == cluster.cluster_dashboard_uri() + + mock_res = mocker.patch.object( + ray.job_submission.JobSubmissionClient, "get_job_status", autospec=True + ) + mock_res.side_effect = ray_addr + assert cluster.job_status("fake_id") == cluster.cluster_dashboard_uri() + + mock_res = mocker.patch.object( + ray.job_submission.JobSubmissionClient, "get_job_logs", autospec=True + ) + mock_res.side_effect = ray_addr + assert cluster.job_logs("fake_id") == cluster.cluster_dashboard_uri() + + +def test_local_client_url(mocker): + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": ""}}, + ) + mocker.patch( + "codeflare_sdk.ray.cluster.cluster._get_ingress_domain", + return_value="rayclient-unit-test-cluster-localinter-ns.apps.cluster.awsroute.org", + ) + mocker.patch( + "codeflare_sdk.ray.cluster.cluster.Cluster.create_app_wrapper", + return_value="unit-test-cluster-localinter.yaml", + ) + + cluster_config = ClusterConfiguration( + name="unit-test-cluster-localinter", + namespace="ns", + ) + cluster = Cluster(cluster_config) + assert ( + cluster.local_client_url() + == "ray://rayclient-unit-test-cluster-localinter-ns.apps.cluster.awsroute.org" + ) + + +""" +get_cluster tests +""" + + +def test_get_cluster_openshift(mocker): + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + # Mock the client.ApisApi function to return a mock object + mock_api = MagicMock() + mock_api.get_api_versions.return_value.groups = [ + MagicMock(versions=[MagicMock(group_version="route.openshift.io/v1")]) + ] + mocker.patch("kubernetes.client.ApisApi", return_value=mock_api) + mocker.patch( + "codeflare_sdk.common.kueue.kueue.local_queue_exists", + return_value="true", + ) + + assert is_openshift_cluster() + + def custom_side_effect(group, version, namespace, plural, **kwargs): + if plural == "routes": + return route_list_retrieval("route.openshift.io", "v1", "ns", "routes") + elif plural == "rayclusters": + return get_ray_obj("ray.io", "v1", "ns", "rayclusters") + elif plural == "appwrappers": + return get_aw_obj("workload.codeflare.dev", "v1beta2", "ns", "appwrappers") + elif plural == "localqueues": + return get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues") + + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", get_aw_obj + ) + + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + side_effect=custom_side_effect, + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_namespaced_custom_object", + return_value=get_named_aw, + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_namespaced_custom_object", + side_effect=route_list_retrieval("route.openshift.io", "v1", "ns", "routes")[ + "items" + ], + ) + mocker.patch( + "codeflare_sdk.common.kueue.kueue.local_queue_exists", + return_value="true", + ) + + cluster = get_cluster( + "test-cluster-a", "ns" + ) # see tests/test_cluster_yamls/support_clusters + cluster_config = cluster.config + + assert cluster_config.name == "test-cluster-a" and cluster_config.namespace == "ns" + assert cluster_config.head_cpu_requests == 2 and cluster_config.head_cpu_limits == 2 + assert ( + cluster_config.head_memory_requests == "8G" + and cluster_config.head_memory_limits == "8G" + ) + assert ( + cluster_config.worker_cpu_requests == 1 + and cluster_config.worker_cpu_limits == 1 + ) + assert ( + cluster_config.worker_memory_requests == "2G" + and cluster_config.worker_memory_limits == "2G" + ) + assert cluster_config.num_workers == 1 + assert cluster_config.write_to_file == False + assert cluster_config.local_queue == "local_default_queue" + + +def test_get_cluster(mocker): + # test get_cluster for Kind Clusters + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + side_effect=get_ray_obj, + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_namespaced_custom_object", + side_effect=get_named_aw, + ) + mocker.patch( + "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", + return_value=ingress_retrieval(cluster_name="quicktest", client_ing=True), + ) + mocker.patch( + "codeflare_sdk.common.kueue.kueue.local_queue_exists", + return_value="true", + ) + cluster = get_cluster( + "test-cluster-a" + ) # see tests/test_cluster_yamls/support_clusters + cluster_config = cluster.config + + assert cluster_config.name == "test-cluster-a" and cluster_config.namespace == "ns" + assert cluster_config.head_cpu_requests == 2 and cluster_config.head_cpu_limits == 2 + assert ( + cluster_config.head_memory_requests == "8G" + and cluster_config.head_memory_limits == "8G" + ) + assert ( + cluster_config.worker_cpu_requests == 1 + and cluster_config.worker_cpu_limits == 1 + ) + assert ( + cluster_config.worker_memory_requests == "2G" + and cluster_config.worker_memory_limits == "2G" + ) + assert cluster_config.num_workers == 1 + assert cluster_config.write_to_file == False + assert cluster_config.local_queue == "local_default_queue" + + +def test_wait_ready(mocker, capsys): + from codeflare_sdk.ray.cluster.status import CodeFlareClusterStatus + + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch( + "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", + return_value=ingress_retrieval(), + ) + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch( + "codeflare_sdk.ray.cluster.cluster._app_wrapper_status", return_value=None + ) + mocker.patch( + "codeflare_sdk.ray.cluster.cluster._ray_cluster_status", return_value=None + ) + mocker.patch( + "codeflare_sdk.common.kueue.kueue.local_queue_exists", + return_value="true", + ) + mocker.patch.object( + client.CustomObjectsApi, + "list_namespaced_custom_object", + return_value={ + "items": [ + { + "metadata": {"name": "ray-dashboard-test"}, + "spec": {"host": "mocked-host"}, + } + ] + }, + ) + mock_response = mocker.Mock() + mock_response.status_code = 200 + mocker.patch("requests.get", return_value=mock_response) + cf = Cluster( + ClusterConfiguration( + name="test", + namespace="ns", + write_to_file=False, + appwrapper=True, + local_queue="local-queue-default", + ) + ) + try: + cf.wait_ready(timeout=5) + assert 1 == 0 + except Exception as e: + assert type(e) == TimeoutError + + captured = capsys.readouterr() + assert ( + "WARNING: Current cluster status is unknown, have you run cluster.up yet?" + in captured.out + ) + mocker.patch( + "codeflare_sdk.ray.cluster.cluster.Cluster.status", + return_value=(True, CodeFlareClusterStatus.READY), + ) + cf.wait_ready() + captured = capsys.readouterr() + assert ( + captured.out + == "Waiting for requested resources to be set up...\nRequested cluster is up and running!\nDashboard is ready!\n" + ) + cf.wait_ready(dashboard_check=False) + captured = capsys.readouterr() + assert ( + captured.out + == "Waiting for requested resources to be set up...\nRequested cluster is up and running!\n" + ) + + +def test_list_queue_appwrappers(mocker, capsys): + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_obj_none( + "workload.codeflare.dev", "v1beta2", "ns", "appwrappers" + ), + ) + list_all_queued("ns", appwrapper=True) + captured = capsys.readouterr() + assert captured.out == ( + "╭──────────────────────────────────────────────────────────────────────────────╮\n" + "│ No resources found, have you run cluster.up() yet? │\n" + "╰──────────────────────────────────────────────────────────────────────────────╯\n" + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_aw_obj_with_status( + "workload.codeflare.dev", "v1beta2", "ns", "appwrappers" + ), + ) + list_all_queued("ns", appwrapper=True) + captured = capsys.readouterr() + print(captured.out) + assert captured.out == ( + "╭────────────────────────────────╮\n" + "│ 🚀 Cluster Queue Status 🚀 │\n" + "│ +----------------+-----------+ │\n" + "│ | Name | Status | │\n" + "│ +================+===========+ │\n" + "│ | test-cluster-a | running | │\n" + "│ | | | │\n" + "│ | test-cluster-b | suspended | │\n" + "│ | | | │\n" + "│ +----------------+-----------+ │\n" + "╰────────────────────────────────╯\n" + ) + + +def test_list_queue_rayclusters(mocker, capsys): + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mock_api = MagicMock() + mock_api.get_api_versions.return_value.groups = [ + MagicMock(versions=[MagicMock(group_version="route.openshift.io/v1")]) + ] + mocker.patch("kubernetes.client.ApisApi", return_value=mock_api) + + assert is_openshift_cluster() == True + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_obj_none("ray.io", "v1", "ns", "rayclusters"), + ) + + list_all_queued("ns") + captured = capsys.readouterr() + assert captured.out == ( + "╭──────────────────────────────────────────────────────────────────────────────╮\n" + "│ No resources found, have you run cluster.up() yet? │\n" + "╰──────────────────────────────────────────────────────────────────────────────╯\n" + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_ray_obj_with_status("ray.io", "v1", "ns", "rayclusters"), + ) + + list_all_queued("ns") + captured = capsys.readouterr() + # print(captured.out) -> useful for updating the test + assert captured.out == ( + "╭────────────────────────────────╮\n" + "│ 🚀 Cluster Queue Status 🚀 │\n" + "│ +----------------+-----------+ │\n" + "│ | Name | Status | │\n" + "│ +================+===========+ │\n" + "│ | test-cluster-a | ready | │\n" + "│ | | | │\n" + "│ | test-rc-b | suspended | │\n" + "│ | | | │\n" + "│ +----------------+-----------+ │\n" + "╰────────────────────────────────╯\n" + ) + + +def test_list_clusters(mocker, capsys): + from codeflare_sdk.ray.cluster.cluster import list_all_clusters + + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + side_effect=get_obj_none, + ) + mocker.patch( + "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", + ) + list_all_clusters("ns") + captured = capsys.readouterr() + assert captured.out == ( + "╭──────────────────────────────────────────────────────────────────────────────╮\n" + "│ No resources found, have you run cluster.up() yet? │\n" + "╰──────────────────────────────────────────────────────────────────────────────╯\n" + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + side_effect=get_ray_obj, + ) + list_all_clusters("ns") + captured = capsys.readouterr() + # print(captured.out) -> useful for updating the test + assert captured.out == ( + " 🚀 CodeFlare Cluster Details 🚀 \n" + " \n" + " ╭──────────────────────────────────────────────────────────────────╮ \n" + " │ Name │ \n" + " │ test-cluster-a Inactive ❌ │ \n" + " │ │ \n" + " │ URI: ray://test-cluster-a-head-svc.ns.svc:10001 │ \n" + " │ │ \n" + " │ Dashboard🔗 │ \n" + " │ │ \n" + " │ Cluster Resources │ \n" + " │ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │ \n" + " │ │ # Workers │ │ Memory CPU GPU │ │ \n" + " │ │ │ │ │ │ \n" + " │ │ 1 │ │ 2G~2G 1~1 0 │ │ \n" + " │ │ │ │ │ │ \n" + " │ ╰─────────────╯ ╰──────────────────────────────────────╯ │ \n" + " ╰──────────────────────────────────────────────────────────────────╯ \n" + "╭───────────────────────────────────────────────────────────────╮\n" + "│ Name │\n" + "│ test-rc-b Inactive ❌ │\n" + "│ │\n" + "│ URI: ray://test-rc-b-head-svc.ns.svc:10001 │\n" + "│ │\n" + "│ Dashboard🔗 │\n" + "│ │\n" + "│ Cluster Resources │\n" + "│ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │\n" + "│ │ # Workers │ │ Memory CPU GPU │ │\n" + "│ │ │ │ │ │\n" + "│ │ 1 │ │ 2G~2G 1~1 0 │ │\n" + "│ │ │ │ │ │\n" + "│ ╰─────────────╯ ╰──────────────────────────────────────╯ │\n" + "╰───────────────────────────────────────────────────────────────╯\n" + ) + + +def test_map_to_ray_cluster(mocker): + from codeflare_sdk.ray.cluster.cluster import _map_to_ray_cluster + + mocker.patch("kubernetes.config.load_kube_config") + + mocker.patch( + "codeflare_sdk.ray.cluster.cluster.is_openshift_cluster", return_value=True + ) + + mock_api_client = mocker.MagicMock(spec=client.ApiClient) + mocker.patch( + "codeflare_sdk.common.kubernetes_cluster.auth.get_api_client", + return_value=mock_api_client, + ) + + mock_routes = { + "items": [ + { + "apiVersion": "route.openshift.io/v1", + "kind": "Route", + "metadata": { + "name": "ray-dashboard-test-cluster-a", + "namespace": "ns", + }, + "spec": {"host": "ray-dashboard-test-cluster-a"}, + }, + ] + } + + def custom_side_effect(group, version, namespace, plural, **kwargs): + if plural == "routes": + return mock_routes + elif plural == "rayclusters": + return get_ray_obj("ray.io", "v1", "ns", "rayclusters") + + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + side_effect=custom_side_effect, + ) + + rc = get_ray_obj("ray.io", "v1", "ns", "rayclusters")["items"][0] + rc_name = rc["metadata"]["name"] + rc_dashboard = f"http://ray-dashboard-{rc_name}" + + result = _map_to_ray_cluster(rc) + + assert result is not None + assert result.dashboard == rc_dashboard diff --git a/src/codeflare_sdk/ray/cluster/test_config.py b/src/codeflare_sdk/ray/cluster/test_config.py new file mode 100644 index 00000000..f1ac5355 --- /dev/null +++ b/src/codeflare_sdk/ray/cluster/test_config.py @@ -0,0 +1,170 @@ +# Copyright 2024 IBM, Red Hat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from codeflare_sdk.common.utils.unit_test_support import createClusterWrongType +from codeflare_sdk.ray.cluster.cluster import ClusterConfiguration, Cluster +from pathlib import Path +from unittest.mock import patch +import filecmp +import pytest +import yaml +import os + +parent = Path(__file__).resolve().parents[4] # project directory +expected_clusters_dir = f"{parent}/tests/test_cluster_yamls" +aw_dir = os.path.expanduser("~/.codeflare/resources/") + + +def test_default_cluster_creation(mocker): + # Create a Ray Cluster using the default config variables + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch("kubernetes.client.CustomObjectsApi.list_namespaced_custom_object") + + cluster = Cluster( + ClusterConfiguration( + name="default-cluster", + namespace="ns", + ) + ) + + test_rc = yaml.load(cluster.app_wrapper_yaml, Loader=yaml.FullLoader) + with open(f"{expected_clusters_dir}/ray/default-ray-cluster.yaml") as f: + expected_rc = yaml.load(f, Loader=yaml.FullLoader) + assert test_rc == expected_rc + + +def test_default_appwrapper_creation(mocker): + # Create an AppWrapper using the default config variables + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch("kubernetes.client.CustomObjectsApi.list_namespaced_custom_object") + + cluster = Cluster( + ClusterConfiguration(name="default-appwrapper", namespace="ns", appwrapper=True) + ) + + test_aw = yaml.load(cluster.app_wrapper_yaml, Loader=yaml.FullLoader) + with open(f"{expected_clusters_dir}/ray/default-appwrapper.yaml") as f: + expected_aw = yaml.load(f, Loader=yaml.FullLoader) + assert test_aw == expected_aw + + +@patch.dict("os.environ", {"NB_PREFIX": "test-prefix"}) +def test_config_creation_all_parameters(mocker): + from codeflare_sdk.ray.cluster.config import DEFAULT_RESOURCE_MAPPING + + mocker.patch( + "codeflare_sdk.common.kueue.kueue.local_queue_exists", + return_value="true", + ) + extended_resource_mapping = DEFAULT_RESOURCE_MAPPING + extended_resource_mapping.update({"example.com/gpu": "GPU"}) + + config = ClusterConfiguration( + name="test-all-params", + namespace="ns", + head_info=["test1", "test2"], + head_cpu_requests=4, + head_cpu_limits=8, + head_memory_requests=12, + head_memory_limits=16, + head_extended_resource_requests={"nvidia.com/gpu": 1}, + machine_types={"gpu.small", "gpu.large"}, + worker_cpu_requests=4, + worker_cpu_limits=8, + num_workers=10, + worker_memory_requests=12, + worker_memory_limits=16, + template=f"{parent}/src/codeflare_sdk/ray/templates/base-template.yaml", + appwrapper=False, + envs={"key1": "value1", "key2": "value2"}, + image="example/ray:tag", + image_pull_secrets=["secret1", "secret2"], + write_to_file=True, + verify_tls=True, + labels={"key1": "value1", "key2": "value2"}, + worker_extended_resource_requests={"nvidia.com/gpu": 1}, + extended_resource_mapping=extended_resource_mapping, + overwrite_default_resource_mapping=True, + local_queue="local-queue-default", + ) + Cluster(config) + + assert config.name == "test-all-params" and config.namespace == "ns" + assert config.head_info == ["test1", "test2"] + assert config.head_cpu_requests == 4 + assert config.head_cpu_limits == 8 + assert config.head_memory_requests == "12G" + assert config.head_memory_limits == "16G" + assert config.head_extended_resource_requests == {"nvidia.com/gpu": 1} + assert config.machine_types == {"gpu.small", "gpu.large"} + assert config.worker_cpu_requests == 4 + assert config.worker_cpu_limits == 8 + assert config.num_workers == 10 + assert config.worker_memory_requests == "12G" + assert config.worker_memory_limits == "16G" + assert ( + config.template + == f"{parent}/src/codeflare_sdk/ray/templates/base-template.yaml" + ) + assert config.appwrapper == False + assert config.envs == {"key1": "value1", "key2": "value2"} + assert config.image == "example/ray:tag" + assert config.image_pull_secrets == ["secret1", "secret2"] + assert config.write_to_file == True + assert config.verify_tls == True + assert config.labels == {"key1": "value1", "key2": "value2"} + assert config.worker_extended_resource_requests == {"nvidia.com/gpu": 1} + assert config.extended_resource_mapping == extended_resource_mapping + assert config.overwrite_default_resource_mapping == True + assert config.local_queue == "local-queue-default" + + assert filecmp.cmp( + f"{aw_dir}test-all-params.yaml", + f"{expected_clusters_dir}/ray/unit-test-all-params.yaml", + shallow=True, + ) + + +def test_config_creation_wrong_type(): + with pytest.raises(TypeError): + createClusterWrongType() + + +def test_cluster_config_deprecation_conversion(mocker): + config = ClusterConfiguration( + name="test", + num_gpus=2, + head_gpus=1, + head_cpus=3, + head_memory=16, + min_memory=3, + max_memory=4, + min_cpus=1, + max_cpus=2, + ) + assert config.head_cpu_requests == 3 + assert config.head_cpu_limits == 3 + assert config.head_memory_requests == "16G" + assert config.head_memory_limits == "16G" + assert config.worker_extended_resource_requests == {"nvidia.com/gpu": 2} + assert config.head_extended_resource_requests == {"nvidia.com/gpu": 1} + assert config.worker_memory_requests == "3G" + assert config.worker_memory_limits == "4G" + assert config.worker_cpu_requests == 1 + assert config.worker_cpu_limits == 2 + + +# Make sure to always keep this function last +def test_cleanup(): + os.remove(f"{aw_dir}test-all-params.yaml") diff --git a/src/codeflare_sdk/ray/cluster/test_generate_yaml.py b/src/codeflare_sdk/ray/cluster/test_generate_yaml.py new file mode 100644 index 00000000..68c6aa89 --- /dev/null +++ b/src/codeflare_sdk/ray/cluster/test_generate_yaml.py @@ -0,0 +1,34 @@ +# Copyright 2024 IBM, Red Hat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from codeflare_sdk.ray.cluster.generate_yaml import gen_names +import uuid + + +def test_gen_names_with_name(mocker): + mocker.patch.object( + uuid, "uuid4", return_value=uuid.UUID("00000000-0000-0000-0000-000000000001") + ) + name = "myname" + appwrapper_name, cluster_name = gen_names(name) + assert appwrapper_name == name + assert cluster_name == name + + +def test_gen_names_without_name(mocker): + mocker.patch.object( + uuid, "uuid4", return_value=uuid.UUID("00000000-0000-0000-0000-000000000001") + ) + appwrapper_name, cluster_name = gen_names(None) + assert appwrapper_name.startswith("appwrapper-") + assert cluster_name.startswith("cluster-") diff --git a/src/codeflare_sdk/ray/cluster/test_pretty_print.py b/src/codeflare_sdk/ray/cluster/test_pretty_print.py new file mode 100644 index 00000000..b0da4201 --- /dev/null +++ b/src/codeflare_sdk/ray/cluster/test_pretty_print.py @@ -0,0 +1,208 @@ +# Copyright 2024 IBM, Red Hat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from codeflare_sdk.ray.cluster.pretty_print import ( + print_app_wrappers_status, + print_cluster_status, + print_clusters, + print_no_resources_found, +) +from codeflare_sdk.ray.appwrapper.status import AppWrapperStatus, AppWrapper +from codeflare_sdk.ray.cluster.status import ( + RayCluster, + RayClusterStatus, + CodeFlareClusterStatus, +) +from codeflare_sdk.ray.cluster.cluster import ( + Cluster, + ClusterConfiguration, + _copy_to_ray, +) + + +def test_print_no_resources(capsys): + try: + print_no_resources_found() + except Exception: + assert 1 == 0 + captured = capsys.readouterr() + assert captured.out == ( + "╭──────────────────────────────────────────────────────────────────────────────╮\n" + "│ No resources found, have you run cluster.up() yet? │\n" + "╰──────────────────────────────────────────────────────────────────────────────╯\n" + ) + + +def test_print_appwrappers(capsys): + aw1 = AppWrapper( + name="awtest1", + status=AppWrapperStatus.SUSPENDED, + ) + aw2 = AppWrapper( + name="awtest2", + status=AppWrapperStatus.RUNNING, + ) + try: + print_app_wrappers_status([aw1, aw2]) + except Exception: + assert 1 == 0 + captured = capsys.readouterr() + assert captured.out == ( + "╭─────────────────────────╮\n" + "│ 🚀 Cluster Queue │\n" + "│ Status 🚀 │\n" + "│ +---------+-----------+ │\n" + "│ | Name | Status | │\n" + "│ +=========+===========+ │\n" + "│ | awtest1 | suspended | │\n" + "│ | | | │\n" + "│ | awtest2 | running | │\n" + "│ | | | │\n" + "│ +---------+-----------+ │\n" + "╰─────────────────────────╯\n" + ) + + +def test_ray_details(mocker, capsys): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + ray1 = RayCluster( + name="raytest1", + status=RayClusterStatus.READY, + num_workers=1, + worker_mem_requests="2G", + worker_mem_limits="2G", + worker_cpu_requests=1, + worker_cpu_limits=1, + namespace="ns", + dashboard="fake-uri", + head_cpu_requests=2, + head_cpu_limits=2, + head_mem_requests=8, + head_mem_limits=8, + ) + mocker.patch( + "codeflare_sdk.ray.cluster.cluster.Cluster.status", + return_value=(False, CodeFlareClusterStatus.UNKNOWN), + ) + mocker.patch( + "codeflare_sdk.ray.cluster.cluster.Cluster.cluster_dashboard_uri", + return_value="", + ) + mocker.patch( + "codeflare_sdk.common.kueue.kueue.local_queue_exists", + return_value="true", + ) + cf = Cluster( + ClusterConfiguration( + name="raytest2", + namespace="ns", + appwrapper=True, + local_queue="local_default_queue", + ) + ) + captured = capsys.readouterr() + ray2 = _copy_to_ray(cf) + details = cf.details() + assert details == ray2 + assert ray2.name == "raytest2" + assert ray1.namespace == ray2.namespace + assert ray1.num_workers == ray2.num_workers + assert ray1.worker_mem_requests == ray2.worker_mem_requests + assert ray1.worker_mem_limits == ray2.worker_mem_limits + assert ray1.worker_cpu_requests == ray2.worker_cpu_requests + assert ray1.worker_cpu_limits == ray2.worker_cpu_limits + assert ray1.worker_extended_resources == ray2.worker_extended_resources + try: + print_clusters([ray1, ray2]) + print_cluster_status(ray1) + print_cluster_status(ray2) + except Exception: + assert 0 == 1 + captured = capsys.readouterr() + assert captured.out == ( + " 🚀 CodeFlare Cluster Details 🚀 \n" + " \n" + " ╭───────────────────────────────────────────────────────────────╮ \n" + " │ Name │ \n" + " │ raytest2 Inactive ❌ │ \n" + " │ │ \n" + " │ URI: ray://raytest2-head-svc.ns.svc:10001 │ \n" + " │ │ \n" + " │ Dashboard🔗 │ \n" + " │ │ \n" + " │ Cluster Resources │ \n" + " │ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │ \n" + " │ │ # Workers │ │ Memory CPU GPU │ │ \n" + " │ │ │ │ │ │ \n" + " │ │ 1 │ │ 2G~2G 1~1 0 │ │ \n" + " │ │ │ │ │ │ \n" + " │ ╰─────────────╯ ╰──────────────────────────────────────╯ │ \n" + " ╰───────────────────────────────────────────────────────────────╯ \n" + " 🚀 CodeFlare Cluster Details 🚀 \n" + " \n" + " ╭───────────────────────────────────────────────────────────────╮ \n" + " │ Name │ \n" + " │ raytest1 Active ✅ │ \n" + " │ │ \n" + " │ URI: ray://raytest1-head-svc.ns.svc:10001 │ \n" + " │ │ \n" + " │ Dashboard🔗 │ \n" + " │ │ \n" + " │ Cluster Resources │ \n" + " │ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │ \n" + " │ │ # Workers │ │ Memory CPU GPU │ │ \n" + " │ │ │ │ │ │ \n" + " │ │ 1 │ │ 2G~2G 1~1 0 │ │ \n" + " │ │ │ │ │ │ \n" + " │ ╰─────────────╯ ╰──────────────────────────────────────╯ │ \n" + " ╰───────────────────────────────────────────────────────────────╯ \n" + "╭───────────────────────────────────────────────────────────────╮\n" + "│ Name │\n" + "│ raytest2 Inactive ❌ │\n" + "│ │\n" + "│ URI: ray://raytest2-head-svc.ns.svc:10001 │\n" + "│ │\n" + "│ Dashboard🔗 │\n" + "│ │\n" + "│ Cluster Resources │\n" + "│ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │\n" + "│ │ # Workers │ │ Memory CPU GPU │ │\n" + "│ │ │ │ │ │\n" + "│ │ 1 │ │ 2G~2G 1~1 0 │ │\n" + "│ │ │ │ │ │\n" + "│ ╰─────────────╯ ╰──────────────────────────────────────╯ │\n" + "╰───────────────────────────────────────────────────────────────╯\n" + " 🚀 CodeFlare Cluster Status 🚀 \n" + " \n" + " ╭──────────────────────────────────────────────────────────╮ \n" + " │ Name │ \n" + " │ raytest1 Active ✅ │ \n" + " │ │ \n" + " │ URI: ray://raytest1-head-svc.ns.svc:10001 │ \n" + " │ │ \n" + " │ Dashboard🔗 │ \n" + " │ │ \n" + " ╰──────────────────────────────────────────────────────────╯ \n" + " 🚀 CodeFlare Cluster Status 🚀 \n" + " \n" + " ╭────────────────────────────────────────────────────────────╮ \n" + " │ Name │ \n" + " │ raytest2 Inactive ❌ │ \n" + " │ │ \n" + " │ URI: ray://raytest2-head-svc.ns.svc:10001 │ \n" + " │ │ \n" + " │ Dashboard🔗 │ \n" + " │ │ \n" + " ╰────────────────────────────────────────────────────────────╯ \n" + ) diff --git a/src/codeflare_sdk/ray/cluster/test_status.py b/src/codeflare_sdk/ray/cluster/test_status.py new file mode 100644 index 00000000..146d2190 --- /dev/null +++ b/src/codeflare_sdk/ray/cluster/test_status.py @@ -0,0 +1,114 @@ +# Copyright 2024 IBM, Red Hat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from codeflare_sdk.ray.cluster.cluster import ( + Cluster, + ClusterConfiguration, + _ray_cluster_status, +) +from codeflare_sdk.ray.cluster.status import ( + CodeFlareClusterStatus, + RayClusterStatus, + RayCluster, +) +import os + +aw_dir = os.path.expanduser("~/.codeflare/resources/") + + +def test_cluster_status(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch( + "codeflare_sdk.common.kueue.kueue.local_queue_exists", + return_value="true", + ) + + fake_ray = RayCluster( + name="test", + status=RayClusterStatus.UNKNOWN, + num_workers=1, + worker_mem_requests=2, + worker_mem_limits=2, + worker_cpu_requests=1, + worker_cpu_limits=1, + namespace="ns", + dashboard="fake-uri", + head_cpu_requests=2, + head_cpu_limits=2, + head_mem_requests=8, + head_mem_limits=8, + ) + cf = Cluster( + ClusterConfiguration( + name="test", + namespace="ns", + write_to_file=True, + appwrapper=False, + local_queue="local_default_queue", + ) + ) + mocker.patch( + "codeflare_sdk.ray.cluster.cluster._ray_cluster_status", return_value=None + ) + status, ready = cf.status() + assert status == CodeFlareClusterStatus.UNKNOWN + assert ready == False + + mocker.patch( + "codeflare_sdk.ray.cluster.cluster._ray_cluster_status", return_value=fake_ray + ) + + status, ready = cf.status() + assert status == CodeFlareClusterStatus.STARTING + assert ready == False + + fake_ray.status = RayClusterStatus.FAILED + status, ready = cf.status() + assert status == CodeFlareClusterStatus.FAILED + assert ready == False + + fake_ray.status = RayClusterStatus.UNHEALTHY + status, ready = cf.status() + assert status == CodeFlareClusterStatus.FAILED + assert ready == False + + fake_ray.status = RayClusterStatus.READY + status, ready = cf.status() + assert status == CodeFlareClusterStatus.READY + assert ready == True + + +def rc_status_fields(group, version, namespace, plural, *args): + assert group == "ray.io" + assert version == "v1" + assert namespace == "test-ns" + assert plural == "rayclusters" + assert args == tuple() + return {"items": []} + + +def test_rc_status(mocker): + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + side_effect=rc_status_fields, + ) + rc = _ray_cluster_status("test-rc", "test-ns") + assert rc == None + + +# Make sure to always keep this function last +def test_cleanup(): + os.remove(f"{aw_dir}test.yaml") diff --git a/tests/demo_test.py b/tests/demo_test.py deleted file mode 100644 index b5453058..00000000 --- a/tests/demo_test.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright 2022 IBM, Red Hat -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -These were the old tests used during initial demo building, and they will soon be fully deprecated. -""" - -from codeflare_sdk.ray.cluster.cluster import ( - list_all_clusters, - list_all_queued, - _app_wrapper_status, -) -from codeflare_sdk.ray.cluster.cluster import Cluster, ClusterConfiguration - -import time - -# FIXME - These tests currently assume OC logged in, and not self-contained unit/funcitonal tests - - -def test_cluster_up(): - cluster = Cluster(ClusterConfiguration(name="raycluster-autoscaler")) - cluster.up() - time.sleep(15) - - -def test_list_clusters(): - clusters = list_all_clusters() - - -def test_cluster_status(): - cluster = Cluster(ClusterConfiguration(name="raycluster-autoscaler")) - cluster.status() - - -def test_app_wrapper_status(): - print(_app_wrapper_status("raycluster-autoscaler")) - - -def test_cluster_down(): - cluster = Cluster(ClusterConfiguration(name="raycluster-autoscaler")) - cluster.down() - - -def test_no_resources_found(): - from codeflare_sdk.ray.cluster import pretty_print - - pretty_print.print_no_resources_found() - - -def test_list_app_wrappers(): - app_wrappers = list_all_queued() diff --git a/tests/func_test.py b/tests/func_test.py deleted file mode 100644 index 6b5799c3..00000000 --- a/tests/func_test.py +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright 2022 IBM, Red Hat -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from pathlib import Path -import sys - -parent = Path(__file__).resolve().parents[1] -sys.path.append(str(parent) + "/src") - -# COMING SOON! diff --git a/tests/test-case-bad.yaml b/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml similarity index 100% rename from tests/test-case-bad.yaml rename to tests/test_cluster_yamls/appwrapper/test-case-bad.yaml diff --git a/tests/test-case.yaml b/tests/test_cluster_yamls/kueue/aw_kueue.yaml similarity index 93% rename from tests/test-case.yaml rename to tests/test_cluster_yamls/kueue/aw_kueue.yaml index c03422cf..2c6d868a 100644 --- a/tests/test-case.yaml +++ b/tests/test_cluster_yamls/kueue/aw_kueue.yaml @@ -3,7 +3,7 @@ kind: AppWrapper metadata: labels: kueue.x-k8s.io/queue-name: local-queue-default - name: unit-test-cluster + name: unit-test-aw-kueue namespace: ns spec: components: @@ -13,7 +13,7 @@ spec: metadata: labels: controller-tools.k8s.io: '1.0' - name: unit-test-cluster + name: unit-test-aw-kueue namespace: ns spec: autoscalerOptions: @@ -76,8 +76,7 @@ spec: - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt - imagePullSecrets: - - name: unit-test-pull-secret + imagePullSecrets: [] volumes: - configMap: items: @@ -95,12 +94,12 @@ spec: name: odh-ca-cert rayVersion: 2.35.0 workerGroupSpecs: - - groupName: small-group-unit-test-cluster + - groupName: small-group-unit-test-aw-kueue maxReplicas: 2 minReplicas: 2 rayStartParams: block: 'true' - num-gpus: '7' + num-gpus: '0' resources: '"{}"' replicas: 2 template: @@ -124,11 +123,9 @@ spec: limits: cpu: 4 memory: 6G - nvidia.com/gpu: 7 requests: cpu: 3 memory: 5G - nvidia.com/gpu: 7 volumeMounts: - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert @@ -142,8 +139,7 @@ spec: - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt - imagePullSecrets: - - name: unit-test-pull-secret + imagePullSecrets: [] volumes: - configMap: items: diff --git a/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml b/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml new file mode 100644 index 00000000..0c4efb29 --- /dev/null +++ b/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml @@ -0,0 +1,157 @@ +apiVersion: workload.codeflare.dev/v1beta2 +kind: AppWrapper +metadata: + labels: + kueue.x-k8s.io/queue-name: local-queue-default + name: unit-test-cluster-kueue + namespace: ns +spec: + components: + - template: + apiVersion: ray.io/v1 + kind: RayCluster + metadata: + labels: + controller-tools.k8s.io: '1.0' + name: unit-test-cluster-kueue + namespace: ns + spec: + autoscalerOptions: + idleTimeoutSeconds: 60 + imagePullPolicy: Always + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 500m + memory: 512Mi + upscalingMode: Default + enableInTreeAutoscaling: false + headGroupSpec: + enableIngress: false + rayStartParams: + block: 'true' + dashboard-host: 0.0.0.0 + num-gpus: '0' + resources: '"{}"' + serviceType: ClusterIP + template: + spec: + containers: + - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + imagePullPolicy: Always + lifecycle: + preStop: + exec: + command: + - /bin/sh + - -c + - ray stop + name: ray-head + ports: + - containerPort: 6379 + name: gcs + - containerPort: 8265 + name: dashboard + - containerPort: 10001 + name: client + resources: + limits: + cpu: 2 + memory: 8G + requests: + cpu: 2 + memory: 8G + volumeMounts: + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + imagePullSecrets: [] + volumes: + - configMap: + items: + - key: ca-bundle.crt + path: odh-trusted-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert + rayVersion: 2.35.0 + workerGroupSpecs: + - groupName: small-group-unit-test-cluster-kueue + maxReplicas: 2 + minReplicas: 2 + rayStartParams: + block: 'true' + num-gpus: '0' + resources: '"{}"' + replicas: 2 + template: + metadata: + annotations: + key: value + labels: + key: value + spec: + containers: + - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + lifecycle: + preStop: + exec: + command: + - /bin/sh + - -c + - ray stop + name: machine-learning + resources: + limits: + cpu: 4 + memory: 6G + requests: + cpu: 3 + memory: 5G + volumeMounts: + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + imagePullSecrets: [] + volumes: + - configMap: + items: + - key: ca-bundle.crt + path: odh-trusted-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert diff --git a/tests/test_cluster_yamls/ray/default-appwrapper.yaml b/tests/test_cluster_yamls/ray/default-appwrapper.yaml new file mode 100644 index 00000000..60152c1e --- /dev/null +++ b/tests/test_cluster_yamls/ray/default-appwrapper.yaml @@ -0,0 +1,155 @@ +apiVersion: workload.codeflare.dev/v1beta2 +kind: AppWrapper +metadata: + name: default-appwrapper + namespace: ns +spec: + components: + - template: + apiVersion: ray.io/v1 + kind: RayCluster + metadata: + labels: + controller-tools.k8s.io: '1.0' + name: default-appwrapper + namespace: ns + spec: + autoscalerOptions: + idleTimeoutSeconds: 60 + imagePullPolicy: Always + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 500m + memory: 512Mi + upscalingMode: Default + enableInTreeAutoscaling: false + headGroupSpec: + enableIngress: false + rayStartParams: + block: 'true' + dashboard-host: 0.0.0.0 + num-gpus: '0' + resources: '"{}"' + serviceType: ClusterIP + template: + spec: + containers: + - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + imagePullPolicy: Always + lifecycle: + preStop: + exec: + command: + - /bin/sh + - -c + - ray stop + name: ray-head + ports: + - containerPort: 6379 + name: gcs + - containerPort: 8265 + name: dashboard + - containerPort: 10001 + name: client + resources: + limits: + cpu: 2 + memory: 8G + requests: + cpu: 2 + memory: 8G + volumeMounts: + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + imagePullSecrets: [] + volumes: + - configMap: + items: + - key: ca-bundle.crt + path: odh-trusted-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert + rayVersion: 2.35.0 + workerGroupSpecs: + - groupName: small-group-default-appwrapper + maxReplicas: 1 + minReplicas: 1 + rayStartParams: + block: 'true' + num-gpus: '0' + resources: '"{}"' + replicas: 1 + template: + metadata: + annotations: + key: value + labels: + key: value + spec: + containers: + - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + lifecycle: + preStop: + exec: + command: + - /bin/sh + - -c + - ray stop + name: machine-learning + resources: + limits: + cpu: 1 + memory: 2G + requests: + cpu: 1 + memory: 2G + volumeMounts: + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + imagePullSecrets: [] + volumes: + - configMap: + items: + - key: ca-bundle.crt + path: odh-trusted-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert diff --git a/tests/test-case-no-kueue-no-aw.yaml b/tests/test_cluster_yamls/ray/default-ray-cluster.yaml similarity index 88% rename from tests/test-case-no-kueue-no-aw.yaml rename to tests/test_cluster_yamls/ray/default-ray-cluster.yaml index ea90a275..7a3329b6 100644 --- a/tests/test-case-no-kueue-no-aw.yaml +++ b/tests/test_cluster_yamls/ray/default-ray-cluster.yaml @@ -1,11 +1,9 @@ apiVersion: ray.io/v1 kind: RayCluster metadata: - annotations: - app.kubernetes.io/managed-by: test-prefix labels: controller-tools.k8s.io: '1.0' - name: unit-test-no-kueue + name: default-cluster namespace: ns spec: autoscalerOptions: @@ -68,8 +66,7 @@ spec: - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt - imagePullSecrets: - - name: unit-test-pull-secret + imagePullSecrets: [] volumes: - configMap: items: @@ -87,14 +84,14 @@ spec: name: odh-ca-cert rayVersion: 2.35.0 workerGroupSpecs: - - groupName: small-group-unit-test-no-kueue - maxReplicas: 2 - minReplicas: 2 + - groupName: small-group-default-cluster + maxReplicas: 1 + minReplicas: 1 rayStartParams: block: 'true' - num-gpus: '7' + num-gpus: '0' resources: '"{}"' - replicas: 2 + replicas: 1 template: metadata: annotations: @@ -114,13 +111,11 @@ spec: name: machine-learning resources: limits: - cpu: 4 - memory: 6G - nvidia.com/gpu: 7 + cpu: 1 + memory: 2G requests: - cpu: 3 - memory: 5G - nvidia.com/gpu: 7 + cpu: 1 + memory: 2G volumeMounts: - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert @@ -134,8 +129,7 @@ spec: - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt - imagePullSecrets: - - name: unit-test-pull-secret + imagePullSecrets: [] volumes: - configMap: items: diff --git a/tests/test-case-custom-image.yaml b/tests/test_cluster_yamls/ray/unit-test-all-params.yaml similarity index 83% rename from tests/test-case-custom-image.yaml rename to tests/test_cluster_yamls/ray/unit-test-all-params.yaml index d7e52507..eda7270f 100644 --- a/tests/test-case-custom-image.yaml +++ b/tests/test_cluster_yamls/ray/unit-test-all-params.yaml @@ -5,10 +5,10 @@ metadata: app.kubernetes.io/managed-by: test-prefix labels: controller-tools.k8s.io: '1.0' + key1: value1 + key2: value2 kueue.x-k8s.io/queue-name: local-queue-default - testlabel: test - testlabel2: test - name: unit-test-cluster-custom-image + name: test-all-params namespace: ns spec: autoscalerOptions: @@ -28,13 +28,16 @@ spec: rayStartParams: block: 'true' dashboard-host: 0.0.0.0 - num-gpus: '0' + num-gpus: '1' resources: '"{}"' serviceType: ClusterIP template: spec: containers: - - image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 + - env: &id001 + key1: value1 + key2: value2 + image: example/ray:tag imagePullPolicy: Always lifecycle: preStop: @@ -53,11 +56,13 @@ spec: name: client resources: limits: - cpu: 2 - memory: 8G + cpu: 8 + memory: 16G + nvidia.com/gpu: 1 requests: - cpu: 2 - memory: 8G + cpu: 4 + memory: 12G + nvidia.com/gpu: 1 volumeMounts: - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert @@ -72,7 +77,8 @@ spec: name: odh-ca-cert subPath: odh-ca-bundle.crt imagePullSecrets: - - name: unit-test-pull-secret + - name: secret1 + - name: secret2 volumes: - configMap: items: @@ -90,14 +96,14 @@ spec: name: odh-ca-cert rayVersion: 2.35.0 workerGroupSpecs: - - groupName: small-group-unit-test-cluster-custom-image - maxReplicas: 2 - minReplicas: 2 + - groupName: small-group-test-all-params + maxReplicas: 10 + minReplicas: 10 rayStartParams: block: 'true' - num-gpus: '7' + num-gpus: '1' resources: '"{}"' - replicas: 2 + replicas: 10 template: metadata: annotations: @@ -106,7 +112,8 @@ spec: key: value spec: containers: - - image: quay.io/project-codeflare/ray:2.20.0-py39-cu118 + - env: *id001 + image: example/ray:tag lifecycle: preStop: exec: @@ -117,13 +124,13 @@ spec: name: machine-learning resources: limits: - cpu: 4 - memory: 6G - nvidia.com/gpu: 7 + cpu: 8 + memory: 16G + nvidia.com/gpu: 1 requests: - cpu: 3 - memory: 5G - nvidia.com/gpu: 7 + cpu: 4 + memory: 12G + nvidia.com/gpu: 1 volumeMounts: - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert @@ -138,7 +145,8 @@ spec: name: odh-ca-cert subPath: odh-ca-bundle.crt imagePullSecrets: - - name: unit-test-pull-secret + - name: secret1 + - name: secret2 volumes: - configMap: items: diff --git a/tests/test-default-appwrapper.yaml b/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml similarity index 95% rename from tests/test-default-appwrapper.yaml rename to tests/test_cluster_yamls/support_clusters/test-aw-a.yaml index 0780a46e..9b8a647f 100644 --- a/tests/test-default-appwrapper.yaml +++ b/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml @@ -2,9 +2,9 @@ apiVersion: workload.codeflare.dev/v1beta2 kind: AppWrapper metadata: labels: - kueue.x-k8s.io/queue-name: local-queue-default - name: unit-test-default-cluster - namespace: opendatahub + kueue.x-k8s.io/queue-name: local_default_queue + name: test-cluster-a + namespace: ns spec: components: - template: @@ -13,8 +13,8 @@ spec: metadata: labels: controller-tools.k8s.io: '1.0' - name: unit-test-default-cluster - namespace: opendatahub + name: test-cluster-a + namespace: ns spec: autoscalerOptions: idleTimeoutSeconds: 60 @@ -38,7 +38,6 @@ spec: serviceType: ClusterIP template: spec: - imagePullSecrets: [] containers: - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 imagePullPolicy: Always @@ -77,6 +76,7 @@ spec: - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt + imagePullSecrets: [] volumes: - configMap: items: @@ -94,7 +94,7 @@ spec: name: odh-ca-cert rayVersion: 2.35.0 workerGroupSpecs: - - groupName: small-group-unit-test-default-cluster + - groupName: small-group-test-cluster-a maxReplicas: 1 minReplicas: 1 rayStartParams: @@ -109,7 +109,6 @@ spec: labels: key: value spec: - imagePullSecrets: [] containers: - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 lifecycle: @@ -140,6 +139,7 @@ spec: - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt + imagePullSecrets: [] volumes: - configMap: items: diff --git a/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml b/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml new file mode 100644 index 00000000..763eb5c2 --- /dev/null +++ b/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml @@ -0,0 +1,157 @@ +apiVersion: workload.codeflare.dev/v1beta2 +kind: AppWrapper +metadata: + labels: + kueue.x-k8s.io/queue-name: local_default_queue + name: test-cluster-b + namespace: ns +spec: + components: + - template: + apiVersion: ray.io/v1 + kind: RayCluster + metadata: + labels: + controller-tools.k8s.io: '1.0' + name: test-cluster-b + namespace: ns + spec: + autoscalerOptions: + idleTimeoutSeconds: 60 + imagePullPolicy: Always + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 500m + memory: 512Mi + upscalingMode: Default + enableInTreeAutoscaling: false + headGroupSpec: + enableIngress: false + rayStartParams: + block: 'true' + dashboard-host: 0.0.0.0 + num-gpus: '0' + resources: '"{}"' + serviceType: ClusterIP + template: + spec: + containers: + - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + imagePullPolicy: Always + lifecycle: + preStop: + exec: + command: + - /bin/sh + - -c + - ray stop + name: ray-head + ports: + - containerPort: 6379 + name: gcs + - containerPort: 8265 + name: dashboard + - containerPort: 10001 + name: client + resources: + limits: + cpu: 2 + memory: 8G + requests: + cpu: 2 + memory: 8G + volumeMounts: + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + imagePullSecrets: [] + volumes: + - configMap: + items: + - key: ca-bundle.crt + path: odh-trusted-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert + rayVersion: 2.35.0 + workerGroupSpecs: + - groupName: small-group-test-cluster-b + maxReplicas: 1 + minReplicas: 1 + rayStartParams: + block: 'true' + num-gpus: '0' + resources: '"{}"' + replicas: 1 + template: + metadata: + annotations: + key: value + labels: + key: value + spec: + containers: + - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + lifecycle: + preStop: + exec: + command: + - /bin/sh + - -c + - ray stop + name: machine-learning + resources: + limits: + cpu: 1 + memory: 2G + requests: + cpu: 1 + memory: 2G + volumeMounts: + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + imagePullSecrets: [] + volumes: + - configMap: + items: + - key: ca-bundle.crt + path: odh-trusted-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert diff --git a/tests/test-case-no-mcad.yamls b/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml similarity index 86% rename from tests/test-case-no-mcad.yamls rename to tests/test_cluster_yamls/support_clusters/test-rc-a.yaml index 36ce8e26..f12ffde0 100644 --- a/tests/test-case-no-mcad.yamls +++ b/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml @@ -1,14 +1,10 @@ apiVersion: ray.io/v1 kind: RayCluster metadata: - annotations: - app.kubernetes.io/managed-by: test-prefix labels: controller-tools.k8s.io: '1.0' - kueue.x-k8s.io/queue-name: local-queue-default - testlabel: test - testlabel2: test - name: unit-test-cluster-ray + kueue.x-k8s.io/queue-name: local_default_queue + name: test-cluster-a namespace: ns spec: autoscalerOptions: @@ -71,8 +67,7 @@ spec: - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt - imagePullSecrets: - - name: unit-test-pull-secret + imagePullSecrets: [] volumes: - configMap: items: @@ -90,14 +85,14 @@ spec: name: odh-ca-cert rayVersion: 2.35.0 workerGroupSpecs: - - groupName: small-group-unit-test-cluster-ray - maxReplicas: 2 - minReplicas: 2 + - groupName: small-group-test-cluster-a + maxReplicas: 1 + minReplicas: 1 rayStartParams: block: 'true' - num-gpus: '7' + num-gpus: '0' resources: '"{}"' - replicas: 2 + replicas: 1 template: metadata: annotations: @@ -117,13 +112,11 @@ spec: name: machine-learning resources: limits: - cpu: 4 - memory: 6G - nvidia.com/gpu: 7 + cpu: 1 + memory: 2G requests: - cpu: 3 - memory: 5G - nvidia.com/gpu: 7 + cpu: 1 + memory: 2G volumeMounts: - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert @@ -137,8 +130,7 @@ spec: - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt - imagePullSecrets: - - name: unit-test-pull-secret + imagePullSecrets: [] volumes: - configMap: items: diff --git a/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml b/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml new file mode 100644 index 00000000..1d41e365 --- /dev/null +++ b/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml @@ -0,0 +1,148 @@ +apiVersion: ray.io/v1 +kind: RayCluster +metadata: + labels: + controller-tools.k8s.io: '1.0' + kueue.x-k8s.io/queue-name: local_default_queue + name: test-rc-b + namespace: ns +spec: + autoscalerOptions: + idleTimeoutSeconds: 60 + imagePullPolicy: Always + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 500m + memory: 512Mi + upscalingMode: Default + enableInTreeAutoscaling: false + headGroupSpec: + enableIngress: false + rayStartParams: + block: 'true' + dashboard-host: 0.0.0.0 + num-gpus: '0' + resources: '"{}"' + serviceType: ClusterIP + template: + spec: + containers: + - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + imagePullPolicy: Always + lifecycle: + preStop: + exec: + command: + - /bin/sh + - -c + - ray stop + name: ray-head + ports: + - containerPort: 6379 + name: gcs + - containerPort: 8265 + name: dashboard + - containerPort: 10001 + name: client + resources: + limits: + cpu: 2 + memory: 8G + requests: + cpu: 2 + memory: 8G + volumeMounts: + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + imagePullSecrets: [] + volumes: + - configMap: + items: + - key: ca-bundle.crt + path: odh-trusted-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert + rayVersion: 2.35.0 + workerGroupSpecs: + - groupName: small-group-test-rc-b + maxReplicas: 1 + minReplicas: 1 + rayStartParams: + block: 'true' + num-gpus: '0' + resources: '"{}"' + replicas: 1 + template: + metadata: + annotations: + key: value + labels: + key: value + spec: + containers: + - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + lifecycle: + preStop: + exec: + command: + - /bin/sh + - -c + - ray stop + name: machine-learning + resources: + limits: + cpu: 1 + memory: 2G + requests: + cpu: 1 + memory: 2G + volumeMounts: + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + imagePullSecrets: [] + volumes: + - configMap: + items: + - key: ca-bundle.crt + path: odh-trusted-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert diff --git a/tests/unit_test.py b/tests/unit_test.py deleted file mode 100644 index 1f11643b..00000000 --- a/tests/unit_test.py +++ /dev/null @@ -1,3350 +0,0 @@ -# Copyright 2022 IBM, Red Hat -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import filecmp -import os -import re -import sys -import uuid -from pathlib import Path - -parent = Path(__file__).resolve().parents[1] -aw_dir = os.path.expanduser("~/.codeflare/resources/") -sys.path.append(str(parent) + "/src") - -from unittest.mock import MagicMock, patch - -import openshift -import pandas as pd -import pytest -import ray -import yaml -from kubernetes import client, config -from pytest_mock import MockerFixture -from ray.job_submission import JobSubmissionClient - -import codeflare_sdk.common.widgets.widgets as cf_widgets -from codeflare_sdk.common.kubernetes_cluster import ( - Authentication, - KubeConfigFileAuthentication, - TokenAuthentication, - config_check, -) -from codeflare_sdk.common.utils.generate_cert import ( - export_env, - generate_ca_cert, - generate_tls_cert, -) -from codeflare_sdk.ray.appwrapper.awload import AWManager -from codeflare_sdk.ray.appwrapper.status import AppWrapper, AppWrapperStatus -from codeflare_sdk.ray.client.ray_jobs import RayJobClient -from codeflare_sdk.ray.cluster.cluster import ( - Cluster, - ClusterConfiguration, - _app_wrapper_status, - _copy_to_ray, - _map_to_ray_cluster, - _ray_cluster_status, - get_cluster, - list_all_clusters, - list_all_queued, -) -from codeflare_sdk.ray.cluster.generate_yaml import gen_names, is_openshift_cluster -from codeflare_sdk.ray.cluster.pretty_print import ( - print_app_wrappers_status, - print_cluster_status, - print_clusters, - print_no_resources_found, -) -from codeflare_sdk.ray.cluster.status import ( - CodeFlareClusterStatus, - RayCluster, - RayClusterStatus, -) -from tests.unit_test_support import ( - createClusterConfig, - createClusterWithConfig, - createClusterWrongType, - get_package_and_version, -) - -# For mocking openshift client results -fake_res = openshift.Result("fake") - - -def mock_routes_api(mocker): - mocker.patch.object( - "_route_api_getter", - return_value=MagicMock( - resources=MagicMock( - get=MagicMock( - return_value=MagicMock( - create=MagicMock(), - replace=MagicMock(), - delete=MagicMock(), - ) - ) - ) - ), - ) - - -def arg_side_effect(*args): - fake_res.high_level_operation = args - return fake_res - - -def att_side_effect(self): - return self.high_level_operation - - -def test_token_auth_creation(): - try: - token_auth = TokenAuthentication(token="token", server="server") - assert token_auth.token == "token" - assert token_auth.server == "server" - assert token_auth.skip_tls == False - assert token_auth.ca_cert_path == None - - token_auth = TokenAuthentication(token="token", server="server", skip_tls=True) - assert token_auth.token == "token" - assert token_auth.server == "server" - assert token_auth.skip_tls == True - assert token_auth.ca_cert_path == None - - os.environ["CF_SDK_CA_CERT_PATH"] = "/etc/pki/tls/custom-certs/ca-bundle.crt" - token_auth = TokenAuthentication(token="token", server="server", skip_tls=False) - assert token_auth.token == "token" - assert token_auth.server == "server" - assert token_auth.skip_tls == False - assert token_auth.ca_cert_path == "/etc/pki/tls/custom-certs/ca-bundle.crt" - os.environ.pop("CF_SDK_CA_CERT_PATH") - - token_auth = TokenAuthentication( - token="token", - server="server", - skip_tls=False, - ca_cert_path=f"{parent}/tests/auth-test.crt", - ) - assert token_auth.token == "token" - assert token_auth.server == "server" - assert token_auth.skip_tls == False - assert token_auth.ca_cert_path == f"{parent}/tests/auth-test.crt" - - except Exception: - assert 0 == 1 - - -def test_token_auth_login_logout(mocker): - mocker.patch.object(client, "ApiClient") - - token_auth = TokenAuthentication( - token="testtoken", server="testserver:6443", skip_tls=False, ca_cert_path=None - ) - assert token_auth.login() == ("Logged into testserver:6443") - assert token_auth.logout() == ("Successfully logged out of testserver:6443") - - -def test_token_auth_login_tls(mocker): - mocker.patch.object(client, "ApiClient") - - token_auth = TokenAuthentication( - token="testtoken", server="testserver:6443", skip_tls=True, ca_cert_path=None - ) - assert token_auth.login() == ("Logged into testserver:6443") - token_auth = TokenAuthentication( - token="testtoken", server="testserver:6443", skip_tls=False, ca_cert_path=None - ) - assert token_auth.login() == ("Logged into testserver:6443") - token_auth = TokenAuthentication( - token="testtoken", - server="testserver:6443", - skip_tls=False, - ca_cert_path=f"{parent}/tests/auth-test.crt", - ) - assert token_auth.login() == ("Logged into testserver:6443") - - os.environ["CF_SDK_CA_CERT_PATH"] = f"{parent}/tests/auth-test.crt" - token_auth = TokenAuthentication( - token="testtoken", - server="testserver:6443", - skip_tls=False, - ) - assert token_auth.login() == ("Logged into testserver:6443") - - -def test_config_check_no_config_file(mocker): - mocker.patch("os.path.expanduser", return_value="/mock/home/directory") - mocker.patch("os.path.isfile", return_value=False) - mocker.patch("codeflare_sdk.common.kubernetes_cluster.auth.config_path", None) - mocker.patch("codeflare_sdk.common.kubernetes_cluster.auth.api_client", None) - - with pytest.raises(PermissionError): - config_check() - - -def test_config_check_with_incluster_config(mocker): - mocker.patch("os.path.expanduser", return_value="/mock/home/directory") - mocker.patch("os.path.isfile", return_value=False) - mocker.patch.dict(os.environ, {"KUBERNETES_PORT": "number"}) - mocker.patch("kubernetes.config.load_incluster_config", side_effect=None) - mocker.patch("codeflare_sdk.common.kubernetes_cluster.auth.config_path", None) - mocker.patch("codeflare_sdk.common.kubernetes_cluster.auth.api_client", None) - - result = config_check() - assert result == None - - -def test_config_check_with_existing_config_file(mocker): - mocker.patch("os.path.expanduser", return_value="/mock/home/directory") - mocker.patch("os.path.isfile", return_value=True) - mocker.patch("kubernetes.config.load_kube_config", side_effect=None) - mocker.patch("codeflare_sdk.common.kubernetes_cluster.auth.config_path", None) - mocker.patch("codeflare_sdk.common.kubernetes_cluster.auth.api_client", None) - - result = config_check() - assert result == None - - -def test_config_check_with_config_path_and_no_api_client(mocker): - mocker.patch( - "codeflare_sdk.common.kubernetes_cluster.auth.config_path", "/mock/config/path" - ) - mocker.patch("codeflare_sdk.common.kubernetes_cluster.auth.api_client", None) - result = config_check() - assert result == "/mock/config/path" - - -def test_load_kube_config(mocker): - mocker.patch.object(config, "load_kube_config") - kube_config_auth = KubeConfigFileAuthentication( - kube_config_path="/path/to/your/config" - ) - response = kube_config_auth.load_kube_config() - - assert ( - response - == "Loaded user config file at path %s" % kube_config_auth.kube_config_path - ) - - kube_config_auth = KubeConfigFileAuthentication(kube_config_path=None) - response = kube_config_auth.load_kube_config() - assert response == "Please specify a config file path" - - -def test_auth_coverage(): - abstract = Authentication() - abstract.login() - abstract.logout() - - -def test_config_creation(): - config = createClusterConfig() - - assert config.name == "unit-test-cluster" and config.namespace == "ns" - assert config.num_workers == 2 - assert config.worker_cpu_requests == 3 and config.worker_cpu_limits == 4 - assert config.worker_memory_requests == "5G" and config.worker_memory_limits == "6G" - assert config.worker_extended_resource_requests == {"nvidia.com/gpu": 7} - assert ( - config.template - == f"{parent}/src/codeflare_sdk/ray/templates/base-template.yaml" - ) - assert config.machine_types == ["cpu.small", "gpu.large"] - assert config.image_pull_secrets == ["unit-test-pull-secret"] - assert config.appwrapper == True - - -def test_config_creation_wrong_type(): - with pytest.raises(TypeError): - createClusterWrongType() - - -def test_cluster_creation(mocker): - # Create AppWrapper containing a Ray Cluster with no local queue specified - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), - ) - cluster = createClusterWithConfig(mocker) - assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster.yaml" - assert cluster.app_wrapper_name == "unit-test-cluster" - assert filecmp.cmp( - f"{aw_dir}unit-test-cluster.yaml", - f"{parent}/tests/test-case.yaml", - shallow=True, - ) - - -@patch.dict("os.environ", {"NB_PREFIX": "test-prefix"}) -def test_cluster_no_kueue_no_aw(mocker): - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", - return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, - ) - mocker.patch("kubernetes.client.CustomObjectsApi.list_namespaced_custom_object") - config = createClusterConfig() - config.appwrapper = False - config.name = "unit-test-no-kueue" - config.write_to_file = True - cluster = Cluster(config) - assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-no-kueue.yaml" - assert cluster.config.local_queue == None - assert filecmp.cmp( - f"{aw_dir}unit-test-no-kueue.yaml", - f"{parent}/tests/test-case-no-kueue-no-aw.yaml", - shallow=True, - ) - - -def get_local_queue(group, version, namespace, plural): - assert group == "kueue.x-k8s.io" - assert version == "v1beta1" - assert namespace == "ns" - assert plural == "localqueues" - local_queues = { - "apiVersion": "kueue.x-k8s.io/v1beta1", - "items": [ - { - "apiVersion": "kueue.x-k8s.io/v1beta1", - "kind": "LocalQueue", - "metadata": { - "annotations": {"kueue.x-k8s.io/default-queue": "true"}, - "name": "local-queue-default", - "namespace": "ns", - }, - "spec": {"clusterQueue": "cluster-queue"}, - }, - { - "apiVersion": "kueue.x-k8s.io/v1beta1", - "kind": "LocalQueue", - "metadata": { - "name": "team-a-queue", - "namespace": "ns", - }, - "spec": {"clusterQueue": "team-a-queue"}, - }, - ], - "kind": "LocalQueueList", - "metadata": {"continue": "", "resourceVersion": "2266811"}, - } - return local_queues - - -@patch.dict("os.environ", {"NB_PREFIX": "test-prefix"}) -def test_cluster_creation_no_mcad(mocker): - # Create Ray Cluster with no local queue specified - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", - return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, - ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), - ) - - config = createClusterConfig() - config.name = "unit-test-cluster-ray" - config.write_to_file = True - config.labels = {"testlabel": "test", "testlabel2": "test"} - config.appwrapper = False - cluster = Cluster(config) - - assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-ray.yaml" - assert cluster.app_wrapper_name == "unit-test-cluster-ray" - assert filecmp.cmp( - f"{aw_dir}unit-test-cluster-ray.yaml", - f"{parent}/tests/test-case-no-mcad.yamls", - shallow=True, - ) - - -@patch.dict("os.environ", {"NB_PREFIX": "test-prefix"}) -def test_cluster_creation_no_mcad_local_queue(mocker): - # With written resources - # Create Ray Cluster with local queue specified - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", - return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, - ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), - ) - config = createClusterConfig() - config.name = "unit-test-cluster-ray" - config.appwrapper = False - config.write_to_file = True - config.local_queue = "local-queue-default" - config.labels = {"testlabel": "test", "testlabel2": "test"} - cluster = Cluster(config) - assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-ray.yaml" - assert cluster.app_wrapper_name == "unit-test-cluster-ray" - assert filecmp.cmp( - f"{aw_dir}unit-test-cluster-ray.yaml", - f"{parent}/tests/test-case-no-mcad.yamls", - shallow=True, - ) - # With resources loaded in memory - config = ClusterConfiguration( - name="unit-test-cluster-ray", - namespace="ns", - num_workers=2, - worker_cpu_requests=3, - worker_cpu_limits=4, - worker_memory_requests=5, - worker_memory_limits=6, - worker_extended_resource_requests={"nvidia.com/gpu": 7}, - machine_types=["cpu.small", "gpu.large"], - image_pull_secrets=["unit-test-pull-secret"], - write_to_file=True, - appwrapper=False, - local_queue="local-queue-default", - labels={"testlabel": "test", "testlabel2": "test"}, - ) - cluster = Cluster(config) - assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-ray.yaml" - assert cluster.app_wrapper_name == "unit-test-cluster-ray" - assert filecmp.cmp( - f"{aw_dir}unit-test-cluster-ray.yaml", - f"{parent}/tests/test-case-no-mcad.yamls", - shallow=True, - ) - - -def test_default_cluster_creation(mocker): - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch( - "codeflare_sdk.ray.cluster.cluster.get_current_namespace", - return_value="opendatahub", - ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), - ) - default_config = ClusterConfiguration( - name="unit-test-default-cluster", - appwrapper=True, - ) - cluster = Cluster(default_config) - test_aw = yaml.load(cluster.app_wrapper_yaml, Loader=yaml.FullLoader) - - with open( - f"{parent}/tests/test-default-appwrapper.yaml", - ) as f: - default_aw = yaml.load(f, Loader=yaml.FullLoader) - assert test_aw == default_aw - - assert cluster.app_wrapper_name == "unit-test-default-cluster" - assert cluster.config.namespace == "opendatahub" - - -@patch.dict("os.environ", {"NB_PREFIX": "test-prefix"}) -def test_cluster_creation_with_custom_image(mocker): - # With written resources - # Create Ray Cluster with local queue specified - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", - return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, - ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), - ) - config = createClusterConfig() - config.name = "unit-test-cluster-custom-image" - config.appwrapper = False - config.image = "quay.io/project-codeflare/ray:2.20.0-py39-cu118" - config.local_queue = "local-queue-default" - config.labels = {"testlabel": "test", "testlabel2": "test"} - cluster = Cluster(config) - assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-custom-image.yaml" - assert cluster.app_wrapper_name == "unit-test-cluster-custom-image" - assert filecmp.cmp( - f"{aw_dir}unit-test-cluster-custom-image.yaml", - f"{parent}/tests/test-case-custom-image.yaml", - shallow=True, - ) - # With resources loaded in memory - config = ClusterConfiguration( - name="unit-test-cluster-custom-image", - namespace="ns", - num_workers=2, - worker_cpu_requests=3, - worker_cpu_limits=4, - worker_memory_requests=5, - worker_memory_limits=6, - worker_extended_resource_requests={"nvidia.com/gpu": 7}, - machine_types=["cpu.small", "gpu.large"], - image_pull_secrets=["unit-test-pull-secret"], - image="quay.io/project-codeflare/ray:2.20.0-py39-cu118", - write_to_file=True, - appwrapper=False, - local_queue="local-queue-default", - labels={"testlabel": "test", "testlabel2": "test"}, - ) - cluster = Cluster(config) - assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-custom-image.yaml" - assert cluster.app_wrapper_name == "unit-test-cluster-custom-image" - assert filecmp.cmp( - f"{aw_dir}unit-test-cluster-custom-image.yaml", - f"{parent}/tests/test-case-custom-image.yaml", - shallow=True, - ) - - -def test_gen_names_with_name(mocker): - mocker.patch.object( - uuid, "uuid4", return_value=uuid.UUID("00000000-0000-0000-0000-000000000001") - ) - name = "myname" - appwrapper_name, cluster_name = gen_names(name) - assert appwrapper_name == name - assert cluster_name == name - - -def test_gen_names_without_name(mocker): - mocker.patch.object( - uuid, "uuid4", return_value=uuid.UUID("00000000-0000-0000-0000-000000000001") - ) - appwrapper_name, cluster_name = gen_names(None) - assert appwrapper_name.startswith("appwrapper-") - assert cluster_name.startswith("cluster-") - - -def arg_check_apply_effect(group, version, namespace, plural, body, *args): - assert namespace == "ns" - assert args == tuple() - if plural == "appwrappers": - assert group == "workload.codeflare.dev" - assert version == "v1beta2" - with open(f"{aw_dir}unit-test-cluster.yaml") as f: - aw = yaml.load(f, Loader=yaml.FullLoader) - assert body == aw - elif plural == "rayclusters": - assert group == "ray.io" - assert version == "v1" - with open(f"{aw_dir}unit-test-cluster-ray.yaml") as f: - yamls = yaml.load_all(f, Loader=yaml.FullLoader) - for resource in yamls: - if resource["kind"] == "RayCluster": - assert body == resource - elif plural == "ingresses": - assert group == "networking.k8s.io" - assert version == "v1" - with open(f"{aw_dir}unit-test-cluster-ray.yaml") as f: - yamls = yaml.load_all(f, Loader=yaml.FullLoader) - for resource in yamls: - if resource["kind"] == "Ingress": - assert body == resource - elif plural == "routes": - assert group == "route.openshift.io" - assert version == "v1" - with open(f"{aw_dir}unit-test-cluster-ray.yaml") as f: - yamls = yaml.load_all(f, Loader=yaml.FullLoader) - for resource in yamls: - if resource["kind"] == "Ingress": - assert body == resource - else: - assert 1 == 0 - - -def arg_check_del_effect(group, version, namespace, plural, name, *args): - assert namespace == "ns" - assert args == tuple() - if plural == "appwrappers": - assert group == "workload.codeflare.dev" - assert version == "v1beta2" - assert name == "unit-test-cluster" - elif plural == "rayclusters": - assert group == "ray.io" - assert version == "v1" - assert name == "unit-test-cluster-ray" - elif plural == "ingresses": - assert group == "networking.k8s.io" - assert version == "v1" - assert name == "ray-dashboard-unit-test-cluster-ray" - - -def test_cluster_up_down(mocker): - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch("codeflare_sdk.ray.cluster.cluster.Cluster._throw_for_no_raycluster") - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", - return_value={"spec": {"domain": ""}}, - ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.create_namespaced_custom_object", - side_effect=arg_check_apply_effect, - ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.delete_namespaced_custom_object", - side_effect=arg_check_del_effect, - ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_cluster_custom_object", - return_value={"items": []}, - ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), - ) - cluster = cluster = createClusterWithConfig(mocker) - cluster.up() - cluster.down() - - -def test_cluster_up_down_no_mcad(mocker): - mocker.patch("codeflare_sdk.ray.cluster.cluster.Cluster._throw_for_no_raycluster") - mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), - ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.create_namespaced_custom_object", - side_effect=arg_check_apply_effect, - ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.delete_namespaced_custom_object", - side_effect=arg_check_del_effect, - ) - mocker.patch( - "kubernetes.client.CoreV1Api.create_namespaced_secret", - ) - mocker.patch( - "kubernetes.client.CoreV1Api.delete_namespaced_secret", - ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_cluster_custom_object", - return_value={"items": []}, - ) - config = createClusterConfig() - config.name = "unit-test-cluster-ray" - config.appwrapper = False - cluster = Cluster(config) - cluster.up() - cluster.down() - - -def arg_check_list_effect(group, version, plural, name, *args): - assert group == "config.openshift.io" - assert version == "v1" - assert plural == "ingresses" - assert name == "cluster" - assert args == tuple() - return {"spec": {"domain": "test"}} - - -""" We need to fix get_current_namespace in order to reuse this test. -def test_get_ingress_domain(mocker): - mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", - side_effect=arg_check_list_effect, - ) - domain = _get_ingress_domain() - assert domain == "test" -""" - - -def aw_status_fields(group, version, namespace, plural, *args): - assert group == "workload.codeflare.dev" - assert version == "v1beta2" - assert namespace == "test-ns" - assert plural == "appwrappers" - assert args == tuple() - return {"items": []} - - -def test_aw_status(mocker): - mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - side_effect=aw_status_fields, - ) - aw = _app_wrapper_status("test-aw", "test-ns") - assert aw == None - - -def rc_status_fields(group, version, namespace, plural, *args): - assert group == "ray.io" - assert version == "v1" - assert namespace == "test-ns" - assert plural == "rayclusters" - assert args == tuple() - return {"items": []} - - -def test_rc_status(mocker): - mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - side_effect=rc_status_fields, - ) - rc = _ray_cluster_status("test-rc", "test-ns") - assert rc == None - - -def test_cluster_uris(mocker): - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch( - "codeflare_sdk.ray.cluster.cluster._get_ingress_domain", - return_value="apps.cluster.awsroute.org", - ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), - ) - cluster = cluster = createClusterWithConfig(mocker) - mocker.patch( - "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", - return_value=ingress_retrieval( - cluster_name="unit-test-cluster", - annotations={"route.openshift.io/termination": "passthrough"}, - ), - ) - assert ( - cluster.cluster_dashboard_uri() - == "https://ray-dashboard-unit-test-cluster-ns.apps.cluster.awsroute.org" - ) - mocker.patch( - "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", - return_value=ingress_retrieval(), - ) - assert cluster.cluster_uri() == "ray://unit-test-cluster-head-svc.ns.svc:10001" - assert ( - cluster.cluster_dashboard_uri() - == "http://ray-dashboard-unit-test-cluster-ns.apps.cluster.awsroute.org" - ) - cluster.config.name = "fake" - mocker.patch( - "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", - ) - assert ( - cluster.cluster_dashboard_uri() - == "Dashboard not available yet, have you run cluster.up()?" - ) - - -def test_local_client_url(mocker): - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", - return_value={"spec": {"domain": ""}}, - ) - mocker.patch( - "codeflare_sdk.ray.cluster.cluster._get_ingress_domain", - return_value="rayclient-unit-test-cluster-localinter-ns.apps.cluster.awsroute.org", - ) - mocker.patch( - "codeflare_sdk.ray.cluster.cluster.Cluster.create_app_wrapper", - return_value="unit-test-cluster-localinter.yaml", - ) - - cluster_config = ClusterConfiguration( - name="unit-test-cluster-localinter", - namespace="ns", - write_to_file=True, - ) - cluster = Cluster(cluster_config) - assert ( - cluster.local_client_url() - == "ray://rayclient-unit-test-cluster-localinter-ns.apps.cluster.awsroute.org" - ) - - -def ray_addr(self, *args): - return self._address - - -def mocked_ingress(port, cluster_name="unit-test-cluster", annotations: dict = None): - labels = {"ingress-owner": cluster_name} - if port == 10001: - name = f"rayclient-{cluster_name}" - else: - name = f"ray-dashboard-{cluster_name}" - mock_ingress = client.V1Ingress( - metadata=client.V1ObjectMeta( - name=name, - annotations=annotations, - labels=labels, - owner_references=[ - client.V1OwnerReference( - api_version="v1", kind="Ingress", name=cluster_name, uid="unique-id" - ) - ], - ), - spec=client.V1IngressSpec( - rules=[ - client.V1IngressRule( - host=f"{name}-ns.apps.cluster.awsroute.org", - http=client.V1HTTPIngressRuleValue( - paths=[ - client.V1HTTPIngressPath( - path_type="Prefix", - path="/", - backend=client.V1IngressBackend( - service=client.V1IngressServiceBackend( - name="head-svc-test", - port=client.V1ServiceBackendPort(number=port), - ) - ), - ) - ] - ), - ) - ], - ), - ) - return mock_ingress - - -def ingress_retrieval( - cluster_name="unit-test-cluster", client_ing: bool = False, annotations: dict = None -): - dashboard_ingress = mocked_ingress(8265, cluster_name, annotations) - if client_ing: - client_ingress = mocked_ingress( - 10001, cluster_name=cluster_name, annotations=annotations - ) - mock_ingress_list = client.V1IngressList( - items=[client_ingress, dashboard_ingress] - ) - else: - mock_ingress_list = client.V1IngressList(items=[dashboard_ingress]) - - return mock_ingress_list - - -def test_ray_job_wrapping(mocker): - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), - ) - cluster = cluster = createClusterWithConfig(mocker) - mocker.patch( - "ray.job_submission.JobSubmissionClient._check_connection_and_version_with_url", - return_value="None", - ) - mock_res = mocker.patch.object( - ray.job_submission.JobSubmissionClient, "list_jobs", autospec=True - ) - mock_res.side_effect = ray_addr - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", - return_value={"spec": {"domain": ""}}, - ) - mocker.patch( - "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", - return_value=ingress_retrieval(), - ) - assert cluster.list_jobs() == cluster.cluster_dashboard_uri() - - mock_res = mocker.patch.object( - ray.job_submission.JobSubmissionClient, "get_job_status", autospec=True - ) - mock_res.side_effect = ray_addr - assert cluster.job_status("fake_id") == cluster.cluster_dashboard_uri() - - mock_res = mocker.patch.object( - ray.job_submission.JobSubmissionClient, "get_job_logs", autospec=True - ) - mock_res.side_effect = ray_addr - assert cluster.job_logs("fake_id") == cluster.cluster_dashboard_uri() - - -def test_print_no_resources(capsys): - try: - print_no_resources_found() - except Exception: - assert 1 == 0 - captured = capsys.readouterr() - assert captured.out == ( - "╭──────────────────────────────────────────────────────────────────────────────╮\n" - "│ No resources found, have you run cluster.up() yet? │\n" - "╰──────────────────────────────────────────────────────────────────────────────╯\n" - ) - - -def test_print_no_cluster(capsys): - try: - print_cluster_status(None) - except Exception: - assert 1 == 0 - captured = capsys.readouterr() - assert captured.out == ( - "╭──────────────────────────────────────────────────────────────────────────────╮\n" - "│ No resources found, have you run cluster.up() yet? │\n" - "╰──────────────────────────────────────────────────────────────────────────────╯\n" - ) - - -def test_print_appwrappers(capsys): - aw1 = AppWrapper( - name="awtest1", - status=AppWrapperStatus.SUSPENDED, - ) - aw2 = AppWrapper( - name="awtest2", - status=AppWrapperStatus.RUNNING, - ) - try: - print_app_wrappers_status([aw1, aw2]) - except Exception: - assert 1 == 0 - captured = capsys.readouterr() - assert captured.out == ( - "╭─────────────────────────╮\n" - "│ 🚀 Cluster Queue │\n" - "│ Status 🚀 │\n" - "│ +---------+-----------+ │\n" - "│ | Name | Status | │\n" - "│ +=========+===========+ │\n" - "│ | awtest1 | suspended | │\n" - "│ | | | │\n" - "│ | awtest2 | running | │\n" - "│ | | | │\n" - "│ +---------+-----------+ │\n" - "╰─────────────────────────╯\n" - ) - - -def test_ray_details(mocker, capsys): - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - ray1 = RayCluster( - name="raytest1", - status=RayClusterStatus.READY, - num_workers=1, - worker_mem_requests="2G", - worker_mem_limits="2G", - worker_cpu_requests=1, - worker_cpu_limits=1, - namespace="ns", - dashboard="fake-uri", - head_cpu_requests=2, - head_cpu_limits=2, - head_mem_requests=8, - head_mem_limits=8, - ) - mocker.patch( - "codeflare_sdk.ray.cluster.cluster.Cluster.status", - return_value=(False, CodeFlareClusterStatus.UNKNOWN), - ) - mocker.patch( - "codeflare_sdk.ray.cluster.cluster.Cluster.cluster_dashboard_uri", - return_value="", - ) - mocker.patch( - "codeflare_sdk.common.kueue.kueue.local_queue_exists", - return_value="true", - ) - cf = Cluster( - ClusterConfiguration( - name="raytest2", - namespace="ns", - write_to_file=True, - appwrapper=True, - local_queue="local_default_queue", - ) - ) - captured = capsys.readouterr() - ray2 = _copy_to_ray(cf) - details = cf.details() - assert details == ray2 - assert ray2.name == "raytest2" - assert ray1.namespace == ray2.namespace - assert ray1.num_workers == ray2.num_workers - assert ray1.worker_mem_requests == ray2.worker_mem_requests - assert ray1.worker_mem_limits == ray2.worker_mem_limits - assert ray1.worker_cpu_requests == ray2.worker_cpu_requests - assert ray1.worker_cpu_limits == ray2.worker_cpu_limits - assert ray1.worker_extended_resources == ray2.worker_extended_resources - try: - print_clusters([ray1, ray2]) - print_cluster_status(ray1) - print_cluster_status(ray2) - except Exception: - assert 0 == 1 - captured = capsys.readouterr() - assert captured.out == ( - " 🚀 CodeFlare Cluster Details 🚀 \n" - " \n" - " ╭───────────────────────────────────────────────────────────────╮ \n" - " │ Name │ \n" - " │ raytest2 Inactive ❌ │ \n" - " │ │ \n" - " │ URI: ray://raytest2-head-svc.ns.svc:10001 │ \n" - " │ │ \n" - " │ Dashboard🔗 │ \n" - " │ │ \n" - " │ Cluster Resources │ \n" - " │ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │ \n" - " │ │ # Workers │ │ Memory CPU GPU │ │ \n" - " │ │ │ │ │ │ \n" - " │ │ 1 │ │ 2G~2G 1~1 0 │ │ \n" - " │ │ │ │ │ │ \n" - " │ ╰─────────────╯ ╰──────────────────────────────────────╯ │ \n" - " ╰───────────────────────────────────────────────────────────────╯ \n" - " 🚀 CodeFlare Cluster Details 🚀 \n" - " \n" - " ╭───────────────────────────────────────────────────────────────╮ \n" - " │ Name │ \n" - " │ raytest1 Active ✅ │ \n" - " │ │ \n" - " │ URI: ray://raytest1-head-svc.ns.svc:10001 │ \n" - " │ │ \n" - " │ Dashboard🔗 │ \n" - " │ │ \n" - " │ Cluster Resources │ \n" - " │ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │ \n" - " │ │ # Workers │ │ Memory CPU GPU │ │ \n" - " │ │ │ │ │ │ \n" - " │ │ 1 │ │ 2G~2G 1~1 0 │ │ \n" - " │ │ │ │ │ │ \n" - " │ ╰─────────────╯ ╰──────────────────────────────────────╯ │ \n" - " ╰───────────────────────────────────────────────────────────────╯ \n" - "╭───────────────────────────────────────────────────────────────╮\n" - "│ Name │\n" - "│ raytest2 Inactive ❌ │\n" - "│ │\n" - "│ URI: ray://raytest2-head-svc.ns.svc:10001 │\n" - "│ │\n" - "│ Dashboard🔗 │\n" - "│ │\n" - "│ Cluster Resources │\n" - "│ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │\n" - "│ │ # Workers │ │ Memory CPU GPU │ │\n" - "│ │ │ │ │ │\n" - "│ │ 1 │ │ 2G~2G 1~1 0 │ │\n" - "│ │ │ │ │ │\n" - "│ ╰─────────────╯ ╰──────────────────────────────────────╯ │\n" - "╰───────────────────────────────────────────────────────────────╯\n" - " 🚀 CodeFlare Cluster Status 🚀 \n" - " \n" - " ╭──────────────────────────────────────────────────────────╮ \n" - " │ Name │ \n" - " │ raytest1 Active ✅ │ \n" - " │ │ \n" - " │ URI: ray://raytest1-head-svc.ns.svc:10001 │ \n" - " │ │ \n" - " │ Dashboard🔗 │ \n" - " │ │ \n" - " ╰──────────────────────────────────────────────────────────╯ \n" - " 🚀 CodeFlare Cluster Status 🚀 \n" - " \n" - " ╭────────────────────────────────────────────────────────────╮ \n" - " │ Name │ \n" - " │ raytest2 Inactive ❌ │ \n" - " │ │ \n" - " │ URI: ray://raytest2-head-svc.ns.svc:10001 │ \n" - " │ │ \n" - " │ Dashboard🔗 │ \n" - " │ │ \n" - " ╰────────────────────────────────────────────────────────────╯ \n" - ) - - -def act_side_effect_list(self): - print([self]) - self.out = str(self.high_level_operation) - return [self] - - -def get_obj_none(group, version, namespace, plural): - return {"items": []} - - -def get_ray_obj(group, version, namespace, plural, cls=None): - api_obj = { - "items": [ - { - "apiVersion": "ray.io/v1", - "kind": "RayCluster", - "metadata": { - "creationTimestamp": "2024-03-05T09:55:37Z", - "generation": 1, - "labels": { - "controller-tools.k8s.io": "1.0", - "resourceName": "quicktest", - "orderedinstance": "m4.xlarge_g4dn.xlarge", - "kueue.x-k8s.io/queue-name": "team-a-queue", - }, - "name": "quicktest", - "namespace": "ns", - "ownerReferences": [ - { - "apiVersion": "workload.codeflare.dev/v1beta2", - "blockOwnerDeletion": True, - "controller": True, - "kind": "AppWrapper", - "name": "quicktest", - "uid": "a29b1a7a-0992-4860-a8d5-a689a751a3e8", - } - ], - "resourceVersion": "5305674", - "uid": "820d065d-bf0c-4675-b951-d32ea496020e", - }, - "spec": { - "autoscalerOptions": { - "idleTimeoutSeconds": 60, - "imagePullPolicy": "Always", - "resources": { - "limits": {"cpu": "500m", "memory": "512Mi"}, - "requests": {"cpu": "500m", "memory": "512Mi"}, - }, - "upscalingMode": "Default", - }, - "enableInTreeAutoscaling": False, - "headGroupSpec": { - "rayStartParams": { - "block": "true", - "dashboard-host": "0.0.0.0", - "num-gpus": "0", - }, - "serviceType": "ClusterIP", - "template": { - "metadata": {}, - "spec": { - "containers": [ - { - "env": [ - { - "name": "MY_POD_IP", - "valueFrom": { - "fieldRef": { - "fieldPath": "status.podIP" - } - }, - }, - {"name": "RAY_USE_TLS", "value": "0"}, - { - "name": "RAY_TLS_SERVER_CERT", - "value": "/home/ray/workspace/tls/server.crt", - }, - { - "name": "RAY_TLS_SERVER_KEY", - "value": "/home/ray/workspace/tls/server.key", - }, - { - "name": "RAY_TLS_CA_CERT", - "value": "/home/ray/workspace/tls/ca.crt", - }, - ], - "image": "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103", - "imagePullPolicy": "Always", - "lifecycle": { - "preStop": { - "exec": { - "command": [ - "/bin/sh", - "-c", - "ray stop", - ] - } - } - }, - "name": "ray-head", - "ports": [ - { - "containerPort": 6379, - "name": "gcs", - "protocol": "TCP", - }, - { - "containerPort": 8265, - "name": "dashboard", - "protocol": "TCP", - }, - { - "containerPort": 10001, - "name": "client", - "protocol": "TCP", - }, - ], - "resources": { - "limits": { - "cpu": 2, - "memory": "8G", - }, - "requests": { - "cpu": 2, - "memory": "8G", - }, - }, - "volumeMounts": [ - { - "mountPath": "/etc/pki/tls/certs/odh-trusted-ca-bundle.crt", - "name": "odh-trusted-ca-cert", - "subPath": "odh-trusted-ca-bundle.crt", - }, - { - "mountPath": "/etc/ssl/certs/odh-trusted-ca-bundle.crt", - "name": "odh-trusted-ca-cert", - "subPath": "odh-trusted-ca-bundle.crt", - }, - { - "mountPath": "/etc/pki/tls/certs/odh-ca-bundle.crt", - "name": "odh-ca-cert", - "subPath": "odh-ca-bundle.crt", - }, - { - "mountPath": "/etc/ssl/certs/odh-ca-bundle.crt", - "name": "odh-ca-cert", - "subPath": "odh-ca-bundle.crt", - }, - ], - } - ], - "volumes": [ - { - "configMap": { - "items": [ - { - "key": "ca-bundle.crt", - "path": "odh-trusted-ca-bundle.crt", - } - ], - "name": "odh-trusted-ca-bundle", - "optional": True, - }, - "name": "odh-trusted-ca-cert", - }, - { - "configMap": { - "items": [ - { - "key": "odh-ca-bundle.crt", - "path": "odh-ca-bundle.crt", - } - ], - "name": "odh-trusted-ca-bundle", - "optional": True, - }, - "name": "odh-ca-cert", - }, - ], - }, - }, - }, - "rayVersion": "2.35.0", - "workerGroupSpecs": [ - { - "groupName": "small-group-quicktest", - "maxReplicas": 1, - "minReplicas": 1, - "rayStartParams": { - "block": "true", - "num-gpus": "0", - }, - "replicas": 1, - "scaleStrategy": {}, - "template": { - "metadata": { - "annotations": {"key": "value"}, - "labels": {"key": "value"}, - }, - "spec": { - "containers": [ - { - "env": [ - { - "name": "MY_POD_IP", - "valueFrom": { - "fieldRef": { - "fieldPath": "status.podIP" - } - }, - }, - {"name": "RAY_USE_TLS", "value": "0"}, - { - "name": "RAY_TLS_SERVER_CERT", - "value": "/home/ray/workspace/tls/server.crt", - }, - { - "name": "RAY_TLS_SERVER_KEY", - "value": "/home/ray/workspace/tls/server.key", - }, - { - "name": "RAY_TLS_CA_CERT", - "value": "/home/ray/workspace/tls/ca.crt", - }, - ], - "image": "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103", - "lifecycle": { - "preStop": { - "exec": { - "command": [ - "/bin/sh", - "-c", - "ray stop", - ] - } - } - }, - "name": "machine-learning", - "resources": { - "limits": { - "cpu": 1, - "memory": "2G", - }, - "requests": { - "cpu": 1, - "memory": "2G", - }, - }, - "volumeMounts": [ - { - "mountPath": "/etc/pki/tls/certs/odh-trusted-ca-bundle.crt", - "name": "odh-trusted-ca-cert", - "subPath": "odh-trusted-ca-bundle.crt", - }, - { - "mountPath": "/etc/ssl/certs/odh-trusted-ca-bundle.crt", - "name": "odh-trusted-ca-cert", - "subPath": "odh-trusted-ca-bundle.crt", - }, - { - "mountPath": "/etc/pki/tls/certs/odh-ca-bundle.crt", - "name": "odh-ca-cert", - "subPath": "odh-ca-bundle.crt", - }, - { - "mountPath": "/etc/ssl/certs/odh-ca-bundle.crt", - "name": "odh-ca-cert", - "subPath": "odh-ca-bundle.crt", - }, - ], - } - ], - "volumes": [ - { - "configMap": { - "items": [ - { - "key": "ca-bundle.crt", - "path": "odh-trusted-ca-bundle.crt", - } - ], - "name": "odh-trusted-ca-bundle", - "optional": True, - }, - "name": "odh-trusted-ca-cert", - }, - { - "configMap": { - "items": [ - { - "key": "odh-ca-bundle.crt", - "path": "odh-ca-bundle.crt", - } - ], - "name": "odh-trusted-ca-bundle", - "optional": True, - }, - "name": "odh-ca-cert", - }, - ], - }, - }, - } - ], - }, - "status": { - "desiredWorkerReplicas": 1, - "endpoints": { - "client": "10001", - "dashboard": "8265", - "gcs": "6379", - "metrics": "8080", - }, - "head": {"serviceIP": "172.30.179.88"}, - "lastUpdateTime": "2024-03-05T09:55:37Z", - "maxWorkerReplicas": 1, - "minWorkerReplicas": 1, - "observedGeneration": 1, - "state": "ready", - }, - }, - { - "apiVersion": "ray.io/v1", - "kind": "RayCluster", - "metadata": { - "creationTimestamp": "2023-02-22T16:26:07Z", - "generation": 1, - "labels": { - "controller-tools.k8s.io": "1.0", - "resourceName": "quicktest2", - "orderedinstance": "m4.xlarge_g4dn.xlarge", - }, - "name": "quicktest2", - "namespace": "ns", - "ownerReferences": [ - { - "apiVersion": "workload.codeflare.dev/v1beta2", - "blockOwnerDeletion": True, - "controller": True, - "kind": "AppWrapper", - "name": "quicktest2", - "uid": "6334fc1b-471e-4876-8e7b-0b2277679235", - } - ], - "resourceVersion": "9482407", - "uid": "44d45d1f-26c8-43e7-841f-831dbd8c1285", - }, - "spec": { - "autoscalerOptions": { - "idleTimeoutSeconds": 60, - "imagePullPolicy": "Always", - "resources": { - "limits": {"cpu": "500m", "memory": "512Mi"}, - "requests": {"cpu": "500m", "memory": "512Mi"}, - }, - "upscalingMode": "Default", - }, - "enableInTreeAutoscaling": False, - "headGroupSpec": { - "rayStartParams": { - "block": "true", - "dashboard-host": "0.0.0.0", - "num-gpus": "0", - }, - "serviceType": "ClusterIP", - "template": { - "spec": { - "containers": [ - { - "image": "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103", - "imagePullPolicy": "Always", - "lifecycle": { - "preStop": { - "exec": { - "command": [ - "/bin/sh", - "-c", - "ray stop", - ] - } - } - }, - "name": "ray-head", - "ports": [ - { - "containerPort": 6379, - "name": "gcs", - "protocol": "TCP", - }, - { - "containerPort": 8265, - "name": "dashboard", - "protocol": "TCP", - }, - { - "containerPort": 10001, - "name": "client", - "protocol": "TCP", - }, - ], - "resources": { - "limits": { - "cpu": 2, - "memory": "8G", - }, - "requests": { - "cpu": 2, - "memory": "8G", - }, - }, - } - ] - } - }, - }, - "rayVersion": "2.35.0", - "workerGroupSpecs": [ - { - "groupName": "small-group-quicktest2", - "maxReplicas": 1, - "minReplicas": 1, - "rayStartParams": { - "block": "true", - "num-gpus": "0", - }, - "replicas": 1, - "template": { - "metadata": { - "annotations": {"key": "value"}, - "labels": {"key": "value"}, - }, - "spec": { - "containers": [ - { - "env": [ - { - "name": "MY_POD_IP", - "valueFrom": { - "fieldRef": { - "fieldPath": "status.podIP" - } - }, - } - ], - "image": "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103", - "lifecycle": { - "preStop": { - "exec": { - "command": [ - "/bin/sh", - "-c", - "ray stop", - ] - } - } - }, - "name": "machine-learning", - "resources": { - "limits": { - "cpu": 1, - "memory": "2G", - }, - "requests": { - "cpu": 1, - "memory": "2G", - }, - }, - } - ], - }, - }, - } - ], - }, - "status": { - "availableWorkerReplicas": 2, - "desiredWorkerReplicas": 1, - "endpoints": { - "client": "10001", - "dashboard": "8265", - "gcs": "6379", - }, - "lastUpdateTime": "2023-02-22T16:26:16Z", - "maxWorkerReplicas": 1, - "minWorkerReplicas": 1, - "state": "suspended", - }, - }, - ] - } - return api_obj - - -def get_named_aw(group, version, namespace, plural, name): - aws = get_aw_obj("workload.codeflare.dev", "v1beta2", "ns", "appwrappers") - return aws["items"][0] - - -def get_aw_obj(group, version, namespace, plural): - api_obj1 = { - "items": [ - { - "apiVersion": "workload.codeflare.dev/v1beta2", - "kind": "AppWrapper", - "metadata": { - "name": "quicktest1", - "namespace": "ns", - }, - "spec": { - "components": [ - { - "template": { - "apiVersion": "ray.io/v1", - "kind": "RayCluster", - "metadata": { - "labels": { - "controller-tools.k8s.io": "1.0", - }, - "name": "quicktest1", - "namespace": "ns", - }, - "spec": { - "autoscalerOptions": { - "idleTimeoutSeconds": 60, - "imagePullPolicy": "Always", - "resources": { - "limits": { - "cpu": "500m", - "memory": "512Mi", - }, - "requests": { - "cpu": "500m", - "memory": "512Mi", - }, - }, - "upscalingMode": "Default", - }, - "enableInTreeAutoscaling": False, - "headGroupSpec": { - "rayStartParams": { - "block": "true", - "dashboard-host": "0.0.0.0", - "num-gpus": "0", - }, - "serviceType": "ClusterIP", - "template": { - "spec": { - "containers": [ - { - "image": "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103", - "imagePullPolicy": "Always", - "lifecycle": { - "preStop": { - "exec": { - "command": [ - "/bin/sh", - "-c", - "ray stop", - ] - } - } - }, - "name": "ray-head", - "ports": [ - { - "containerPort": 6379, - "name": "gcs", - }, - { - "containerPort": 8265, - "name": "dashboard", - }, - { - "containerPort": 10001, - "name": "client", - }, - ], - "resources": { - "limits": { - "cpu": 2, - "memory": "8G", - }, - "requests": { - "cpu": 2, - "memory": "8G", - }, - }, - } - ] - } - }, - }, - "rayVersion": "1.12.0", - "workerGroupSpecs": [ - { - "groupName": "small-group-quicktest", - "maxReplicas": 1, - "minReplicas": 1, - "rayStartParams": { - "block": "true", - "num-gpus": "0", - }, - "replicas": 1, - "template": { - "metadata": { - "annotations": {"key": "value"}, - "labels": {"key": "value"}, - }, - "spec": { - "containers": [ - { - "env": [ - { - "name": "MY_POD_IP", - "valueFrom": { - "fieldRef": { - "fieldPath": "status.podIP" - } - }, - } - ], - "image": "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103", - "lifecycle": { - "preStop": { - "exec": { - "command": [ - "/bin/sh", - "-c", - "ray stop", - ] - } - } - }, - "name": "machine-learning", - "resources": { - "limits": { - "cpu": 1, - "memory": "2G", - }, - "requests": { - "cpu": 1, - "memory": "2G", - }, - }, - } - ], - }, - }, - } - ], - }, - }, - }, - { - "template": { - "apiVersion": "networking.k8s.io/v1", - "kind": "Ingress", - "metadata": { - "labels": { - "ingress-owner": "appwrapper-name", - }, - "name": "ray-dashboard-quicktest", - "namespace": "default", - }, - "spec": { - "ingressClassName": "nginx", - "rules": [ - { - "http": { - "paths": { - "backend": { - "service": { - "name": "quicktest-head-svc", - "port": {"number": 8265}, - }, - }, - "pathType": "Prefix", - "path": "/", - }, - }, - "host": "quicktest.awsroute.com", - } - ], - }, - }, - }, - ], - }, - "status": { - "phase": "Running", - }, - }, - { - "apiVersion": "workload.codeflare.dev/v1beta2", - "kind": "AppWrapper", - "metadata": { - "name": "quicktest2", - "namespace": "ns", - }, - "spec": { - "components": [ - { - "template": { - "apiVersion": "ray.io/v1", - "kind": "RayCluster", - "metadata": { - "labels": { - "controller-tools.k8s.io": "1.0", - }, - "name": "quicktest2", - "namespace": "ns", - }, - "spec": { - "autoscalerOptions": { - "idleTimeoutSeconds": 60, - "imagePullPolicy": "Always", - "resources": { - "limits": { - "cpu": "500m", - "memory": "512Mi", - }, - "requests": { - "cpu": "500m", - "memory": "512Mi", - }, - }, - "upscalingMode": "Default", - }, - "enableInTreeAutoscaling": False, - "headGroupSpec": { - "rayStartParams": { - "block": "true", - "dashboard-host": "0.0.0.0", - "num-gpus": "0", - }, - "serviceType": "ClusterIP", - "template": { - "spec": { - "containers": [ - { - "image": "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103", - "imagePullPolicy": "Always", - "lifecycle": { - "preStop": { - "exec": { - "command": [ - "/bin/sh", - "-c", - "ray stop", - ] - } - } - }, - "name": "ray-head", - "ports": [ - { - "containerPort": 6379, - "name": "gcs", - }, - { - "containerPort": 8265, - "name": "dashboard", - }, - { - "containerPort": 10001, - "name": "client", - }, - ], - "resources": { - "limits": { - "cpu": 2, - "memory": "8G", - }, - "requests": { - "cpu": 2, - "memory": "8G", - }, - }, - } - ] - } - }, - }, - "rayVersion": "2.35.0", - "workerGroupSpecs": [ - { - "groupName": "small-group-quicktest", - "maxReplicas": 1, - "minReplicas": 1, - "rayStartParams": { - "block": "true", - "num-gpus": "0", - }, - "replicas": 1, - "template": { - "metadata": { - "annotations": {"key": "value"}, - "labels": {"key": "value"}, - }, - "spec": { - "containers": [ - { - "env": [ - { - "name": "MY_POD_IP", - "valueFrom": { - "fieldRef": { - "fieldPath": "status.podIP" - } - }, - } - ], - "image": "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103", - "lifecycle": { - "preStop": { - "exec": { - "command": [ - "/bin/sh", - "-c", - "ray stop", - ] - } - } - }, - "name": "machine-learning", - "resources": { - "limits": { - "cpu": 1, - "memory": "2G", - }, - "requests": { - "cpu": 1, - "memory": "2G", - }, - }, - } - ], - }, - }, - } - ], - }, - }, - }, - { - "template": { - "apiVersion": "route.openshift.io/v1", - "kind": "Route", - "metadata": { - "labels": { - "odh-ray-cluster-service": "quicktest-head-svc" - }, - "name": "ray-dashboard-quicktest", - "namespace": "default", - }, - "spec": { - "port": {"targetPort": "dashboard"}, - "to": { - "kind": "Service", - "name": "quicktest-head-svc", - }, - }, - }, - }, - ], - }, - "status": { - "phase": "Suspended", - }, - }, - ] - } - return api_obj1 - - -def route_list_retrieval(group, version, namespace, plural): - assert group == "route.openshift.io" - assert version == "v1" - assert namespace == "ns" - assert plural == "routes" - return { - "kind": "RouteList", - "apiVersion": "route.openshift.io/v1", - "metadata": {"resourceVersion": "6072398"}, - "items": [ - { - "metadata": { - "name": "ray-dashboard-quicktest", - "namespace": "ns", - }, - "spec": { - "host": "ray-dashboard-quicktest-opendatahub.apps.cluster.awsroute.org", - "to": { - "kind": "Service", - "name": "quicktest-head-svc", - "weight": 100, - }, - "port": {"targetPort": "dashboard"}, - "tls": {"termination": "edge"}, - }, - }, - { - "metadata": { - "name": "rayclient-quicktest", - "namespace": "ns", - }, - "spec": { - "host": "rayclient-quicktest-opendatahub.apps.cluster.awsroute.org", - "to": { - "kind": "Service", - "name": "quicktest-head-svc", - "weight": 100, - }, - "port": {"targetPort": "client"}, - "tls": {"termination": "passthrough"}, - }, - }, - ], - } - - -def test_get_cluster_openshift(mocker): - mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - # Mock the client.ApisApi function to return a mock object - mock_api = MagicMock() - mock_api.get_api_versions.return_value.groups = [ - MagicMock(versions=[MagicMock(group_version="route.openshift.io/v1")]) - ] - mocker.patch("kubernetes.client.ApisApi", return_value=mock_api) - mocker.patch( - "codeflare_sdk.common.kueue.kueue.local_queue_exists", - return_value="true", - ) - - assert is_openshift_cluster() - - def custom_side_effect(group, version, namespace, plural, **kwargs): - if plural == "routes": - return route_list_retrieval("route.openshift.io", "v1", "ns", "routes") - elif plural == "rayclusters": - return get_ray_obj("ray.io", "v1", "ns", "rayclusters") - elif plural == "appwrappers": - return get_aw_obj("workload.codeflare.dev", "v1beta2", "ns", "appwrappers") - elif plural == "localqueues": - return get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues") - - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", get_aw_obj - ) - - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - side_effect=custom_side_effect, - ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_namespaced_custom_object", - return_value=get_named_aw, - ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_namespaced_custom_object", - side_effect=route_list_retrieval("route.openshift.io", "v1", "ns", "routes")[ - "items" - ], - ) - mocker.patch( - "codeflare_sdk.common.kueue.kueue.local_queue_exists", - return_value="true", - ) - - cluster = get_cluster("quicktest") - cluster_config = cluster.config - assert cluster_config.name == "quicktest" and cluster_config.namespace == "ns" - assert ( - "m4.xlarge" in cluster_config.machine_types - and "g4dn.xlarge" in cluster_config.machine_types - ) - assert ( - cluster_config.worker_cpu_requests == 1 - and cluster_config.worker_cpu_limits == 1 - ) - assert ( - cluster_config.worker_memory_requests == "2G" - and cluster_config.worker_memory_limits == "2G" - ) - assert cluster_config.worker_extended_resource_requests == {} - assert ( - cluster_config.image - == "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103" - ) - assert cluster_config.num_workers == 1 - - -def test_get_cluster(mocker): - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - side_effect=get_ray_obj, - ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_namespaced_custom_object", - side_effect=get_named_aw, - ) - mocker.patch( - "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", - return_value=ingress_retrieval(cluster_name="quicktest", client_ing=True), - ) - mocker.patch( - "codeflare_sdk.common.kueue.kueue.local_queue_exists", - return_value="true", - ) - cluster = get_cluster("quicktest") - cluster_config = cluster.config - assert cluster_config.name == "quicktest" and cluster_config.namespace == "ns" - assert ( - "m4.xlarge" in cluster_config.machine_types - and "g4dn.xlarge" in cluster_config.machine_types - ) - assert ( - cluster_config.worker_cpu_requests == 1 - and cluster_config.worker_cpu_limits == 1 - ) - assert ( - cluster_config.worker_memory_requests == "2G" - and cluster_config.worker_memory_limits == "2G" - ) - assert cluster_config.worker_extended_resource_requests == {} - assert ( - cluster_config.image - == "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103" - ) - assert cluster_config.num_workers == 1 - - -def test_get_cluster_no_mcad(mocker): - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - side_effect=get_ray_obj, - ) - mocker.patch( - "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", - return_value=ingress_retrieval(cluster_name="quicktest", client_ing=True), - ) - mocker.patch( - "codeflare_sdk.common.kueue.kueue.local_queue_exists", - return_value="true", - ) - cluster = get_cluster("quicktest") - cluster_config = cluster.config - assert cluster_config.name == "quicktest" and cluster_config.namespace == "ns" - assert ( - "m4.xlarge" in cluster_config.machine_types - and "g4dn.xlarge" in cluster_config.machine_types - ) - assert ( - cluster_config.worker_cpu_requests == 1 - and cluster_config.worker_cpu_limits == 1 - ) - assert ( - cluster_config.worker_memory_requests == "2G" - and cluster_config.worker_memory_limits == "2G" - ) - assert cluster_config.worker_extended_resource_requests == {} - assert ( - cluster_config.image - == "ghcr.io/foundation-model-stack/base:ray2.1.0-py38-gpu-pytorch1.12.0cu116-20221213-193103" - ) - assert cluster_config.num_workers == 1 - assert cluster_config.local_queue == "team-a-queue" - - -def route_retrieval(group, version, namespace, plural, name): - assert group == "route.openshift.io" - assert version == "v1" - assert namespace == "ns" - assert plural == "routes" - assert name == "ray-dashboard-unit-test-cluster" - return { - "items": [ - { - "metadata": {"name": "ray-dashboard-unit-test-cluster"}, - "spec": { - "host": "ray-dashboard-unit-test-cluster-ns.apps.cluster.awsroute.org" - }, - } - ] - } - - -def test_map_to_ray_cluster(mocker): - mocker.patch("kubernetes.config.load_kube_config") - - mocker.patch( - "codeflare_sdk.ray.cluster.cluster.is_openshift_cluster", return_value=True - ) - - mock_api_client = mocker.MagicMock(spec=client.ApiClient) - mocker.patch( - "codeflare_sdk.common.kubernetes_cluster.auth.get_api_client", - return_value=mock_api_client, - ) - - mock_routes = { - "items": [ - { - "apiVersion": "route.openshift.io/v1", - "kind": "Route", - "metadata": { - "name": "ray-dashboard-quicktest", - "namespace": "ns", - }, - "spec": {"host": "ray-dashboard-quicktest"}, - }, - ] - } - - def custom_side_effect(group, version, namespace, plural, **kwargs): - if plural == "routes": - return mock_routes - elif plural == "rayclusters": - return get_ray_obj("ray.io", "v1", "ns", "rayclusters") - - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - side_effect=custom_side_effect, - ) - - rc = get_ray_obj("ray.io", "v1", "ns", "rayclusters")["items"][0] - rc_name = rc["metadata"]["name"] - rc_dashboard = f"http://ray-dashboard-{rc_name}" - - result = _map_to_ray_cluster(rc) - - assert result is not None - assert result.dashboard == rc_dashboard - - -def test_list_clusters(mocker, capsys): - mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - side_effect=get_obj_none, - ) - mocker.patch( - "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", - ) - list_all_clusters("ns") - captured = capsys.readouterr() - assert captured.out == ( - "╭──────────────────────────────────────────────────────────────────────────────╮\n" - "│ No resources found, have you run cluster.up() yet? │\n" - "╰──────────────────────────────────────────────────────────────────────────────╯\n" - ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - side_effect=get_ray_obj, - ) - list_all_clusters("ns") - captured = capsys.readouterr() - assert captured.out == ( - " 🚀 CodeFlare Cluster Details 🚀 \n" - " \n" - " ╭───────────────────────────────────────────────────────────────╮ \n" - " │ Name │ \n" - " │ quicktest Active ✅ │ \n" - " │ │ \n" - " │ URI: ray://quicktest-head-svc.ns.svc:10001 │ \n" - " │ │ \n" - " │ Dashboard🔗 │ \n" - " │ │ \n" - " │ Cluster Resources │ \n" - " │ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │ \n" - " │ │ # Workers │ │ Memory CPU GPU │ │ \n" - " │ │ │ │ │ │ \n" - " │ │ 1 │ │ 2G~2G 1~1 0 │ │ \n" - " │ │ │ │ │ │ \n" - " │ ╰─────────────╯ ╰──────────────────────────────────────╯ │ \n" - " ╰───────────────────────────────────────────────────────────────╯ \n" - "╭───────────────────────────────────────────────────────────────╮\n" - "│ Name │\n" - "│ quicktest2 Inactive ❌ │\n" - "│ │\n" - "│ URI: ray://quicktest2-head-svc.ns.svc:10001 │\n" - "│ │\n" - "│ Dashboard🔗 │\n" - "│ │\n" - "│ Cluster Resources │\n" - "│ ╭── Workers ──╮ ╭───────── Worker specs(each) ─────────╮ │\n" - "│ │ # Workers │ │ Memory CPU GPU │ │\n" - "│ │ │ │ │ │\n" - "│ │ 1 │ │ 2G~2G 1~1 0 │ │\n" - "│ │ │ │ │ │\n" - "│ ╰─────────────╯ ╰──────────────────────────────────────╯ │\n" - "╰───────────────────────────────────────────────────────────────╯\n" - ) - - -def test_list_queue(mocker, capsys): - mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - side_effect=get_obj_none, - ) - list_all_queued("ns", appwrapper=True) - captured = capsys.readouterr() - assert captured.out == ( - "╭──────────────────────────────────────────────────────────────────────────────╮\n" - "│ No resources found, have you run cluster.up() yet? │\n" - "╰──────────────────────────────────────────────────────────────────────────────╯\n" - ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - side_effect=get_aw_obj, - ) - list_all_queued("ns", appwrapper=True) - captured = capsys.readouterr() - assert captured.out == ( - "╭────────────────────────────╮\n" - "│ 🚀 Cluster Queue Status │\n" - "│ 🚀 │\n" - "│ +------------+-----------+ │\n" - "│ | Name | Status | │\n" - "│ +============+===========+ │\n" - "│ | quicktest1 | running | │\n" - "│ | | | │\n" - "│ | quicktest2 | suspended | │\n" - "│ | | | │\n" - "│ +------------+-----------+ │\n" - "╰────────────────────────────╯\n" - ) - - -def test_list_queue_rayclusters(mocker, capsys): - mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mock_api = MagicMock() - mock_api.get_api_versions.return_value.groups = [ - MagicMock(versions=[MagicMock(group_version="route.openshift.io/v1")]) - ] - mocker.patch("kubernetes.client.ApisApi", return_value=mock_api) - - assert is_openshift_cluster() == True - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - side_effect=get_obj_none, - ) - list_all_queued("ns") - captured = capsys.readouterr() - assert captured.out == ( - "╭──────────────────────────────────────────────────────────────────────────────╮\n" - "│ No resources found, have you run cluster.up() yet? │\n" - "╰──────────────────────────────────────────────────────────────────────────────╯\n" - ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - side_effect=get_ray_obj, - ) - list_all_queued("ns") - captured = capsys.readouterr() - print(captured.out) - assert captured.out == ( - "╭────────────────────────────╮\n" - "│ 🚀 Cluster Queue Status │\n" - "│ 🚀 │\n" - "│ +------------+-----------+ │\n" - "│ | Name | Status | │\n" - "│ +============+===========+ │\n" - "│ | quicktest | ready | │\n" - "│ | | | │\n" - "│ | quicktest2 | suspended | │\n" - "│ | | | │\n" - "│ +------------+-----------+ │\n" - "╰────────────────────────────╯\n" - ) - - -def test_cluster_status(mocker): - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch( - "codeflare_sdk.common.kueue.kueue.local_queue_exists", - return_value="true", - ) - fake_aw = AppWrapper("test", AppWrapperStatus.FAILED) - fake_ray = RayCluster( - name="test", - status=RayClusterStatus.UNKNOWN, - num_workers=1, - worker_mem_requests=2, - worker_mem_limits=2, - worker_cpu_requests=1, - worker_cpu_limits=1, - namespace="ns", - dashboard="fake-uri", - head_cpu_requests=2, - head_cpu_limits=2, - head_mem_requests=8, - head_mem_limits=8, - ) - cf = Cluster( - ClusterConfiguration( - name="test", - namespace="ns", - write_to_file=True, - appwrapper=True, - local_queue="local_default_queue", - ) - ) - mocker.patch( - "codeflare_sdk.ray.cluster.cluster._app_wrapper_status", return_value=None - ) - mocker.patch( - "codeflare_sdk.ray.cluster.cluster._ray_cluster_status", return_value=None - ) - status, ready = cf.status() - assert status == CodeFlareClusterStatus.UNKNOWN - assert ready == False - - mocker.patch( - "codeflare_sdk.ray.cluster.cluster._app_wrapper_status", return_value=fake_aw - ) - status, ready = cf.status() - assert status == CodeFlareClusterStatus.FAILED - assert ready == False - - fake_aw.status = AppWrapperStatus.SUSPENDED - status, ready = cf.status() - assert status == CodeFlareClusterStatus.QUEUED - assert ready == False - - fake_aw.status = AppWrapperStatus.RESUMING - status, ready = cf.status() - assert status == CodeFlareClusterStatus.STARTING - assert ready == False - - fake_aw.status = AppWrapperStatus.RESETTING - status, ready = cf.status() - assert status == CodeFlareClusterStatus.STARTING - assert ready == False - - fake_aw.status = AppWrapperStatus.RUNNING - status, ready = cf.status() - assert status == CodeFlareClusterStatus.UNKNOWN - assert ready == False - - mocker.patch( - "codeflare_sdk.ray.cluster.cluster._ray_cluster_status", return_value=fake_ray - ) - - status, ready = cf.status() - assert status == CodeFlareClusterStatus.STARTING - assert ready == False - - fake_ray.status = RayClusterStatus.FAILED - status, ready = cf.status() - assert status == CodeFlareClusterStatus.FAILED - assert ready == False - - fake_ray.status = RayClusterStatus.UNHEALTHY - status, ready = cf.status() - assert status == CodeFlareClusterStatus.FAILED - assert ready == False - - fake_ray.status = RayClusterStatus.READY - status, ready = cf.status() - assert status == CodeFlareClusterStatus.READY - assert ready == True - - -def test_wait_ready(mocker, capsys): - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch( - "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", - return_value=ingress_retrieval(), - ) - mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch( - "codeflare_sdk.ray.cluster.cluster._app_wrapper_status", return_value=None - ) - mocker.patch( - "codeflare_sdk.ray.cluster.cluster._ray_cluster_status", return_value=None - ) - mocker.patch( - "codeflare_sdk.common.kueue.kueue.local_queue_exists", - return_value="true", - ) - mocker.patch.object( - client.CustomObjectsApi, - "list_namespaced_custom_object", - return_value={ - "items": [ - { - "metadata": {"name": "ray-dashboard-test"}, - "spec": {"host": "mocked-host"}, - } - ] - }, - ) - mock_response = mocker.Mock() - mock_response.status_code = 200 - mocker.patch("requests.get", return_value=mock_response) - cf = Cluster( - ClusterConfiguration( - name="test", - namespace="ns", - write_to_file=True, - appwrapper=True, - local_queue="local-queue-default", - ) - ) - try: - cf.wait_ready(timeout=5) - assert 1 == 0 - except Exception as e: - assert type(e) == TimeoutError - - captured = capsys.readouterr() - assert ( - "WARNING: Current cluster status is unknown, have you run cluster.up yet?" - in captured.out - ) - mocker.patch( - "codeflare_sdk.ray.cluster.cluster.Cluster.status", - return_value=(True, CodeFlareClusterStatus.READY), - ) - cf.wait_ready() - captured = capsys.readouterr() - assert ( - captured.out - == "Waiting for requested resources to be set up...\nRequested cluster is up and running!\nDashboard is ready!\n" - ) - cf.wait_ready(dashboard_check=False) - captured = capsys.readouterr() - assert ( - captured.out - == "Waiting for requested resources to be set up...\nRequested cluster is up and running!\n" - ) - - -def arg_check_side_effect(*args): - assert args[0] == "fake-app-handle" - - -def parse_j(cmd): - pattern = r"--nnodes\s+\d+\s+--nproc_per_node\s+\d+" - match = re.search(pattern, cmd) - if match: - substring = match.group(0) - else: - return None - args = substring.split() - worker = args[1] - gpu = args[3] - return f"{worker}x{gpu}" - - -def test_AWManager_creation(mocker): - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - testaw = AWManager(f"{aw_dir}test.yaml") - assert testaw.name == "test" - assert testaw.namespace == "ns" - assert testaw.submitted == False - try: - testaw = AWManager("fake") - except Exception as e: - assert type(e) == FileNotFoundError - assert str(e) == "[Errno 2] No such file or directory: 'fake'" - try: - testaw = AWManager("tests/test-case-bad.yaml") - except Exception as e: - assert type(e) == ValueError - assert ( - str(e) - == "tests/test-case-bad.yaml is not a correctly formatted AppWrapper yaml" - ) - - -def arg_check_aw_apply_effect(group, version, namespace, plural, body, *args): - assert group == "workload.codeflare.dev" - assert version == "v1beta2" - assert namespace == "ns" - assert plural == "appwrappers" - with open(f"{aw_dir}test.yaml") as f: - aw = yaml.load(f, Loader=yaml.FullLoader) - assert body == aw - assert args == tuple() - - -def arg_check_aw_del_effect(group, version, namespace, plural, name, *args): - assert group == "workload.codeflare.dev" - assert version == "v1beta2" - assert namespace == "ns" - assert plural == "appwrappers" - assert name == "test" - assert args == tuple() - - -def test_AWManager_submit_remove(mocker, capsys): - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - testaw = AWManager(f"{aw_dir}test.yaml") - testaw.remove() - captured = capsys.readouterr() - assert ( - captured.out - == "AppWrapper not submitted by this manager yet, nothing to remove\n" - ) - assert testaw.submitted == False - mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch( - "kubernetes.client.CustomObjectsApi.create_namespaced_custom_object", - side_effect=arg_check_aw_apply_effect, - ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.delete_namespaced_custom_object", - side_effect=arg_check_aw_del_effect, - ) - testaw.submit() - assert testaw.submitted == True - testaw.remove() - assert testaw.submitted == False - - -import base64 - -from cryptography.hazmat.primitives.serialization import ( - Encoding, - PublicFormat, - load_pem_private_key, -) -from cryptography.x509 import load_pem_x509_certificate - - -def test_generate_ca_cert(): - """ - test the function codeflare_sdk.common.utils.generate_ca_cert generates the correct outputs - """ - key, certificate = generate_ca_cert() - cert = load_pem_x509_certificate(base64.b64decode(certificate)) - private_pub_key_bytes = ( - load_pem_private_key(base64.b64decode(key), password=None) - .public_key() - .public_bytes(Encoding.PEM, PublicFormat.SubjectPublicKeyInfo) - ) - cert_pub_key_bytes = cert.public_key().public_bytes( - Encoding.PEM, PublicFormat.SubjectPublicKeyInfo - ) - assert type(key) == str - assert type(certificate) == str - # Veirfy ca.cert is self signed - assert cert.verify_directly_issued_by(cert) == None - # Verify cert has the public key bytes from the private key - assert cert_pub_key_bytes == private_pub_key_bytes - - -def secret_ca_retreival(secret_name, namespace): - ca_private_key_bytes, ca_cert = generate_ca_cert() - data = {"ca.crt": ca_cert, "ca.key": ca_private_key_bytes} - assert secret_name == "ca-secret-cluster" - assert namespace == "namespace" - return client.models.V1Secret(data=data) - - -def test_generate_tls_cert(mocker): - """ - test the function codeflare_sdk.common.utils.generate_ca_cert generates the correct outputs - """ - mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch( - "codeflare_sdk.common.utils.generate_cert.get_secret_name", - return_value="ca-secret-cluster", - ) - mocker.patch( - "kubernetes.client.CoreV1Api.read_namespaced_secret", - side_effect=secret_ca_retreival, - ) - - generate_tls_cert("cluster", "namespace") - assert os.path.exists("tls-cluster-namespace") - assert os.path.exists(os.path.join("tls-cluster-namespace", "ca.crt")) - assert os.path.exists(os.path.join("tls-cluster-namespace", "tls.crt")) - assert os.path.exists(os.path.join("tls-cluster-namespace", "tls.key")) - - # verify the that the signed tls.crt is issued by the ca_cert (root cert) - with open(os.path.join("tls-cluster-namespace", "tls.crt"), "r") as f: - tls_cert = load_pem_x509_certificate(f.read().encode("utf-8")) - with open(os.path.join("tls-cluster-namespace", "ca.crt"), "r") as f: - root_cert = load_pem_x509_certificate(f.read().encode("utf-8")) - assert tls_cert.verify_directly_issued_by(root_cert) == None - - -def test_export_env(): - """ - test the function codeflare_sdk.common.utils.generate_ca_cert.export_ev generates the correct outputs - """ - tls_dir = "cluster" - ns = "namespace" - export_env(tls_dir, ns) - assert os.environ["RAY_USE_TLS"] == "1" - assert os.environ["RAY_TLS_SERVER_CERT"] == os.path.join( - os.getcwd(), f"tls-{tls_dir}-{ns}", "tls.crt" - ) - assert os.environ["RAY_TLS_SERVER_KEY"] == os.path.join( - os.getcwd(), f"tls-{tls_dir}-{ns}", "tls.key" - ) - assert os.environ["RAY_TLS_CA_CERT"] == os.path.join( - os.getcwd(), f"tls-{tls_dir}-{ns}", "ca.crt" - ) - - -def test_cluster_throw_for_no_raycluster(mocker: MockerFixture): - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch( - "codeflare_sdk.ray.cluster.cluster.get_current_namespace", - return_value="opendatahub", - ) - mocker.patch( - "codeflare_sdk.common.kueue.kueue.get_default_kueue_name", - return_value="default", - ) - mocker.patch( - "codeflare_sdk.common.kueue.kueue.local_queue_exists", - return_value="true", - ) - - def throw_if_getting_raycluster(group, version, namespace, plural): - if plural == "rayclusters": - raise client.ApiException(status=404) - return - - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - side_effect=throw_if_getting_raycluster, - ) - cluster = Cluster( - ClusterConfiguration( - "test_cluster", - write_to_file=False, - ) - ) - with pytest.raises(RuntimeError): - cluster.up() - - -""" -Ray Jobs tests -""" - - -# rjc == RayJobClient -@pytest.fixture -def ray_job_client(mocker): - # Creating a fixture to instantiate RayJobClient with a mocked JobSubmissionClient - mocker.patch.object(JobSubmissionClient, "__init__", return_value=None) - return RayJobClient( - "https://ray-dashboard-unit-test-cluster-ns.apps.cluster.awsroute.org" - ) - - -def test_rjc_submit_job(ray_job_client, mocker): - mocked_submit_job = mocker.patch.object( - JobSubmissionClient, "submit_job", return_value="mocked_submission_id" - ) - submission_id = ray_job_client.submit_job(entrypoint={"pip": ["numpy"]}) - - mocked_submit_job.assert_called_once_with( - entrypoint={"pip": ["numpy"]}, - job_id=None, - runtime_env=None, - metadata=None, - submission_id=None, - entrypoint_num_cpus=None, - entrypoint_num_gpus=None, - entrypoint_memory=None, - entrypoint_resources=None, - ) - - assert submission_id == "mocked_submission_id" - - -def test_rjc_delete_job(ray_job_client, mocker): - # Case return True - mocked_delete_job_True = mocker.patch.object( - JobSubmissionClient, "delete_job", return_value=True - ) - result = ray_job_client.delete_job(job_id="mocked_job_id") - - mocked_delete_job_True.assert_called_once_with(job_id="mocked_job_id") - assert result == (True, "Successfully deleted Job mocked_job_id") - - # Case return False - mocked_delete_job_False = mocker.patch.object( - JobSubmissionClient, "delete_job", return_value=(False) - ) - result = ray_job_client.delete_job(job_id="mocked_job_id") - - mocked_delete_job_False.assert_called_once_with(job_id="mocked_job_id") - assert result == (False, "Failed to delete Job mocked_job_id") - - -def test_rjc_stop_job(ray_job_client, mocker): - # Case return True - mocked_stop_job_True = mocker.patch.object( - JobSubmissionClient, "stop_job", return_value=(True) - ) - result = ray_job_client.stop_job(job_id="mocked_job_id") - - mocked_stop_job_True.assert_called_once_with(job_id="mocked_job_id") - assert result == (True, "Successfully stopped Job mocked_job_id") - - # Case return False - mocked_stop_job_False = mocker.patch.object( - JobSubmissionClient, "stop_job", return_value=(False) - ) - result = ray_job_client.stop_job(job_id="mocked_job_id") - - mocked_stop_job_False.assert_called_once_with(job_id="mocked_job_id") - assert result == ( - False, - "Failed to stop Job, mocked_job_id could have already completed.", - ) - - -def test_rjc_address(ray_job_client, mocker): - mocked_rjc_address = mocker.patch.object( - JobSubmissionClient, - "get_address", - return_value="https://ray-dashboard-unit-test-cluster-ns.apps.cluster.awsroute.org", - ) - address = ray_job_client.get_address() - - mocked_rjc_address.assert_called_once() - assert ( - address - == "https://ray-dashboard-unit-test-cluster-ns.apps.cluster.awsroute.org" - ) - - -def test_rjc_get_job_logs(ray_job_client, mocker): - mocked_rjc_get_job_logs = mocker.patch.object( - JobSubmissionClient, "get_job_logs", return_value="Logs" - ) - logs = ray_job_client.get_job_logs(job_id="mocked_job_id") - - mocked_rjc_get_job_logs.assert_called_once_with(job_id="mocked_job_id") - assert logs == "Logs" - - -def test_rjc_get_job_info(ray_job_client, mocker): - job_details_example = "JobDetails(type=, job_id=None, submission_id='mocked_submission_id', driver_info=None, status=, entrypoint='python test.py', message='Job has not started yet. It may be waiting for the runtime environment to be set up.', error_type=None, start_time=1701271760641, end_time=None, metadata={}, runtime_env={'working_dir': 'gcs://_ray_pkg_67de6f0e60d43b19.zip', 'pip': {'packages': ['numpy'], 'pip_check': False}, '_ray_commit': 'b4bba4717f5ba04ee25580fe8f88eed63ef0c5dc'}, driver_agent_http_address=None, driver_node_id=None)" - mocked_rjc_get_job_info = mocker.patch.object( - JobSubmissionClient, "get_job_info", return_value=job_details_example - ) - job_details = ray_job_client.get_job_info(job_id="mocked_job_id") - - mocked_rjc_get_job_info.assert_called_once_with(job_id="mocked_job_id") - assert job_details == job_details_example - - -def test_rjc_get_job_status(ray_job_client, mocker): - job_status_example = "" - mocked_rjc_get_job_status = mocker.patch.object( - JobSubmissionClient, "get_job_status", return_value=job_status_example - ) - job_status = ray_job_client.get_job_status(job_id="mocked_job_id") - - mocked_rjc_get_job_status.assert_called_once_with(job_id="mocked_job_id") - assert job_status == job_status_example - - -def test_rjc_tail_job_logs(ray_job_client, mocker): - logs_example = [ - "Job started...", - "Processing input data...", - "Finalizing results...", - "Job completed successfully.", - ] - mocked_rjc_tail_job_logs = mocker.patch.object( - JobSubmissionClient, "tail_job_logs", return_value=logs_example - ) - job_tail_job_logs = ray_job_client.tail_job_logs(job_id="mocked_job_id") - - mocked_rjc_tail_job_logs.assert_called_once_with(job_id="mocked_job_id") - assert job_tail_job_logs == logs_example - - -def test_rjc_list_jobs(ray_job_client, mocker): - requirements_path = "tests/e2e/mnist_pip_requirements.txt" - pytorch_lightning = get_package_and_version("pytorch_lightning", requirements_path) - torchmetrics = get_package_and_version("torchmetrics", requirements_path) - torchvision = get_package_and_version("torchvision", requirements_path) - jobs_list = [ - f"JobDetails(type=, job_id=None, submission_id='raysubmit_4k2NYS1YbRXYPZCM', driver_info=None, status=, entrypoint='python mnist.py', message='Job finished successfully.', error_type=None, start_time=1701352132585, end_time=1701352192002, metadata={{}}, runtime_env={{'working_dir': 'gcs://_ray_pkg_6200b93a110e8033.zip', 'pip': {{'packages': ['{pytorch_lightning}', 'ray_lightning', '{torchmetrics}', '{torchvision}'], 'pip_check': False}}, '_ray_commit': 'b4bba4717f5ba04ee25580fe8f88eed63ef0c5dc'}}, driver_agent_http_address='http://10.131.0.18:52365', driver_node_id='9fb515995f5fb13ad4db239ceea378333bebf0a2d45b6aa09d02e691')", - f"JobDetails(type=, job_id=None, submission_id='raysubmit_iRuwU8vdkbUZZGvT', driver_info=None, status=, entrypoint='python mnist.py', message='Job was intentionally stopped.', error_type=None, start_time=1701353096163, end_time=1701353097733, metadata={{}}, runtime_env={{'working_dir': 'gcs://_ray_pkg_6200b93a110e8033.zip', 'pip': {{'packages': ['{pytorch_lightning}', 'ray_lightning', '{torchmetrics}', '{torchvision}'], 'pip_check': False}}, '_ray_commit': 'b4bba4717f5ba04ee25580fe8f88eed63ef0c5dc'}}, driver_agent_http_address='http://10.131.0.18:52365', driver_node_id='9fb515995f5fb13ad4db239ceea378333bebf0a2d45b6aa09d02e691')", - ] - mocked_rjc_list_jobs = mocker.patch.object( - JobSubmissionClient, "list_jobs", return_value=jobs_list - ) - job_list_jobs = ray_job_client.list_jobs() - - mocked_rjc_list_jobs.assert_called_once() - assert job_list_jobs == jobs_list - - -def test_cluster_config_deprecation_conversion(mocker): - config = ClusterConfiguration( - name="test", - num_gpus=2, - head_gpus=1, - min_memory=3, - max_memory=4, - min_cpus=1, - max_cpus=2, - ) - assert config.worker_extended_resource_requests == {"nvidia.com/gpu": 2} - assert config.head_extended_resource_requests == {"nvidia.com/gpu": 1} - assert config.worker_memory_requests == "3G" - assert config.worker_memory_limits == "4G" - assert config.worker_cpu_requests == 1 - assert config.worker_cpu_limits == 2 - - -""" - Ipywidgets tests -""" - - -@patch.dict( - "os.environ", {"JPY_SESSION_NAME": "example-test"} -) # Mock Jupyter environment variable -def test_cluster_up_down_buttons(mocker): - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", - return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, - ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), - ) - cluster = Cluster(createClusterConfig()) - - with patch("ipywidgets.Button") as MockButton, patch( - "ipywidgets.Checkbox" - ) as MockCheckbox, patch("ipywidgets.Output"), patch("ipywidgets.HBox"), patch( - "ipywidgets.VBox" - ), patch.object( - cluster, "up" - ) as mock_up, patch.object( - cluster, "down" - ) as mock_down, patch.object( - cluster, "wait_ready" - ) as mock_wait_ready: - # Create mock button & CheckBox instances - mock_up_button = MagicMock() - mock_down_button = MagicMock() - mock_wait_ready_check_box = MagicMock() - - # Ensure the mock Button class returns the mock button instances in sequence - MockCheckbox.side_effect = [mock_wait_ready_check_box] - MockButton.side_effect = [mock_up_button, mock_down_button] - - # Call the method under test - cf_widgets.cluster_up_down_buttons(cluster) - - # Simulate checkbox being checked or unchecked - mock_wait_ready_check_box.value = True # Simulate checkbox being checked - - # Simulate the button clicks by calling the mock on_click handlers - mock_up_button.on_click.call_args[0][0](None) # Simulate clicking "Cluster Up" - mock_down_button.on_click.call_args[0][0]( - None - ) # Simulate clicking "Cluster Down" - - # Check if the `up` and `down` methods were called - mock_wait_ready.assert_called_once() - mock_up.assert_called_once() - mock_down.assert_called_once() - - -@patch.dict("os.environ", {}, clear=True) # Mock environment with no variables -def test_is_notebook_false(): - assert cf_widgets.is_notebook() is False - - -@patch.dict( - "os.environ", {"JPY_SESSION_NAME": "example-test"} -) # Mock Jupyter environment variable -def test_is_notebook_true(): - assert cf_widgets.is_notebook() is True - - -def test_view_clusters(mocker, capsys): - # If is not a notebook environment, a warning should be raised - with pytest.warns( - UserWarning, - match="view_clusters can only be used in a Jupyter Notebook environment.", - ): - result = cf_widgets.view_clusters("default") - - # Assert the function returns None when not in a notebook environment - assert result is None - - # Prepare to run view_clusters when notebook environment is detected - mocker.patch("codeflare_sdk.common.widgets.widgets.is_notebook", return_value=True) - mock_get_current_namespace = mocker.patch( - "codeflare_sdk.ray.cluster.cluster.get_current_namespace", - return_value="default", - ) - namespace = mock_get_current_namespace.return_value - - # Assert the function returns None when no clusters are found - mock_fetch_cluster_data = mocker.patch( - "codeflare_sdk.common.widgets.widgets._fetch_cluster_data", - return_value=pd.DataFrame(), - ) - result = cf_widgets.view_clusters() - captured = capsys.readouterr() - assert mock_fetch_cluster_data.return_value.empty - assert "No clusters found in the default namespace." in captured.out - assert result is None - - # Prepare to run view_clusters with a test DataFrame - mock_fetch_cluster_data = mocker.patch( - "codeflare_sdk.common.widgets.widgets._fetch_cluster_data", - return_value=pd.DataFrame( - { - "Name": ["test-cluster"], - "Namespace": ["default"], - "Num Workers": ["1"], - "Head GPUs": ["0"], - "Worker GPUs": ["0"], - "Head CPU Req~Lim": ["1~1"], - "Head Memory Req~Lim": ["1Gi~1Gi"], - "Worker CPU Req~Lim": ["1~1"], - "Worker Memory Req~Lim": ["1Gi~1Gi"], - "status": ['Ready ✓'], - } - ), - ) - # Create a RayClusterManagerWidgets instance - ray_cluster_manager_instance = cf_widgets.RayClusterManagerWidgets( - ray_clusters_df=mock_fetch_cluster_data.return_value, namespace=namespace - ) - # Patch the constructor of RayClusterManagerWidgets to return our initialized instance - mock_constructor = mocker.patch( - "codeflare_sdk.common.widgets.widgets.RayClusterManagerWidgets", - return_value=ray_cluster_manager_instance, - ) - - # Use a spy to track calls to display_widgets without replacing it - spy_display_widgets = mocker.spy(ray_cluster_manager_instance, "display_widgets") - - cf_widgets.view_clusters() - - mock_constructor.assert_called_once_with( - ray_clusters_df=mock_fetch_cluster_data.return_value, namespace=namespace - ) - - spy_display_widgets.assert_called_once() - - -def test_delete_cluster(mocker, capsys): - name = "test-cluster" - namespace = "default" - - mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch("kubernetes.client.ApisApi.get_api_versions") - - mock_ray_cluster = MagicMock() - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_namespaced_custom_object", - side_effect=[ - mock_ray_cluster, - client.ApiException(status=404), - client.ApiException(status=404), - mock_ray_cluster, - ], - ) - - # In this scenario, the RayCluster exists and the AppWrapper does not. - mocker.patch( - "codeflare_sdk.ray.cluster.cluster._check_aw_exists", return_value=False - ) - mock_delete_rc = mocker.patch( - "kubernetes.client.CustomObjectsApi.delete_namespaced_custom_object" - ) - cf_widgets._delete_cluster(name, namespace) - - mock_delete_rc.assert_called_once_with( - group="ray.io", - version="v1", - namespace=namespace, - plural="rayclusters", - name=name, - ) - - # In this scenario, the AppWrapper exists and the RayCluster does not - mocker.patch( - "codeflare_sdk.ray.cluster.cluster._check_aw_exists", return_value=True - ) - mock_delete_aw = mocker.patch( - "kubernetes.client.CustomObjectsApi.delete_namespaced_custom_object" - ) - cf_widgets._delete_cluster(name, namespace) - - mock_delete_aw.assert_called_once_with( - group="workload.codeflare.dev", - version="v1beta2", - namespace=namespace, - plural="appwrappers", - name=name, - ) - - # In this scenario, the deletion of the resource times out. - with pytest.raises( - TimeoutError, match=f"Timeout waiting for {name} to be deleted." - ): - cf_widgets._delete_cluster(name, namespace, 1) - - -def test_ray_cluster_manager_widgets_init(mocker, capsys): - namespace = "default" - mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), - ) - test_ray_clusters_df = pd.DataFrame( - { - "Name": ["test-cluster-1", "test-cluster-2"], - "Namespace": [namespace, namespace], - "Num Workers": ["1", "2"], - "Head GPUs": ["0", "0"], - "Worker GPUs": ["0", "0"], - "Head CPU Req~Lim": ["1~1", "1~1"], - "Head Memory Req~Lim": ["1Gi~1Gi", "1Gi~1Gi"], - "Worker CPU Req~Lim": ["1~1", "1~1"], - "Worker Memory Req~Lim": ["1Gi~1Gi", "1Gi~1Gi"], - "status": [ - 'Ready ✓', - 'Ready ✓', - ], - } - ) - mock_fetch_cluster_data = mocker.patch( - "codeflare_sdk.common.widgets.widgets._fetch_cluster_data", - return_value=test_ray_clusters_df, - ) - mocker.patch( - "codeflare_sdk.ray.cluster.cluster.get_current_namespace", - return_value=namespace, - ) - mock_delete_cluster = mocker.patch( - "codeflare_sdk.common.widgets.widgets._delete_cluster" - ) - - # # Mock ToggleButtons - mock_toggle_buttons = mocker.patch("ipywidgets.ToggleButtons") - mock_button = mocker.patch("ipywidgets.Button") - mock_output = mocker.patch("ipywidgets.Output") - - # Initialize the RayClusterManagerWidgets instance - ray_cluster_manager_instance = cf_widgets.RayClusterManagerWidgets( - ray_clusters_df=test_ray_clusters_df, namespace=namespace - ) - - # Assertions for DataFrame and attributes - assert ray_cluster_manager_instance.ray_clusters_df.equals( - test_ray_clusters_df - ), "ray_clusters_df attribute does not match the input DataFrame" - assert ( - ray_cluster_manager_instance.namespace == namespace - ), f"Expected namespace to be '{namespace}', but got '{ray_cluster_manager_instance.namespace}'" - assert ( - ray_cluster_manager_instance.classification_widget.options - == test_ray_clusters_df["Name"].tolist() - ), "classification_widget options do not match the input DataFrame" - - # Assertions for widgets - mock_toggle_buttons.assert_called_once_with( - options=test_ray_clusters_df["Name"].tolist(), - value=test_ray_clusters_df["Name"].tolist()[0], - description="Select an existing cluster:", - ) - assert ( - ray_cluster_manager_instance.classification_widget - == mock_toggle_buttons.return_value - ), "classification_widget is not set correctly" - assert ( - ray_cluster_manager_instance.delete_button == mock_button.return_value - ), "delete_button is not set correctly" - assert ( - ray_cluster_manager_instance.list_jobs_button == mock_button.return_value - ), "list_jobs_button is not set correctly" - assert ( - ray_cluster_manager_instance.ray_dashboard_button == mock_button.return_value - ), "ray_dashboard_button is not set correctly" - assert ( - ray_cluster_manager_instance.raycluster_data_output == mock_output.return_value - ), "raycluster_data_output is not set correctly" - assert ( - ray_cluster_manager_instance.user_output == mock_output.return_value - ), "user_output is not set correctly" - assert ( - ray_cluster_manager_instance.url_output == mock_output.return_value - ), "url_output is not set correctly" - - ### Test button click events - mock_delete_button = MagicMock() - mock_list_jobs_button = MagicMock() - mock_ray_dashboard_button = MagicMock() - - mock_javascript = mocker.patch("codeflare_sdk.common.widgets.widgets.Javascript") - ray_cluster_manager_instance.url_output = MagicMock() - - mock_dashboard_uri = mocker.patch( - "codeflare_sdk.ray.cluster.cluster.Cluster.cluster_dashboard_uri", - return_value="https://ray-dashboard-test-cluster-1-ns.apps.cluster.awsroute.org", - ) - - # Simulate clicking the list jobs button - ray_cluster_manager_instance.classification_widget.value = "test-cluster-1" - ray_cluster_manager_instance._on_list_jobs_button_click(mock_list_jobs_button) - - captured = capsys.readouterr() - assert ( - f"Opening Ray Jobs Dashboard for test-cluster-1 cluster:\n{mock_dashboard_uri.return_value}/#/jobs" - in captured.out - ) - mock_javascript.assert_called_with( - f'window.open("{mock_dashboard_uri.return_value}/#/jobs", "_blank");' - ) - - # Simulate clicking the Ray dashboard button - ray_cluster_manager_instance.classification_widget.value = "test-cluster-1" - ray_cluster_manager_instance._on_ray_dashboard_button_click( - mock_ray_dashboard_button - ) - - captured = capsys.readouterr() - assert ( - f"Opening Ray Dashboard for test-cluster-1 cluster:\n{mock_dashboard_uri.return_value}" - in captured.out - ) - mock_javascript.assert_called_with( - f'window.open("{mock_dashboard_uri.return_value}", "_blank");' - ) - - # Simulate clicking the delete button - ray_cluster_manager_instance.classification_widget.value = "test-cluster-1" - ray_cluster_manager_instance._on_delete_button_click(mock_delete_button) - mock_delete_cluster.assert_called_with("test-cluster-1", namespace) - - mock_fetch_cluster_data.return_value = pd.DataFrame() - ray_cluster_manager_instance.classification_widget.value = "test-cluster-2" - ray_cluster_manager_instance._on_delete_button_click(mock_delete_button) - mock_delete_cluster.assert_called_with("test-cluster-2", namespace) - - # Assert on deletion that the dataframe is empty - assert ( - ray_cluster_manager_instance.ray_clusters_df.empty - ), "Expected DataFrame to be empty after deletion" - - captured = capsys.readouterr() - assert ( - f"Cluster test-cluster-1 in the {namespace} namespace was deleted successfully." - in captured.out - ) - - -def test_fetch_cluster_data(mocker): - # Return empty dataframe when no clusters are found - mocker.patch("codeflare_sdk.ray.cluster.cluster.list_all_clusters", return_value=[]) - df = cf_widgets._fetch_cluster_data(namespace="default") - assert df.empty - - # Create mock RayCluster objects - mock_raycluster1 = MagicMock(spec=RayCluster) - mock_raycluster1.name = "test-cluster-1" - mock_raycluster1.namespace = "default" - mock_raycluster1.num_workers = 1 - mock_raycluster1.head_extended_resources = {"nvidia.com/gpu": "1"} - mock_raycluster1.worker_extended_resources = {"nvidia.com/gpu": "2"} - mock_raycluster1.head_cpu_requests = "500m" - mock_raycluster1.head_cpu_limits = "1000m" - mock_raycluster1.head_mem_requests = "1Gi" - mock_raycluster1.head_mem_limits = "2Gi" - mock_raycluster1.worker_cpu_requests = "1000m" - mock_raycluster1.worker_cpu_limits = "2000m" - mock_raycluster1.worker_mem_requests = "2Gi" - mock_raycluster1.worker_mem_limits = "4Gi" - mock_raycluster1.status = MagicMock() - mock_raycluster1.status.name = "READY" - mock_raycluster1.status = RayClusterStatus.READY - - mock_raycluster2 = MagicMock(spec=RayCluster) - mock_raycluster2.name = "test-cluster-2" - mock_raycluster2.namespace = "default" - mock_raycluster2.num_workers = 2 - mock_raycluster2.head_extended_resources = {} - mock_raycluster2.worker_extended_resources = {} - mock_raycluster2.head_cpu_requests = None - mock_raycluster2.head_cpu_limits = None - mock_raycluster2.head_mem_requests = None - mock_raycluster2.head_mem_limits = None - mock_raycluster2.worker_cpu_requests = None - mock_raycluster2.worker_cpu_limits = None - mock_raycluster2.worker_mem_requests = None - mock_raycluster2.worker_mem_limits = None - mock_raycluster2.status = MagicMock() - mock_raycluster2.status.name = "SUSPENDED" - mock_raycluster2.status = RayClusterStatus.SUSPENDED - - with patch( - "codeflare_sdk.ray.cluster.cluster.list_all_clusters", - return_value=[mock_raycluster1, mock_raycluster2], - ): - # Call the function under test - df = cf_widgets._fetch_cluster_data(namespace="default") - - # Expected DataFrame - expected_data = { - "Name": ["test-cluster-1", "test-cluster-2"], - "Namespace": ["default", "default"], - "Num Workers": [1, 2], - "Head GPUs": ["nvidia.com/gpu: 1", "0"], - "Worker GPUs": ["nvidia.com/gpu: 2", "0"], - "Head CPU Req~Lim": ["500m~1000m", "0~0"], - "Head Memory Req~Lim": ["1Gi~2Gi", "0~0"], - "Worker CPU Req~Lim": ["1000m~2000m", "0~0"], - "Worker Memory Req~Lim": ["2Gi~4Gi", "0~0"], - "status": [ - 'Ready ✓', - 'Suspended ❄️', - ], - } - - expected_df = pd.DataFrame(expected_data) - - # Assert that the DataFrame matches expected - pd.testing.assert_frame_equal( - df.reset_index(drop=True), expected_df.reset_index(drop=True) - ) - - -def test_format_status(): - # Test each possible status - test_cases = [ - (RayClusterStatus.READY, 'Ready ✓'), - ( - RayClusterStatus.SUSPENDED, - 'Suspended ❄️', - ), - (RayClusterStatus.FAILED, 'Failed ✗'), - (RayClusterStatus.UNHEALTHY, 'Unhealthy'), - (RayClusterStatus.UNKNOWN, 'Unknown'), - ] - - for status, expected_output in test_cases: - assert ( - cf_widgets._format_status(status) == expected_output - ), f"Failed for status: {status}" - - # Test an unrecognized status - unrecognized_status = "NotAStatus" - assert ( - cf_widgets._format_status(unrecognized_status) == "NotAStatus" - ), "Failed for unrecognized status" - - -# Make sure to always keep this function last -def test_cleanup(): - os.remove(f"{aw_dir}unit-test-no-kueue.yaml") - os.remove(f"{aw_dir}unit-test-cluster.yaml") - os.remove(f"{aw_dir}test.yaml") - os.remove(f"{aw_dir}raytest2.yaml") - os.remove(f"{aw_dir}unit-test-cluster-ray.yaml") - os.remove("tls-cluster-namespace/ca.crt") - os.remove("tls-cluster-namespace/tls.crt") - os.remove("tls-cluster-namespace/tls.key") - os.rmdir("tls-cluster-namespace") diff --git a/tests/unit_test_support.py b/tests/unit_test_support.py deleted file mode 100644 index b3c2e197..00000000 --- a/tests/unit_test_support.py +++ /dev/null @@ -1,60 +0,0 @@ -from codeflare_sdk.ray.cluster.cluster import ( - Cluster, - ClusterConfiguration, -) - - -def createClusterConfig(): - config = ClusterConfiguration( - name="unit-test-cluster", - namespace="ns", - num_workers=2, - worker_cpu_requests=3, - worker_cpu_limits=4, - worker_memory_requests=5, - worker_memory_limits=6, - worker_extended_resource_requests={"nvidia.com/gpu": 7}, - appwrapper=True, - machine_types=["cpu.small", "gpu.large"], - image_pull_secrets=["unit-test-pull-secret"], - write_to_file=True, - ) - return config - - -def createClusterWithConfig(mocker): - mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", - return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, - ) - cluster = Cluster(createClusterConfig()) - return cluster - - -def createClusterWrongType(): - config = ClusterConfiguration( - name="unit-test-cluster", - namespace="ns", - num_workers=2, - worker_cpu_requests=[], - worker_cpu_limits=4, - worker_memory_requests=5, - worker_memory_limits=6, - worker_extended_resource_requests={"nvidia.com/gpu": 7}, - appwrapper=True, - machine_types=[True, False], - image_pull_secrets=["unit-test-pull-secret"], - image="quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06", - write_to_file=True, - labels={1: 1}, - ) - return config - - -def get_package_and_version(package_name, requirements_file_path): - with open(requirements_file_path, "r") as file: - for line in file: - if line.strip().startswith(f"{package_name}=="): - return line.strip() - return None From 54f53bcc70177f16ca012f458b3c44a9dbb8f9a3 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Tue, 8 Oct 2024 16:52:02 +0100 Subject: [PATCH 370/496] refactor: unit test workflows --- .github/workflows/coverage-badge.yaml | 2 +- .github/workflows/unit-tests.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/coverage-badge.yaml b/.github/workflows/coverage-badge.yaml index 5a88934c..bae1212d 100644 --- a/.github/workflows/coverage-badge.yaml +++ b/.github/workflows/coverage-badge.yaml @@ -26,7 +26,7 @@ jobs: poetry install --with test - name: Generate coverage report run: | - coverage run -m --source=src pytest -v tests/unit_test.py + coverage run -m pytest - name: Coverage Badge uses: tj-actions/coverage-badge-py@v2 diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index bb010e2c..b7d8ba61 100755 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -26,7 +26,7 @@ jobs: poetry install --with test - name: Test with pytest and check coverage run: | - coverage run -m --source=src pytest -v tests/unit_test.py + coverage run -m pytest coverage=$(coverage report -m | tail -1 | tail -c 4 | head -c 2) if (( $coverage < 90 )); then echo "Coverage failed at ${coverage}%"; exit 1; else echo "Coverage passed, ${coverage}%"; fi continue-on-error: true From 7694218b9c27a4de71239fb6424917939af69001 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Tue, 8 Oct 2024 16:54:03 +0100 Subject: [PATCH 371/496] docs: update docs to reflect unit test refactor --- CONTRIBUTING.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 60f84102..884632da 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -71,7 +71,7 @@ pip install -e . To run the unit tests, execute: ```sh -pytest -v tests/unit_test.py +pytest -v src/codeflare_sdk ``` ### Local e2e Testing @@ -80,7 +80,7 @@ pytest -v tests/unit_test.py #### Code Coverage -- Run tests with the following command: `coverage run -m --source=src pytest tests/unit_test.py` +- Run tests with the following command: `coverage run -m pytest` - To then view a code coverage report w/ missing lines, run `coverage report -m` ### Code Formatting From b90d5d63c5a2ebdbf5a45c6f3dfdd3e923e0f507 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Wed, 9 Oct 2024 19:09:35 +0000 Subject: [PATCH 372/496] Updated coverage.svg --- coverage.svg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/coverage.svg b/coverage.svg index 607d3de4..fe061433 100644 --- a/coverage.svg +++ b/coverage.svg @@ -15,7 +15,7 @@ coverage coverage - 91% - 91% + 94% + 94% From 7c04444481f510a958655587e6d5161641ae5577 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Mon, 14 Oct 2024 10:24:46 +0100 Subject: [PATCH 373/496] Dynamic notebooks sync workflow --- .github/workflows/odh-notebooks-sync.yml | 25 ++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/.github/workflows/odh-notebooks-sync.yml b/.github/workflows/odh-notebooks-sync.yml index d31a49f5..ecb5b0a5 100644 --- a/.github/workflows/odh-notebooks-sync.yml +++ b/.github/workflows/odh-notebooks-sync.yml @@ -7,7 +7,14 @@ on: required: true description: "Owner of target upstream notebooks repository used to open a PR against" default: "opendatahub-io" - + notebooks-target-branch: + required: true + description: "Target branch of upstream repository" + default: "main" + python-version: + required: true + description: "Provide the python version to be used for the notebooks" + default: "3.11" codeflare-repository-organization: required: true description: "Owner of origin notebooks repository used to open a PR" @@ -18,7 +25,8 @@ on: description: "Provide version of the Codeflare-SDK release" env: - BRANCH_NAME: main + BRANCH_NAME: ${{ github.event.inputs.notebooks-target-branch }} + PYTHON_VERSION: ${{ github.event.inputs.python-version }} CODEFLARE_RELEASE_VERSION: ${{ github.event.inputs.codeflare_sdk_release_version }} UPDATER_BRANCH: odh-sync-updater-${{ github.run_id }} UPSTREAM_OWNER: ${{ github.event.inputs.upstream-repository-organization }} @@ -39,23 +47,24 @@ jobs: git config --global user.email "138894154+codeflare-machine-account@users.noreply.github.com" git config --global user.name "codeflare-machine-account" git remote -v - git pull upstream main && git push origin main + git fetch upstream $BRANCH_NAME + git checkout $BRANCH_NAME - name: Setup Python environment uses: actions/setup-python@v4 with: - python-version: | - 3.9 - 3.11 + python-version: ${{ env.PYTHON_VERSION }} cache: 'pipenv' + # Sync fails with pipenv 2024.1.0 (current latest version) + # TODO: We should retry with later versions of pipenv once they are available. - name: Install pipenv and pip-versions - run: pip install pipenv pip-versions + run: pip install pipenv==2024.0.3 pip-versions - name: Update Pipfiles in accordance with Codeflare-SDK latest release run: | package_name=codeflare-sdk - available_python_versions=("3.9" "3.11") # add space separated python versions according to 'python-versions' specified in 'Setup Python Environment' step + available_python_versions=("$PYTHON_VERSION") # add space separated python versions according to 'python-versions' specified in 'Setup Python Environment' step install_package_using_pipenv(){ # args allow custom names for Pipfile and Pipfile.lock if [ $# -eq 2 ]; then From 9116859cdcb457616a4eafe56805004b701d22f8 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 17 Oct 2024 11:35:13 +0100 Subject: [PATCH 374/496] update approvers and reviewers --- OWNERS | 2 ++ 1 file changed, 2 insertions(+) diff --git a/OWNERS b/OWNERS index 02abe61c..f375f431 100644 --- a/OWNERS +++ b/OWNERS @@ -1,6 +1,7 @@ approvers: - astefanutti - Bobbins228 + - ChristianZaccaria - dimakis - Fiona-Waters - franciscojavierarceo @@ -19,3 +20,4 @@ reviewers: - maxusmusti - MichaelClifford - varshaprasad96 + - Ygnas From b7c37af4e59cd5ac686b3d37f2424455b8ae9b62 Mon Sep 17 00:00:00 2001 From: Kevin Date: Wed, 9 Oct 2024 15:49:54 -0400 Subject: [PATCH 375/496] add function for listing lqs by flavors Signed-off-by: Kevin --- src/codeflare_sdk/__init__.py | 4 ++ .../kubernetes_cluster/kube_api_helpers.py | 1 + src/codeflare_sdk/common/kueue/__init__.py | 1 + src/codeflare_sdk/common/kueue/kueue.py | 49 ++++++++++++++++++- src/codeflare_sdk/common/kueue/test_kueue.py | 42 ++++++++++++++++ 5 files changed, 96 insertions(+), 1 deletion(-) diff --git a/src/codeflare_sdk/__init__.py b/src/codeflare_sdk/__init__.py index a1b5535c..9ab5c745 100644 --- a/src/codeflare_sdk/__init__.py +++ b/src/codeflare_sdk/__init__.py @@ -21,6 +21,10 @@ KubeConfigFileAuthentication, ) +from .common.kueue import ( + list_local_queues, +) + from .common.utils import generate_cert from .common.utils.demos import copy_demo_nbs diff --git a/src/codeflare_sdk/common/kubernetes_cluster/kube_api_helpers.py b/src/codeflare_sdk/common/kubernetes_cluster/kube_api_helpers.py index efa1d2b6..8974a3f3 100644 --- a/src/codeflare_sdk/common/kubernetes_cluster/kube_api_helpers.py +++ b/src/codeflare_sdk/common/kubernetes_cluster/kube_api_helpers.py @@ -20,6 +20,7 @@ import executing from kubernetes import client, config from urllib3.util import parse_url +import os # private methods diff --git a/src/codeflare_sdk/common/kueue/__init__.py b/src/codeflare_sdk/common/kueue/__init__.py index b02e3c24..c9c641c1 100644 --- a/src/codeflare_sdk/common/kueue/__init__.py +++ b/src/codeflare_sdk/common/kueue/__init__.py @@ -2,4 +2,5 @@ get_default_kueue_name, local_queue_exists, add_queue_label, + list_local_queues, ) diff --git a/src/codeflare_sdk/common/kueue/kueue.py b/src/codeflare_sdk/common/kueue/kueue.py index 0c207548..c063c6fe 100644 --- a/src/codeflare_sdk/common/kueue/kueue.py +++ b/src/codeflare_sdk/common/kueue/kueue.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Optional +from typing import Optional, List from codeflare_sdk.common import _kube_api_error_handling from codeflare_sdk.common.kubernetes_cluster.auth import config_check, get_api_client from kubernetes import client @@ -45,6 +45,53 @@ def get_default_kueue_name(namespace: str): return lq["metadata"]["name"] +def list_local_queues( + namespace: Optional[str] = None, flavors: Optional[List[str]] = None +) -> List[dict]: + """ + This function lists all local queues in the namespace provided. + + If no namespace is provided, it will use the current namespace. If flavors is provided, it will only return local + queues that support all the flavors provided. + + Note: + Depending on the version of the local queue API, the available flavors may not be present in the response. + + Args: + namespace (str, optional): The namespace to list local queues from. Defaults to None. + flavors (List[str], optional): The flavors to filter local queues by. Defaults to None. + Returns: + List[dict]: A list of dictionaries containing the name of the local queue and the available flavors + """ + + from ...ray.cluster.cluster import get_current_namespace + + if namespace is None: # pragma: no cover + namespace = get_current_namespace() + try: + config_check() + api_instance = client.CustomObjectsApi(get_api_client()) + local_queues = api_instance.list_namespaced_custom_object( + group="kueue.x-k8s.io", + version="v1beta1", + namespace=namespace, + plural="localqueues", + ) + except ApiException as e: # pragma: no cover + return _kube_api_error_handling(e) + to_return = [] + for lq in local_queues["items"]: + item = {"name": lq["metadata"]["name"]} + if "flavors" in lq["status"]: + item["flavors"] = [f["name"] for f in lq["status"]["flavors"]] + if flavors is not None and not set(flavors).issubset(set(item["flavors"])): + continue + elif flavors is not None: + continue # NOTE: may be indicative old local queue API and might be worth while raising or warning here + to_return.append(item) + return to_return + + def local_queue_exists(namespace: str, local_queue_name: str): # get all local queues in the namespace try: diff --git a/src/codeflare_sdk/common/kueue/test_kueue.py b/src/codeflare_sdk/common/kueue/test_kueue.py index a4e984c3..e9de364d 100644 --- a/src/codeflare_sdk/common/kueue/test_kueue.py +++ b/src/codeflare_sdk/common/kueue/test_kueue.py @@ -18,6 +18,7 @@ import os import filecmp from pathlib import Path +from .kueue import list_local_queues parent = Path(__file__).resolve().parents[4] # project directory aw_dir = os.path.expanduser("~/.codeflare/resources/") @@ -131,6 +132,47 @@ def test_get_local_queue_exists_fail(mocker): ) +def test_list_local_queues(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value={ + "items": [ + { + "metadata": {"name": "lq1"}, + "status": {"flavors": [{"name": "default"}]}, + }, + { + "metadata": {"name": "lq2"}, + "status": { + "flavors": [{"name": "otherflavor"}, {"name": "default"}] + }, + }, + ] + }, + ) + lqs = list_local_queues("ns") + assert lqs == [ + {"name": "lq1", "flavors": ["default"]}, + {"name": "lq2", "flavors": ["otherflavor", "default"]}, + ] + lqs = list_local_queues("ns", flavors=["otherflavor"]) + assert lqs == [{"name": "lq2", "flavors": ["otherflavor", "default"]}] + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value={ + "items": [ + { + "metadata": {"name": "lq1"}, + "status": {}, + }, + ] + }, + ) + lqs = list_local_queues("ns", flavors=["default"]) + assert lqs == [] + + # Make sure to always keep this function last def test_cleanup(): os.remove(f"{aw_dir}unit-test-cluster-kueue.yaml") From 5606f845674adbdb87c9c2e8cd1ffe81621e3880 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Fri, 11 Oct 2024 14:14:50 +0100 Subject: [PATCH 376/496] docs: convert auto-generated documentation to from pydoc to sphinx --- .github/workflows/release.yaml | 5 +- docs/authentication.md | 40 - docs/cluster-configuration.md | 46 - docs/detailed-documentation/cluster/auth.html | 742 ------ .../cluster/awload.html | 328 --- .../cluster/cluster.html | 2174 ----------------- .../cluster/config.html | 764 ------ .../detailed-documentation/cluster/index.html | 129 - .../detailed-documentation/cluster/model.html | 531 ---- .../cluster/widgets.html | 758 ------ docs/detailed-documentation/index.html | 106 - docs/detailed-documentation/job/index.html | 72 - docs/detailed-documentation/job/ray_jobs.html | 585 ----- docs/detailed-documentation/utils/demos.html | 138 -- .../utils/generate_cert.html | 451 ---- .../utils/generate_yaml.html | 951 ------- docs/detailed-documentation/utils/index.html | 88 - .../utils/kube_api_helpers.html | 112 - .../utils/pretty_print.html | 491 ---- docs/e2e.md | 133 - docs/generate-documentation.md | 14 + docs/s3-compatible-storage.md | 61 - docs/setup-kueue.md | 66 - docs/sphinx/Makefile | 20 + docs/sphinx/conf.py | 38 + docs/sphinx/index.rst | 32 + docs/sphinx/make.bat | 35 + docs/sphinx/user-docs/authentication.rst | 66 + .../user-docs/cluster-configuration.rst | 72 + docs/sphinx/user-docs/e2e.rst | 210 ++ .../user-docs/s3-compatible-storage.rst | 86 + docs/sphinx/user-docs/setup-kueue.rst | 109 + poetry.lock | 261 +- pyproject.toml | 3 +- 34 files changed, 896 insertions(+), 8821 deletions(-) delete mode 100644 docs/authentication.md delete mode 100644 docs/cluster-configuration.md delete mode 100644 docs/detailed-documentation/cluster/auth.html delete mode 100644 docs/detailed-documentation/cluster/awload.html delete mode 100644 docs/detailed-documentation/cluster/cluster.html delete mode 100644 docs/detailed-documentation/cluster/config.html delete mode 100644 docs/detailed-documentation/cluster/index.html delete mode 100644 docs/detailed-documentation/cluster/model.html delete mode 100644 docs/detailed-documentation/cluster/widgets.html delete mode 100644 docs/detailed-documentation/index.html delete mode 100644 docs/detailed-documentation/job/index.html delete mode 100644 docs/detailed-documentation/job/ray_jobs.html delete mode 100644 docs/detailed-documentation/utils/demos.html delete mode 100644 docs/detailed-documentation/utils/generate_cert.html delete mode 100644 docs/detailed-documentation/utils/generate_yaml.html delete mode 100644 docs/detailed-documentation/utils/index.html delete mode 100644 docs/detailed-documentation/utils/kube_api_helpers.html delete mode 100644 docs/detailed-documentation/utils/pretty_print.html delete mode 100644 docs/e2e.md create mode 100644 docs/generate-documentation.md delete mode 100644 docs/s3-compatible-storage.md delete mode 100644 docs/setup-kueue.md create mode 100644 docs/sphinx/Makefile create mode 100644 docs/sphinx/conf.py create mode 100644 docs/sphinx/index.rst create mode 100644 docs/sphinx/make.bat create mode 100644 docs/sphinx/user-docs/authentication.rst create mode 100644 docs/sphinx/user-docs/cluster-configuration.rst create mode 100644 docs/sphinx/user-docs/e2e.rst create mode 100644 docs/sphinx/user-docs/s3-compatible-storage.rst create mode 100644 docs/sphinx/user-docs/setup-kueue.rst diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 0b9d6bbc..6e56a3f8 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -51,7 +51,10 @@ jobs: - name: Run poetry install run: poetry install --with docs - name: Create new documentation - run: poetry run pdoc --html -o docs/detailed-documentation src/codeflare_sdk && pushd docs/detailed-documentation && rm -rf cluster job utils && mv codeflare_sdk/* . && rm -rf codeflare_sdk && popd && find docs/detailed-documentation -type f -name "*.html" -exec bash -c "echo '' >> {}" \; + run: | + sphinx-apidoc -o docs/sphinx src/codeflare_sdk "**/*test_*" --force + make clean -C docs/sphinx + make html -C docs/sphinx - name: Copy demo notebooks into SDK package run: cp -r demo-notebooks src/codeflare_sdk/demo-notebooks - name: Run poetry build diff --git a/docs/authentication.md b/docs/authentication.md deleted file mode 100644 index bb27f171..00000000 --- a/docs/authentication.md +++ /dev/null @@ -1,40 +0,0 @@ -# Authentication via the CodeFlare SDK -Currently there are four ways of authenticating to your cluster via the SDK.
    -Authenticating with your cluster allows you to perform actions such as creating Ray Clusters and Job Submission. - -## Method 1 Token Authentication -This is how a typical user would authenticate to their cluster using `TokenAuthentication`. -``` -from codeflare_sdk import TokenAuthentication - -auth = TokenAuthentication( - token = "XXXXX", - server = "XXXXX", - skip_tls=False, - # ca_cert_path="/path/to/cert" -) -auth.login() -# log out with auth.logout() -``` -Setting `skip_tls=True` allows interaction with an HTTPS server bypassing the server certificate checks although this is not secure.
    -You can pass a custom certificate to `TokenAuthentication` by using `ca_cert_path="/path/to/cert"` when authenticating provided `skip_tls=False`. Alternatively you can set the environment variable `CF_SDK_CA_CERT_PATH` to the path of your custom certificate. - -## Method 2 Kubernetes Config File Authentication (Default location) -If a user has authenticated to their cluster by alternate means e.g. run a login command like `oc login --token= --server=` their kubernetes config file should have updated.
    -If the user has not specifically authenticated through the SDK by other means such as `TokenAuthentication` then the SDK will try to use their default Kubernetes config file located at `"/HOME/.kube/config"`. - -## Method 3 Specifying a Kubernetes Config File -A user can specify a config file via a different authentication class `KubeConfigFileAuthentication` for authenticating with the SDK.
    -This is what loading a custom config file would typically look like. -``` -from codeflare_sdk import KubeConfigFileAuthentication - -auth = KubeConfigFileAuthentication( - kube_config_path="/path/to/config", -) -auth.load_kube_config() -# log out with auth.logout() -``` - -## Method 4 In-Cluster Authentication -If a user does not authenticate by any of the means detailed above and does not have a config file at `"/HOME/.kube/config"` the SDK will try to authenticate with the in-cluster configuration file. diff --git a/docs/cluster-configuration.md b/docs/cluster-configuration.md deleted file mode 100644 index 97068b49..00000000 --- a/docs/cluster-configuration.md +++ /dev/null @@ -1,46 +0,0 @@ -# Ray Cluster Configuration - -To create Ray Clusters using the CodeFlare SDK a cluster configuration needs to be created first.
    -This is what a typical cluster configuration would look like; Note: The values for CPU and Memory are at the minimum requirements for creating the Ray Cluster. - -```python -from codeflare_sdk import Cluster, ClusterConfiguration - -cluster = Cluster(ClusterConfiguration( - name='ray-example', # Mandatory Field - namespace='default', # Default None - head_cpu_requests=1, # Default 2 - head_cpu_limits=1, # Default 2 - head_memory_requests=1, # Default 8 - head_memory_limits=1, # Default 8 - head_extended_resource_requests={'nvidia.com/gpu':0}, # Default 0 - worker_extended_resource_requests={'nvidia.com/gpu':0}, # Default 0 - num_workers=1, # Default 1 - worker_cpu_requests=1, # Default 1 - worker_cpu_limits=1, # Default 1 - worker_memory_requests=2, # Default 2 - worker_memory_limits=2, # Default 2 - # image="", # Optional Field - machine_types=["m5.xlarge", "g4dn.xlarge"], - labels={"exampleLabel": "example", "secondLabel": "example"}, -)) -``` -Note: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. If you have your own Ray image which suits your purposes, specify it in image field to override the default image. If you are using ROCm compatible GPUs you can use 'quay.io/modh/ray:2.35.0-py39-rocm61'. You can also find documentation on building a custom image [here](https://github.com/opendatahub-io/distributed-workloads/tree/main/images/runtime/examples). - -The `labels={"exampleLabel": "example"}` parameter can be used to apply additional labels to the RayCluster resource. - -After creating their `cluster`, a user can call `cluster.up()` and `cluster.down()` to respectively create or remove the Ray Cluster. - - -## Deprecating Parameters -The following parameters of the `ClusterConfiguration` are being deprecated in release `v0.22.0`. -| Deprecated Parameter | Replaced By | -| :--------- | :-------- | -| `head_cpus` | `head_cpu_requests`, `head_cpu_limits` | -| `head_memory` | `head_memory_requests`, `head_memory_limits` | -| `min_cpus` | `worker_cpu_requests` | -| `max_cpus` | `worker_cpu_limits` | -| `min_memory` | `worker_memory_requests` | -| `max_memory` | `worker_memory_limits` | -| `head_gpus` | `head_extended_resource_requests` | -| `num_gpus` | `worker_extended_resource_requests` | diff --git a/docs/detailed-documentation/cluster/auth.html b/docs/detailed-documentation/cluster/auth.html deleted file mode 100644 index d35b4609..00000000 --- a/docs/detailed-documentation/cluster/auth.html +++ /dev/null @@ -1,742 +0,0 @@ - - - - - - -codeflare_sdk.cluster.auth API documentation - - - - - - - - - - - -
    -
    -
    -

    Module codeflare_sdk.cluster.auth

    -
    -
    -

    The auth sub-module contains the definitions for the Authentication objects, which represent -the methods by which a user can authenticate to their cluster(s). The abstract class, Authentication, -contains two required methods login() and logout(). Users can use one of the existing concrete classes to -authenticate to their cluster or add their own custom concrete classes here.

    -
    - -Expand source code - -
    # Copyright 2022 IBM, Red Hat
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#      http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -
    -"""
    -The auth sub-module contains the definitions for the Authentication objects, which represent
    -the methods by which a user can authenticate to their cluster(s). The abstract class, `Authentication`,
    -contains two required methods `login()` and `logout()`. Users can use one of the existing concrete classes to
    -authenticate to their cluster or add their own custom concrete classes here.
    -"""
    -
    -import abc
    -from kubernetes import client, config
    -import os
    -import urllib3
    -from ..utils.kube_api_helpers import _kube_api_error_handling
    -
    -from typing import Optional
    -
    -global api_client
    -api_client = None
    -global config_path
    -config_path = None
    -
    -WORKBENCH_CA_CERT_PATH = "/etc/pki/tls/custom-certs/ca-bundle.crt"
    -
    -
    -class Authentication(metaclass=abc.ABCMeta):
    -    """
    -    An abstract class that defines the necessary methods for authenticating to a remote environment.
    -    Specifically, this class defines the need for a `login()` and a `logout()` function.
    -    """
    -
    -    def login(self):
    -        """
    -        Method for logging in to a remote cluster.
    -        """
    -        pass
    -
    -    def logout(self):
    -        """
    -        Method for logging out of the remote cluster.
    -        """
    -        pass
    -
    -
    -class KubeConfiguration(metaclass=abc.ABCMeta):
    -    """
    -    An abstract class that defines the method for loading a user defined config file using the `load_kube_config()` function
    -    """
    -
    -    def load_kube_config(self):
    -        """
    -        Method for setting your Kubernetes configuration to a certain file
    -        """
    -        pass
    -
    -    def logout(self):
    -        """
    -        Method for logging out of the remote cluster
    -        """
    -        pass
    -
    -
    -class TokenAuthentication(Authentication):
    -    """
    -    `TokenAuthentication` is a subclass of `Authentication`. It can be used to authenticate to a Kubernetes
    -    cluster when the user has an API token and the API server address.
    -    """
    -
    -    def __init__(
    -        self,
    -        token: str,
    -        server: str,
    -        skip_tls: bool = False,
    -        ca_cert_path: str = None,
    -    ):
    -        """
    -        Initialize a TokenAuthentication object that requires a value for `token`, the API Token
    -        and `server`, the API server address for authenticating to a Kubernetes cluster.
    -        """
    -
    -        self.token = token
    -        self.server = server
    -        self.skip_tls = skip_tls
    -        self.ca_cert_path = _gen_ca_cert_path(ca_cert_path)
    -
    -    def login(self) -> str:
    -        """
    -        This function is used to log in to a Kubernetes cluster using the user's API token and API server address.
    -        Depending on the cluster, a user can choose to login in with `--insecure-skip-tls-verify` by setting `skip_tls`
    -        to `True` or `--certificate-authority` by setting `skip_tls` to False and providing a path to a ca bundle with `ca_cert_path`.
    -        """
    -        global config_path
    -        global api_client
    -        try:
    -            configuration = client.Configuration()
    -            configuration.api_key_prefix["authorization"] = "Bearer"
    -            configuration.host = self.server
    -            configuration.api_key["authorization"] = self.token
    -
    -            api_client = client.ApiClient(configuration)
    -            if not self.skip_tls:
    -                _client_with_cert(api_client, self.ca_cert_path)
    -            else:
    -                urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
    -                print("Insecure request warnings have been disabled")
    -                configuration.verify_ssl = False
    -
    -            client.AuthenticationApi(api_client).get_api_group()
    -            config_path = None
    -            return "Logged into %s" % self.server
    -        except client.ApiException as e:
    -            _kube_api_error_handling(e)
    -
    -    def logout(self) -> str:
    -        """
    -        This function is used to logout of a Kubernetes cluster.
    -        """
    -        global config_path
    -        config_path = None
    -        global api_client
    -        api_client = None
    -        return "Successfully logged out of %s" % self.server
    -
    -
    -class KubeConfigFileAuthentication(KubeConfiguration):
    -    """
    -    A class that defines the necessary methods for passing a user's own Kubernetes config file.
    -    Specifically this class defines the `load_kube_config()` and `config_check()` functions.
    -    """
    -
    -    def __init__(self, kube_config_path: str = None):
    -        self.kube_config_path = kube_config_path
    -
    -    def load_kube_config(self):
    -        """
    -        Function for loading a user's own predefined Kubernetes config file.
    -        """
    -        global config_path
    -        global api_client
    -        try:
    -            if self.kube_config_path == None:
    -                return "Please specify a config file path"
    -            config_path = self.kube_config_path
    -            api_client = None
    -            config.load_kube_config(config_path)
    -            response = "Loaded user config file at path %s" % self.kube_config_path
    -        except config.ConfigException:  # pragma: no cover
    -            config_path = None
    -            raise Exception("Please specify a config file path")
    -        return response
    -
    -
    -def config_check() -> str:
    -    """
    -    Function for loading the config file at the default config location ~/.kube/config if the user has not
    -    specified their own config file or has logged in with their token and server.
    -    """
    -    global config_path
    -    global api_client
    -    home_directory = os.path.expanduser("~")
    -    if config_path == None and api_client == None:
    -        if os.path.isfile("%s/.kube/config" % home_directory):
    -            try:
    -                config.load_kube_config()
    -            except Exception as e:  # pragma: no cover
    -                _kube_api_error_handling(e)
    -        elif "KUBERNETES_PORT" in os.environ:
    -            try:
    -                config.load_incluster_config()
    -            except Exception as e:  # pragma: no cover
    -                _kube_api_error_handling(e)
    -        else:
    -            raise PermissionError(
    -                "Action not permitted, have you put in correct/up-to-date auth credentials?"
    -            )
    -
    -    if config_path != None and api_client == None:
    -        return config_path
    -
    -
    -def _client_with_cert(client: client.ApiClient, ca_cert_path: Optional[str] = None):
    -    client.configuration.verify_ssl = True
    -    cert_path = _gen_ca_cert_path(ca_cert_path)
    -    if cert_path is None:
    -        client.configuration.ssl_ca_cert = None
    -    elif os.path.isfile(cert_path):
    -        client.configuration.ssl_ca_cert = cert_path
    -    else:
    -        raise FileNotFoundError(f"Certificate file not found at {cert_path}")
    -
    -
    -def _gen_ca_cert_path(ca_cert_path: Optional[str]):
    -    """Gets the path to the default CA certificate file either through env config or default path"""
    -    if ca_cert_path is not None:
    -        return ca_cert_path
    -    elif "CF_SDK_CA_CERT_PATH" in os.environ:
    -        return os.environ.get("CF_SDK_CA_CERT_PATH")
    -    elif os.path.exists(WORKBENCH_CA_CERT_PATH):
    -        return WORKBENCH_CA_CERT_PATH
    -    else:
    -        return None
    -
    -
    -def get_api_client() -> client.ApiClient:
    -    "This function should load the api client with defaults"
    -    if api_client != None:
    -        return api_client
    -    to_return = client.ApiClient()
    -    _client_with_cert(to_return)
    -    return to_return
    -
    -
    -
    -
    -
    -
    -
    -

    Functions

    -
    -
    -def config_check() ‑> str -
    -
    -

    Function for loading the config file at the default config location ~/.kube/config if the user has not -specified their own config file or has logged in with their token and server.

    -
    - -Expand source code - -
    def config_check() -> str:
    -    """
    -    Function for loading the config file at the default config location ~/.kube/config if the user has not
    -    specified their own config file or has logged in with their token and server.
    -    """
    -    global config_path
    -    global api_client
    -    home_directory = os.path.expanduser("~")
    -    if config_path == None and api_client == None:
    -        if os.path.isfile("%s/.kube/config" % home_directory):
    -            try:
    -                config.load_kube_config()
    -            except Exception as e:  # pragma: no cover
    -                _kube_api_error_handling(e)
    -        elif "KUBERNETES_PORT" in os.environ:
    -            try:
    -                config.load_incluster_config()
    -            except Exception as e:  # pragma: no cover
    -                _kube_api_error_handling(e)
    -        else:
    -            raise PermissionError(
    -                "Action not permitted, have you put in correct/up-to-date auth credentials?"
    -            )
    -
    -    if config_path != None and api_client == None:
    -        return config_path
    -
    -
    -
    -def get_api_client() ‑> kubernetes.client.api_client.ApiClient -
    -
    -

    This function should load the api client with defaults

    -
    - -Expand source code - -
    def get_api_client() -> client.ApiClient:
    -    "This function should load the api client with defaults"
    -    if api_client != None:
    -        return api_client
    -    to_return = client.ApiClient()
    -    _client_with_cert(to_return)
    -    return to_return
    -
    -
    -
    -
    -
    -

    Classes

    -
    -
    -class Authentication -
    -
    -

    An abstract class that defines the necessary methods for authenticating to a remote environment. -Specifically, this class defines the need for a login() and a logout() function.

    -
    - -Expand source code - -
    class Authentication(metaclass=abc.ABCMeta):
    -    """
    -    An abstract class that defines the necessary methods for authenticating to a remote environment.
    -    Specifically, this class defines the need for a `login()` and a `logout()` function.
    -    """
    -
    -    def login(self):
    -        """
    -        Method for logging in to a remote cluster.
    -        """
    -        pass
    -
    -    def logout(self):
    -        """
    -        Method for logging out of the remote cluster.
    -        """
    -        pass
    -
    -

    Subclasses

    - -

    Methods

    -
    -
    -def login(self) -
    -
    -

    Method for logging in to a remote cluster.

    -
    - -Expand source code - -
    def login(self):
    -    """
    -    Method for logging in to a remote cluster.
    -    """
    -    pass
    -
    -
    -
    -def logout(self) -
    -
    -

    Method for logging out of the remote cluster.

    -
    - -Expand source code - -
    def logout(self):
    -    """
    -    Method for logging out of the remote cluster.
    -    """
    -    pass
    -
    -
    -
    -
    -
    -class KubeConfigFileAuthentication -(kube_config_path: str = None) -
    -
    -

    A class that defines the necessary methods for passing a user's own Kubernetes config file. -Specifically this class defines the load_kube_config() and config_check() functions.

    -
    - -Expand source code - -
    class KubeConfigFileAuthentication(KubeConfiguration):
    -    """
    -    A class that defines the necessary methods for passing a user's own Kubernetes config file.
    -    Specifically this class defines the `load_kube_config()` and `config_check()` functions.
    -    """
    -
    -    def __init__(self, kube_config_path: str = None):
    -        self.kube_config_path = kube_config_path
    -
    -    def load_kube_config(self):
    -        """
    -        Function for loading a user's own predefined Kubernetes config file.
    -        """
    -        global config_path
    -        global api_client
    -        try:
    -            if self.kube_config_path == None:
    -                return "Please specify a config file path"
    -            config_path = self.kube_config_path
    -            api_client = None
    -            config.load_kube_config(config_path)
    -            response = "Loaded user config file at path %s" % self.kube_config_path
    -        except config.ConfigException:  # pragma: no cover
    -            config_path = None
    -            raise Exception("Please specify a config file path")
    -        return response
    -
    -

    Ancestors

    - -

    Methods

    -
    -
    -def load_kube_config(self) -
    -
    -

    Function for loading a user's own predefined Kubernetes config file.

    -
    - -Expand source code - -
    def load_kube_config(self):
    -    """
    -    Function for loading a user's own predefined Kubernetes config file.
    -    """
    -    global config_path
    -    global api_client
    -    try:
    -        if self.kube_config_path == None:
    -            return "Please specify a config file path"
    -        config_path = self.kube_config_path
    -        api_client = None
    -        config.load_kube_config(config_path)
    -        response = "Loaded user config file at path %s" % self.kube_config_path
    -    except config.ConfigException:  # pragma: no cover
    -        config_path = None
    -        raise Exception("Please specify a config file path")
    -    return response
    -
    -
    -
    -

    Inherited members

    - -
    -
    -class KubeConfiguration -
    -
    -

    An abstract class that defines the method for loading a user defined config file using the load_kube_config() function

    -
    - -Expand source code - -
    class KubeConfiguration(metaclass=abc.ABCMeta):
    -    """
    -    An abstract class that defines the method for loading a user defined config file using the `load_kube_config()` function
    -    """
    -
    -    def load_kube_config(self):
    -        """
    -        Method for setting your Kubernetes configuration to a certain file
    -        """
    -        pass
    -
    -    def logout(self):
    -        """
    -        Method for logging out of the remote cluster
    -        """
    -        pass
    -
    -

    Subclasses

    - -

    Methods

    -
    -
    -def load_kube_config(self) -
    -
    -

    Method for setting your Kubernetes configuration to a certain file

    -
    - -Expand source code - -
    def load_kube_config(self):
    -    """
    -    Method for setting your Kubernetes configuration to a certain file
    -    """
    -    pass
    -
    -
    -
    -def logout(self) -
    -
    -

    Method for logging out of the remote cluster

    -
    - -Expand source code - -
    def logout(self):
    -    """
    -    Method for logging out of the remote cluster
    -    """
    -    pass
    -
    -
    -
    -
    -
    -class TokenAuthentication -(token: str, server: str, skip_tls: bool = False, ca_cert_path: str = None) -
    -
    -

    TokenAuthentication is a subclass of Authentication. It can be used to authenticate to a Kubernetes -cluster when the user has an API token and the API server address.

    -

    Initialize a TokenAuthentication object that requires a value for token, the API Token -and server, the API server address for authenticating to a Kubernetes cluster.

    -
    - -Expand source code - -
    class TokenAuthentication(Authentication):
    -    """
    -    `TokenAuthentication` is a subclass of `Authentication`. It can be used to authenticate to a Kubernetes
    -    cluster when the user has an API token and the API server address.
    -    """
    -
    -    def __init__(
    -        self,
    -        token: str,
    -        server: str,
    -        skip_tls: bool = False,
    -        ca_cert_path: str = None,
    -    ):
    -        """
    -        Initialize a TokenAuthentication object that requires a value for `token`, the API Token
    -        and `server`, the API server address for authenticating to a Kubernetes cluster.
    -        """
    -
    -        self.token = token
    -        self.server = server
    -        self.skip_tls = skip_tls
    -        self.ca_cert_path = _gen_ca_cert_path(ca_cert_path)
    -
    -    def login(self) -> str:
    -        """
    -        This function is used to log in to a Kubernetes cluster using the user's API token and API server address.
    -        Depending on the cluster, a user can choose to login in with `--insecure-skip-tls-verify` by setting `skip_tls`
    -        to `True` or `--certificate-authority` by setting `skip_tls` to False and providing a path to a ca bundle with `ca_cert_path`.
    -        """
    -        global config_path
    -        global api_client
    -        try:
    -            configuration = client.Configuration()
    -            configuration.api_key_prefix["authorization"] = "Bearer"
    -            configuration.host = self.server
    -            configuration.api_key["authorization"] = self.token
    -
    -            api_client = client.ApiClient(configuration)
    -            if not self.skip_tls:
    -                _client_with_cert(api_client, self.ca_cert_path)
    -            else:
    -                urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
    -                print("Insecure request warnings have been disabled")
    -                configuration.verify_ssl = False
    -
    -            client.AuthenticationApi(api_client).get_api_group()
    -            config_path = None
    -            return "Logged into %s" % self.server
    -        except client.ApiException as e:
    -            _kube_api_error_handling(e)
    -
    -    def logout(self) -> str:
    -        """
    -        This function is used to logout of a Kubernetes cluster.
    -        """
    -        global config_path
    -        config_path = None
    -        global api_client
    -        api_client = None
    -        return "Successfully logged out of %s" % self.server
    -
    -

    Ancestors

    - -

    Methods

    -
    -
    -def login(self) ‑> str -
    -
    -

    This function is used to log in to a Kubernetes cluster using the user's API token and API server address. -Depending on the cluster, a user can choose to login in with --insecure-skip-tls-verify by setting skip_tls -to True or --certificate-authority by setting skip_tls to False and providing a path to a ca bundle with ca_cert_path.

    -
    - -Expand source code - -
    def login(self) -> str:
    -    """
    -    This function is used to log in to a Kubernetes cluster using the user's API token and API server address.
    -    Depending on the cluster, a user can choose to login in with `--insecure-skip-tls-verify` by setting `skip_tls`
    -    to `True` or `--certificate-authority` by setting `skip_tls` to False and providing a path to a ca bundle with `ca_cert_path`.
    -    """
    -    global config_path
    -    global api_client
    -    try:
    -        configuration = client.Configuration()
    -        configuration.api_key_prefix["authorization"] = "Bearer"
    -        configuration.host = self.server
    -        configuration.api_key["authorization"] = self.token
    -
    -        api_client = client.ApiClient(configuration)
    -        if not self.skip_tls:
    -            _client_with_cert(api_client, self.ca_cert_path)
    -        else:
    -            urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
    -            print("Insecure request warnings have been disabled")
    -            configuration.verify_ssl = False
    -
    -        client.AuthenticationApi(api_client).get_api_group()
    -        config_path = None
    -        return "Logged into %s" % self.server
    -    except client.ApiException as e:
    -        _kube_api_error_handling(e)
    -
    -
    -
    -def logout(self) ‑> str -
    -
    -

    This function is used to logout of a Kubernetes cluster.

    -
    - -Expand source code - -
    def logout(self) -> str:
    -    """
    -    This function is used to logout of a Kubernetes cluster.
    -    """
    -    global config_path
    -    config_path = None
    -    global api_client
    -    api_client = None
    -    return "Successfully logged out of %s" % self.server
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - diff --git a/docs/detailed-documentation/cluster/awload.html b/docs/detailed-documentation/cluster/awload.html deleted file mode 100644 index fba18e3f..00000000 --- a/docs/detailed-documentation/cluster/awload.html +++ /dev/null @@ -1,328 +0,0 @@ - - - - - - -codeflare_sdk.cluster.awload API documentation - - - - - - - - - - - -
    -
    -
    -

    Module codeflare_sdk.cluster.awload

    -
    -
    -

    The awload sub-module contains the definition of the AWManager object, which handles -submission and deletion of existing AppWrappers from a user's file system.

    -
    - -Expand source code - -
    # Copyright 2022 IBM, Red Hat
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#      http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -
    -"""
    -The awload sub-module contains the definition of the AWManager object, which handles
    -submission and deletion of existing AppWrappers from a user's file system.
    -"""
    -
    -from os.path import isfile
    -import errno
    -import os
    -import yaml
    -
    -from kubernetes import client, config
    -from ..utils.kube_api_helpers import _kube_api_error_handling
    -from .auth import config_check, get_api_client
    -
    -
    -class AWManager:
    -    """
    -    An object for submitting and removing existing AppWrapper yamls
    -    to be added to the Kueue localqueue.
    -    """
    -
    -    def __init__(self, filename: str) -> None:
    -        """
    -        Create the AppWrapper Manager object by passing in an
    -        AppWrapper yaml file
    -        """
    -        if not isfile(filename):
    -            raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), filename)
    -        self.filename = filename
    -        try:
    -            with open(self.filename) as f:
    -                self.awyaml = yaml.load(f, Loader=yaml.FullLoader)
    -            assert self.awyaml["kind"] == "AppWrapper"
    -            self.name = self.awyaml["metadata"]["name"]
    -            self.namespace = self.awyaml["metadata"]["namespace"]
    -        except:
    -            raise ValueError(
    -                f"{filename } is not a correctly formatted AppWrapper yaml"
    -            )
    -        self.submitted = False
    -
    -    def submit(self) -> None:
    -        """
    -        Attempts to create the AppWrapper custom resource using the yaml file
    -        """
    -        try:
    -            config_check()
    -            api_instance = client.CustomObjectsApi(get_api_client())
    -            api_instance.create_namespaced_custom_object(
    -                group="workload.codeflare.dev",
    -                version="v1beta2",
    -                namespace=self.namespace,
    -                plural="appwrappers",
    -                body=self.awyaml,
    -            )
    -        except Exception as e:
    -            return _kube_api_error_handling(e)
    -
    -        self.submitted = True
    -        print(f"AppWrapper {self.filename} submitted!")
    -
    -    def remove(self) -> None:
    -        """
    -        Attempts to delete the AppWrapper custom resource matching the name in the yaml,
    -        if submitted by this manager.
    -        """
    -        if not self.submitted:
    -            print("AppWrapper not submitted by this manager yet, nothing to remove")
    -            return
    -
    -        try:
    -            config_check()
    -            api_instance = client.CustomObjectsApi(get_api_client())
    -            api_instance.delete_namespaced_custom_object(
    -                group="workload.codeflare.dev",
    -                version="v1beta2",
    -                namespace=self.namespace,
    -                plural="appwrappers",
    -                name=self.name,
    -            )
    -        except Exception as e:
    -            return _kube_api_error_handling(e)
    -
    -        self.submitted = False
    -        print(f"AppWrapper {self.name} removed!")
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -

    Classes

    -
    -
    -class AWManager -(filename: str) -
    -
    -

    An object for submitting and removing existing AppWrapper yamls -to be added to the Kueue localqueue.

    -

    Create the AppWrapper Manager object by passing in an -AppWrapper yaml file

    -
    - -Expand source code - -
    class AWManager:
    -    """
    -    An object for submitting and removing existing AppWrapper yamls
    -    to be added to the Kueue localqueue.
    -    """
    -
    -    def __init__(self, filename: str) -> None:
    -        """
    -        Create the AppWrapper Manager object by passing in an
    -        AppWrapper yaml file
    -        """
    -        if not isfile(filename):
    -            raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), filename)
    -        self.filename = filename
    -        try:
    -            with open(self.filename) as f:
    -                self.awyaml = yaml.load(f, Loader=yaml.FullLoader)
    -            assert self.awyaml["kind"] == "AppWrapper"
    -            self.name = self.awyaml["metadata"]["name"]
    -            self.namespace = self.awyaml["metadata"]["namespace"]
    -        except:
    -            raise ValueError(
    -                f"{filename } is not a correctly formatted AppWrapper yaml"
    -            )
    -        self.submitted = False
    -
    -    def submit(self) -> None:
    -        """
    -        Attempts to create the AppWrapper custom resource using the yaml file
    -        """
    -        try:
    -            config_check()
    -            api_instance = client.CustomObjectsApi(get_api_client())
    -            api_instance.create_namespaced_custom_object(
    -                group="workload.codeflare.dev",
    -                version="v1beta2",
    -                namespace=self.namespace,
    -                plural="appwrappers",
    -                body=self.awyaml,
    -            )
    -        except Exception as e:
    -            return _kube_api_error_handling(e)
    -
    -        self.submitted = True
    -        print(f"AppWrapper {self.filename} submitted!")
    -
    -    def remove(self) -> None:
    -        """
    -        Attempts to delete the AppWrapper custom resource matching the name in the yaml,
    -        if submitted by this manager.
    -        """
    -        if not self.submitted:
    -            print("AppWrapper not submitted by this manager yet, nothing to remove")
    -            return
    -
    -        try:
    -            config_check()
    -            api_instance = client.CustomObjectsApi(get_api_client())
    -            api_instance.delete_namespaced_custom_object(
    -                group="workload.codeflare.dev",
    -                version="v1beta2",
    -                namespace=self.namespace,
    -                plural="appwrappers",
    -                name=self.name,
    -            )
    -        except Exception as e:
    -            return _kube_api_error_handling(e)
    -
    -        self.submitted = False
    -        print(f"AppWrapper {self.name} removed!")
    -
    -

    Methods

    -
    -
    -def remove(self) ‑> None -
    -
    -

    Attempts to delete the AppWrapper custom resource matching the name in the yaml, -if submitted by this manager.

    -
    - -Expand source code - -
    def remove(self) -> None:
    -    """
    -    Attempts to delete the AppWrapper custom resource matching the name in the yaml,
    -    if submitted by this manager.
    -    """
    -    if not self.submitted:
    -        print("AppWrapper not submitted by this manager yet, nothing to remove")
    -        return
    -
    -    try:
    -        config_check()
    -        api_instance = client.CustomObjectsApi(get_api_client())
    -        api_instance.delete_namespaced_custom_object(
    -            group="workload.codeflare.dev",
    -            version="v1beta2",
    -            namespace=self.namespace,
    -            plural="appwrappers",
    -            name=self.name,
    -        )
    -    except Exception as e:
    -        return _kube_api_error_handling(e)
    -
    -    self.submitted = False
    -    print(f"AppWrapper {self.name} removed!")
    -
    -
    -
    -def submit(self) ‑> None -
    -
    -

    Attempts to create the AppWrapper custom resource using the yaml file

    -
    - -Expand source code - -
    def submit(self) -> None:
    -    """
    -    Attempts to create the AppWrapper custom resource using the yaml file
    -    """
    -    try:
    -        config_check()
    -        api_instance = client.CustomObjectsApi(get_api_client())
    -        api_instance.create_namespaced_custom_object(
    -            group="workload.codeflare.dev",
    -            version="v1beta2",
    -            namespace=self.namespace,
    -            plural="appwrappers",
    -            body=self.awyaml,
    -        )
    -    except Exception as e:
    -        return _kube_api_error_handling(e)
    -
    -    self.submitted = True
    -    print(f"AppWrapper {self.filename} submitted!")
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - diff --git a/docs/detailed-documentation/cluster/cluster.html b/docs/detailed-documentation/cluster/cluster.html deleted file mode 100644 index 12865c04..00000000 --- a/docs/detailed-documentation/cluster/cluster.html +++ /dev/null @@ -1,2174 +0,0 @@ - - - - - - -codeflare_sdk.cluster.cluster API documentation - - - - - - - - - - - -
    -
    -
    -

    Module codeflare_sdk.cluster.cluster

    -
    -
    -

    The cluster sub-module contains the definition of the Cluster object, which represents -the resources requested by the user. It also contains functions for checking the -cluster setup queue, a list of all existing clusters, and the user's working namespace.

    -
    - -Expand source code - -
    # Copyright 2022 IBM, Red Hat
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#      http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -
    -"""
    -The cluster sub-module contains the definition of the Cluster object, which represents
    -the resources requested by the user. It also contains functions for checking the
    -cluster setup queue, a list of all existing clusters, and the user's working namespace.
    -"""
    -
    -import re
    -import subprocess
    -from time import sleep
    -from typing import List, Optional, Tuple, Dict
    -
    -from kubernetes import config
    -from ray.job_submission import JobSubmissionClient
    -
    -from .auth import config_check, get_api_client
    -from ..utils import pretty_print
    -from ..utils.generate_yaml import (
    -    generate_appwrapper,
    -    head_worker_gpu_count_from_cluster,
    -)
    -from ..utils.kube_api_helpers import _kube_api_error_handling
    -from ..utils.generate_yaml import is_openshift_cluster
    -
    -from .config import ClusterConfiguration
    -from .model import (
    -    AppWrapper,
    -    AppWrapperStatus,
    -    CodeFlareClusterStatus,
    -    RayCluster,
    -    RayClusterStatus,
    -)
    -from .widgets import (
    -    cluster_up_down_buttons,
    -    is_notebook,
    -)
    -from kubernetes import client, config
    -from kubernetes.utils import parse_quantity
    -import yaml
    -import os
    -import requests
    -
    -from kubernetes import config
    -from kubernetes.client.rest import ApiException
    -
    -
    -class Cluster:
    -    """
    -    An object for requesting, bringing up, and taking down resources.
    -    Can also be used for seeing the resource cluster status and details.
    -
    -    Note that currently, the underlying implementation is a Ray cluster.
    -    """
    -
    -    def __init__(self, config: ClusterConfiguration):
    -        """
    -        Create the resource cluster object by passing in a ClusterConfiguration
    -        (defined in the config sub-module). An AppWrapper will then be generated
    -        based off of the configured resources to represent the desired cluster
    -        request.
    -        """
    -        self.config = config
    -        self.app_wrapper_yaml = self.create_app_wrapper()
    -        self._job_submission_client = None
    -        self.app_wrapper_name = self.config.name
    -        if is_notebook():
    -            cluster_up_down_buttons(self)
    -
    -    @property
    -    def _client_headers(self):
    -        k8_client = get_api_client()
    -        return {
    -            "Authorization": k8_client.configuration.get_api_key_with_prefix(
    -                "authorization"
    -            )
    -        }
    -
    -    @property
    -    def _client_verify_tls(self):
    -        if not is_openshift_cluster or not self.config.verify_tls:
    -            return False
    -        return True
    -
    -    @property
    -    def job_client(self):
    -        k8client = get_api_client()
    -        if self._job_submission_client:
    -            return self._job_submission_client
    -        if is_openshift_cluster():
    -            self._job_submission_client = JobSubmissionClient(
    -                self.cluster_dashboard_uri(),
    -                headers=self._client_headers,
    -                verify=self._client_verify_tls,
    -            )
    -        else:
    -            self._job_submission_client = JobSubmissionClient(
    -                self.cluster_dashboard_uri()
    -            )
    -        return self._job_submission_client
    -
    -    def create_app_wrapper(self):
    -        """
    -        Called upon cluster object creation, creates an AppWrapper yaml based on
    -        the specifications of the ClusterConfiguration.
    -        """
    -
    -        if self.config.namespace is None:
    -            self.config.namespace = get_current_namespace()
    -            if self.config.namespace is None:
    -                print("Please specify with namespace=<your_current_namespace>")
    -            elif type(self.config.namespace) is not str:
    -                raise TypeError(
    -                    f"Namespace {self.config.namespace} is of type {type(self.config.namespace)}. Check your Kubernetes Authentication."
    -                )
    -
    -        return generate_appwrapper(self)
    -
    -    # creates a new cluster with the provided or default spec
    -    def up(self):
    -        """
    -        Applies the Cluster yaml, pushing the resource request onto
    -        the Kueue localqueue.
    -        """
    -
    -        # check if RayCluster CustomResourceDefinition exists if not throw RuntimeError
    -        self._throw_for_no_raycluster()
    -
    -        namespace = self.config.namespace
    -
    -        try:
    -            config_check()
    -            api_instance = client.CustomObjectsApi(get_api_client())
    -            if self.config.appwrapper:
    -                if self.config.write_to_file:
    -                    with open(self.app_wrapper_yaml) as f:
    -                        aw = yaml.load(f, Loader=yaml.FullLoader)
    -                        api_instance.create_namespaced_custom_object(
    -                            group="workload.codeflare.dev",
    -                            version="v1beta2",
    -                            namespace=namespace,
    -                            plural="appwrappers",
    -                            body=aw,
    -                        )
    -                else:
    -                    aw = yaml.safe_load(self.app_wrapper_yaml)
    -                    api_instance.create_namespaced_custom_object(
    -                        group="workload.codeflare.dev",
    -                        version="v1beta2",
    -                        namespace=namespace,
    -                        plural="appwrappers",
    -                        body=aw,
    -                    )
    -                print(f"AppWrapper: '{self.config.name}' has successfully been created")
    -            else:
    -                self._component_resources_up(namespace, api_instance)
    -                print(
    -                    f"Ray Cluster: '{self.config.name}' has successfully been created"
    -                )
    -        except Exception as e:  # pragma: no cover
    -            return _kube_api_error_handling(e)
    -
    -    def _throw_for_no_raycluster(self):
    -        api_instance = client.CustomObjectsApi(get_api_client())
    -        try:
    -            api_instance.list_namespaced_custom_object(
    -                group="ray.io",
    -                version="v1",
    -                namespace=self.config.namespace,
    -                plural="rayclusters",
    -            )
    -        except ApiException as e:
    -            if e.status == 404:
    -                raise RuntimeError(
    -                    "RayCluster CustomResourceDefinition unavailable contact your administrator."
    -                )
    -            else:
    -                raise RuntimeError(
    -                    "Failed to get RayCluster CustomResourceDefinition: " + str(e)
    -                )
    -
    -    def down(self):
    -        """
    -        Deletes the AppWrapper yaml, scaling-down and deleting all resources
    -        associated with the cluster.
    -        """
    -        namespace = self.config.namespace
    -        self._throw_for_no_raycluster()
    -        try:
    -            config_check()
    -            api_instance = client.CustomObjectsApi(get_api_client())
    -            if self.config.appwrapper:
    -                api_instance.delete_namespaced_custom_object(
    -                    group="workload.codeflare.dev",
    -                    version="v1beta2",
    -                    namespace=namespace,
    -                    plural="appwrappers",
    -                    name=self.app_wrapper_name,
    -                )
    -                print(f"AppWrapper: '{self.config.name}' has successfully been deleted")
    -            else:
    -                self._component_resources_down(namespace, api_instance)
    -                print(
    -                    f"Ray Cluster: '{self.config.name}' has successfully been deleted"
    -                )
    -        except Exception as e:  # pragma: no cover
    -            return _kube_api_error_handling(e)
    -
    -    def status(
    -        self, print_to_console: bool = True
    -    ) -> Tuple[CodeFlareClusterStatus, bool]:
    -        """
    -        Returns the requested cluster's status, as well as whether or not
    -        it is ready for use.
    -        """
    -        ready = False
    -        status = CodeFlareClusterStatus.UNKNOWN
    -        if self.config.appwrapper:
    -            # check the app wrapper status
    -            appwrapper = _app_wrapper_status(self.config.name, self.config.namespace)
    -            if appwrapper:
    -                if appwrapper.status in [
    -                    AppWrapperStatus.RESUMING,
    -                    AppWrapperStatus.RESETTING,
    -                ]:
    -                    ready = False
    -                    status = CodeFlareClusterStatus.STARTING
    -                elif appwrapper.status in [
    -                    AppWrapperStatus.FAILED,
    -                ]:
    -                    ready = False
    -                    status = CodeFlareClusterStatus.FAILED  # should deleted be separate
    -                    return status, ready  # exit early, no need to check ray status
    -                elif appwrapper.status in [
    -                    AppWrapperStatus.SUSPENDED,
    -                    AppWrapperStatus.SUSPENDING,
    -                ]:
    -                    ready = False
    -                    if appwrapper.status == AppWrapperStatus.SUSPENDED:
    -                        status = CodeFlareClusterStatus.QUEUED
    -                    else:
    -                        status = CodeFlareClusterStatus.QUEUEING
    -                    if print_to_console:
    -                        pretty_print.print_app_wrappers_status([appwrapper])
    -                    return (
    -                        status,
    -                        ready,
    -                    )  # no need to check the ray status since still in queue
    -
    -        # check the ray cluster status
    -        cluster = _ray_cluster_status(self.config.name, self.config.namespace)
    -        if cluster:
    -            if cluster.status == RayClusterStatus.SUSPENDED:
    -                ready = False
    -                status = CodeFlareClusterStatus.SUSPENDED
    -            if cluster.status == RayClusterStatus.UNKNOWN:
    -                ready = False
    -                status = CodeFlareClusterStatus.STARTING
    -            if cluster.status == RayClusterStatus.READY:
    -                ready = True
    -                status = CodeFlareClusterStatus.READY
    -            elif cluster.status in [
    -                RayClusterStatus.UNHEALTHY,
    -                RayClusterStatus.FAILED,
    -            ]:
    -                ready = False
    -                status = CodeFlareClusterStatus.FAILED
    -
    -            if print_to_console:
    -                # overriding the number of gpus with requested
    -                _, cluster.worker_gpu = head_worker_gpu_count_from_cluster(self)
    -                pretty_print.print_cluster_status(cluster)
    -        elif print_to_console:
    -            if status == CodeFlareClusterStatus.UNKNOWN:
    -                pretty_print.print_no_resources_found()
    -            else:
    -                pretty_print.print_app_wrappers_status([appwrapper], starting=True)
    -
    -        return status, ready
    -
    -    def is_dashboard_ready(self) -> bool:
    -        try:
    -            response = requests.get(
    -                self.cluster_dashboard_uri(),
    -                headers=self._client_headers,
    -                timeout=5,
    -                verify=self._client_verify_tls,
    -            )
    -        except requests.exceptions.SSLError:  # pragma no cover
    -            # SSL exception occurs when oauth ingress has been created but cluster is not up
    -            return False
    -        if response.status_code == 200:
    -            return True
    -        else:
    -            return False
    -
    -    def wait_ready(self, timeout: Optional[int] = None, dashboard_check: bool = True):
    -        """
    -        Waits for requested cluster to be ready, up to an optional timeout (s).
    -        Checks every five seconds.
    -        """
    -        print("Waiting for requested resources to be set up...")
    -        time = 0
    -        while True:
    -            if timeout and time >= timeout:
    -                raise TimeoutError(
    -                    f"wait() timed out after waiting {timeout}s for cluster to be ready"
    -                )
    -            status, ready = self.status(print_to_console=False)
    -            if status == CodeFlareClusterStatus.UNKNOWN:
    -                print(
    -                    "WARNING: Current cluster status is unknown, have you run cluster.up yet?"
    -                )
    -            if ready:
    -                break
    -            sleep(5)
    -            time += 5
    -        print("Requested cluster is up and running!")
    -
    -        while dashboard_check:
    -            if timeout and time >= timeout:
    -                raise TimeoutError(
    -                    f"wait() timed out after waiting {timeout}s for dashboard to be ready"
    -                )
    -            if self.is_dashboard_ready():
    -                print("Dashboard is ready!")
    -                break
    -            sleep(5)
    -            time += 5
    -
    -    def details(self, print_to_console: bool = True) -> RayCluster:
    -        cluster = _copy_to_ray(self)
    -        if print_to_console:
    -            pretty_print.print_clusters([cluster])
    -        return cluster
    -
    -    def cluster_uri(self) -> str:
    -        """
    -        Returns a string containing the cluster's URI.
    -        """
    -        return f"ray://{self.config.name}-head-svc.{self.config.namespace}.svc:10001"
    -
    -    def cluster_dashboard_uri(self) -> str:
    -        """
    -        Returns a string containing the cluster's dashboard URI.
    -        """
    -        config_check()
    -        if is_openshift_cluster():
    -            try:
    -                api_instance = client.CustomObjectsApi(get_api_client())
    -                routes = api_instance.list_namespaced_custom_object(
    -                    group="route.openshift.io",
    -                    version="v1",
    -                    namespace=self.config.namespace,
    -                    plural="routes",
    -                )
    -            except Exception as e:  # pragma: no cover
    -                return _kube_api_error_handling(e)
    -
    -            for route in routes["items"]:
    -                if route["metadata"][
    -                    "name"
    -                ] == f"ray-dashboard-{self.config.name}" or route["metadata"][
    -                    "name"
    -                ].startswith(
    -                    f"{self.config.name}-ingress"
    -                ):
    -                    protocol = "https" if route["spec"].get("tls") else "http"
    -                    return f"{protocol}://{route['spec']['host']}"
    -        else:
    -            try:
    -                api_instance = client.NetworkingV1Api(get_api_client())
    -                ingresses = api_instance.list_namespaced_ingress(self.config.namespace)
    -            except Exception as e:  # pragma no cover
    -                return _kube_api_error_handling(e)
    -
    -            for ingress in ingresses.items:
    -                annotations = ingress.metadata.annotations
    -                protocol = "http"
    -                if (
    -                    ingress.metadata.name == f"ray-dashboard-{self.config.name}"
    -                    or ingress.metadata.name.startswith(f"{self.config.name}-ingress")
    -                ):
    -                    if annotations == None:
    -                        protocol = "http"
    -                    elif "route.openshift.io/termination" in annotations:
    -                        protocol = "https"
    -                return f"{protocol}://{ingress.spec.rules[0].host}"
    -        return "Dashboard not available yet, have you run cluster.up()?"
    -
    -    def list_jobs(self) -> List:
    -        """
    -        This method accesses the head ray node in your cluster and lists the running jobs.
    -        """
    -        return self.job_client.list_jobs()
    -
    -    def job_status(self, job_id: str) -> str:
    -        """
    -        This method accesses the head ray node in your cluster and returns the job status for the provided job id.
    -        """
    -        return self.job_client.get_job_status(job_id)
    -
    -    def job_logs(self, job_id: str) -> str:
    -        """
    -        This method accesses the head ray node in your cluster and returns the logs for the provided job id.
    -        """
    -        return self.job_client.get_job_logs(job_id)
    -
    -    @staticmethod
    -    def _head_worker_extended_resources_from_rc_dict(rc: Dict) -> Tuple[dict, dict]:
    -        head_extended_resources, worker_extended_resources = {}, {}
    -        for resource in rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][
    -            "containers"
    -        ][0]["resources"]["limits"].keys():
    -            if resource in ["memory", "cpu"]:
    -                continue
    -            worker_extended_resources[resource] = rc["spec"]["workerGroupSpecs"][0][
    -                "template"
    -            ]["spec"]["containers"][0]["resources"]["limits"][resource]
    -
    -        for resource in rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][
    -            0
    -        ]["resources"]["limits"].keys():
    -            if resource in ["memory", "cpu"]:
    -                continue
    -            head_extended_resources[resource] = rc["spec"]["headGroupSpec"]["template"][
    -                "spec"
    -            ]["containers"][0]["resources"]["limits"][resource]
    -
    -        return head_extended_resources, worker_extended_resources
    -
    -    def from_k8_cluster_object(
    -        rc,
    -        appwrapper=True,
    -        write_to_file=False,
    -        verify_tls=True,
    -    ):
    -        config_check()
    -        machine_types = (
    -            rc["metadata"]["labels"]["orderedinstance"].split("_")
    -            if "orderedinstance" in rc["metadata"]["labels"]
    -            else []
    -        )
    -
    -        (
    -            head_extended_resources,
    -            worker_extended_resources,
    -        ) = Cluster._head_worker_extended_resources_from_rc_dict(rc)
    -
    -        cluster_config = ClusterConfiguration(
    -            name=rc["metadata"]["name"],
    -            namespace=rc["metadata"]["namespace"],
    -            machine_types=machine_types,
    -            head_cpu_requests=rc["spec"]["headGroupSpec"]["template"]["spec"][
    -                "containers"
    -            ][0]["resources"]["requests"]["cpu"],
    -            head_cpu_limits=rc["spec"]["headGroupSpec"]["template"]["spec"][
    -                "containers"
    -            ][0]["resources"]["limits"]["cpu"],
    -            head_memory_requests=rc["spec"]["headGroupSpec"]["template"]["spec"][
    -                "containers"
    -            ][0]["resources"]["requests"]["memory"],
    -            head_memory_limits=rc["spec"]["headGroupSpec"]["template"]["spec"][
    -                "containers"
    -            ][0]["resources"]["limits"]["memory"],
    -            num_workers=rc["spec"]["workerGroupSpecs"][0]["minReplicas"],
    -            worker_cpu_requests=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][
    -                "containers"
    -            ][0]["resources"]["requests"]["cpu"],
    -            worker_cpu_limits=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][
    -                "containers"
    -            ][0]["resources"]["limits"]["cpu"],
    -            worker_memory_requests=rc["spec"]["workerGroupSpecs"][0]["template"][
    -                "spec"
    -            ]["containers"][0]["resources"]["requests"]["memory"],
    -            worker_memory_limits=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][
    -                "containers"
    -            ][0]["resources"]["limits"]["memory"],
    -            worker_extended_resource_requests=worker_extended_resources,
    -            head_extended_resource_requests=head_extended_resources,
    -            image=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][
    -                0
    -            ]["image"],
    -            appwrapper=appwrapper,
    -            write_to_file=write_to_file,
    -            verify_tls=verify_tls,
    -            local_queue=rc["metadata"]
    -            .get("labels", dict())
    -            .get("kueue.x-k8s.io/queue-name", None),
    -        )
    -        return Cluster(cluster_config)
    -
    -    def local_client_url(self):
    -        ingress_domain = _get_ingress_domain(self)
    -        return f"ray://{ingress_domain}"
    -
    -    def _component_resources_up(
    -        self, namespace: str, api_instance: client.CustomObjectsApi
    -    ):
    -        if self.config.write_to_file:
    -            with open(self.app_wrapper_yaml) as f:
    -                yamls = list(yaml.load_all(f, Loader=yaml.FullLoader))
    -                for resource in yamls:
    -                    enable_ingress = (
    -                        resource.get("spec", {})
    -                        .get("headGroupSpec", {})
    -                        .get("enableIngress")
    -                    )
    -                    if resource["kind"] == "RayCluster" and enable_ingress is True:
    -                        name = resource["metadata"]["name"]
    -                        print(
    -                            f"Forbidden: RayCluster '{name}' has 'enableIngress' set to 'True'."
    -                        )
    -                        return
    -                _create_resources(yamls, namespace, api_instance)
    -        else:
    -            yamls = yaml.load_all(self.app_wrapper_yaml, Loader=yaml.FullLoader)
    -            _create_resources(yamls, namespace, api_instance)
    -
    -    def _component_resources_down(
    -        self, namespace: str, api_instance: client.CustomObjectsApi
    -    ):
    -        cluster_name = self.config.name
    -        if self.config.write_to_file:
    -            with open(self.app_wrapper_yaml) as f:
    -                yamls = yaml.load_all(f, Loader=yaml.FullLoader)
    -                _delete_resources(yamls, namespace, api_instance, cluster_name)
    -        else:
    -            yamls = yaml.safe_load_all(self.app_wrapper_yaml)
    -            _delete_resources(yamls, namespace, api_instance, cluster_name)
    -
    -
    -def list_all_clusters(namespace: str, print_to_console: bool = True):
    -    """
    -    Returns (and prints by default) a list of all clusters in a given namespace.
    -    """
    -    clusters = _get_ray_clusters(namespace)
    -    if print_to_console:
    -        pretty_print.print_clusters(clusters)
    -    return clusters
    -
    -
    -def list_all_queued(
    -    namespace: str, print_to_console: bool = True, appwrapper: bool = False
    -):
    -    """
    -    Returns (and prints by default) a list of all currently queued-up Ray Clusters
    -    in a given namespace.
    -    """
    -    if appwrapper:
    -        resources = _get_app_wrappers(namespace, filter=[AppWrapperStatus.SUSPENDED])
    -        if print_to_console:
    -            pretty_print.print_app_wrappers_status(resources)
    -    else:
    -        resources = _get_ray_clusters(
    -            namespace, filter=[RayClusterStatus.READY, RayClusterStatus.SUSPENDED]
    -        )
    -        if print_to_console:
    -            pretty_print.print_ray_clusters_status(resources)
    -    return resources
    -
    -
    -def get_current_namespace():  # pragma: no cover
    -    if os.path.isfile("/var/run/secrets/kubernetes.io/serviceaccount/namespace"):
    -        try:
    -            file = open("/var/run/secrets/kubernetes.io/serviceaccount/namespace", "r")
    -            active_context = file.readline().strip("\n")
    -            return active_context
    -        except Exception as e:
    -            print("Unable to find current namespace")
    -    print("trying to gather from current context")
    -    try:
    -        _, active_context = config.list_kube_config_contexts(config_check())
    -    except Exception as e:
    -        return _kube_api_error_handling(e)
    -    try:
    -        return active_context["context"]["namespace"]
    -    except KeyError:
    -        return None
    -
    -
    -def get_cluster(
    -    cluster_name: str,
    -    namespace: str = "default",
    -    write_to_file: bool = False,
    -    verify_tls: bool = True,
    -):
    -    try:
    -        config_check()
    -        api_instance = client.CustomObjectsApi(get_api_client())
    -        rcs = api_instance.list_namespaced_custom_object(
    -            group="ray.io",
    -            version="v1",
    -            namespace=namespace,
    -            plural="rayclusters",
    -        )
    -    except Exception as e:
    -        return _kube_api_error_handling(e)
    -
    -    for rc in rcs["items"]:
    -        if rc["metadata"]["name"] == cluster_name:
    -            appwrapper = _check_aw_exists(cluster_name, namespace)
    -            return Cluster.from_k8_cluster_object(
    -                rc,
    -                appwrapper=appwrapper,
    -                write_to_file=write_to_file,
    -                verify_tls=verify_tls,
    -            )
    -    raise FileNotFoundError(
    -        f"Cluster {cluster_name} is not found in {namespace} namespace"
    -    )
    -
    -
    -# private methods
    -def _delete_resources(
    -    yamls, namespace: str, api_instance: client.CustomObjectsApi, cluster_name: str
    -):
    -    for resource in yamls:
    -        if resource["kind"] == "RayCluster":
    -            name = resource["metadata"]["name"]
    -            api_instance.delete_namespaced_custom_object(
    -                group="ray.io",
    -                version="v1",
    -                namespace=namespace,
    -                plural="rayclusters",
    -                name=name,
    -            )
    -
    -
    -def _create_resources(yamls, namespace: str, api_instance: client.CustomObjectsApi):
    -    for resource in yamls:
    -        if resource["kind"] == "RayCluster":
    -            api_instance.create_namespaced_custom_object(
    -                group="ray.io",
    -                version="v1",
    -                namespace=namespace,
    -                plural="rayclusters",
    -                body=resource,
    -            )
    -
    -
    -def _check_aw_exists(name: str, namespace: str) -> bool:
    -    try:
    -        config_check()
    -        api_instance = client.CustomObjectsApi(get_api_client())
    -        aws = api_instance.list_namespaced_custom_object(
    -            group="workload.codeflare.dev",
    -            version="v1beta2",
    -            namespace=namespace,
    -            plural="appwrappers",
    -        )
    -    except Exception as e:  # pragma: no cover
    -        return _kube_api_error_handling(e, print_error=False)
    -    for aw in aws["items"]:
    -        if aw["metadata"]["name"] == name:
    -            return True
    -    return False
    -
    -
    -# Cant test this until get_current_namespace is fixed and placed in this function over using `self`
    -def _get_ingress_domain(self):  # pragma: no cover
    -    config_check()
    -
    -    if self.config.namespace != None:
    -        namespace = self.config.namespace
    -    else:
    -        namespace = get_current_namespace()
    -    domain = None
    -
    -    if is_openshift_cluster():
    -        try:
    -            api_instance = client.CustomObjectsApi(get_api_client())
    -
    -            routes = api_instance.list_namespaced_custom_object(
    -                group="route.openshift.io",
    -                version="v1",
    -                namespace=namespace,
    -                plural="routes",
    -            )
    -        except Exception as e:  # pragma: no cover
    -            return _kube_api_error_handling(e)
    -
    -        for route in routes["items"]:
    -            if (
    -                route["spec"]["port"]["targetPort"] == "client"
    -                or route["spec"]["port"]["targetPort"] == 10001
    -            ):
    -                domain = route["spec"]["host"]
    -    else:
    -        try:
    -            api_client = client.NetworkingV1Api(get_api_client())
    -            ingresses = api_client.list_namespaced_ingress(namespace)
    -        except Exception as e:  # pragma: no cover
    -            return _kube_api_error_handling(e)
    -
    -        for ingress in ingresses.items:
    -            if ingress.spec.rules[0].http.paths[0].backend.service.port.number == 10001:
    -                domain = ingress.spec.rules[0].host
    -    return domain
    -
    -
    -def _app_wrapper_status(name, namespace="default") -> Optional[AppWrapper]:
    -    try:
    -        config_check()
    -        api_instance = client.CustomObjectsApi(get_api_client())
    -        aws = api_instance.list_namespaced_custom_object(
    -            group="workload.codeflare.dev",
    -            version="v1beta2",
    -            namespace=namespace,
    -            plural="appwrappers",
    -        )
    -    except Exception as e:  # pragma: no cover
    -        return _kube_api_error_handling(e)
    -
    -    for aw in aws["items"]:
    -        if aw["metadata"]["name"] == name:
    -            return _map_to_app_wrapper(aw)
    -    return None
    -
    -
    -def _ray_cluster_status(name, namespace="default") -> Optional[RayCluster]:
    -    try:
    -        config_check()
    -        api_instance = client.CustomObjectsApi(get_api_client())
    -        rcs = api_instance.list_namespaced_custom_object(
    -            group="ray.io",
    -            version="v1",
    -            namespace=namespace,
    -            plural="rayclusters",
    -        )
    -    except Exception as e:  # pragma: no cover
    -        return _kube_api_error_handling(e)
    -
    -    for rc in rcs["items"]:
    -        if rc["metadata"]["name"] == name:
    -            return _map_to_ray_cluster(rc)
    -    return None
    -
    -
    -def _get_ray_clusters(
    -    namespace="default", filter: Optional[List[RayClusterStatus]] = None
    -) -> List[RayCluster]:
    -    list_of_clusters = []
    -    try:
    -        config_check()
    -        api_instance = client.CustomObjectsApi(get_api_client())
    -        rcs = api_instance.list_namespaced_custom_object(
    -            group="ray.io",
    -            version="v1",
    -            namespace=namespace,
    -            plural="rayclusters",
    -        )
    -    except Exception as e:  # pragma: no cover
    -        return _kube_api_error_handling(e)
    -
    -    # Get a list of RCs with the filter if it is passed to the function
    -    if filter is not None:
    -        for rc in rcs["items"]:
    -            ray_cluster = _map_to_ray_cluster(rc)
    -            if filter and ray_cluster.status in filter:
    -                list_of_clusters.append(ray_cluster)
    -    else:
    -        for rc in rcs["items"]:
    -            list_of_clusters.append(_map_to_ray_cluster(rc))
    -    return list_of_clusters
    -
    -
    -def _get_app_wrappers(
    -    namespace="default", filter=List[AppWrapperStatus]
    -) -> List[AppWrapper]:
    -    list_of_app_wrappers = []
    -
    -    try:
    -        config_check()
    -        api_instance = client.CustomObjectsApi(get_api_client())
    -        aws = api_instance.list_namespaced_custom_object(
    -            group="workload.codeflare.dev",
    -            version="v1beta2",
    -            namespace=namespace,
    -            plural="appwrappers",
    -        )
    -    except Exception as e:  # pragma: no cover
    -        return _kube_api_error_handling(e)
    -
    -    for item in aws["items"]:
    -        app_wrapper = _map_to_app_wrapper(item)
    -        if filter and app_wrapper.status in filter:
    -            list_of_app_wrappers.append(app_wrapper)
    -        else:
    -            # Unsure what the purpose of the filter is
    -            list_of_app_wrappers.append(app_wrapper)
    -    return list_of_app_wrappers
    -
    -
    -def _map_to_ray_cluster(rc) -> Optional[RayCluster]:
    -    if "status" in rc and "state" in rc["status"]:
    -        status = RayClusterStatus(rc["status"]["state"].lower())
    -    else:
    -        status = RayClusterStatus.UNKNOWN
    -    config_check()
    -    dashboard_url = None
    -    if is_openshift_cluster():
    -        try:
    -            api_instance = client.CustomObjectsApi(get_api_client())
    -            routes = api_instance.list_namespaced_custom_object(
    -                group="route.openshift.io",
    -                version="v1",
    -                namespace=rc["metadata"]["namespace"],
    -                plural="routes",
    -            )
    -        except Exception as e:  # pragma: no cover
    -            return _kube_api_error_handling(e)
    -
    -        for route in routes["items"]:
    -            rc_name = rc["metadata"]["name"]
    -            if route["metadata"]["name"] == f"ray-dashboard-{rc_name}" or route[
    -                "metadata"
    -            ]["name"].startswith(f"{rc_name}-ingress"):
    -                protocol = "https" if route["spec"].get("tls") else "http"
    -                dashboard_url = f"{protocol}://{route['spec']['host']}"
    -    else:
    -        try:
    -            api_instance = client.NetworkingV1Api(get_api_client())
    -            ingresses = api_instance.list_namespaced_ingress(
    -                rc["metadata"]["namespace"]
    -            )
    -        except Exception as e:  # pragma no cover
    -            return _kube_api_error_handling(e)
    -        for ingress in ingresses.items:
    -            annotations = ingress.metadata.annotations
    -            protocol = "http"
    -            if (
    -                ingress.metadata.name == f"ray-dashboard-{rc['metadata']['name']}"
    -                or ingress.metadata.name.startswith(f"{rc['metadata']['name']}-ingress")
    -            ):
    -                if annotations == None:
    -                    protocol = "http"
    -                elif "route.openshift.io/termination" in annotations:
    -                    protocol = "https"
    -            dashboard_url = f"{protocol}://{ingress.spec.rules[0].host}"
    -
    -    (
    -        head_extended_resources,
    -        worker_extended_resources,
    -    ) = Cluster._head_worker_extended_resources_from_rc_dict(rc)
    -
    -    return RayCluster(
    -        name=rc["metadata"]["name"],
    -        status=status,
    -        # for now we are not using autoscaling so same replicas is fine
    -        num_workers=rc["spec"]["workerGroupSpecs"][0]["replicas"],
    -        worker_mem_limits=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][
    -            "containers"
    -        ][0]["resources"]["limits"]["memory"],
    -        worker_mem_requests=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][
    -            "containers"
    -        ][0]["resources"]["requests"]["memory"],
    -        worker_cpu_requests=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][
    -            "containers"
    -        ][0]["resources"]["requests"]["cpu"],
    -        worker_cpu_limits=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][
    -            "containers"
    -        ][0]["resources"]["limits"]["cpu"],
    -        worker_extended_resources=worker_extended_resources,
    -        namespace=rc["metadata"]["namespace"],
    -        head_cpu_requests=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][
    -            0
    -        ]["resources"]["requests"]["cpu"],
    -        head_cpu_limits=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][
    -            0
    -        ]["resources"]["limits"]["cpu"],
    -        head_mem_requests=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][
    -            0
    -        ]["resources"]["requests"]["memory"],
    -        head_mem_limits=rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][
    -            0
    -        ]["resources"]["limits"]["memory"],
    -        head_extended_resources=head_extended_resources,
    -        dashboard=dashboard_url,
    -    )
    -
    -
    -def _map_to_app_wrapper(aw) -> AppWrapper:
    -    if "status" in aw:
    -        return AppWrapper(
    -            name=aw["metadata"]["name"],
    -            status=AppWrapperStatus(aw["status"]["phase"].lower()),
    -        )
    -    return AppWrapper(
    -        name=aw["metadata"]["name"],
    -        status=AppWrapperStatus("suspended"),
    -    )
    -
    -
    -def _copy_to_ray(cluster: Cluster) -> RayCluster:
    -    ray = RayCluster(
    -        name=cluster.config.name,
    -        status=cluster.status(print_to_console=False)[0],
    -        num_workers=cluster.config.num_workers,
    -        worker_mem_requests=cluster.config.worker_memory_requests,
    -        worker_mem_limits=cluster.config.worker_memory_limits,
    -        worker_cpu_requests=cluster.config.worker_cpu_requests,
    -        worker_cpu_limits=cluster.config.worker_cpu_limits,
    -        worker_extended_resources=cluster.config.worker_extended_resource_requests,
    -        namespace=cluster.config.namespace,
    -        dashboard=cluster.cluster_dashboard_uri(),
    -        head_mem_requests=cluster.config.head_memory_requests,
    -        head_mem_limits=cluster.config.head_memory_limits,
    -        head_cpu_requests=cluster.config.head_cpu_requests,
    -        head_cpu_limits=cluster.config.head_cpu_limits,
    -        head_extended_resources=cluster.config.head_extended_resource_requests,
    -    )
    -    if ray.status == CodeFlareClusterStatus.READY:
    -        ray.status = RayClusterStatus.READY
    -    return ray
    -
    -
    -
    -
    -
    -
    -
    -

    Functions

    -
    -
    -def get_cluster(cluster_name: str, namespace: str = 'default', write_to_file: bool = False, verify_tls: bool = True) -
    -
    -
    -
    - -Expand source code - -
    def get_cluster(
    -    cluster_name: str,
    -    namespace: str = "default",
    -    write_to_file: bool = False,
    -    verify_tls: bool = True,
    -):
    -    try:
    -        config_check()
    -        api_instance = client.CustomObjectsApi(get_api_client())
    -        rcs = api_instance.list_namespaced_custom_object(
    -            group="ray.io",
    -            version="v1",
    -            namespace=namespace,
    -            plural="rayclusters",
    -        )
    -    except Exception as e:
    -        return _kube_api_error_handling(e)
    -
    -    for rc in rcs["items"]:
    -        if rc["metadata"]["name"] == cluster_name:
    -            appwrapper = _check_aw_exists(cluster_name, namespace)
    -            return Cluster.from_k8_cluster_object(
    -                rc,
    -                appwrapper=appwrapper,
    -                write_to_file=write_to_file,
    -                verify_tls=verify_tls,
    -            )
    -    raise FileNotFoundError(
    -        f"Cluster {cluster_name} is not found in {namespace} namespace"
    -    )
    -
    -
    -
    -def get_current_namespace() -
    -
    -
    -
    - -Expand source code - -
    def get_current_namespace():  # pragma: no cover
    -    if os.path.isfile("/var/run/secrets/kubernetes.io/serviceaccount/namespace"):
    -        try:
    -            file = open("/var/run/secrets/kubernetes.io/serviceaccount/namespace", "r")
    -            active_context = file.readline().strip("\n")
    -            return active_context
    -        except Exception as e:
    -            print("Unable to find current namespace")
    -    print("trying to gather from current context")
    -    try:
    -        _, active_context = config.list_kube_config_contexts(config_check())
    -    except Exception as e:
    -        return _kube_api_error_handling(e)
    -    try:
    -        return active_context["context"]["namespace"]
    -    except KeyError:
    -        return None
    -
    -
    -
    -def list_all_clusters(namespace: str, print_to_console: bool = True) -
    -
    -

    Returns (and prints by default) a list of all clusters in a given namespace.

    -
    - -Expand source code - -
    def list_all_clusters(namespace: str, print_to_console: bool = True):
    -    """
    -    Returns (and prints by default) a list of all clusters in a given namespace.
    -    """
    -    clusters = _get_ray_clusters(namespace)
    -    if print_to_console:
    -        pretty_print.print_clusters(clusters)
    -    return clusters
    -
    -
    -
    -def list_all_queued(namespace: str, print_to_console: bool = True, appwrapper: bool = False) -
    -
    -

    Returns (and prints by default) a list of all currently queued-up Ray Clusters -in a given namespace.

    -
    - -Expand source code - -
    def list_all_queued(
    -    namespace: str, print_to_console: bool = True, appwrapper: bool = False
    -):
    -    """
    -    Returns (and prints by default) a list of all currently queued-up Ray Clusters
    -    in a given namespace.
    -    """
    -    if appwrapper:
    -        resources = _get_app_wrappers(namespace, filter=[AppWrapperStatus.SUSPENDED])
    -        if print_to_console:
    -            pretty_print.print_app_wrappers_status(resources)
    -    else:
    -        resources = _get_ray_clusters(
    -            namespace, filter=[RayClusterStatus.READY, RayClusterStatus.SUSPENDED]
    -        )
    -        if print_to_console:
    -            pretty_print.print_ray_clusters_status(resources)
    -    return resources
    -
    -
    -
    -
    -
    -

    Classes

    -
    -
    -class Cluster -(config: ClusterConfiguration) -
    -
    -

    An object for requesting, bringing up, and taking down resources. -Can also be used for seeing the resource cluster status and details.

    -

    Note that currently, the underlying implementation is a Ray cluster.

    -

    Create the resource cluster object by passing in a ClusterConfiguration -(defined in the config sub-module). An AppWrapper will then be generated -based off of the configured resources to represent the desired cluster -request.

    -
    - -Expand source code - -
    class Cluster:
    -    """
    -    An object for requesting, bringing up, and taking down resources.
    -    Can also be used for seeing the resource cluster status and details.
    -
    -    Note that currently, the underlying implementation is a Ray cluster.
    -    """
    -
    -    def __init__(self, config: ClusterConfiguration):
    -        """
    -        Create the resource cluster object by passing in a ClusterConfiguration
    -        (defined in the config sub-module). An AppWrapper will then be generated
    -        based off of the configured resources to represent the desired cluster
    -        request.
    -        """
    -        self.config = config
    -        self.app_wrapper_yaml = self.create_app_wrapper()
    -        self._job_submission_client = None
    -        self.app_wrapper_name = self.config.name
    -        if is_notebook():
    -            cluster_up_down_buttons(self)
    -
    -    @property
    -    def _client_headers(self):
    -        k8_client = get_api_client()
    -        return {
    -            "Authorization": k8_client.configuration.get_api_key_with_prefix(
    -                "authorization"
    -            )
    -        }
    -
    -    @property
    -    def _client_verify_tls(self):
    -        if not is_openshift_cluster or not self.config.verify_tls:
    -            return False
    -        return True
    -
    -    @property
    -    def job_client(self):
    -        k8client = get_api_client()
    -        if self._job_submission_client:
    -            return self._job_submission_client
    -        if is_openshift_cluster():
    -            self._job_submission_client = JobSubmissionClient(
    -                self.cluster_dashboard_uri(),
    -                headers=self._client_headers,
    -                verify=self._client_verify_tls,
    -            )
    -        else:
    -            self._job_submission_client = JobSubmissionClient(
    -                self.cluster_dashboard_uri()
    -            )
    -        return self._job_submission_client
    -
    -    def create_app_wrapper(self):
    -        """
    -        Called upon cluster object creation, creates an AppWrapper yaml based on
    -        the specifications of the ClusterConfiguration.
    -        """
    -
    -        if self.config.namespace is None:
    -            self.config.namespace = get_current_namespace()
    -            if self.config.namespace is None:
    -                print("Please specify with namespace=<your_current_namespace>")
    -            elif type(self.config.namespace) is not str:
    -                raise TypeError(
    -                    f"Namespace {self.config.namespace} is of type {type(self.config.namespace)}. Check your Kubernetes Authentication."
    -                )
    -
    -        return generate_appwrapper(self)
    -
    -    # creates a new cluster with the provided or default spec
    -    def up(self):
    -        """
    -        Applies the Cluster yaml, pushing the resource request onto
    -        the Kueue localqueue.
    -        """
    -
    -        # check if RayCluster CustomResourceDefinition exists if not throw RuntimeError
    -        self._throw_for_no_raycluster()
    -
    -        namespace = self.config.namespace
    -
    -        try:
    -            config_check()
    -            api_instance = client.CustomObjectsApi(get_api_client())
    -            if self.config.appwrapper:
    -                if self.config.write_to_file:
    -                    with open(self.app_wrapper_yaml) as f:
    -                        aw = yaml.load(f, Loader=yaml.FullLoader)
    -                        api_instance.create_namespaced_custom_object(
    -                            group="workload.codeflare.dev",
    -                            version="v1beta2",
    -                            namespace=namespace,
    -                            plural="appwrappers",
    -                            body=aw,
    -                        )
    -                else:
    -                    aw = yaml.safe_load(self.app_wrapper_yaml)
    -                    api_instance.create_namespaced_custom_object(
    -                        group="workload.codeflare.dev",
    -                        version="v1beta2",
    -                        namespace=namespace,
    -                        plural="appwrappers",
    -                        body=aw,
    -                    )
    -                print(f"AppWrapper: '{self.config.name}' has successfully been created")
    -            else:
    -                self._component_resources_up(namespace, api_instance)
    -                print(
    -                    f"Ray Cluster: '{self.config.name}' has successfully been created"
    -                )
    -        except Exception as e:  # pragma: no cover
    -            return _kube_api_error_handling(e)
    -
    -    def _throw_for_no_raycluster(self):
    -        api_instance = client.CustomObjectsApi(get_api_client())
    -        try:
    -            api_instance.list_namespaced_custom_object(
    -                group="ray.io",
    -                version="v1",
    -                namespace=self.config.namespace,
    -                plural="rayclusters",
    -            )
    -        except ApiException as e:
    -            if e.status == 404:
    -                raise RuntimeError(
    -                    "RayCluster CustomResourceDefinition unavailable contact your administrator."
    -                )
    -            else:
    -                raise RuntimeError(
    -                    "Failed to get RayCluster CustomResourceDefinition: " + str(e)
    -                )
    -
    -    def down(self):
    -        """
    -        Deletes the AppWrapper yaml, scaling-down and deleting all resources
    -        associated with the cluster.
    -        """
    -        namespace = self.config.namespace
    -        self._throw_for_no_raycluster()
    -        try:
    -            config_check()
    -            api_instance = client.CustomObjectsApi(get_api_client())
    -            if self.config.appwrapper:
    -                api_instance.delete_namespaced_custom_object(
    -                    group="workload.codeflare.dev",
    -                    version="v1beta2",
    -                    namespace=namespace,
    -                    plural="appwrappers",
    -                    name=self.app_wrapper_name,
    -                )
    -                print(f"AppWrapper: '{self.config.name}' has successfully been deleted")
    -            else:
    -                self._component_resources_down(namespace, api_instance)
    -                print(
    -                    f"Ray Cluster: '{self.config.name}' has successfully been deleted"
    -                )
    -        except Exception as e:  # pragma: no cover
    -            return _kube_api_error_handling(e)
    -
    -    def status(
    -        self, print_to_console: bool = True
    -    ) -> Tuple[CodeFlareClusterStatus, bool]:
    -        """
    -        Returns the requested cluster's status, as well as whether or not
    -        it is ready for use.
    -        """
    -        ready = False
    -        status = CodeFlareClusterStatus.UNKNOWN
    -        if self.config.appwrapper:
    -            # check the app wrapper status
    -            appwrapper = _app_wrapper_status(self.config.name, self.config.namespace)
    -            if appwrapper:
    -                if appwrapper.status in [
    -                    AppWrapperStatus.RESUMING,
    -                    AppWrapperStatus.RESETTING,
    -                ]:
    -                    ready = False
    -                    status = CodeFlareClusterStatus.STARTING
    -                elif appwrapper.status in [
    -                    AppWrapperStatus.FAILED,
    -                ]:
    -                    ready = False
    -                    status = CodeFlareClusterStatus.FAILED  # should deleted be separate
    -                    return status, ready  # exit early, no need to check ray status
    -                elif appwrapper.status in [
    -                    AppWrapperStatus.SUSPENDED,
    -                    AppWrapperStatus.SUSPENDING,
    -                ]:
    -                    ready = False
    -                    if appwrapper.status == AppWrapperStatus.SUSPENDED:
    -                        status = CodeFlareClusterStatus.QUEUED
    -                    else:
    -                        status = CodeFlareClusterStatus.QUEUEING
    -                    if print_to_console:
    -                        pretty_print.print_app_wrappers_status([appwrapper])
    -                    return (
    -                        status,
    -                        ready,
    -                    )  # no need to check the ray status since still in queue
    -
    -        # check the ray cluster status
    -        cluster = _ray_cluster_status(self.config.name, self.config.namespace)
    -        if cluster:
    -            if cluster.status == RayClusterStatus.SUSPENDED:
    -                ready = False
    -                status = CodeFlareClusterStatus.SUSPENDED
    -            if cluster.status == RayClusterStatus.UNKNOWN:
    -                ready = False
    -                status = CodeFlareClusterStatus.STARTING
    -            if cluster.status == RayClusterStatus.READY:
    -                ready = True
    -                status = CodeFlareClusterStatus.READY
    -            elif cluster.status in [
    -                RayClusterStatus.UNHEALTHY,
    -                RayClusterStatus.FAILED,
    -            ]:
    -                ready = False
    -                status = CodeFlareClusterStatus.FAILED
    -
    -            if print_to_console:
    -                # overriding the number of gpus with requested
    -                _, cluster.worker_gpu = head_worker_gpu_count_from_cluster(self)
    -                pretty_print.print_cluster_status(cluster)
    -        elif print_to_console:
    -            if status == CodeFlareClusterStatus.UNKNOWN:
    -                pretty_print.print_no_resources_found()
    -            else:
    -                pretty_print.print_app_wrappers_status([appwrapper], starting=True)
    -
    -        return status, ready
    -
    -    def is_dashboard_ready(self) -> bool:
    -        try:
    -            response = requests.get(
    -                self.cluster_dashboard_uri(),
    -                headers=self._client_headers,
    -                timeout=5,
    -                verify=self._client_verify_tls,
    -            )
    -        except requests.exceptions.SSLError:  # pragma no cover
    -            # SSL exception occurs when oauth ingress has been created but cluster is not up
    -            return False
    -        if response.status_code == 200:
    -            return True
    -        else:
    -            return False
    -
    -    def wait_ready(self, timeout: Optional[int] = None, dashboard_check: bool = True):
    -        """
    -        Waits for requested cluster to be ready, up to an optional timeout (s).
    -        Checks every five seconds.
    -        """
    -        print("Waiting for requested resources to be set up...")
    -        time = 0
    -        while True:
    -            if timeout and time >= timeout:
    -                raise TimeoutError(
    -                    f"wait() timed out after waiting {timeout}s for cluster to be ready"
    -                )
    -            status, ready = self.status(print_to_console=False)
    -            if status == CodeFlareClusterStatus.UNKNOWN:
    -                print(
    -                    "WARNING: Current cluster status is unknown, have you run cluster.up yet?"
    -                )
    -            if ready:
    -                break
    -            sleep(5)
    -            time += 5
    -        print("Requested cluster is up and running!")
    -
    -        while dashboard_check:
    -            if timeout and time >= timeout:
    -                raise TimeoutError(
    -                    f"wait() timed out after waiting {timeout}s for dashboard to be ready"
    -                )
    -            if self.is_dashboard_ready():
    -                print("Dashboard is ready!")
    -                break
    -            sleep(5)
    -            time += 5
    -
    -    def details(self, print_to_console: bool = True) -> RayCluster:
    -        cluster = _copy_to_ray(self)
    -        if print_to_console:
    -            pretty_print.print_clusters([cluster])
    -        return cluster
    -
    -    def cluster_uri(self) -> str:
    -        """
    -        Returns a string containing the cluster's URI.
    -        """
    -        return f"ray://{self.config.name}-head-svc.{self.config.namespace}.svc:10001"
    -
    -    def cluster_dashboard_uri(self) -> str:
    -        """
    -        Returns a string containing the cluster's dashboard URI.
    -        """
    -        config_check()
    -        if is_openshift_cluster():
    -            try:
    -                api_instance = client.CustomObjectsApi(get_api_client())
    -                routes = api_instance.list_namespaced_custom_object(
    -                    group="route.openshift.io",
    -                    version="v1",
    -                    namespace=self.config.namespace,
    -                    plural="routes",
    -                )
    -            except Exception as e:  # pragma: no cover
    -                return _kube_api_error_handling(e)
    -
    -            for route in routes["items"]:
    -                if route["metadata"][
    -                    "name"
    -                ] == f"ray-dashboard-{self.config.name}" or route["metadata"][
    -                    "name"
    -                ].startswith(
    -                    f"{self.config.name}-ingress"
    -                ):
    -                    protocol = "https" if route["spec"].get("tls") else "http"
    -                    return f"{protocol}://{route['spec']['host']}"
    -        else:
    -            try:
    -                api_instance = client.NetworkingV1Api(get_api_client())
    -                ingresses = api_instance.list_namespaced_ingress(self.config.namespace)
    -            except Exception as e:  # pragma no cover
    -                return _kube_api_error_handling(e)
    -
    -            for ingress in ingresses.items:
    -                annotations = ingress.metadata.annotations
    -                protocol = "http"
    -                if (
    -                    ingress.metadata.name == f"ray-dashboard-{self.config.name}"
    -                    or ingress.metadata.name.startswith(f"{self.config.name}-ingress")
    -                ):
    -                    if annotations == None:
    -                        protocol = "http"
    -                    elif "route.openshift.io/termination" in annotations:
    -                        protocol = "https"
    -                return f"{protocol}://{ingress.spec.rules[0].host}"
    -        return "Dashboard not available yet, have you run cluster.up()?"
    -
    -    def list_jobs(self) -> List:
    -        """
    -        This method accesses the head ray node in your cluster and lists the running jobs.
    -        """
    -        return self.job_client.list_jobs()
    -
    -    def job_status(self, job_id: str) -> str:
    -        """
    -        This method accesses the head ray node in your cluster and returns the job status for the provided job id.
    -        """
    -        return self.job_client.get_job_status(job_id)
    -
    -    def job_logs(self, job_id: str) -> str:
    -        """
    -        This method accesses the head ray node in your cluster and returns the logs for the provided job id.
    -        """
    -        return self.job_client.get_job_logs(job_id)
    -
    -    @staticmethod
    -    def _head_worker_extended_resources_from_rc_dict(rc: Dict) -> Tuple[dict, dict]:
    -        head_extended_resources, worker_extended_resources = {}, {}
    -        for resource in rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][
    -            "containers"
    -        ][0]["resources"]["limits"].keys():
    -            if resource in ["memory", "cpu"]:
    -                continue
    -            worker_extended_resources[resource] = rc["spec"]["workerGroupSpecs"][0][
    -                "template"
    -            ]["spec"]["containers"][0]["resources"]["limits"][resource]
    -
    -        for resource in rc["spec"]["headGroupSpec"]["template"]["spec"]["containers"][
    -            0
    -        ]["resources"]["limits"].keys():
    -            if resource in ["memory", "cpu"]:
    -                continue
    -            head_extended_resources[resource] = rc["spec"]["headGroupSpec"]["template"][
    -                "spec"
    -            ]["containers"][0]["resources"]["limits"][resource]
    -
    -        return head_extended_resources, worker_extended_resources
    -
    -    def from_k8_cluster_object(
    -        rc,
    -        appwrapper=True,
    -        write_to_file=False,
    -        verify_tls=True,
    -    ):
    -        config_check()
    -        machine_types = (
    -            rc["metadata"]["labels"]["orderedinstance"].split("_")
    -            if "orderedinstance" in rc["metadata"]["labels"]
    -            else []
    -        )
    -
    -        (
    -            head_extended_resources,
    -            worker_extended_resources,
    -        ) = Cluster._head_worker_extended_resources_from_rc_dict(rc)
    -
    -        cluster_config = ClusterConfiguration(
    -            name=rc["metadata"]["name"],
    -            namespace=rc["metadata"]["namespace"],
    -            machine_types=machine_types,
    -            head_cpu_requests=rc["spec"]["headGroupSpec"]["template"]["spec"][
    -                "containers"
    -            ][0]["resources"]["requests"]["cpu"],
    -            head_cpu_limits=rc["spec"]["headGroupSpec"]["template"]["spec"][
    -                "containers"
    -            ][0]["resources"]["limits"]["cpu"],
    -            head_memory_requests=rc["spec"]["headGroupSpec"]["template"]["spec"][
    -                "containers"
    -            ][0]["resources"]["requests"]["memory"],
    -            head_memory_limits=rc["spec"]["headGroupSpec"]["template"]["spec"][
    -                "containers"
    -            ][0]["resources"]["limits"]["memory"],
    -            num_workers=rc["spec"]["workerGroupSpecs"][0]["minReplicas"],
    -            worker_cpu_requests=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][
    -                "containers"
    -            ][0]["resources"]["requests"]["cpu"],
    -            worker_cpu_limits=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][
    -                "containers"
    -            ][0]["resources"]["limits"]["cpu"],
    -            worker_memory_requests=rc["spec"]["workerGroupSpecs"][0]["template"][
    -                "spec"
    -            ]["containers"][0]["resources"]["requests"]["memory"],
    -            worker_memory_limits=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][
    -                "containers"
    -            ][0]["resources"]["limits"]["memory"],
    -            worker_extended_resource_requests=worker_extended_resources,
    -            head_extended_resource_requests=head_extended_resources,
    -            image=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][
    -                0
    -            ]["image"],
    -            appwrapper=appwrapper,
    -            write_to_file=write_to_file,
    -            verify_tls=verify_tls,
    -            local_queue=rc["metadata"]
    -            .get("labels", dict())
    -            .get("kueue.x-k8s.io/queue-name", None),
    -        )
    -        return Cluster(cluster_config)
    -
    -    def local_client_url(self):
    -        ingress_domain = _get_ingress_domain(self)
    -        return f"ray://{ingress_domain}"
    -
    -    def _component_resources_up(
    -        self, namespace: str, api_instance: client.CustomObjectsApi
    -    ):
    -        if self.config.write_to_file:
    -            with open(self.app_wrapper_yaml) as f:
    -                yamls = list(yaml.load_all(f, Loader=yaml.FullLoader))
    -                for resource in yamls:
    -                    enable_ingress = (
    -                        resource.get("spec", {})
    -                        .get("headGroupSpec", {})
    -                        .get("enableIngress")
    -                    )
    -                    if resource["kind"] == "RayCluster" and enable_ingress is True:
    -                        name = resource["metadata"]["name"]
    -                        print(
    -                            f"Forbidden: RayCluster '{name}' has 'enableIngress' set to 'True'."
    -                        )
    -                        return
    -                _create_resources(yamls, namespace, api_instance)
    -        else:
    -            yamls = yaml.load_all(self.app_wrapper_yaml, Loader=yaml.FullLoader)
    -            _create_resources(yamls, namespace, api_instance)
    -
    -    def _component_resources_down(
    -        self, namespace: str, api_instance: client.CustomObjectsApi
    -    ):
    -        cluster_name = self.config.name
    -        if self.config.write_to_file:
    -            with open(self.app_wrapper_yaml) as f:
    -                yamls = yaml.load_all(f, Loader=yaml.FullLoader)
    -                _delete_resources(yamls, namespace, api_instance, cluster_name)
    -        else:
    -            yamls = yaml.safe_load_all(self.app_wrapper_yaml)
    -            _delete_resources(yamls, namespace, api_instance, cluster_name)
    -
    -

    Instance variables

    -
    -
    var job_client
    -
    -
    -
    - -Expand source code - -
    @property
    -def job_client(self):
    -    k8client = get_api_client()
    -    if self._job_submission_client:
    -        return self._job_submission_client
    -    if is_openshift_cluster():
    -        self._job_submission_client = JobSubmissionClient(
    -            self.cluster_dashboard_uri(),
    -            headers=self._client_headers,
    -            verify=self._client_verify_tls,
    -        )
    -    else:
    -        self._job_submission_client = JobSubmissionClient(
    -            self.cluster_dashboard_uri()
    -        )
    -    return self._job_submission_client
    -
    -
    -
    -

    Methods

    -
    -
    -def cluster_dashboard_uri(self) ‑> str -
    -
    -

    Returns a string containing the cluster's dashboard URI.

    -
    - -Expand source code - -
    def cluster_dashboard_uri(self) -> str:
    -    """
    -    Returns a string containing the cluster's dashboard URI.
    -    """
    -    config_check()
    -    if is_openshift_cluster():
    -        try:
    -            api_instance = client.CustomObjectsApi(get_api_client())
    -            routes = api_instance.list_namespaced_custom_object(
    -                group="route.openshift.io",
    -                version="v1",
    -                namespace=self.config.namespace,
    -                plural="routes",
    -            )
    -        except Exception as e:  # pragma: no cover
    -            return _kube_api_error_handling(e)
    -
    -        for route in routes["items"]:
    -            if route["metadata"][
    -                "name"
    -            ] == f"ray-dashboard-{self.config.name}" or route["metadata"][
    -                "name"
    -            ].startswith(
    -                f"{self.config.name}-ingress"
    -            ):
    -                protocol = "https" if route["spec"].get("tls") else "http"
    -                return f"{protocol}://{route['spec']['host']}"
    -    else:
    -        try:
    -            api_instance = client.NetworkingV1Api(get_api_client())
    -            ingresses = api_instance.list_namespaced_ingress(self.config.namespace)
    -        except Exception as e:  # pragma no cover
    -            return _kube_api_error_handling(e)
    -
    -        for ingress in ingresses.items:
    -            annotations = ingress.metadata.annotations
    -            protocol = "http"
    -            if (
    -                ingress.metadata.name == f"ray-dashboard-{self.config.name}"
    -                or ingress.metadata.name.startswith(f"{self.config.name}-ingress")
    -            ):
    -                if annotations == None:
    -                    protocol = "http"
    -                elif "route.openshift.io/termination" in annotations:
    -                    protocol = "https"
    -            return f"{protocol}://{ingress.spec.rules[0].host}"
    -    return "Dashboard not available yet, have you run cluster.up()?"
    -
    -
    -
    -def cluster_uri(self) ‑> str -
    -
    -

    Returns a string containing the cluster's URI.

    -
    - -Expand source code - -
    def cluster_uri(self) -> str:
    -    """
    -    Returns a string containing the cluster's URI.
    -    """
    -    return f"ray://{self.config.name}-head-svc.{self.config.namespace}.svc:10001"
    -
    -
    -
    -def create_app_wrapper(self) -
    -
    -

    Called upon cluster object creation, creates an AppWrapper yaml based on -the specifications of the ClusterConfiguration.

    -
    - -Expand source code - -
    def create_app_wrapper(self):
    -    """
    -    Called upon cluster object creation, creates an AppWrapper yaml based on
    -    the specifications of the ClusterConfiguration.
    -    """
    -
    -    if self.config.namespace is None:
    -        self.config.namespace = get_current_namespace()
    -        if self.config.namespace is None:
    -            print("Please specify with namespace=<your_current_namespace>")
    -        elif type(self.config.namespace) is not str:
    -            raise TypeError(
    -                f"Namespace {self.config.namespace} is of type {type(self.config.namespace)}. Check your Kubernetes Authentication."
    -            )
    -
    -    return generate_appwrapper(self)
    -
    -
    -
    -def details(self, print_to_console: bool = True) ‑> RayCluster -
    -
    -
    -
    - -Expand source code - -
    def details(self, print_to_console: bool = True) -> RayCluster:
    -    cluster = _copy_to_ray(self)
    -    if print_to_console:
    -        pretty_print.print_clusters([cluster])
    -    return cluster
    -
    -
    -
    -def down(self) -
    -
    -

    Deletes the AppWrapper yaml, scaling-down and deleting all resources -associated with the cluster.

    -
    - -Expand source code - -
    def down(self):
    -    """
    -    Deletes the AppWrapper yaml, scaling-down and deleting all resources
    -    associated with the cluster.
    -    """
    -    namespace = self.config.namespace
    -    self._throw_for_no_raycluster()
    -    try:
    -        config_check()
    -        api_instance = client.CustomObjectsApi(get_api_client())
    -        if self.config.appwrapper:
    -            api_instance.delete_namespaced_custom_object(
    -                group="workload.codeflare.dev",
    -                version="v1beta2",
    -                namespace=namespace,
    -                plural="appwrappers",
    -                name=self.app_wrapper_name,
    -            )
    -            print(f"AppWrapper: '{self.config.name}' has successfully been deleted")
    -        else:
    -            self._component_resources_down(namespace, api_instance)
    -            print(
    -                f"Ray Cluster: '{self.config.name}' has successfully been deleted"
    -            )
    -    except Exception as e:  # pragma: no cover
    -        return _kube_api_error_handling(e)
    -
    -
    -
    -def from_k8_cluster_object(rc, appwrapper=True, write_to_file=False, verify_tls=True) -
    -
    -
    -
    - -Expand source code - -
    def from_k8_cluster_object(
    -    rc,
    -    appwrapper=True,
    -    write_to_file=False,
    -    verify_tls=True,
    -):
    -    config_check()
    -    machine_types = (
    -        rc["metadata"]["labels"]["orderedinstance"].split("_")
    -        if "orderedinstance" in rc["metadata"]["labels"]
    -        else []
    -    )
    -
    -    (
    -        head_extended_resources,
    -        worker_extended_resources,
    -    ) = Cluster._head_worker_extended_resources_from_rc_dict(rc)
    -
    -    cluster_config = ClusterConfiguration(
    -        name=rc["metadata"]["name"],
    -        namespace=rc["metadata"]["namespace"],
    -        machine_types=machine_types,
    -        head_cpu_requests=rc["spec"]["headGroupSpec"]["template"]["spec"][
    -            "containers"
    -        ][0]["resources"]["requests"]["cpu"],
    -        head_cpu_limits=rc["spec"]["headGroupSpec"]["template"]["spec"][
    -            "containers"
    -        ][0]["resources"]["limits"]["cpu"],
    -        head_memory_requests=rc["spec"]["headGroupSpec"]["template"]["spec"][
    -            "containers"
    -        ][0]["resources"]["requests"]["memory"],
    -        head_memory_limits=rc["spec"]["headGroupSpec"]["template"]["spec"][
    -            "containers"
    -        ][0]["resources"]["limits"]["memory"],
    -        num_workers=rc["spec"]["workerGroupSpecs"][0]["minReplicas"],
    -        worker_cpu_requests=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][
    -            "containers"
    -        ][0]["resources"]["requests"]["cpu"],
    -        worker_cpu_limits=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][
    -            "containers"
    -        ][0]["resources"]["limits"]["cpu"],
    -        worker_memory_requests=rc["spec"]["workerGroupSpecs"][0]["template"][
    -            "spec"
    -        ]["containers"][0]["resources"]["requests"]["memory"],
    -        worker_memory_limits=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][
    -            "containers"
    -        ][0]["resources"]["limits"]["memory"],
    -        worker_extended_resource_requests=worker_extended_resources,
    -        head_extended_resource_requests=head_extended_resources,
    -        image=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][
    -            0
    -        ]["image"],
    -        appwrapper=appwrapper,
    -        write_to_file=write_to_file,
    -        verify_tls=verify_tls,
    -        local_queue=rc["metadata"]
    -        .get("labels", dict())
    -        .get("kueue.x-k8s.io/queue-name", None),
    -    )
    -    return Cluster(cluster_config)
    -
    -
    -
    -def is_dashboard_ready(self) ‑> bool -
    -
    -
    -
    - -Expand source code - -
    def is_dashboard_ready(self) -> bool:
    -    try:
    -        response = requests.get(
    -            self.cluster_dashboard_uri(),
    -            headers=self._client_headers,
    -            timeout=5,
    -            verify=self._client_verify_tls,
    -        )
    -    except requests.exceptions.SSLError:  # pragma no cover
    -        # SSL exception occurs when oauth ingress has been created but cluster is not up
    -        return False
    -    if response.status_code == 200:
    -        return True
    -    else:
    -        return False
    -
    -
    -
    -def job_logs(self, job_id: str) ‑> str -
    -
    -

    This method accesses the head ray node in your cluster and returns the logs for the provided job id.

    -
    - -Expand source code - -
    def job_logs(self, job_id: str) -> str:
    -    """
    -    This method accesses the head ray node in your cluster and returns the logs for the provided job id.
    -    """
    -    return self.job_client.get_job_logs(job_id)
    -
    -
    -
    -def job_status(self, job_id: str) ‑> str -
    -
    -

    This method accesses the head ray node in your cluster and returns the job status for the provided job id.

    -
    - -Expand source code - -
    def job_status(self, job_id: str) -> str:
    -    """
    -    This method accesses the head ray node in your cluster and returns the job status for the provided job id.
    -    """
    -    return self.job_client.get_job_status(job_id)
    -
    -
    -
    -def list_jobs(self) ‑> List -
    -
    -

    This method accesses the head ray node in your cluster and lists the running jobs.

    -
    - -Expand source code - -
    def list_jobs(self) -> List:
    -    """
    -    This method accesses the head ray node in your cluster and lists the running jobs.
    -    """
    -    return self.job_client.list_jobs()
    -
    -
    -
    -def local_client_url(self) -
    -
    -
    -
    - -Expand source code - -
    def local_client_url(self):
    -    ingress_domain = _get_ingress_domain(self)
    -    return f"ray://{ingress_domain}"
    -
    -
    -
    -def status(self, print_to_console: bool = True) ‑> Tuple[CodeFlareClusterStatus, bool] -
    -
    -

    Returns the requested cluster's status, as well as whether or not -it is ready for use.

    -
    - -Expand source code - -
    def status(
    -    self, print_to_console: bool = True
    -) -> Tuple[CodeFlareClusterStatus, bool]:
    -    """
    -    Returns the requested cluster's status, as well as whether or not
    -    it is ready for use.
    -    """
    -    ready = False
    -    status = CodeFlareClusterStatus.UNKNOWN
    -    if self.config.appwrapper:
    -        # check the app wrapper status
    -        appwrapper = _app_wrapper_status(self.config.name, self.config.namespace)
    -        if appwrapper:
    -            if appwrapper.status in [
    -                AppWrapperStatus.RESUMING,
    -                AppWrapperStatus.RESETTING,
    -            ]:
    -                ready = False
    -                status = CodeFlareClusterStatus.STARTING
    -            elif appwrapper.status in [
    -                AppWrapperStatus.FAILED,
    -            ]:
    -                ready = False
    -                status = CodeFlareClusterStatus.FAILED  # should deleted be separate
    -                return status, ready  # exit early, no need to check ray status
    -            elif appwrapper.status in [
    -                AppWrapperStatus.SUSPENDED,
    -                AppWrapperStatus.SUSPENDING,
    -            ]:
    -                ready = False
    -                if appwrapper.status == AppWrapperStatus.SUSPENDED:
    -                    status = CodeFlareClusterStatus.QUEUED
    -                else:
    -                    status = CodeFlareClusterStatus.QUEUEING
    -                if print_to_console:
    -                    pretty_print.print_app_wrappers_status([appwrapper])
    -                return (
    -                    status,
    -                    ready,
    -                )  # no need to check the ray status since still in queue
    -
    -    # check the ray cluster status
    -    cluster = _ray_cluster_status(self.config.name, self.config.namespace)
    -    if cluster:
    -        if cluster.status == RayClusterStatus.SUSPENDED:
    -            ready = False
    -            status = CodeFlareClusterStatus.SUSPENDED
    -        if cluster.status == RayClusterStatus.UNKNOWN:
    -            ready = False
    -            status = CodeFlareClusterStatus.STARTING
    -        if cluster.status == RayClusterStatus.READY:
    -            ready = True
    -            status = CodeFlareClusterStatus.READY
    -        elif cluster.status in [
    -            RayClusterStatus.UNHEALTHY,
    -            RayClusterStatus.FAILED,
    -        ]:
    -            ready = False
    -            status = CodeFlareClusterStatus.FAILED
    -
    -        if print_to_console:
    -            # overriding the number of gpus with requested
    -            _, cluster.worker_gpu = head_worker_gpu_count_from_cluster(self)
    -            pretty_print.print_cluster_status(cluster)
    -    elif print_to_console:
    -        if status == CodeFlareClusterStatus.UNKNOWN:
    -            pretty_print.print_no_resources_found()
    -        else:
    -            pretty_print.print_app_wrappers_status([appwrapper], starting=True)
    -
    -    return status, ready
    -
    -
    -
    -def up(self) -
    -
    -

    Applies the Cluster yaml, pushing the resource request onto -the Kueue localqueue.

    -
    - -Expand source code - -
    def up(self):
    -    """
    -    Applies the Cluster yaml, pushing the resource request onto
    -    the Kueue localqueue.
    -    """
    -
    -    # check if RayCluster CustomResourceDefinition exists if not throw RuntimeError
    -    self._throw_for_no_raycluster()
    -
    -    namespace = self.config.namespace
    -
    -    try:
    -        config_check()
    -        api_instance = client.CustomObjectsApi(get_api_client())
    -        if self.config.appwrapper:
    -            if self.config.write_to_file:
    -                with open(self.app_wrapper_yaml) as f:
    -                    aw = yaml.load(f, Loader=yaml.FullLoader)
    -                    api_instance.create_namespaced_custom_object(
    -                        group="workload.codeflare.dev",
    -                        version="v1beta2",
    -                        namespace=namespace,
    -                        plural="appwrappers",
    -                        body=aw,
    -                    )
    -            else:
    -                aw = yaml.safe_load(self.app_wrapper_yaml)
    -                api_instance.create_namespaced_custom_object(
    -                    group="workload.codeflare.dev",
    -                    version="v1beta2",
    -                    namespace=namespace,
    -                    plural="appwrappers",
    -                    body=aw,
    -                )
    -            print(f"AppWrapper: '{self.config.name}' has successfully been created")
    -        else:
    -            self._component_resources_up(namespace, api_instance)
    -            print(
    -                f"Ray Cluster: '{self.config.name}' has successfully been created"
    -            )
    -    except Exception as e:  # pragma: no cover
    -        return _kube_api_error_handling(e)
    -
    -
    -
    -def wait_ready(self, timeout: Optional[int] = None, dashboard_check: bool = True) -
    -
    -

    Waits for requested cluster to be ready, up to an optional timeout (s). -Checks every five seconds.

    -
    - -Expand source code - -
    def wait_ready(self, timeout: Optional[int] = None, dashboard_check: bool = True):
    -    """
    -    Waits for requested cluster to be ready, up to an optional timeout (s).
    -    Checks every five seconds.
    -    """
    -    print("Waiting for requested resources to be set up...")
    -    time = 0
    -    while True:
    -        if timeout and time >= timeout:
    -            raise TimeoutError(
    -                f"wait() timed out after waiting {timeout}s for cluster to be ready"
    -            )
    -        status, ready = self.status(print_to_console=False)
    -        if status == CodeFlareClusterStatus.UNKNOWN:
    -            print(
    -                "WARNING: Current cluster status is unknown, have you run cluster.up yet?"
    -            )
    -        if ready:
    -            break
    -        sleep(5)
    -        time += 5
    -    print("Requested cluster is up and running!")
    -
    -    while dashboard_check:
    -        if timeout and time >= timeout:
    -            raise TimeoutError(
    -                f"wait() timed out after waiting {timeout}s for dashboard to be ready"
    -            )
    -        if self.is_dashboard_ready():
    -            print("Dashboard is ready!")
    -            break
    -        sleep(5)
    -        time += 5
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - diff --git a/docs/detailed-documentation/cluster/config.html b/docs/detailed-documentation/cluster/config.html deleted file mode 100644 index b329fb03..00000000 --- a/docs/detailed-documentation/cluster/config.html +++ /dev/null @@ -1,764 +0,0 @@ - - - - - - -codeflare_sdk.cluster.config API documentation - - - - - - - - - - - -
    -
    -
    -

    Module codeflare_sdk.cluster.config

    -
    -
    -

    The config sub-module contains the definition of the ClusterConfiguration dataclass, -which is used to specify resource requirements and other details when creating a -Cluster object.

    -
    - -Expand source code - -
    # Copyright 2022 IBM, Red Hat
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#      http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -
    -"""
    -The config sub-module contains the definition of the ClusterConfiguration dataclass,
    -which is used to specify resource requirements and other details when creating a
    -Cluster object.
    -"""
    -
    -import pathlib
    -import warnings
    -from dataclasses import dataclass, field, fields
    -from typing import Dict, List, Optional, Union, get_args, get_origin
    -
    -dir = pathlib.Path(__file__).parent.parent.resolve()
    -
    -# https://docs.ray.io/en/latest/ray-core/scheduling/accelerators.html
    -DEFAULT_RESOURCE_MAPPING = {
    -    "nvidia.com/gpu": "GPU",
    -    "intel.com/gpu": "GPU",
    -    "amd.com/gpu": "GPU",
    -    "aws.amazon.com/neuroncore": "neuron_cores",
    -    "google.com/tpu": "TPU",
    -    "habana.ai/gaudi": "HPU",
    -    "huawei.com/Ascend910": "NPU",
    -    "huawei.com/Ascend310": "NPU",
    -}
    -
    -
    -@dataclass
    -class ClusterConfiguration:
    -    """
    -    This dataclass is used to specify resource requirements and other details, and
    -    is passed in as an argument when creating a Cluster object.
    -
    -    Attributes:
    -    - name: The name of the cluster.
    -    - namespace: The namespace in which the cluster should be created.
    -    - head_info: A list of strings containing information about the head node.
    -    - head_cpus: The number of CPUs to allocate to the head node.
    -    - head_memory: The amount of memory to allocate to the head node.
    -    - head_gpus: The number of GPUs to allocate to the head node. (Deprecated, use head_extended_resource_requests)
    -    - head_extended_resource_requests: A dictionary of extended resource requests for the head node. ex: {"nvidia.com/gpu": 1}
    -    - machine_types: A list of machine types to use for the cluster.
    -    - min_cpus: The minimum number of CPUs to allocate to each worker.
    -    - max_cpus: The maximum number of CPUs to allocate to each worker.
    -    - num_workers: The number of workers to create.
    -    - min_memory: The minimum amount of memory to allocate to each worker.
    -    - max_memory: The maximum amount of memory to allocate to each worker.
    -    - num_gpus: The number of GPUs to allocate to each worker. (Deprecated, use worker_extended_resource_requests)
    -    - template: The path to the template file to use for the cluster.
    -    - appwrapper: A boolean indicating whether to use an AppWrapper.
    -    - envs: A dictionary of environment variables to set for the cluster.
    -    - image: The image to use for the cluster.
    -    - image_pull_secrets: A list of image pull secrets to use for the cluster.
    -    - write_to_file: A boolean indicating whether to write the cluster configuration to a file.
    -    - verify_tls: A boolean indicating whether to verify TLS when connecting to the cluster.
    -    - labels: A dictionary of labels to apply to the cluster.
    -    - worker_extended_resource_requests: A dictionary of extended resource requests for each worker. ex: {"nvidia.com/gpu": 1}
    -    - extended_resource_mapping: A dictionary of custom resource mappings to map extended resource requests to RayCluster resource names
    -    - overwrite_default_resource_mapping: A boolean indicating whether to overwrite the default resource mapping.
    -    """
    -
    -    name: str
    -    namespace: Optional[str] = None
    -    head_info: List[str] = field(default_factory=list)
    -    head_cpu_requests: Union[int, str] = 2
    -    head_cpu_limits: Union[int, str] = 2
    -    head_cpus: Optional[Union[int, str]] = None  # Deprecating
    -    head_memory_requests: Union[int, str] = 8
    -    head_memory_limits: Union[int, str] = 8
    -    head_memory: Optional[Union[int, str]] = None  # Deprecating
    -    head_gpus: Optional[int] = None  # Deprecating
    -    head_extended_resource_requests: Dict[str, Union[str, int]] = field(
    -        default_factory=dict
    -    )
    -    machine_types: List[str] = field(
    -        default_factory=list
    -    )  # ["m4.xlarge", "g4dn.xlarge"]
    -    worker_cpu_requests: Union[int, str] = 1
    -    worker_cpu_limits: Union[int, str] = 1
    -    min_cpus: Optional[Union[int, str]] = None  # Deprecating
    -    max_cpus: Optional[Union[int, str]] = None  # Deprecating
    -    num_workers: int = 1
    -    worker_memory_requests: Union[int, str] = 2
    -    worker_memory_limits: Union[int, str] = 2
    -    min_memory: Optional[Union[int, str]] = None  # Deprecating
    -    max_memory: Optional[Union[int, str]] = None  # Deprecating
    -    num_gpus: Optional[int] = None  # Deprecating
    -    template: str = f"{dir}/templates/base-template.yaml"
    -    appwrapper: bool = False
    -    envs: Dict[str, str] = field(default_factory=dict)
    -    image: str = ""
    -    image_pull_secrets: List[str] = field(default_factory=list)
    -    write_to_file: bool = False
    -    verify_tls: bool = True
    -    labels: Dict[str, str] = field(default_factory=dict)
    -    worker_extended_resource_requests: Dict[str, Union[str, int]] = field(
    -        default_factory=dict
    -    )
    -    extended_resource_mapping: Dict[str, str] = field(default_factory=dict)
    -    overwrite_default_resource_mapping: bool = False
    -    local_queue: Optional[str] = None
    -
    -    def __post_init__(self):
    -        if not self.verify_tls:
    -            print(
    -                "Warning: TLS verification has been disabled - Endpoint checks will be bypassed"
    -            )
    -
    -        self._validate_types()
    -        self._memory_to_string()
    -        self._str_mem_no_unit_add_GB()
    -        self._memory_to_resource()
    -        self._cpu_to_resource()
    -        self._gpu_to_resource()
    -        self._combine_extended_resource_mapping()
    -        self._validate_extended_resource_requests(self.head_extended_resource_requests)
    -        self._validate_extended_resource_requests(
    -            self.worker_extended_resource_requests
    -        )
    -
    -    def _combine_extended_resource_mapping(self):
    -        if overwritten := set(self.extended_resource_mapping.keys()).intersection(
    -            DEFAULT_RESOURCE_MAPPING.keys()
    -        ):
    -            if self.overwrite_default_resource_mapping:
    -                warnings.warn(
    -                    f"Overwriting default resource mapping for {overwritten}",
    -                    UserWarning,
    -                )
    -            else:
    -                raise ValueError(
    -                    f"Resource mapping already exists for {overwritten}, set overwrite_default_resource_mapping to True to overwrite"
    -                )
    -        self.extended_resource_mapping = {
    -            **DEFAULT_RESOURCE_MAPPING,
    -            **self.extended_resource_mapping,
    -        }
    -
    -    def _validate_extended_resource_requests(self, extended_resources: Dict[str, int]):
    -        for k in extended_resources.keys():
    -            if k not in self.extended_resource_mapping.keys():
    -                raise ValueError(
    -                    f"extended resource '{k}' not found in extended_resource_mapping, available resources are {list(self.extended_resource_mapping.keys())}, to add more supported resources use extended_resource_mapping. i.e. extended_resource_mapping = {{'{k}': 'FOO_BAR'}}"
    -                )
    -
    -    def _gpu_to_resource(self):
    -        if self.head_gpus:
    -            warnings.warn(
    -                f"head_gpus is being deprecated, replacing with head_extended_resource_requests['nvidia.com/gpu'] = {self.head_gpus}"
    -            )
    -            if "nvidia.com/gpu" in self.head_extended_resource_requests:
    -                raise ValueError(
    -                    "nvidia.com/gpu already exists in head_extended_resource_requests"
    -                )
    -            self.head_extended_resource_requests["nvidia.com/gpu"] = self.head_gpus
    -        if self.num_gpus:
    -            warnings.warn(
    -                f"num_gpus is being deprecated, replacing with worker_extended_resource_requests['nvidia.com/gpu'] = {self.num_gpus}"
    -            )
    -            if "nvidia.com/gpu" in self.worker_extended_resource_requests:
    -                raise ValueError(
    -                    "nvidia.com/gpu already exists in worker_extended_resource_requests"
    -                )
    -            self.worker_extended_resource_requests["nvidia.com/gpu"] = self.num_gpus
    -
    -    def _str_mem_no_unit_add_GB(self):
    -        if isinstance(self.head_memory, str) and self.head_memory.isdecimal():
    -            self.head_memory = f"{self.head_memory}G"
    -        if (
    -            isinstance(self.worker_memory_requests, str)
    -            and self.worker_memory_requests.isdecimal()
    -        ):
    -            self.worker_memory_requests = f"{self.worker_memory_requests}G"
    -        if (
    -            isinstance(self.worker_memory_limits, str)
    -            and self.worker_memory_limits.isdecimal()
    -        ):
    -            self.worker_memory_limits = f"{self.worker_memory_limits}G"
    -
    -    def _memory_to_string(self):
    -        if isinstance(self.head_memory_requests, int):
    -            self.head_memory_requests = f"{self.head_memory_requests}G"
    -        if isinstance(self.head_memory_limits, int):
    -            self.head_memory_limits = f"{self.head_memory_limits}G"
    -        if isinstance(self.worker_memory_requests, int):
    -            self.worker_memory_requests = f"{self.worker_memory_requests}G"
    -        if isinstance(self.worker_memory_limits, int):
    -            self.worker_memory_limits = f"{self.worker_memory_limits}G"
    -
    -    def _cpu_to_resource(self):
    -        if self.head_cpus:
    -            warnings.warn(
    -                "head_cpus is being deprecated, use head_cpu_requests and head_cpu_limits"
    -            )
    -            self.head_cpu_requests = self.head_cpu_limits = self.head_cpus
    -        if self.min_cpus:
    -            warnings.warn("min_cpus is being deprecated, use worker_cpu_requests")
    -            self.worker_cpu_requests = self.min_cpus
    -        if self.max_cpus:
    -            warnings.warn("max_cpus is being deprecated, use worker_cpu_limits")
    -            self.worker_cpu_limits = self.max_cpus
    -
    -    def _memory_to_resource(self):
    -        if self.head_memory:
    -            warnings.warn(
    -                "head_memory is being deprecated, use head_memory_requests and head_memory_limits"
    -            )
    -            self.head_memory_requests = self.head_memory_limits = self.head_memory
    -        if self.min_memory:
    -            warnings.warn("min_memory is being deprecated, use worker_memory_requests")
    -            self.worker_memory_requests = f"{self.min_memory}G"
    -        if self.max_memory:
    -            warnings.warn("max_memory is being deprecated, use worker_memory_limits")
    -            self.worker_memory_limits = f"{self.max_memory}G"
    -
    -    def _validate_types(self):
    -        """Validate the types of all fields in the ClusterConfiguration dataclass."""
    -        for field_info in fields(self):
    -            value = getattr(self, field_info.name)
    -            expected_type = field_info.type
    -            if not self._is_type(value, expected_type):
    -                raise TypeError(
    -                    f"'{field_info.name}' should be of type {expected_type}"
    -                )
    -
    -    @staticmethod
    -    def _is_type(value, expected_type):
    -        """Check if the value matches the expected type."""
    -
    -        def check_type(value, expected_type):
    -            origin_type = get_origin(expected_type)
    -            args = get_args(expected_type)
    -            if origin_type is Union:
    -                return any(check_type(value, union_type) for union_type in args)
    -            if origin_type is list:
    -                return all(check_type(elem, args[0]) for elem in value)
    -            if origin_type is dict:
    -                return all(
    -                    check_type(k, args[0]) and check_type(v, args[1])
    -                    for k, v in value.items()
    -                )
    -            if origin_type is tuple:
    -                return all(check_type(elem, etype) for elem, etype in zip(value, args))
    -            return isinstance(value, expected_type)
    -
    -        return check_type(value, expected_type)
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -

    Classes

    -
    -
    -class ClusterConfiguration -(name: str, namespace: Optional[str] = None, head_info: List[str] = <factory>, head_cpu_requests: Union[int, str] = 2, head_cpu_limits: Union[int, str] = 2, head_cpus: Union[int, str, ForwardRef(None)] = None, head_memory_requests: Union[int, str] = 8, head_memory_limits: Union[int, str] = 8, head_memory: Union[int, str, ForwardRef(None)] = None, head_gpus: Optional[int] = None, head_extended_resource_requests: Dict[str, Union[str, int]] = <factory>, machine_types: List[str] = <factory>, worker_cpu_requests: Union[int, str] = 1, worker_cpu_limits: Union[int, str] = 1, min_cpus: Union[int, str, ForwardRef(None)] = None, max_cpus: Union[int, str, ForwardRef(None)] = None, num_workers: int = 1, worker_memory_requests: Union[int, str] = 2, worker_memory_limits: Union[int, str] = 2, min_memory: Union[int, str, ForwardRef(None)] = None, max_memory: Union[int, str, ForwardRef(None)] = None, num_gpus: Optional[int] = None, template: str = '/home/runner/work/codeflare-sdk/codeflare-sdk/src/codeflare_sdk/templates/base-template.yaml', appwrapper: bool = False, envs: Dict[str, str] = <factory>, image: str = '', image_pull_secrets: List[str] = <factory>, write_to_file: bool = False, verify_tls: bool = True, labels: Dict[str, str] = <factory>, worker_extended_resource_requests: Dict[str, Union[str, int]] = <factory>, extended_resource_mapping: Dict[str, str] = <factory>, overwrite_default_resource_mapping: bool = False, local_queue: Optional[str] = None) -
    -
    -

    This dataclass is used to specify resource requirements and other details, and -is passed in as an argument when creating a Cluster object.

    -

    Attributes: -- name: The name of the cluster. -- namespace: The namespace in which the cluster should be created. -- head_info: A list of strings containing information about the head node. -- head_cpus: The number of CPUs to allocate to the head node. -- head_memory: The amount of memory to allocate to the head node. -- head_gpus: The number of GPUs to allocate to the head node. (Deprecated, use head_extended_resource_requests) -- head_extended_resource_requests: A dictionary of extended resource requests for the head node. ex: {"nvidia.com/gpu": 1} -- machine_types: A list of machine types to use for the cluster. -- min_cpus: The minimum number of CPUs to allocate to each worker. -- max_cpus: The maximum number of CPUs to allocate to each worker. -- num_workers: The number of workers to create. -- min_memory: The minimum amount of memory to allocate to each worker. -- max_memory: The maximum amount of memory to allocate to each worker. -- num_gpus: The number of GPUs to allocate to each worker. (Deprecated, use worker_extended_resource_requests) -- template: The path to the template file to use for the cluster. -- appwrapper: A boolean indicating whether to use an AppWrapper. -- envs: A dictionary of environment variables to set for the cluster. -- image: The image to use for the cluster. -- image_pull_secrets: A list of image pull secrets to use for the cluster. -- write_to_file: A boolean indicating whether to write the cluster configuration to a file. -- verify_tls: A boolean indicating whether to verify TLS when connecting to the cluster. -- labels: A dictionary of labels to apply to the cluster. -- worker_extended_resource_requests: A dictionary of extended resource requests for each worker. ex: {"nvidia.com/gpu": 1} -- extended_resource_mapping: A dictionary of custom resource mappings to map extended resource requests to RayCluster resource names -- overwrite_default_resource_mapping: A boolean indicating whether to overwrite the default resource mapping.

    -
    - -Expand source code - -
    @dataclass
    -class ClusterConfiguration:
    -    """
    -    This dataclass is used to specify resource requirements and other details, and
    -    is passed in as an argument when creating a Cluster object.
    -
    -    Attributes:
    -    - name: The name of the cluster.
    -    - namespace: The namespace in which the cluster should be created.
    -    - head_info: A list of strings containing information about the head node.
    -    - head_cpus: The number of CPUs to allocate to the head node.
    -    - head_memory: The amount of memory to allocate to the head node.
    -    - head_gpus: The number of GPUs to allocate to the head node. (Deprecated, use head_extended_resource_requests)
    -    - head_extended_resource_requests: A dictionary of extended resource requests for the head node. ex: {"nvidia.com/gpu": 1}
    -    - machine_types: A list of machine types to use for the cluster.
    -    - min_cpus: The minimum number of CPUs to allocate to each worker.
    -    - max_cpus: The maximum number of CPUs to allocate to each worker.
    -    - num_workers: The number of workers to create.
    -    - min_memory: The minimum amount of memory to allocate to each worker.
    -    - max_memory: The maximum amount of memory to allocate to each worker.
    -    - num_gpus: The number of GPUs to allocate to each worker. (Deprecated, use worker_extended_resource_requests)
    -    - template: The path to the template file to use for the cluster.
    -    - appwrapper: A boolean indicating whether to use an AppWrapper.
    -    - envs: A dictionary of environment variables to set for the cluster.
    -    - image: The image to use for the cluster.
    -    - image_pull_secrets: A list of image pull secrets to use for the cluster.
    -    - write_to_file: A boolean indicating whether to write the cluster configuration to a file.
    -    - verify_tls: A boolean indicating whether to verify TLS when connecting to the cluster.
    -    - labels: A dictionary of labels to apply to the cluster.
    -    - worker_extended_resource_requests: A dictionary of extended resource requests for each worker. ex: {"nvidia.com/gpu": 1}
    -    - extended_resource_mapping: A dictionary of custom resource mappings to map extended resource requests to RayCluster resource names
    -    - overwrite_default_resource_mapping: A boolean indicating whether to overwrite the default resource mapping.
    -    """
    -
    -    name: str
    -    namespace: Optional[str] = None
    -    head_info: List[str] = field(default_factory=list)
    -    head_cpu_requests: Union[int, str] = 2
    -    head_cpu_limits: Union[int, str] = 2
    -    head_cpus: Optional[Union[int, str]] = None  # Deprecating
    -    head_memory_requests: Union[int, str] = 8
    -    head_memory_limits: Union[int, str] = 8
    -    head_memory: Optional[Union[int, str]] = None  # Deprecating
    -    head_gpus: Optional[int] = None  # Deprecating
    -    head_extended_resource_requests: Dict[str, Union[str, int]] = field(
    -        default_factory=dict
    -    )
    -    machine_types: List[str] = field(
    -        default_factory=list
    -    )  # ["m4.xlarge", "g4dn.xlarge"]
    -    worker_cpu_requests: Union[int, str] = 1
    -    worker_cpu_limits: Union[int, str] = 1
    -    min_cpus: Optional[Union[int, str]] = None  # Deprecating
    -    max_cpus: Optional[Union[int, str]] = None  # Deprecating
    -    num_workers: int = 1
    -    worker_memory_requests: Union[int, str] = 2
    -    worker_memory_limits: Union[int, str] = 2
    -    min_memory: Optional[Union[int, str]] = None  # Deprecating
    -    max_memory: Optional[Union[int, str]] = None  # Deprecating
    -    num_gpus: Optional[int] = None  # Deprecating
    -    template: str = f"{dir}/templates/base-template.yaml"
    -    appwrapper: bool = False
    -    envs: Dict[str, str] = field(default_factory=dict)
    -    image: str = ""
    -    image_pull_secrets: List[str] = field(default_factory=list)
    -    write_to_file: bool = False
    -    verify_tls: bool = True
    -    labels: Dict[str, str] = field(default_factory=dict)
    -    worker_extended_resource_requests: Dict[str, Union[str, int]] = field(
    -        default_factory=dict
    -    )
    -    extended_resource_mapping: Dict[str, str] = field(default_factory=dict)
    -    overwrite_default_resource_mapping: bool = False
    -    local_queue: Optional[str] = None
    -
    -    def __post_init__(self):
    -        if not self.verify_tls:
    -            print(
    -                "Warning: TLS verification has been disabled - Endpoint checks will be bypassed"
    -            )
    -
    -        self._validate_types()
    -        self._memory_to_string()
    -        self._str_mem_no_unit_add_GB()
    -        self._memory_to_resource()
    -        self._cpu_to_resource()
    -        self._gpu_to_resource()
    -        self._combine_extended_resource_mapping()
    -        self._validate_extended_resource_requests(self.head_extended_resource_requests)
    -        self._validate_extended_resource_requests(
    -            self.worker_extended_resource_requests
    -        )
    -
    -    def _combine_extended_resource_mapping(self):
    -        if overwritten := set(self.extended_resource_mapping.keys()).intersection(
    -            DEFAULT_RESOURCE_MAPPING.keys()
    -        ):
    -            if self.overwrite_default_resource_mapping:
    -                warnings.warn(
    -                    f"Overwriting default resource mapping for {overwritten}",
    -                    UserWarning,
    -                )
    -            else:
    -                raise ValueError(
    -                    f"Resource mapping already exists for {overwritten}, set overwrite_default_resource_mapping to True to overwrite"
    -                )
    -        self.extended_resource_mapping = {
    -            **DEFAULT_RESOURCE_MAPPING,
    -            **self.extended_resource_mapping,
    -        }
    -
    -    def _validate_extended_resource_requests(self, extended_resources: Dict[str, int]):
    -        for k in extended_resources.keys():
    -            if k not in self.extended_resource_mapping.keys():
    -                raise ValueError(
    -                    f"extended resource '{k}' not found in extended_resource_mapping, available resources are {list(self.extended_resource_mapping.keys())}, to add more supported resources use extended_resource_mapping. i.e. extended_resource_mapping = {{'{k}': 'FOO_BAR'}}"
    -                )
    -
    -    def _gpu_to_resource(self):
    -        if self.head_gpus:
    -            warnings.warn(
    -                f"head_gpus is being deprecated, replacing with head_extended_resource_requests['nvidia.com/gpu'] = {self.head_gpus}"
    -            )
    -            if "nvidia.com/gpu" in self.head_extended_resource_requests:
    -                raise ValueError(
    -                    "nvidia.com/gpu already exists in head_extended_resource_requests"
    -                )
    -            self.head_extended_resource_requests["nvidia.com/gpu"] = self.head_gpus
    -        if self.num_gpus:
    -            warnings.warn(
    -                f"num_gpus is being deprecated, replacing with worker_extended_resource_requests['nvidia.com/gpu'] = {self.num_gpus}"
    -            )
    -            if "nvidia.com/gpu" in self.worker_extended_resource_requests:
    -                raise ValueError(
    -                    "nvidia.com/gpu already exists in worker_extended_resource_requests"
    -                )
    -            self.worker_extended_resource_requests["nvidia.com/gpu"] = self.num_gpus
    -
    -    def _str_mem_no_unit_add_GB(self):
    -        if isinstance(self.head_memory, str) and self.head_memory.isdecimal():
    -            self.head_memory = f"{self.head_memory}G"
    -        if (
    -            isinstance(self.worker_memory_requests, str)
    -            and self.worker_memory_requests.isdecimal()
    -        ):
    -            self.worker_memory_requests = f"{self.worker_memory_requests}G"
    -        if (
    -            isinstance(self.worker_memory_limits, str)
    -            and self.worker_memory_limits.isdecimal()
    -        ):
    -            self.worker_memory_limits = f"{self.worker_memory_limits}G"
    -
    -    def _memory_to_string(self):
    -        if isinstance(self.head_memory_requests, int):
    -            self.head_memory_requests = f"{self.head_memory_requests}G"
    -        if isinstance(self.head_memory_limits, int):
    -            self.head_memory_limits = f"{self.head_memory_limits}G"
    -        if isinstance(self.worker_memory_requests, int):
    -            self.worker_memory_requests = f"{self.worker_memory_requests}G"
    -        if isinstance(self.worker_memory_limits, int):
    -            self.worker_memory_limits = f"{self.worker_memory_limits}G"
    -
    -    def _cpu_to_resource(self):
    -        if self.head_cpus:
    -            warnings.warn(
    -                "head_cpus is being deprecated, use head_cpu_requests and head_cpu_limits"
    -            )
    -            self.head_cpu_requests = self.head_cpu_limits = self.head_cpus
    -        if self.min_cpus:
    -            warnings.warn("min_cpus is being deprecated, use worker_cpu_requests")
    -            self.worker_cpu_requests = self.min_cpus
    -        if self.max_cpus:
    -            warnings.warn("max_cpus is being deprecated, use worker_cpu_limits")
    -            self.worker_cpu_limits = self.max_cpus
    -
    -    def _memory_to_resource(self):
    -        if self.head_memory:
    -            warnings.warn(
    -                "head_memory is being deprecated, use head_memory_requests and head_memory_limits"
    -            )
    -            self.head_memory_requests = self.head_memory_limits = self.head_memory
    -        if self.min_memory:
    -            warnings.warn("min_memory is being deprecated, use worker_memory_requests")
    -            self.worker_memory_requests = f"{self.min_memory}G"
    -        if self.max_memory:
    -            warnings.warn("max_memory is being deprecated, use worker_memory_limits")
    -            self.worker_memory_limits = f"{self.max_memory}G"
    -
    -    def _validate_types(self):
    -        """Validate the types of all fields in the ClusterConfiguration dataclass."""
    -        for field_info in fields(self):
    -            value = getattr(self, field_info.name)
    -            expected_type = field_info.type
    -            if not self._is_type(value, expected_type):
    -                raise TypeError(
    -                    f"'{field_info.name}' should be of type {expected_type}"
    -                )
    -
    -    @staticmethod
    -    def _is_type(value, expected_type):
    -        """Check if the value matches the expected type."""
    -
    -        def check_type(value, expected_type):
    -            origin_type = get_origin(expected_type)
    -            args = get_args(expected_type)
    -            if origin_type is Union:
    -                return any(check_type(value, union_type) for union_type in args)
    -            if origin_type is list:
    -                return all(check_type(elem, args[0]) for elem in value)
    -            if origin_type is dict:
    -                return all(
    -                    check_type(k, args[0]) and check_type(v, args[1])
    -                    for k, v in value.items()
    -                )
    -            if origin_type is tuple:
    -                return all(check_type(elem, etype) for elem, etype in zip(value, args))
    -            return isinstance(value, expected_type)
    -
    -        return check_type(value, expected_type)
    -
    -

    Class variables

    -
    -
    var appwrapper : bool
    -
    -
    -
    -
    var envs : Dict[str, str]
    -
    -
    -
    -
    var extended_resource_mapping : Dict[str, str]
    -
    -
    -
    -
    var head_cpu_limits : Union[int, str]
    -
    -
    -
    -
    var head_cpu_requests : Union[int, str]
    -
    -
    -
    -
    var head_cpus : Union[int, str, ForwardRef(None)]
    -
    -
    -
    -
    var head_extended_resource_requests : Dict[str, Union[str, int]]
    -
    -
    -
    -
    var head_gpus : Optional[int]
    -
    -
    -
    -
    var head_info : List[str]
    -
    -
    -
    -
    var head_memory : Union[int, str, ForwardRef(None)]
    -
    -
    -
    -
    var head_memory_limits : Union[int, str]
    -
    -
    -
    -
    var head_memory_requests : Union[int, str]
    -
    -
    -
    -
    var image : str
    -
    -
    -
    -
    var image_pull_secrets : List[str]
    -
    -
    -
    -
    var labels : Dict[str, str]
    -
    -
    -
    -
    var local_queue : Optional[str]
    -
    -
    -
    -
    var machine_types : List[str]
    -
    -
    -
    -
    var max_cpus : Union[int, str, ForwardRef(None)]
    -
    -
    -
    -
    var max_memory : Union[int, str, ForwardRef(None)]
    -
    -
    -
    -
    var min_cpus : Union[int, str, ForwardRef(None)]
    -
    -
    -
    -
    var min_memory : Union[int, str, ForwardRef(None)]
    -
    -
    -
    -
    var name : str
    -
    -
    -
    -
    var namespace : Optional[str]
    -
    -
    -
    -
    var num_gpus : Optional[int]
    -
    -
    -
    -
    var num_workers : int
    -
    -
    -
    -
    var overwrite_default_resource_mapping : bool
    -
    -
    -
    -
    var template : str
    -
    -
    -
    -
    var verify_tls : bool
    -
    -
    -
    -
    var worker_cpu_limits : Union[int, str]
    -
    -
    -
    -
    var worker_cpu_requests : Union[int, str]
    -
    -
    -
    -
    var worker_extended_resource_requests : Dict[str, Union[str, int]]
    -
    -
    -
    -
    var worker_memory_limits : Union[int, str]
    -
    -
    -
    -
    var worker_memory_requests : Union[int, str]
    -
    -
    -
    -
    var write_to_file : bool
    -
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - diff --git a/docs/detailed-documentation/cluster/index.html b/docs/detailed-documentation/cluster/index.html deleted file mode 100644 index f8c04fa2..00000000 --- a/docs/detailed-documentation/cluster/index.html +++ /dev/null @@ -1,129 +0,0 @@ - - - - - - -codeflare_sdk.cluster API documentation - - - - - - - - - - - -
    -
    -
    -

    Module codeflare_sdk.cluster

    -
    -
    -
    - -Expand source code - -
    from .auth import (
    -    Authentication,
    -    KubeConfiguration,
    -    TokenAuthentication,
    -    KubeConfigFileAuthentication,
    -)
    -
    -from .model import (
    -    RayClusterStatus,
    -    AppWrapperStatus,
    -    CodeFlareClusterStatus,
    -    RayCluster,
    -    AppWrapper,
    -)
    -
    -from .cluster import (
    -    Cluster,
    -    ClusterConfiguration,
    -    get_cluster,
    -    list_all_queued,
    -    list_all_clusters,
    -)
    -
    -from .widgets import (
    -    view_clusters,
    -)
    -
    -from .awload import AWManager
    -
    -
    -
    -

    Sub-modules

    -
    -
    codeflare_sdk.cluster.auth
    -
    -

    The auth sub-module contains the definitions for the Authentication objects, which represent -the methods by which a user can authenticate to their …

    -
    -
    codeflare_sdk.cluster.awload
    -
    -

    The awload sub-module contains the definition of the AWManager object, which handles -submission and deletion of existing AppWrappers from a user's …

    -
    -
    codeflare_sdk.cluster.cluster
    -
    -

    The cluster sub-module contains the definition of the Cluster object, which represents -the resources requested by the user. It also contains functions …

    -
    -
    codeflare_sdk.cluster.config
    -
    -

    The config sub-module contains the definition of the ClusterConfiguration dataclass, -which is used to specify resource requirements and other details …

    -
    -
    codeflare_sdk.cluster.model
    -
    -

    The model sub-module defines Enums containing information for Ray cluster -states and AppWrapper states, and CodeFlare cluster states, as well as -…

    -
    -
    codeflare_sdk.cluster.widgets
    -
    -

    The widgets sub-module contains the ui widgets created using the ipywidgets package.

    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - diff --git a/docs/detailed-documentation/cluster/model.html b/docs/detailed-documentation/cluster/model.html deleted file mode 100644 index 7d87e34f..00000000 --- a/docs/detailed-documentation/cluster/model.html +++ /dev/null @@ -1,531 +0,0 @@ - - - - - - -codeflare_sdk.cluster.model API documentation - - - - - - - - - - - -
    -
    -
    -

    Module codeflare_sdk.cluster.model

    -
    -
    -

    The model sub-module defines Enums containing information for Ray cluster -states and AppWrapper states, and CodeFlare cluster states, as well as -dataclasses to store information for Ray clusters and AppWrappers.

    -
    - -Expand source code - -
    # Copyright 2022 IBM, Red Hat
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#      http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -
    -"""
    -The model sub-module defines Enums containing information for Ray cluster
    -states and AppWrapper states, and CodeFlare cluster states, as well as
    -dataclasses to store information for Ray clusters and AppWrappers.
    -"""
    -
    -from dataclasses import dataclass, field
    -from enum import Enum
    -import typing
    -from typing import Union
    -
    -
    -class RayClusterStatus(Enum):
    -    """
    -    Defines the possible reportable states of a Ray cluster.
    -    """
    -
    -    # https://github.com/ray-project/kuberay/blob/master/ray-operator/apis/ray/v1/raycluster_types.go#L112-L117
    -    READY = "ready"
    -    UNHEALTHY = "unhealthy"
    -    FAILED = "failed"
    -    UNKNOWN = "unknown"
    -    SUSPENDED = "suspended"
    -
    -
    -class AppWrapperStatus(Enum):
    -    """
    -    Defines the possible reportable phases of an AppWrapper.
    -    """
    -
    -    SUSPENDED = "suspended"
    -    RESUMING = "resuming"
    -    RUNNING = "running"
    -    RESETTING = "resetting"
    -    SUSPENDING = "suspending"
    -    SUCCEEDED = "succeeded"
    -    FAILED = "failed"
    -    TERMINATING = "terminating"
    -
    -
    -class CodeFlareClusterStatus(Enum):
    -    """
    -    Defines the possible reportable states of a Codeflare cluster.
    -    """
    -
    -    READY = 1
    -    STARTING = 2
    -    QUEUED = 3
    -    QUEUEING = 4
    -    FAILED = 5
    -    UNKNOWN = 6
    -    SUSPENDED = 7
    -
    -
    -@dataclass
    -class RayCluster:
    -    """
    -    For storing information about a Ray cluster.
    -    """
    -
    -    name: str
    -    status: RayClusterStatus
    -    head_cpu_requests: int
    -    head_cpu_limits: int
    -    head_mem_requests: str
    -    head_mem_limits: str
    -    num_workers: int
    -    worker_mem_requests: str
    -    worker_mem_limits: str
    -    worker_cpu_requests: Union[int, str]
    -    worker_cpu_limits: Union[int, str]
    -    namespace: str
    -    dashboard: str
    -    worker_extended_resources: typing.Dict[str, int] = field(default_factory=dict)
    -    head_extended_resources: typing.Dict[str, int] = field(default_factory=dict)
    -
    -
    -@dataclass
    -class AppWrapper:
    -    """
    -    For storing information about an AppWrapper.
    -    """
    -
    -    name: str
    -    status: AppWrapperStatus
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -

    Classes

    -
    -
    -class AppWrapper -(name: str, status: AppWrapperStatus) -
    -
    -

    For storing information about an AppWrapper.

    -
    - -Expand source code - -
    @dataclass
    -class AppWrapper:
    -    """
    -    For storing information about an AppWrapper.
    -    """
    -
    -    name: str
    -    status: AppWrapperStatus
    -
    -

    Class variables

    -
    -
    var name : str
    -
    -
    -
    -
    var statusAppWrapperStatus
    -
    -
    -
    -
    -
    -
    -class AppWrapperStatus -(value, names=None, *, module=None, qualname=None, type=None, start=1) -
    -
    -

    Defines the possible reportable phases of an AppWrapper.

    -
    - -Expand source code - -
    class AppWrapperStatus(Enum):
    -    """
    -    Defines the possible reportable phases of an AppWrapper.
    -    """
    -
    -    SUSPENDED = "suspended"
    -    RESUMING = "resuming"
    -    RUNNING = "running"
    -    RESETTING = "resetting"
    -    SUSPENDING = "suspending"
    -    SUCCEEDED = "succeeded"
    -    FAILED = "failed"
    -    TERMINATING = "terminating"
    -
    -

    Ancestors

    -
      -
    • enum.Enum
    • -
    -

    Class variables

    -
    -
    var FAILED
    -
    -
    -
    -
    var RESETTING
    -
    -
    -
    -
    var RESUMING
    -
    -
    -
    -
    var RUNNING
    -
    -
    -
    -
    var SUCCEEDED
    -
    -
    -
    -
    var SUSPENDED
    -
    -
    -
    -
    var SUSPENDING
    -
    -
    -
    -
    var TERMINATING
    -
    -
    -
    -
    -
    -
    -class CodeFlareClusterStatus -(value, names=None, *, module=None, qualname=None, type=None, start=1) -
    -
    -

    Defines the possible reportable states of a Codeflare cluster.

    -
    - -Expand source code - -
    class CodeFlareClusterStatus(Enum):
    -    """
    -    Defines the possible reportable states of a Codeflare cluster.
    -    """
    -
    -    READY = 1
    -    STARTING = 2
    -    QUEUED = 3
    -    QUEUEING = 4
    -    FAILED = 5
    -    UNKNOWN = 6
    -    SUSPENDED = 7
    -
    -

    Ancestors

    -
      -
    • enum.Enum
    • -
    -

    Class variables

    -
    -
    var FAILED
    -
    -
    -
    -
    var QUEUED
    -
    -
    -
    -
    var QUEUEING
    -
    -
    -
    -
    var READY
    -
    -
    -
    -
    var STARTING
    -
    -
    -
    -
    var SUSPENDED
    -
    -
    -
    -
    var UNKNOWN
    -
    -
    -
    -
    -
    -
    -class RayCluster -(name: str, status: RayClusterStatus, head_cpu_requests: int, head_cpu_limits: int, head_mem_requests: str, head_mem_limits: str, num_workers: int, worker_mem_requests: str, worker_mem_limits: str, worker_cpu_requests: Union[int, str], worker_cpu_limits: Union[int, str], namespace: str, dashboard: str, worker_extended_resources: Dict[str, int] = <factory>, head_extended_resources: Dict[str, int] = <factory>) -
    -
    -

    For storing information about a Ray cluster.

    -
    - -Expand source code - -
    @dataclass
    -class RayCluster:
    -    """
    -    For storing information about a Ray cluster.
    -    """
    -
    -    name: str
    -    status: RayClusterStatus
    -    head_cpu_requests: int
    -    head_cpu_limits: int
    -    head_mem_requests: str
    -    head_mem_limits: str
    -    num_workers: int
    -    worker_mem_requests: str
    -    worker_mem_limits: str
    -    worker_cpu_requests: Union[int, str]
    -    worker_cpu_limits: Union[int, str]
    -    namespace: str
    -    dashboard: str
    -    worker_extended_resources: typing.Dict[str, int] = field(default_factory=dict)
    -    head_extended_resources: typing.Dict[str, int] = field(default_factory=dict)
    -
    -

    Class variables

    -
    -
    var dashboard : str
    -
    -
    -
    -
    var head_cpu_limits : int
    -
    -
    -
    -
    var head_cpu_requests : int
    -
    -
    -
    -
    var head_extended_resources : Dict[str, int]
    -
    -
    -
    -
    var head_mem_limits : str
    -
    -
    -
    -
    var head_mem_requests : str
    -
    -
    -
    -
    var name : str
    -
    -
    -
    -
    var namespace : str
    -
    -
    -
    -
    var num_workers : int
    -
    -
    -
    -
    var statusRayClusterStatus
    -
    -
    -
    -
    var worker_cpu_limits : Union[int, str]
    -
    -
    -
    -
    var worker_cpu_requests : Union[int, str]
    -
    -
    -
    -
    var worker_extended_resources : Dict[str, int]
    -
    -
    -
    -
    var worker_mem_limits : str
    -
    -
    -
    -
    var worker_mem_requests : str
    -
    -
    -
    -
    -
    -
    -class RayClusterStatus -(value, names=None, *, module=None, qualname=None, type=None, start=1) -
    -
    -

    Defines the possible reportable states of a Ray cluster.

    -
    - -Expand source code - -
    class RayClusterStatus(Enum):
    -    """
    -    Defines the possible reportable states of a Ray cluster.
    -    """
    -
    -    # https://github.com/ray-project/kuberay/blob/master/ray-operator/apis/ray/v1/raycluster_types.go#L112-L117
    -    READY = "ready"
    -    UNHEALTHY = "unhealthy"
    -    FAILED = "failed"
    -    UNKNOWN = "unknown"
    -    SUSPENDED = "suspended"
    -
    -

    Ancestors

    -
      -
    • enum.Enum
    • -
    -

    Class variables

    -
    -
    var FAILED
    -
    -
    -
    -
    var READY
    -
    -
    -
    -
    var SUSPENDED
    -
    -
    -
    -
    var UNHEALTHY
    -
    -
    -
    -
    var UNKNOWN
    -
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - diff --git a/docs/detailed-documentation/cluster/widgets.html b/docs/detailed-documentation/cluster/widgets.html deleted file mode 100644 index b0334903..00000000 --- a/docs/detailed-documentation/cluster/widgets.html +++ /dev/null @@ -1,758 +0,0 @@ - - - - - - -codeflare_sdk.cluster.widgets API documentation - - - - - - - - - - - -
    -
    -
    -

    Module codeflare_sdk.cluster.widgets

    -
    -
    -

    The widgets sub-module contains the ui widgets created using the ipywidgets package.

    -
    - -Expand source code - -
    # Copyright 2024 IBM, Red Hat
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#      http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -
    -"""
    -The widgets sub-module contains the ui widgets created using the ipywidgets package.
    -"""
    -import contextlib
    -import io
    -import os
    -import warnings
    -import time
    -import codeflare_sdk
    -from kubernetes import client
    -from kubernetes.client.rest import ApiException
    -import ipywidgets as widgets
    -from IPython.display import display, HTML, Javascript
    -import pandas as pd
    -from .config import ClusterConfiguration
    -from .model import RayClusterStatus
    -from ..utils.kube_api_helpers import _kube_api_error_handling
    -from .auth import config_check, get_api_client
    -
    -
    -def cluster_up_down_buttons(cluster: "codeflare_sdk.cluster.Cluster") -> widgets.Button:
    -    """
    -    The cluster_up_down_buttons function returns two button widgets for a create and delete button.
    -    The function uses the appwrapper bool to distinguish between resource type for the tool tip.
    -    """
    -    resource = "Ray Cluster"
    -    if cluster.config.appwrapper:
    -        resource = "AppWrapper"
    -
    -    up_button = widgets.Button(
    -        description="Cluster Up",
    -        tooltip=f"Create the {resource}",
    -        icon="play",
    -    )
    -
    -    delete_button = widgets.Button(
    -        description="Cluster Down",
    -        tooltip=f"Delete the {resource}",
    -        icon="trash",
    -    )
    -
    -    wait_ready_check = wait_ready_check_box()
    -    output = widgets.Output()
    -
    -    # Display the buttons in an HBox wrapped in a VBox which includes the wait_ready Checkbox
    -    button_display = widgets.HBox([up_button, delete_button])
    -    display(widgets.VBox([button_display, wait_ready_check]), output)
    -
    -    def on_up_button_clicked(b):  # Handle the up button click event
    -        with output:
    -            output.clear_output()
    -            cluster.up()
    -
    -            # If the wait_ready Checkbox is clicked(value == True) trigger the wait_ready function
    -            if wait_ready_check.value:
    -                cluster.wait_ready()
    -
    -    def on_down_button_clicked(b):  # Handle the down button click event
    -        with output:
    -            output.clear_output()
    -            cluster.down()
    -
    -    up_button.on_click(on_up_button_clicked)
    -    delete_button.on_click(on_down_button_clicked)
    -
    -
    -def wait_ready_check_box():
    -    """
    -    The wait_ready_check_box function will return a checkbox widget used for waiting for the resource to be in the state READY.
    -    """
    -    wait_ready_check_box = widgets.Checkbox(
    -        False,
    -        description="Wait for Cluster?",
    -    )
    -    return wait_ready_check_box
    -
    -
    -def is_notebook() -> bool:
    -    """
    -    The is_notebook function checks if Jupyter Notebook environment variables exist in the given environment and return True/False based on that.
    -    """
    -    if (
    -        "PYDEVD_IPYTHON_COMPATIBLE_DEBUGGING" in os.environ
    -        or "JPY_SESSION_NAME" in os.environ
    -    ):  # If running Jupyter NBs in VsCode or RHOAI/ODH display UI buttons
    -        return True
    -    else:
    -        return False
    -
    -
    -def view_clusters(namespace: str = None):
    -    """
    -    view_clusters function will display existing clusters with their specs, and handle user interactions.
    -    """
    -    if not is_notebook():
    -        warnings.warn(
    -            "view_clusters can only be used in a Jupyter Notebook environment."
    -        )
    -        return  # Exit function if not in Jupyter Notebook
    -
    -    from .cluster import get_current_namespace
    -
    -    if not namespace:
    -        namespace = get_current_namespace()
    -
    -    user_output = widgets.Output()
    -    raycluster_data_output = widgets.Output()
    -    url_output = widgets.Output()
    -
    -    ray_clusters_df = _fetch_cluster_data(namespace)
    -    if ray_clusters_df.empty:
    -        print(f"No clusters found in the {namespace} namespace.")
    -        return
    -
    -    classification_widget = widgets.ToggleButtons(
    -        options=ray_clusters_df["Name"].tolist(),
    -        value=ray_clusters_df["Name"].tolist()[0],
    -        description="Select an existing cluster:",
    -    )
    -    # Setting the initial value to trigger the event handler to display the cluster details.
    -    initial_value = classification_widget.value
    -    _on_cluster_click(
    -        {"new": initial_value}, raycluster_data_output, namespace, classification_widget
    -    )
    -    classification_widget.observe(
    -        lambda selection_change: _on_cluster_click(
    -            selection_change, raycluster_data_output, namespace, classification_widget
    -        ),
    -        names="value",
    -    )
    -
    -    # UI table buttons
    -    delete_button = widgets.Button(
    -        description="Delete Cluster",
    -        icon="trash",
    -        tooltip="Delete the selected cluster",
    -    )
    -    delete_button.on_click(
    -        lambda b: _on_delete_button_click(
    -            b,
    -            classification_widget,
    -            ray_clusters_df,
    -            raycluster_data_output,
    -            user_output,
    -            delete_button,
    -            list_jobs_button,
    -            ray_dashboard_button,
    -        )
    -    )
    -
    -    list_jobs_button = widgets.Button(
    -        description="View Jobs", icon="suitcase", tooltip="Open the Ray Job Dashboard"
    -    )
    -    list_jobs_button.on_click(
    -        lambda b: _on_list_jobs_button_click(
    -            b, classification_widget, ray_clusters_df, user_output, url_output
    -        )
    -    )
    -
    -    ray_dashboard_button = widgets.Button(
    -        description="Open Ray Dashboard",
    -        icon="dashboard",
    -        tooltip="Open the Ray Dashboard in a new tab",
    -        layout=widgets.Layout(width="auto"),
    -    )
    -    ray_dashboard_button.on_click(
    -        lambda b: _on_ray_dashboard_button_click(
    -            b, classification_widget, ray_clusters_df, user_output, url_output
    -        )
    -    )
    -
    -    display(widgets.VBox([classification_widget, raycluster_data_output]))
    -    display(
    -        widgets.HBox([delete_button, list_jobs_button, ray_dashboard_button]),
    -        url_output,
    -        user_output,
    -    )
    -
    -
    -def _on_cluster_click(
    -    selection_change,
    -    raycluster_data_output: widgets.Output,
    -    namespace: str,
    -    classification_widget: widgets.ToggleButtons,
    -):
    -    """
    -    _on_cluster_click handles the event when a cluster is selected from the toggle buttons, updating the output with cluster details.
    -    """
    -    new_value = selection_change["new"]
    -    raycluster_data_output.clear_output()
    -    ray_clusters_df = _fetch_cluster_data(namespace)
    -    classification_widget.options = ray_clusters_df["Name"].tolist()
    -    with raycluster_data_output:
    -        display(
    -            HTML(
    -                ray_clusters_df[ray_clusters_df["Name"] == new_value][
    -                    [
    -                        "Name",
    -                        "Namespace",
    -                        "Num Workers",
    -                        "Head GPUs",
    -                        "Head CPU Req~Lim",
    -                        "Head Memory Req~Lim",
    -                        "Worker GPUs",
    -                        "Worker CPU Req~Lim",
    -                        "Worker Memory Req~Lim",
    -                        "status",
    -                    ]
    -                ].to_html(escape=False, index=False, border=2)
    -            )
    -        )
    -
    -
    -def _on_delete_button_click(
    -    b,
    -    classification_widget: widgets.ToggleButtons,
    -    ray_clusters_df: pd.DataFrame,
    -    raycluster_data_output: widgets.Output,
    -    user_output: widgets.Output,
    -    delete_button: widgets.Button,
    -    list_jobs_button: widgets.Button,
    -    ray_dashboard_button: widgets.Button,
    -):
    -    """
    -    _on_delete_button_click handles the event when the Delete Button is clicked, deleting the selected cluster.
    -    """
    -    cluster_name = classification_widget.value
    -    namespace = ray_clusters_df[ray_clusters_df["Name"] == classification_widget.value][
    -        "Namespace"
    -    ].values[0]
    -
    -    _delete_cluster(cluster_name, namespace)
    -
    -    with user_output:
    -        user_output.clear_output()
    -        print(
    -            f"Cluster {cluster_name} in the {namespace} namespace was deleted successfully."
    -        )
    -
    -    # Refresh the dataframe
    -    new_df = _fetch_cluster_data(namespace)
    -    if new_df.empty:
    -        classification_widget.close()
    -        delete_button.close()
    -        list_jobs_button.close()
    -        ray_dashboard_button.close()
    -        with raycluster_data_output:
    -            raycluster_data_output.clear_output()
    -            print(f"No clusters found in the {namespace} namespace.")
    -    else:
    -        classification_widget.options = new_df["Name"].tolist()
    -
    -
    -def _on_ray_dashboard_button_click(
    -    b,
    -    classification_widget: widgets.ToggleButtons,
    -    ray_clusters_df: pd.DataFrame,
    -    user_output: widgets.Output,
    -    url_output: widgets.Output,
    -):
    -    """
    -    _on_ray_dashboard_button_click handles the event when the Open Ray Dashboard button is clicked, opening the Ray Dashboard in a new tab
    -    """
    -    from codeflare_sdk.cluster import Cluster
    -
    -    cluster_name = classification_widget.value
    -    namespace = ray_clusters_df[ray_clusters_df["Name"] == classification_widget.value][
    -        "Namespace"
    -    ].values[0]
    -
    -    # Suppress from Cluster Object initialisation widgets and outputs
    -    with widgets.Output(), contextlib.redirect_stdout(
    -        io.StringIO()
    -    ), contextlib.redirect_stderr(io.StringIO()):
    -        cluster = Cluster(ClusterConfiguration(cluster_name, namespace))
    -    dashboard_url = cluster.cluster_dashboard_uri()
    -
    -    with user_output:
    -        user_output.clear_output()
    -        print(f"Opening Ray Dashboard for {cluster_name} cluster:\n{dashboard_url}")
    -    with url_output:
    -        display(Javascript(f'window.open("{dashboard_url}", "_blank");'))
    -
    -
    -def _on_list_jobs_button_click(
    -    b,
    -    classification_widget: widgets.ToggleButtons,
    -    ray_clusters_df: pd.DataFrame,
    -    user_output: widgets.Output,
    -    url_output: widgets.Output,
    -):
    -    """
    -    _on_list_jobs_button_click handles the event when the View Jobs button is clicked, opening the Ray Jobs Dashboard in a new tab
    -    """
    -    from codeflare_sdk.cluster import Cluster
    -
    -    cluster_name = classification_widget.value
    -    namespace = ray_clusters_df[ray_clusters_df["Name"] == classification_widget.value][
    -        "Namespace"
    -    ].values[0]
    -
    -    # Suppress from Cluster Object initialisation widgets and outputs
    -    with widgets.Output(), contextlib.redirect_stdout(
    -        io.StringIO()
    -    ), contextlib.redirect_stderr(io.StringIO()):
    -        cluster = Cluster(ClusterConfiguration(cluster_name, namespace))
    -    dashboard_url = cluster.cluster_dashboard_uri()
    -
    -    with user_output:
    -        user_output.clear_output()
    -        print(
    -            f"Opening Ray Jobs Dashboard for {cluster_name} cluster:\n{dashboard_url}/#/jobs"
    -        )
    -    with url_output:
    -        display(Javascript(f'window.open("{dashboard_url}/#/jobs", "_blank");'))
    -
    -
    -def _delete_cluster(
    -    cluster_name: str,
    -    namespace: str,
    -    timeout: int = 5,
    -    interval: int = 1,
    -):
    -    """
    -    _delete_cluster function deletes the cluster with the given name and namespace.
    -    It optionally waits for the cluster to be deleted.
    -    """
    -    from .cluster import _check_aw_exists
    -
    -    try:
    -        config_check()
    -        api_instance = client.CustomObjectsApi(get_api_client())
    -
    -        if _check_aw_exists(cluster_name, namespace):
    -            api_instance.delete_namespaced_custom_object(
    -                group="workload.codeflare.dev",
    -                version="v1beta2",
    -                namespace=namespace,
    -                plural="appwrappers",
    -                name=cluster_name,
    -            )
    -            group = "workload.codeflare.dev"
    -            version = "v1beta2"
    -            plural = "appwrappers"
    -        else:
    -            api_instance.delete_namespaced_custom_object(
    -                group="ray.io",
    -                version="v1",
    -                namespace=namespace,
    -                plural="rayclusters",
    -                name=cluster_name,
    -            )
    -            group = "ray.io"
    -            version = "v1"
    -            plural = "rayclusters"
    -
    -        # Wait for the resource to be deleted
    -        while timeout > 0:
    -            try:
    -                api_instance.get_namespaced_custom_object(
    -                    group=group,
    -                    version=version,
    -                    namespace=namespace,
    -                    plural=plural,
    -                    name=cluster_name,
    -                )
    -                # Retry if resource still exists
    -                time.sleep(interval)
    -                timeout -= interval
    -                if timeout <= 0:
    -                    raise TimeoutError(
    -                        f"Timeout waiting for {cluster_name} to be deleted."
    -                    )
    -            except ApiException as e:
    -                # Resource is deleted
    -                if e.status == 404:
    -                    break
    -    except Exception as e:  # pragma: no cover
    -        return _kube_api_error_handling(e)
    -
    -
    -def _fetch_cluster_data(namespace):
    -    """
    -    _fetch_cluster_data function fetches all clusters and their spec in a given namespace and returns a DataFrame.
    -    """
    -    from .cluster import list_all_clusters
    -
    -    rayclusters = list_all_clusters(namespace, False)
    -    if not rayclusters:
    -        return pd.DataFrame()
    -    names = [item.name for item in rayclusters]
    -    namespaces = [item.namespace for item in rayclusters]
    -    num_workers = [item.num_workers for item in rayclusters]
    -    head_extended_resources = [
    -        f"{list(item.head_extended_resources.keys())[0]}: {list(item.head_extended_resources.values())[0]}"
    -        if item.head_extended_resources
    -        else "0"
    -        for item in rayclusters
    -    ]
    -    worker_extended_resources = [
    -        f"{list(item.worker_extended_resources.keys())[0]}: {list(item.worker_extended_resources.values())[0]}"
    -        if item.worker_extended_resources
    -        else "0"
    -        for item in rayclusters
    -    ]
    -    head_cpu_requests = [
    -        item.head_cpu_requests if item.head_cpu_requests else 0 for item in rayclusters
    -    ]
    -    head_cpu_limits = [
    -        item.head_cpu_limits if item.head_cpu_limits else 0 for item in rayclusters
    -    ]
    -    head_cpu_rl = [
    -        f"{requests}~{limits}"
    -        for requests, limits in zip(head_cpu_requests, head_cpu_limits)
    -    ]
    -    head_mem_requests = [
    -        item.head_mem_requests if item.head_mem_requests else 0 for item in rayclusters
    -    ]
    -    head_mem_limits = [
    -        item.head_mem_limits if item.head_mem_limits else 0 for item in rayclusters
    -    ]
    -    head_mem_rl = [
    -        f"{requests}~{limits}"
    -        for requests, limits in zip(head_mem_requests, head_mem_limits)
    -    ]
    -    worker_cpu_requests = [
    -        item.worker_cpu_requests if item.worker_cpu_requests else 0
    -        for item in rayclusters
    -    ]
    -    worker_cpu_limits = [
    -        item.worker_cpu_limits if item.worker_cpu_limits else 0 for item in rayclusters
    -    ]
    -    worker_cpu_rl = [
    -        f"{requests}~{limits}"
    -        for requests, limits in zip(worker_cpu_requests, worker_cpu_limits)
    -    ]
    -    worker_mem_requests = [
    -        item.worker_mem_requests if item.worker_mem_requests else 0
    -        for item in rayclusters
    -    ]
    -    worker_mem_limits = [
    -        item.worker_mem_limits if item.worker_mem_limits else 0 for item in rayclusters
    -    ]
    -    worker_mem_rl = [
    -        f"{requests}~{limits}"
    -        for requests, limits in zip(worker_mem_requests, worker_mem_limits)
    -    ]
    -    status = [item.status.name for item in rayclusters]
    -
    -    status = [_format_status(item.status) for item in rayclusters]
    -
    -    data = {
    -        "Name": names,
    -        "Namespace": namespaces,
    -        "Num Workers": num_workers,
    -        "Head GPUs": head_extended_resources,
    -        "Worker GPUs": worker_extended_resources,
    -        "Head CPU Req~Lim": head_cpu_rl,
    -        "Head Memory Req~Lim": head_mem_rl,
    -        "Worker CPU Req~Lim": worker_cpu_rl,
    -        "Worker Memory Req~Lim": worker_mem_rl,
    -        "status": status,
    -    }
    -    return pd.DataFrame(data)
    -
    -
    -def _format_status(status):
    -    """
    -    _format_status function formats the status enum.
    -    """
    -    status_map = {
    -        RayClusterStatus.READY: '<span style="color: green;">Ready ✓</span>',
    -        RayClusterStatus.SUSPENDED: '<span style="color: #007BFF;">Suspended ❄️</span>',
    -        RayClusterStatus.FAILED: '<span style="color: red;">Failed ✗</span>',
    -        RayClusterStatus.UNHEALTHY: '<span style="color: purple;">Unhealthy</span>',
    -        RayClusterStatus.UNKNOWN: '<span style="color: purple;">Unknown</span>',
    -    }
    -    return status_map.get(status, status)
    -
    -
    -
    -
    -
    -
    -
    -

    Functions

    -
    -
    -def cluster_up_down_buttons(cluster: codeflare_sdk.cluster.Cluster) ‑> ipywidgets.widgets.widget_button.Button -
    -
    -

    The cluster_up_down_buttons function returns two button widgets for a create and delete button. -The function uses the appwrapper bool to distinguish between resource type for the tool tip.

    -
    - -Expand source code - -
    def cluster_up_down_buttons(cluster: "codeflare_sdk.cluster.Cluster") -> widgets.Button:
    -    """
    -    The cluster_up_down_buttons function returns two button widgets for a create and delete button.
    -    The function uses the appwrapper bool to distinguish between resource type for the tool tip.
    -    """
    -    resource = "Ray Cluster"
    -    if cluster.config.appwrapper:
    -        resource = "AppWrapper"
    -
    -    up_button = widgets.Button(
    -        description="Cluster Up",
    -        tooltip=f"Create the {resource}",
    -        icon="play",
    -    )
    -
    -    delete_button = widgets.Button(
    -        description="Cluster Down",
    -        tooltip=f"Delete the {resource}",
    -        icon="trash",
    -    )
    -
    -    wait_ready_check = wait_ready_check_box()
    -    output = widgets.Output()
    -
    -    # Display the buttons in an HBox wrapped in a VBox which includes the wait_ready Checkbox
    -    button_display = widgets.HBox([up_button, delete_button])
    -    display(widgets.VBox([button_display, wait_ready_check]), output)
    -
    -    def on_up_button_clicked(b):  # Handle the up button click event
    -        with output:
    -            output.clear_output()
    -            cluster.up()
    -
    -            # If the wait_ready Checkbox is clicked(value == True) trigger the wait_ready function
    -            if wait_ready_check.value:
    -                cluster.wait_ready()
    -
    -    def on_down_button_clicked(b):  # Handle the down button click event
    -        with output:
    -            output.clear_output()
    -            cluster.down()
    -
    -    up_button.on_click(on_up_button_clicked)
    -    delete_button.on_click(on_down_button_clicked)
    -
    -
    -
    -def is_notebook() ‑> bool -
    -
    -

    The is_notebook function checks if Jupyter Notebook environment variables exist in the given environment and return True/False based on that.

    -
    - -Expand source code - -
    def is_notebook() -> bool:
    -    """
    -    The is_notebook function checks if Jupyter Notebook environment variables exist in the given environment and return True/False based on that.
    -    """
    -    if (
    -        "PYDEVD_IPYTHON_COMPATIBLE_DEBUGGING" in os.environ
    -        or "JPY_SESSION_NAME" in os.environ
    -    ):  # If running Jupyter NBs in VsCode or RHOAI/ODH display UI buttons
    -        return True
    -    else:
    -        return False
    -
    -
    -
    -def view_clusters(namespace: str = None) -
    -
    -

    view_clusters function will display existing clusters with their specs, and handle user interactions.

    -
    - -Expand source code - -
    def view_clusters(namespace: str = None):
    -    """
    -    view_clusters function will display existing clusters with their specs, and handle user interactions.
    -    """
    -    if not is_notebook():
    -        warnings.warn(
    -            "view_clusters can only be used in a Jupyter Notebook environment."
    -        )
    -        return  # Exit function if not in Jupyter Notebook
    -
    -    from .cluster import get_current_namespace
    -
    -    if not namespace:
    -        namespace = get_current_namespace()
    -
    -    user_output = widgets.Output()
    -    raycluster_data_output = widgets.Output()
    -    url_output = widgets.Output()
    -
    -    ray_clusters_df = _fetch_cluster_data(namespace)
    -    if ray_clusters_df.empty:
    -        print(f"No clusters found in the {namespace} namespace.")
    -        return
    -
    -    classification_widget = widgets.ToggleButtons(
    -        options=ray_clusters_df["Name"].tolist(),
    -        value=ray_clusters_df["Name"].tolist()[0],
    -        description="Select an existing cluster:",
    -    )
    -    # Setting the initial value to trigger the event handler to display the cluster details.
    -    initial_value = classification_widget.value
    -    _on_cluster_click(
    -        {"new": initial_value}, raycluster_data_output, namespace, classification_widget
    -    )
    -    classification_widget.observe(
    -        lambda selection_change: _on_cluster_click(
    -            selection_change, raycluster_data_output, namespace, classification_widget
    -        ),
    -        names="value",
    -    )
    -
    -    # UI table buttons
    -    delete_button = widgets.Button(
    -        description="Delete Cluster",
    -        icon="trash",
    -        tooltip="Delete the selected cluster",
    -    )
    -    delete_button.on_click(
    -        lambda b: _on_delete_button_click(
    -            b,
    -            classification_widget,
    -            ray_clusters_df,
    -            raycluster_data_output,
    -            user_output,
    -            delete_button,
    -            list_jobs_button,
    -            ray_dashboard_button,
    -        )
    -    )
    -
    -    list_jobs_button = widgets.Button(
    -        description="View Jobs", icon="suitcase", tooltip="Open the Ray Job Dashboard"
    -    )
    -    list_jobs_button.on_click(
    -        lambda b: _on_list_jobs_button_click(
    -            b, classification_widget, ray_clusters_df, user_output, url_output
    -        )
    -    )
    -
    -    ray_dashboard_button = widgets.Button(
    -        description="Open Ray Dashboard",
    -        icon="dashboard",
    -        tooltip="Open the Ray Dashboard in a new tab",
    -        layout=widgets.Layout(width="auto"),
    -    )
    -    ray_dashboard_button.on_click(
    -        lambda b: _on_ray_dashboard_button_click(
    -            b, classification_widget, ray_clusters_df, user_output, url_output
    -        )
    -    )
    -
    -    display(widgets.VBox([classification_widget, raycluster_data_output]))
    -    display(
    -        widgets.HBox([delete_button, list_jobs_button, ray_dashboard_button]),
    -        url_output,
    -        user_output,
    -    )
    -
    -
    -
    -def wait_ready_check_box() -
    -
    -

    The wait_ready_check_box function will return a checkbox widget used for waiting for the resource to be in the state READY.

    -
    - -Expand source code - -
    def wait_ready_check_box():
    -    """
    -    The wait_ready_check_box function will return a checkbox widget used for waiting for the resource to be in the state READY.
    -    """
    -    wait_ready_check_box = widgets.Checkbox(
    -        False,
    -        description="Wait for Cluster?",
    -    )
    -    return wait_ready_check_box
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - diff --git a/docs/detailed-documentation/index.html b/docs/detailed-documentation/index.html deleted file mode 100644 index 45000719..00000000 --- a/docs/detailed-documentation/index.html +++ /dev/null @@ -1,106 +0,0 @@ - - - - - - -codeflare_sdk API documentation - - - - - - - - - - - -
    -
    -
    -

    Package codeflare_sdk

    -
    -
    -
    - -Expand source code - -
    from .cluster import (
    -    Authentication,
    -    KubeConfiguration,
    -    TokenAuthentication,
    -    KubeConfigFileAuthentication,
    -    AWManager,
    -    Cluster,
    -    ClusterConfiguration,
    -    RayClusterStatus,
    -    AppWrapperStatus,
    -    CodeFlareClusterStatus,
    -    RayCluster,
    -    AppWrapper,
    -    get_cluster,
    -    list_all_queued,
    -    list_all_clusters,
    -    view_clusters,
    -)
    -
    -from .job import RayJobClient
    -
    -from .utils import generate_cert
    -from .utils.demos import copy_demo_nbs
    -
    -from importlib.metadata import version, PackageNotFoundError
    -
    -try:
    -    __version__ = version("codeflare-sdk")  # use metadata associated with built package
    -
    -except PackageNotFoundError:
    -    __version__ = "v0.0.0"
    -
    -
    -
    -

    Sub-modules

    -
    -
    codeflare_sdk.cluster
    -
    -
    -
    -
    codeflare_sdk.job
    -
    -
    -
    -
    codeflare_sdk.utils
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - diff --git a/docs/detailed-documentation/job/index.html b/docs/detailed-documentation/job/index.html deleted file mode 100644 index ccfc679d..00000000 --- a/docs/detailed-documentation/job/index.html +++ /dev/null @@ -1,72 +0,0 @@ - - - - - - -codeflare_sdk.job API documentation - - - - - - - - - - - -
    -
    -
    -

    Module codeflare_sdk.job

    -
    -
    -
    - -Expand source code - -
    from .ray_jobs import RayJobClient
    -
    -
    -
    -

    Sub-modules

    -
    -
    codeflare_sdk.job.ray_jobs
    -
    -

    The ray_jobs sub-module contains methods needed to submit jobs and connect to Ray Clusters that were not created by CodeFlare. -The SDK acts as a …

    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - diff --git a/docs/detailed-documentation/job/ray_jobs.html b/docs/detailed-documentation/job/ray_jobs.html deleted file mode 100644 index 20002e27..00000000 --- a/docs/detailed-documentation/job/ray_jobs.html +++ /dev/null @@ -1,585 +0,0 @@ - - - - - - -codeflare_sdk.job.ray_jobs API documentation - - - - - - - - - - - -
    -
    -
    -

    Module codeflare_sdk.job.ray_jobs

    -
    -
    -

    The ray_jobs sub-module contains methods needed to submit jobs and connect to Ray Clusters that were not created by CodeFlare. -The SDK acts as a wrapper for the Ray Job Submission Client.

    -
    - -Expand source code - -
    # Copyright 2022 IBM, Red Hat
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#      http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -
    -"""
    -The ray_jobs sub-module contains methods needed to submit jobs and connect to Ray Clusters that were not created by CodeFlare.
    -The SDK acts as a wrapper for the Ray Job Submission Client.
    -"""
    -
    -from ray.job_submission import JobSubmissionClient
    -from ray.dashboard.modules.job.pydantic_models import JobDetails
    -from typing import Iterator, Optional, Dict, Any, Union, List
    -
    -
    -class RayJobClient:
    -    """
    -    A class that functions as a wrapper for the Ray Job Submission Client.
    -
    -    parameters:
    -    address -- Either (1) the address of the Ray cluster, or (2) the HTTP address of the dashboard server on the head node, e.g. “http://<head-node-ip>:8265”. In case (1) it must be specified as an address that can be passed to ray.init(),
    -    e.g. a Ray Client address (ray://<head_node_host>:10001), or “auto”, or “localhost:<port>”. If unspecified, will try to connect to a running local Ray cluster. This argument is always overridden by the RAY_ADDRESS environment variable.
    -    create_cluster_if_needed -- Indicates whether the cluster at the specified address needs to already be running. Ray doesn't start a cluster before interacting with jobs, but third-party job managers may do so.
    -    cookies -- Cookies to use when sending requests to the HTTP job server.
    -    metadata -- Arbitrary metadata to store along with all jobs. New metadata specified per job will be merged with the global metadata provided here via a simple dict update.
    -    headers -- Headers to use when sending requests to the HTTP job server, used for cases like authentication to a remote cluster.
    -    verify -- Boolean indication to verify the server's TLS certificate or a path to a file or directory of trusted certificates. Default: True.
    -    """
    -
    -    def __init__(
    -        self,
    -        address: Optional[str] = None,
    -        create_cluster_if_needed: bool = False,
    -        cookies: Optional[Dict[str, Any]] = None,
    -        metadata: Optional[Dict[str, Any]] = None,
    -        headers: Optional[Dict[str, Any]] = None,
    -        verify: Optional[Union[str, bool]] = True,
    -    ):
    -        self.rayJobClient = JobSubmissionClient(
    -            address=address,
    -            create_cluster_if_needed=create_cluster_if_needed,
    -            cookies=cookies,
    -            metadata=metadata,
    -            headers=headers,
    -            verify=verify,
    -        )
    -
    -    def submit_job(
    -        self,
    -        entrypoint: str,
    -        job_id: Optional[str] = None,
    -        runtime_env: Optional[Dict[str, Any]] = None,
    -        metadata: Optional[Dict[str, str]] = None,
    -        submission_id: Optional[str] = None,
    -        entrypoint_num_cpus: Optional[Union[int, float]] = None,
    -        entrypoint_num_gpus: Optional[Union[int, float]] = None,
    -        entrypoint_memory: Optional[int] = None,
    -        entrypoint_resources: Optional[Dict[str, float]] = None,
    -    ) -> str:
    -        """
    -        Method for submitting jobs to a Ray Cluster and returning the job id with entrypoint being a mandatory field.
    -
    -        Parameters:
    -        entrypoint -- The shell command to run for this job.
    -        submission_id -- A unique ID for this job.
    -        runtime_env -- The runtime environment to install and run this job in.
    -        metadata -- Arbitrary data to store along with this job.
    -        job_id -- DEPRECATED. This has been renamed to submission_id
    -        entrypoint_num_cpus -- The quantity of CPU cores to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0.
    -        entrypoint_num_gpus -- The quantity of GPUs to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0.
    -        entrypoint_memory –- The quantity of memory to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0.
    -        entrypoint_resources -- The quantity of custom resources to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it.
    -        """
    -        return self.rayJobClient.submit_job(
    -            entrypoint=entrypoint,
    -            job_id=job_id,
    -            runtime_env=runtime_env,
    -            metadata=metadata,
    -            submission_id=submission_id,
    -            entrypoint_num_cpus=entrypoint_num_cpus,
    -            entrypoint_num_gpus=entrypoint_num_gpus,
    -            entrypoint_memory=entrypoint_memory,
    -            entrypoint_resources=entrypoint_resources,
    -        )
    -
    -    def delete_job(self, job_id: str) -> (bool, str):
    -        """
    -        Method for deleting jobs with the job id being a mandatory field.
    -        """
    -        deletion_status = self.rayJobClient.delete_job(job_id=job_id)
    -
    -        if deletion_status:
    -            message = f"Successfully deleted Job {job_id}"
    -        else:
    -            message = f"Failed to delete Job {job_id}"
    -
    -        return deletion_status, message
    -
    -    def get_address(self) -> str:
    -        """
    -        Method for getting the address from the RayJobClient
    -        """
    -        return self.rayJobClient.get_address()
    -
    -    def get_job_info(self, job_id: str):
    -        """
    -        Method for getting the job info with the job id being a mandatory field.
    -        """
    -        return self.rayJobClient.get_job_info(job_id=job_id)
    -
    -    def get_job_logs(self, job_id: str) -> str:
    -        """
    -        Method for getting the job logs with the job id being a mandatory field.
    -        """
    -        return self.rayJobClient.get_job_logs(job_id=job_id)
    -
    -    def get_job_status(self, job_id: str) -> str:
    -        """
    -        Method for getting the job's status with the job id being a mandatory field.
    -        """
    -        return self.rayJobClient.get_job_status(job_id=job_id)
    -
    -    def list_jobs(self) -> List[JobDetails]:
    -        """
    -        Method for getting a list of current jobs in the Ray Cluster.
    -        """
    -        return self.rayJobClient.list_jobs()
    -
    -    def stop_job(self, job_id: str) -> (bool, str):
    -        """
    -        Method for stopping a job with the job id being a mandatory field.
    -        """
    -        stop_job_status = self.rayJobClient.stop_job(job_id=job_id)
    -        if stop_job_status:
    -            message = f"Successfully stopped Job {job_id}"
    -        else:
    -            message = f"Failed to stop Job, {job_id} could have already completed."
    -        return stop_job_status, message
    -
    -    def tail_job_logs(self, job_id: str) -> Iterator[str]:
    -        """
    -        Method for getting an iterator that follows the logs of a job with the job id being a mandatory field.
    -        """
    -        return self.rayJobClient.tail_job_logs(job_id=job_id)
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -

    Classes

    -
    -
    -class RayJobClient -(address: Optional[str] = None, create_cluster_if_needed: bool = False, cookies: Optional[Dict[str, Any]] = None, metadata: Optional[Dict[str, Any]] = None, headers: Optional[Dict[str, Any]] = None, verify: Union[str, bool, ForwardRef(None)] = True) -
    -
    -

    A class that functions as a wrapper for the Ray Job Submission Client.

    -

    parameters: -address – Either (1) the address of the Ray cluster, or (2) the HTTP address of the dashboard server on the head node, e.g. “>:8265”. In case (1) it must be specified as an address that can be passed to ray.init(), -e.g. a Ray Client address (ray://:10001), or “auto”, or “localhost:”. If unspecified, will try to connect to a running local Ray cluster. This argument is always overridden by the RAY_ADDRESS environment variable. -create_cluster_if_needed – Indicates whether the cluster at the specified address needs to already be running. Ray doesn't start a cluster before interacting with jobs, but third-party job managers may do so. -cookies – Cookies to use when sending requests to the HTTP job server. -metadata – Arbitrary metadata to store along with all jobs. New metadata specified per job will be merged with the global metadata provided here via a simple dict update. -headers – Headers to use when sending requests to the HTTP job server, used for cases like authentication to a remote cluster. -verify – Boolean indication to verify the server's TLS certificate or a path to a file or directory of trusted certificates. Default: True.

    -
    - -Expand source code - -
    class RayJobClient:
    -    """
    -    A class that functions as a wrapper for the Ray Job Submission Client.
    -
    -    parameters:
    -    address -- Either (1) the address of the Ray cluster, or (2) the HTTP address of the dashboard server on the head node, e.g. “http://<head-node-ip>:8265”. In case (1) it must be specified as an address that can be passed to ray.init(),
    -    e.g. a Ray Client address (ray://<head_node_host>:10001), or “auto”, or “localhost:<port>”. If unspecified, will try to connect to a running local Ray cluster. This argument is always overridden by the RAY_ADDRESS environment variable.
    -    create_cluster_if_needed -- Indicates whether the cluster at the specified address needs to already be running. Ray doesn't start a cluster before interacting with jobs, but third-party job managers may do so.
    -    cookies -- Cookies to use when sending requests to the HTTP job server.
    -    metadata -- Arbitrary metadata to store along with all jobs. New metadata specified per job will be merged with the global metadata provided here via a simple dict update.
    -    headers -- Headers to use when sending requests to the HTTP job server, used for cases like authentication to a remote cluster.
    -    verify -- Boolean indication to verify the server's TLS certificate or a path to a file or directory of trusted certificates. Default: True.
    -    """
    -
    -    def __init__(
    -        self,
    -        address: Optional[str] = None,
    -        create_cluster_if_needed: bool = False,
    -        cookies: Optional[Dict[str, Any]] = None,
    -        metadata: Optional[Dict[str, Any]] = None,
    -        headers: Optional[Dict[str, Any]] = None,
    -        verify: Optional[Union[str, bool]] = True,
    -    ):
    -        self.rayJobClient = JobSubmissionClient(
    -            address=address,
    -            create_cluster_if_needed=create_cluster_if_needed,
    -            cookies=cookies,
    -            metadata=metadata,
    -            headers=headers,
    -            verify=verify,
    -        )
    -
    -    def submit_job(
    -        self,
    -        entrypoint: str,
    -        job_id: Optional[str] = None,
    -        runtime_env: Optional[Dict[str, Any]] = None,
    -        metadata: Optional[Dict[str, str]] = None,
    -        submission_id: Optional[str] = None,
    -        entrypoint_num_cpus: Optional[Union[int, float]] = None,
    -        entrypoint_num_gpus: Optional[Union[int, float]] = None,
    -        entrypoint_memory: Optional[int] = None,
    -        entrypoint_resources: Optional[Dict[str, float]] = None,
    -    ) -> str:
    -        """
    -        Method for submitting jobs to a Ray Cluster and returning the job id with entrypoint being a mandatory field.
    -
    -        Parameters:
    -        entrypoint -- The shell command to run for this job.
    -        submission_id -- A unique ID for this job.
    -        runtime_env -- The runtime environment to install and run this job in.
    -        metadata -- Arbitrary data to store along with this job.
    -        job_id -- DEPRECATED. This has been renamed to submission_id
    -        entrypoint_num_cpus -- The quantity of CPU cores to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0.
    -        entrypoint_num_gpus -- The quantity of GPUs to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0.
    -        entrypoint_memory –- The quantity of memory to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0.
    -        entrypoint_resources -- The quantity of custom resources to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it.
    -        """
    -        return self.rayJobClient.submit_job(
    -            entrypoint=entrypoint,
    -            job_id=job_id,
    -            runtime_env=runtime_env,
    -            metadata=metadata,
    -            submission_id=submission_id,
    -            entrypoint_num_cpus=entrypoint_num_cpus,
    -            entrypoint_num_gpus=entrypoint_num_gpus,
    -            entrypoint_memory=entrypoint_memory,
    -            entrypoint_resources=entrypoint_resources,
    -        )
    -
    -    def delete_job(self, job_id: str) -> (bool, str):
    -        """
    -        Method for deleting jobs with the job id being a mandatory field.
    -        """
    -        deletion_status = self.rayJobClient.delete_job(job_id=job_id)
    -
    -        if deletion_status:
    -            message = f"Successfully deleted Job {job_id}"
    -        else:
    -            message = f"Failed to delete Job {job_id}"
    -
    -        return deletion_status, message
    -
    -    def get_address(self) -> str:
    -        """
    -        Method for getting the address from the RayJobClient
    -        """
    -        return self.rayJobClient.get_address()
    -
    -    def get_job_info(self, job_id: str):
    -        """
    -        Method for getting the job info with the job id being a mandatory field.
    -        """
    -        return self.rayJobClient.get_job_info(job_id=job_id)
    -
    -    def get_job_logs(self, job_id: str) -> str:
    -        """
    -        Method for getting the job logs with the job id being a mandatory field.
    -        """
    -        return self.rayJobClient.get_job_logs(job_id=job_id)
    -
    -    def get_job_status(self, job_id: str) -> str:
    -        """
    -        Method for getting the job's status with the job id being a mandatory field.
    -        """
    -        return self.rayJobClient.get_job_status(job_id=job_id)
    -
    -    def list_jobs(self) -> List[JobDetails]:
    -        """
    -        Method for getting a list of current jobs in the Ray Cluster.
    -        """
    -        return self.rayJobClient.list_jobs()
    -
    -    def stop_job(self, job_id: str) -> (bool, str):
    -        """
    -        Method for stopping a job with the job id being a mandatory field.
    -        """
    -        stop_job_status = self.rayJobClient.stop_job(job_id=job_id)
    -        if stop_job_status:
    -            message = f"Successfully stopped Job {job_id}"
    -        else:
    -            message = f"Failed to stop Job, {job_id} could have already completed."
    -        return stop_job_status, message
    -
    -    def tail_job_logs(self, job_id: str) -> Iterator[str]:
    -        """
    -        Method for getting an iterator that follows the logs of a job with the job id being a mandatory field.
    -        """
    -        return self.rayJobClient.tail_job_logs(job_id=job_id)
    -
    -

    Methods

    -
    -
    -def delete_job(self, job_id: str) ‑> () -
    -
    -

    Method for deleting jobs with the job id being a mandatory field.

    -
    - -Expand source code - -
    def delete_job(self, job_id: str) -> (bool, str):
    -    """
    -    Method for deleting jobs with the job id being a mandatory field.
    -    """
    -    deletion_status = self.rayJobClient.delete_job(job_id=job_id)
    -
    -    if deletion_status:
    -        message = f"Successfully deleted Job {job_id}"
    -    else:
    -        message = f"Failed to delete Job {job_id}"
    -
    -    return deletion_status, message
    -
    -
    -
    -def get_address(self) ‑> str -
    -
    -

    Method for getting the address from the RayJobClient

    -
    - -Expand source code - -
    def get_address(self) -> str:
    -    """
    -    Method for getting the address from the RayJobClient
    -    """
    -    return self.rayJobClient.get_address()
    -
    -
    -
    -def get_job_info(self, job_id: str) -
    -
    -

    Method for getting the job info with the job id being a mandatory field.

    -
    - -Expand source code - -
    def get_job_info(self, job_id: str):
    -    """
    -    Method for getting the job info with the job id being a mandatory field.
    -    """
    -    return self.rayJobClient.get_job_info(job_id=job_id)
    -
    -
    -
    -def get_job_logs(self, job_id: str) ‑> str -
    -
    -

    Method for getting the job logs with the job id being a mandatory field.

    -
    - -Expand source code - -
    def get_job_logs(self, job_id: str) -> str:
    -    """
    -    Method for getting the job logs with the job id being a mandatory field.
    -    """
    -    return self.rayJobClient.get_job_logs(job_id=job_id)
    -
    -
    -
    -def get_job_status(self, job_id: str) ‑> str -
    -
    -

    Method for getting the job's status with the job id being a mandatory field.

    -
    - -Expand source code - -
    def get_job_status(self, job_id: str) -> str:
    -    """
    -    Method for getting the job's status with the job id being a mandatory field.
    -    """
    -    return self.rayJobClient.get_job_status(job_id=job_id)
    -
    -
    -
    -def list_jobs(self) ‑> List[ray.dashboard.modules.job.pydantic_models.JobDetails] -
    -
    -

    Method for getting a list of current jobs in the Ray Cluster.

    -
    - -Expand source code - -
    def list_jobs(self) -> List[JobDetails]:
    -    """
    -    Method for getting a list of current jobs in the Ray Cluster.
    -    """
    -    return self.rayJobClient.list_jobs()
    -
    -
    -
    -def stop_job(self, job_id: str) ‑> () -
    -
    -

    Method for stopping a job with the job id being a mandatory field.

    -
    - -Expand source code - -
    def stop_job(self, job_id: str) -> (bool, str):
    -    """
    -    Method for stopping a job with the job id being a mandatory field.
    -    """
    -    stop_job_status = self.rayJobClient.stop_job(job_id=job_id)
    -    if stop_job_status:
    -        message = f"Successfully stopped Job {job_id}"
    -    else:
    -        message = f"Failed to stop Job, {job_id} could have already completed."
    -    return stop_job_status, message
    -
    -
    -
    -def submit_job(self, entrypoint: str, job_id: Optional[str] = None, runtime_env: Optional[Dict[str, Any]] = None, metadata: Optional[Dict[str, str]] = None, submission_id: Optional[str] = None, entrypoint_num_cpus: Union[int, float, ForwardRef(None)] = None, entrypoint_num_gpus: Union[int, float, ForwardRef(None)] = None, entrypoint_memory: Optional[int] = None, entrypoint_resources: Optional[Dict[str, float]] = None) ‑> str -
    -
    -

    Method for submitting jobs to a Ray Cluster and returning the job id with entrypoint being a mandatory field.

    -

    Parameters: -entrypoint – The shell command to run for this job. -submission_id – A unique ID for this job. -runtime_env – The runtime environment to install and run this job in. -metadata – Arbitrary data to store along with this job. -job_id – DEPRECATED. This has been renamed to submission_id -entrypoint_num_cpus – The quantity of CPU cores to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0. -entrypoint_num_gpus – The quantity of GPUs to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0. -entrypoint_memory –- The quantity of memory to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0. -entrypoint_resources – The quantity of custom resources to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it.

    -
    - -Expand source code - -
    def submit_job(
    -    self,
    -    entrypoint: str,
    -    job_id: Optional[str] = None,
    -    runtime_env: Optional[Dict[str, Any]] = None,
    -    metadata: Optional[Dict[str, str]] = None,
    -    submission_id: Optional[str] = None,
    -    entrypoint_num_cpus: Optional[Union[int, float]] = None,
    -    entrypoint_num_gpus: Optional[Union[int, float]] = None,
    -    entrypoint_memory: Optional[int] = None,
    -    entrypoint_resources: Optional[Dict[str, float]] = None,
    -) -> str:
    -    """
    -    Method for submitting jobs to a Ray Cluster and returning the job id with entrypoint being a mandatory field.
    -
    -    Parameters:
    -    entrypoint -- The shell command to run for this job.
    -    submission_id -- A unique ID for this job.
    -    runtime_env -- The runtime environment to install and run this job in.
    -    metadata -- Arbitrary data to store along with this job.
    -    job_id -- DEPRECATED. This has been renamed to submission_id
    -    entrypoint_num_cpus -- The quantity of CPU cores to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0.
    -    entrypoint_num_gpus -- The quantity of GPUs to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0.
    -    entrypoint_memory –- The quantity of memory to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0.
    -    entrypoint_resources -- The quantity of custom resources to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it.
    -    """
    -    return self.rayJobClient.submit_job(
    -        entrypoint=entrypoint,
    -        job_id=job_id,
    -        runtime_env=runtime_env,
    -        metadata=metadata,
    -        submission_id=submission_id,
    -        entrypoint_num_cpus=entrypoint_num_cpus,
    -        entrypoint_num_gpus=entrypoint_num_gpus,
    -        entrypoint_memory=entrypoint_memory,
    -        entrypoint_resources=entrypoint_resources,
    -    )
    -
    -
    -
    -def tail_job_logs(self, job_id: str) ‑> Iterator[str] -
    -
    -

    Method for getting an iterator that follows the logs of a job with the job id being a mandatory field.

    -
    - -Expand source code - -
    def tail_job_logs(self, job_id: str) -> Iterator[str]:
    -    """
    -    Method for getting an iterator that follows the logs of a job with the job id being a mandatory field.
    -    """
    -    return self.rayJobClient.tail_job_logs(job_id=job_id)
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - diff --git a/docs/detailed-documentation/utils/demos.html b/docs/detailed-documentation/utils/demos.html deleted file mode 100644 index e0dc5a8e..00000000 --- a/docs/detailed-documentation/utils/demos.html +++ /dev/null @@ -1,138 +0,0 @@ - - - - - - -codeflare_sdk.utils.demos API documentation - - - - - - - - - - - -
    -
    -
    -

    Module codeflare_sdk.utils.demos

    -
    -
    -
    - -Expand source code - -
    import pathlib
    -import shutil
    -
    -package_dir = pathlib.Path(__file__).parent.parent.resolve()
    -demo_dir = f"{package_dir}/demo-notebooks"
    -
    -
    -def copy_demo_nbs(dir: str = "./demo-notebooks", overwrite: bool = False):
    -    """
    -    Copy the demo notebooks from the package to the current working directory
    -
    -    overwrite=True will overwrite any files that exactly match files written by copy_demo_nbs in the target directory.
    -    Any files that exist in the directory that don't match these values will remain untouched.
    -
    -    Args:
    -        dir (str): The directory to copy the demo notebooks to. Defaults to "./demo-notebooks". overwrite (bool):
    -        overwrite (bool): Whether to overwrite files in the directory if it already exists. Defaults to False.
    -    Raises:
    -        FileExistsError: If the directory already exists.
    -    """
    -    # does dir exist already?
    -    if overwrite is False and pathlib.Path(dir).exists():
    -        raise FileExistsError(
    -            f"Directory {dir} already exists. Please remove it or provide a different location."
    -        )
    -
    -    shutil.copytree(demo_dir, dir, dirs_exist_ok=True)
    -
    -
    -
    -
    -
    -
    -
    -

    Functions

    -
    -
    -def copy_demo_nbs(dir: str = './demo-notebooks', overwrite: bool = False) -
    -
    -

    Copy the demo notebooks from the package to the current working directory

    -

    overwrite=True will overwrite any files that exactly match files written by copy_demo_nbs in the target directory. -Any files that exist in the directory that don't match these values will remain untouched.

    -

    Args

    -
    -
    dir : str
    -
    The directory to copy the demo notebooks to. Defaults to "./demo-notebooks". overwrite (bool):
    -
    overwrite : bool
    -
    Whether to overwrite files in the directory if it already exists. Defaults to False.
    -
    -

    Raises

    -
    -
    FileExistsError
    -
    If the directory already exists.
    -
    -
    - -Expand source code - -
    def copy_demo_nbs(dir: str = "./demo-notebooks", overwrite: bool = False):
    -    """
    -    Copy the demo notebooks from the package to the current working directory
    -
    -    overwrite=True will overwrite any files that exactly match files written by copy_demo_nbs in the target directory.
    -    Any files that exist in the directory that don't match these values will remain untouched.
    -
    -    Args:
    -        dir (str): The directory to copy the demo notebooks to. Defaults to "./demo-notebooks". overwrite (bool):
    -        overwrite (bool): Whether to overwrite files in the directory if it already exists. Defaults to False.
    -    Raises:
    -        FileExistsError: If the directory already exists.
    -    """
    -    # does dir exist already?
    -    if overwrite is False and pathlib.Path(dir).exists():
    -        raise FileExistsError(
    -            f"Directory {dir} already exists. Please remove it or provide a different location."
    -        )
    -
    -    shutil.copytree(demo_dir, dir, dirs_exist_ok=True)
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - diff --git a/docs/detailed-documentation/utils/generate_cert.html b/docs/detailed-documentation/utils/generate_cert.html deleted file mode 100644 index 01084d84..00000000 --- a/docs/detailed-documentation/utils/generate_cert.html +++ /dev/null @@ -1,451 +0,0 @@ - - - - - - -codeflare_sdk.utils.generate_cert API documentation - - - - - - - - - - - -
    -
    -
    -

    Module codeflare_sdk.utils.generate_cert

    -
    -
    -
    - -Expand source code - -
    # Copyright 2022 IBM, Red Hat
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#      http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -
    -import base64
    -import os
    -from cryptography.hazmat.primitives import serialization, hashes
    -from cryptography.hazmat.primitives.asymmetric import rsa
    -from cryptography import x509
    -from cryptography.x509.oid import NameOID
    -import datetime
    -from ..cluster.auth import config_check, get_api_client
    -from kubernetes import client, config
    -from .kube_api_helpers import _kube_api_error_handling
    -
    -
    -def generate_ca_cert(days: int = 30):
    -    # Generate base64 encoded ca.key and ca.cert
    -    # Similar to:
    -    # openssl req -x509 -nodes -newkey rsa:2048 -keyout ca.key -days 1826 -out ca.crt -subj '/CN=root-ca'
    -    # base64 -i ca.crt -i ca.key
    -
    -    private_key = rsa.generate_private_key(
    -        public_exponent=65537,
    -        key_size=2048,
    -    )
    -
    -    key = base64.b64encode(
    -        private_key.private_bytes(
    -            serialization.Encoding.PEM,
    -            serialization.PrivateFormat.PKCS8,
    -            serialization.NoEncryption(),
    -        )
    -    ).decode("utf-8")
    -
    -    # Generate Certificate
    -    one_day = datetime.timedelta(1, 0, 0)
    -    public_key = private_key.public_key()
    -    builder = (
    -        x509.CertificateBuilder()
    -        .subject_name(
    -            x509.Name(
    -                [
    -                    x509.NameAttribute(NameOID.COMMON_NAME, "root-ca"),
    -                ]
    -            )
    -        )
    -        .issuer_name(
    -            x509.Name(
    -                [
    -                    x509.NameAttribute(NameOID.COMMON_NAME, "root-ca"),
    -                ]
    -            )
    -        )
    -        .not_valid_before(datetime.datetime.today() - one_day)
    -        .not_valid_after(datetime.datetime.today() + (one_day * days))
    -        .serial_number(x509.random_serial_number())
    -        .public_key(public_key)
    -    )
    -    certificate = base64.b64encode(
    -        builder.sign(private_key=private_key, algorithm=hashes.SHA256()).public_bytes(
    -            serialization.Encoding.PEM
    -        )
    -    ).decode("utf-8")
    -    return key, certificate
    -
    -
    -def get_secret_name(cluster_name, namespace, api_instance):
    -    label_selector = f"ray.openshift.ai/cluster-name={cluster_name}"
    -    try:
    -        secrets = api_instance.list_namespaced_secret(
    -            namespace, label_selector=label_selector
    -        )
    -        for secret in secrets.items:
    -            if (
    -                f"{cluster_name}-ca-secret-" in secret.metadata.name
    -            ):  # Oauth secret share the same label this conditional is to make things more specific
    -                return secret.metadata.name
    -            else:
    -                continue
    -        raise KeyError(f"Unable to gather secret name for {cluster_name}")
    -    except Exception as e:  # pragma: no cover
    -        return _kube_api_error_handling(e)
    -
    -
    -def generate_tls_cert(cluster_name, namespace, days=30):
    -    # Create a folder tls-<cluster>-<namespace> and store three files: ca.crt, tls.crt, and tls.key
    -    tls_dir = os.path.join(os.getcwd(), f"tls-{cluster_name}-{namespace}")
    -    if not os.path.exists(tls_dir):
    -        os.makedirs(tls_dir)
    -
    -    # Similar to:
    -    # oc get secret ca-secret-<cluster-name> -o template='{{index .data "ca.key"}}'
    -    # oc get secret ca-secret-<cluster-name> -o template='{{index .data "ca.crt"}}'|base64 -d > ${TLSDIR}/ca.crt
    -    config_check()
    -    v1 = client.CoreV1Api(get_api_client())
    -
    -    # Secrets have a suffix appended to the end so we must list them and gather the secret that includes cluster_name-ca-secret-
    -    secret_name = get_secret_name(cluster_name, namespace, v1)
    -    secret = v1.read_namespaced_secret(secret_name, namespace).data
    -
    -    ca_cert = secret.get("ca.crt")
    -    ca_key = secret.get("ca.key")
    -
    -    with open(os.path.join(tls_dir, "ca.crt"), "w") as f:
    -        f.write(base64.b64decode(ca_cert).decode("utf-8"))
    -
    -    # Generate tls.key and signed tls.cert locally for ray client
    -    # Similar to running these commands:
    -    # openssl req -nodes -newkey rsa:2048 -keyout ${TLSDIR}/tls.key -out ${TLSDIR}/tls.csr -subj '/CN=local'
    -    # cat <<EOF >${TLSDIR}/domain.ext
    -    # authorityKeyIdentifier=keyid,issuer
    -    # basicConstraints=CA:FALSE
    -    # subjectAltName = @alt_names
    -    # [alt_names]
    -    # DNS.1 = 127.0.0.1
    -    # DNS.2 = localhost
    -    # EOF
    -    # openssl x509 -req -CA ${TLSDIR}/ca.crt -CAkey ${TLSDIR}/ca.key -in ${TLSDIR}/tls.csr -out ${TLSDIR}/tls.crt -days 365 -CAcreateserial -extfile ${TLSDIR}/domain.ext
    -    key = rsa.generate_private_key(
    -        public_exponent=65537,
    -        key_size=2048,
    -    )
    -
    -    tls_key = key.private_bytes(
    -        serialization.Encoding.PEM,
    -        serialization.PrivateFormat.PKCS8,
    -        serialization.NoEncryption(),
    -    )
    -    with open(os.path.join(tls_dir, "tls.key"), "w") as f:
    -        f.write(tls_key.decode("utf-8"))
    -
    -    one_day = datetime.timedelta(1, 0, 0)
    -    tls_cert = (
    -        x509.CertificateBuilder()
    -        .issuer_name(
    -            x509.Name(
    -                [
    -                    x509.NameAttribute(NameOID.COMMON_NAME, "root-ca"),
    -                ]
    -            )
    -        )
    -        .subject_name(
    -            x509.Name(
    -                [
    -                    x509.NameAttribute(NameOID.COMMON_NAME, "local"),
    -                ]
    -            )
    -        )
    -        .public_key(key.public_key())
    -        .not_valid_before(datetime.datetime.today() - one_day)
    -        .not_valid_after(datetime.datetime.today() + (one_day * days))
    -        .serial_number(x509.random_serial_number())
    -        .add_extension(
    -            x509.SubjectAlternativeName(
    -                [x509.DNSName("localhost"), x509.DNSName("127.0.0.1")]
    -            ),
    -            False,
    -        )
    -        .sign(
    -            serialization.load_pem_private_key(base64.b64decode(ca_key), None),
    -            hashes.SHA256(),
    -        )
    -    )
    -
    -    with open(os.path.join(tls_dir, "tls.crt"), "w") as f:
    -        f.write(tls_cert.public_bytes(serialization.Encoding.PEM).decode("utf-8"))
    -
    -
    -def export_env(cluster_name, namespace):
    -    tls_dir = os.path.join(os.getcwd(), f"tls-{cluster_name}-{namespace}")
    -    os.environ["RAY_USE_TLS"] = "1"
    -    os.environ["RAY_TLS_SERVER_CERT"] = os.path.join(tls_dir, "tls.crt")
    -    os.environ["RAY_TLS_SERVER_KEY"] = os.path.join(tls_dir, "tls.key")
    -    os.environ["RAY_TLS_CA_CERT"] = os.path.join(tls_dir, "ca.crt")
    -
    -
    -
    -
    -
    -
    -
    -

    Functions

    -
    -
    -def export_env(cluster_name, namespace) -
    -
    -
    -
    - -Expand source code - -
    def export_env(cluster_name, namespace):
    -    tls_dir = os.path.join(os.getcwd(), f"tls-{cluster_name}-{namespace}")
    -    os.environ["RAY_USE_TLS"] = "1"
    -    os.environ["RAY_TLS_SERVER_CERT"] = os.path.join(tls_dir, "tls.crt")
    -    os.environ["RAY_TLS_SERVER_KEY"] = os.path.join(tls_dir, "tls.key")
    -    os.environ["RAY_TLS_CA_CERT"] = os.path.join(tls_dir, "ca.crt")
    -
    -
    -
    -def generate_ca_cert(days: int = 30) -
    -
    -
    -
    - -Expand source code - -
    def generate_ca_cert(days: int = 30):
    -    # Generate base64 encoded ca.key and ca.cert
    -    # Similar to:
    -    # openssl req -x509 -nodes -newkey rsa:2048 -keyout ca.key -days 1826 -out ca.crt -subj '/CN=root-ca'
    -    # base64 -i ca.crt -i ca.key
    -
    -    private_key = rsa.generate_private_key(
    -        public_exponent=65537,
    -        key_size=2048,
    -    )
    -
    -    key = base64.b64encode(
    -        private_key.private_bytes(
    -            serialization.Encoding.PEM,
    -            serialization.PrivateFormat.PKCS8,
    -            serialization.NoEncryption(),
    -        )
    -    ).decode("utf-8")
    -
    -    # Generate Certificate
    -    one_day = datetime.timedelta(1, 0, 0)
    -    public_key = private_key.public_key()
    -    builder = (
    -        x509.CertificateBuilder()
    -        .subject_name(
    -            x509.Name(
    -                [
    -                    x509.NameAttribute(NameOID.COMMON_NAME, "root-ca"),
    -                ]
    -            )
    -        )
    -        .issuer_name(
    -            x509.Name(
    -                [
    -                    x509.NameAttribute(NameOID.COMMON_NAME, "root-ca"),
    -                ]
    -            )
    -        )
    -        .not_valid_before(datetime.datetime.today() - one_day)
    -        .not_valid_after(datetime.datetime.today() + (one_day * days))
    -        .serial_number(x509.random_serial_number())
    -        .public_key(public_key)
    -    )
    -    certificate = base64.b64encode(
    -        builder.sign(private_key=private_key, algorithm=hashes.SHA256()).public_bytes(
    -            serialization.Encoding.PEM
    -        )
    -    ).decode("utf-8")
    -    return key, certificate
    -
    -
    -
    -def generate_tls_cert(cluster_name, namespace, days=30) -
    -
    -
    -
    - -Expand source code - -
    def generate_tls_cert(cluster_name, namespace, days=30):
    -    # Create a folder tls-<cluster>-<namespace> and store three files: ca.crt, tls.crt, and tls.key
    -    tls_dir = os.path.join(os.getcwd(), f"tls-{cluster_name}-{namespace}")
    -    if not os.path.exists(tls_dir):
    -        os.makedirs(tls_dir)
    -
    -    # Similar to:
    -    # oc get secret ca-secret-<cluster-name> -o template='{{index .data "ca.key"}}'
    -    # oc get secret ca-secret-<cluster-name> -o template='{{index .data "ca.crt"}}'|base64 -d > ${TLSDIR}/ca.crt
    -    config_check()
    -    v1 = client.CoreV1Api(get_api_client())
    -
    -    # Secrets have a suffix appended to the end so we must list them and gather the secret that includes cluster_name-ca-secret-
    -    secret_name = get_secret_name(cluster_name, namespace, v1)
    -    secret = v1.read_namespaced_secret(secret_name, namespace).data
    -
    -    ca_cert = secret.get("ca.crt")
    -    ca_key = secret.get("ca.key")
    -
    -    with open(os.path.join(tls_dir, "ca.crt"), "w") as f:
    -        f.write(base64.b64decode(ca_cert).decode("utf-8"))
    -
    -    # Generate tls.key and signed tls.cert locally for ray client
    -    # Similar to running these commands:
    -    # openssl req -nodes -newkey rsa:2048 -keyout ${TLSDIR}/tls.key -out ${TLSDIR}/tls.csr -subj '/CN=local'
    -    # cat <<EOF >${TLSDIR}/domain.ext
    -    # authorityKeyIdentifier=keyid,issuer
    -    # basicConstraints=CA:FALSE
    -    # subjectAltName = @alt_names
    -    # [alt_names]
    -    # DNS.1 = 127.0.0.1
    -    # DNS.2 = localhost
    -    # EOF
    -    # openssl x509 -req -CA ${TLSDIR}/ca.crt -CAkey ${TLSDIR}/ca.key -in ${TLSDIR}/tls.csr -out ${TLSDIR}/tls.crt -days 365 -CAcreateserial -extfile ${TLSDIR}/domain.ext
    -    key = rsa.generate_private_key(
    -        public_exponent=65537,
    -        key_size=2048,
    -    )
    -
    -    tls_key = key.private_bytes(
    -        serialization.Encoding.PEM,
    -        serialization.PrivateFormat.PKCS8,
    -        serialization.NoEncryption(),
    -    )
    -    with open(os.path.join(tls_dir, "tls.key"), "w") as f:
    -        f.write(tls_key.decode("utf-8"))
    -
    -    one_day = datetime.timedelta(1, 0, 0)
    -    tls_cert = (
    -        x509.CertificateBuilder()
    -        .issuer_name(
    -            x509.Name(
    -                [
    -                    x509.NameAttribute(NameOID.COMMON_NAME, "root-ca"),
    -                ]
    -            )
    -        )
    -        .subject_name(
    -            x509.Name(
    -                [
    -                    x509.NameAttribute(NameOID.COMMON_NAME, "local"),
    -                ]
    -            )
    -        )
    -        .public_key(key.public_key())
    -        .not_valid_before(datetime.datetime.today() - one_day)
    -        .not_valid_after(datetime.datetime.today() + (one_day * days))
    -        .serial_number(x509.random_serial_number())
    -        .add_extension(
    -            x509.SubjectAlternativeName(
    -                [x509.DNSName("localhost"), x509.DNSName("127.0.0.1")]
    -            ),
    -            False,
    -        )
    -        .sign(
    -            serialization.load_pem_private_key(base64.b64decode(ca_key), None),
    -            hashes.SHA256(),
    -        )
    -    )
    -
    -    with open(os.path.join(tls_dir, "tls.crt"), "w") as f:
    -        f.write(tls_cert.public_bytes(serialization.Encoding.PEM).decode("utf-8"))
    -
    -
    -
    -def get_secret_name(cluster_name, namespace, api_instance) -
    -
    -
    -
    - -Expand source code - -
    def get_secret_name(cluster_name, namespace, api_instance):
    -    label_selector = f"ray.openshift.ai/cluster-name={cluster_name}"
    -    try:
    -        secrets = api_instance.list_namespaced_secret(
    -            namespace, label_selector=label_selector
    -        )
    -        for secret in secrets.items:
    -            if (
    -                f"{cluster_name}-ca-secret-" in secret.metadata.name
    -            ):  # Oauth secret share the same label this conditional is to make things more specific
    -                return secret.metadata.name
    -            else:
    -                continue
    -        raise KeyError(f"Unable to gather secret name for {cluster_name}")
    -    except Exception as e:  # pragma: no cover
    -        return _kube_api_error_handling(e)
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - diff --git a/docs/detailed-documentation/utils/generate_yaml.html b/docs/detailed-documentation/utils/generate_yaml.html deleted file mode 100644 index c2a7bb34..00000000 --- a/docs/detailed-documentation/utils/generate_yaml.html +++ /dev/null @@ -1,951 +0,0 @@ - - - - - - -codeflare_sdk.utils.generate_yaml API documentation - - - - - - - - - - - -
    -
    -
    -

    Module codeflare_sdk.utils.generate_yaml

    -
    -
    -

    This sub-module exists primarily to be used internally by the Cluster object -(in the cluster sub-module) for AppWrapper generation.

    -
    - -Expand source code - -
    # Copyright 2022 IBM, Red Hat
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#      http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -
    -"""
    -This sub-module exists primarily to be used internally by the Cluster object
    -(in the cluster sub-module) for AppWrapper generation.
    -"""
    -
    -import json
    -from typing import Optional
    -import typing
    -import yaml
    -import sys
    -import os
    -import argparse
    -import uuid
    -from kubernetes import client, config
    -from .kube_api_helpers import _kube_api_error_handling
    -from ..cluster.auth import get_api_client, config_check
    -from os import urandom
    -from base64 import b64encode
    -from urllib3.util import parse_url
    -from kubernetes.client.exceptions import ApiException
    -import codeflare_sdk
    -
    -
    -def read_template(template):
    -    with open(template, "r") as stream:
    -        try:
    -            return yaml.safe_load(stream)
    -        except yaml.YAMLError as exc:
    -            print(exc)
    -
    -
    -def gen_names(name):
    -    if not name:
    -        gen_id = str(uuid.uuid4())
    -        appwrapper_name = "appwrapper-" + gen_id
    -        cluster_name = "cluster-" + gen_id
    -        return appwrapper_name, cluster_name
    -    else:
    -        return name, name
    -
    -
    -# Check if the routes api exists
    -def is_openshift_cluster():
    -    try:
    -        config_check()
    -        for api in client.ApisApi(get_api_client()).get_api_versions().groups:
    -            for v in api.versions:
    -                if "route.openshift.io/v1" in v.group_version:
    -                    return True
    -        else:
    -            return False
    -    except Exception as e:  # pragma: no cover
    -        return _kube_api_error_handling(e)
    -
    -
    -def is_kind_cluster():
    -    try:
    -        config_check()
    -        v1 = client.CoreV1Api()
    -        label_selector = "kubernetes.io/hostname=kind-control-plane"
    -        nodes = v1.list_node(label_selector=label_selector)
    -        # If we find one or more nodes with the label, assume it's a KinD cluster
    -        return len(nodes.items) > 0
    -    except Exception as e:
    -        print(f"Error checking if cluster is KinD: {e}")
    -        return False
    -
    -
    -def update_names(
    -    cluster_yaml: dict,
    -    cluster: "codeflare_sdk.cluster.Cluster",
    -):
    -    metadata = cluster_yaml.get("metadata")
    -    metadata["name"] = cluster.config.name
    -    metadata["namespace"] = cluster.config.namespace
    -
    -
    -def update_image(spec, image):
    -    containers = spec.get("containers")
    -    if image != "":
    -        for container in containers:
    -            container["image"] = image
    -
    -
    -def update_image_pull_secrets(spec, image_pull_secrets):
    -    template_secrets = spec.get("imagePullSecrets", [])
    -    spec["imagePullSecrets"] = template_secrets + [
    -        {"name": x} for x in image_pull_secrets
    -    ]
    -
    -
    -def update_env(spec, env):
    -    containers = spec.get("containers")
    -    for container in containers:
    -        if env:
    -            if "env" in container:
    -                container["env"].extend(env)
    -            else:
    -                container["env"] = env
    -
    -
    -def update_resources(
    -    spec,
    -    cpu_requests,
    -    cpu_limits,
    -    memory_requests,
    -    memory_limits,
    -    custom_resources,
    -):
    -    container = spec.get("containers")
    -    for resource in container:
    -        requests = resource.get("resources").get("requests")
    -        if requests is not None:
    -            requests["cpu"] = cpu_requests
    -            requests["memory"] = memory_requests
    -        limits = resource.get("resources").get("limits")
    -        if limits is not None:
    -            limits["cpu"] = cpu_limits
    -            limits["memory"] = memory_limits
    -        for k in custom_resources.keys():
    -            limits[k] = custom_resources[k]
    -            requests[k] = custom_resources[k]
    -
    -
    -def head_worker_gpu_count_from_cluster(
    -    cluster: "codeflare_sdk.cluster.Cluster",
    -) -> typing.Tuple[int, int]:
    -    head_gpus = 0
    -    worker_gpus = 0
    -    for k in cluster.config.head_extended_resource_requests.keys():
    -        resource_type = cluster.config.extended_resource_mapping[k]
    -        if resource_type == "GPU":
    -            head_gpus += int(cluster.config.head_extended_resource_requests[k])
    -    for k in cluster.config.worker_extended_resource_requests.keys():
    -        resource_type = cluster.config.extended_resource_mapping[k]
    -        if resource_type == "GPU":
    -            worker_gpus += int(cluster.config.worker_extended_resource_requests[k])
    -
    -    return head_gpus, worker_gpus
    -
    -
    -FORBIDDEN_CUSTOM_RESOURCE_TYPES = ["GPU", "CPU", "memory"]
    -
    -
    -def head_worker_resources_from_cluster(
    -    cluster: "codeflare_sdk.cluster.Cluster",
    -) -> typing.Tuple[dict, dict]:
    -    to_return = {}, {}
    -    for k in cluster.config.head_extended_resource_requests.keys():
    -        resource_type = cluster.config.extended_resource_mapping[k]
    -        if resource_type in FORBIDDEN_CUSTOM_RESOURCE_TYPES:
    -            continue
    -        to_return[0][resource_type] = cluster.config.head_extended_resource_requests[
    -            k
    -        ] + to_return[0].get(resource_type, 0)
    -
    -    for k in cluster.config.worker_extended_resource_requests.keys():
    -        resource_type = cluster.config.extended_resource_mapping[k]
    -        if resource_type in FORBIDDEN_CUSTOM_RESOURCE_TYPES:
    -            continue
    -        to_return[1][resource_type] = cluster.config.worker_extended_resource_requests[
    -            k
    -        ] + to_return[1].get(resource_type, 0)
    -    return to_return
    -
    -
    -def update_nodes(
    -    ray_cluster_dict: dict,
    -    cluster: "codeflare_sdk.cluster.Cluster",
    -):
    -    head = ray_cluster_dict.get("spec").get("headGroupSpec")
    -    worker = ray_cluster_dict.get("spec").get("workerGroupSpecs")[0]
    -    head_gpus, worker_gpus = head_worker_gpu_count_from_cluster(cluster)
    -    head_resources, worker_resources = head_worker_resources_from_cluster(cluster)
    -    head_resources = json.dumps(head_resources).replace('"', '\\"')
    -    head_resources = f'"{head_resources}"'
    -    worker_resources = json.dumps(worker_resources).replace('"', '\\"')
    -    worker_resources = f'"{worker_resources}"'
    -    head["rayStartParams"]["num-gpus"] = str(head_gpus)
    -    head["rayStartParams"]["resources"] = head_resources
    -
    -    # Head counts as first worker
    -    worker["replicas"] = cluster.config.num_workers
    -    worker["minReplicas"] = cluster.config.num_workers
    -    worker["maxReplicas"] = cluster.config.num_workers
    -    worker["groupName"] = "small-group-" + cluster.config.name
    -    worker["rayStartParams"]["num-gpus"] = str(worker_gpus)
    -    worker["rayStartParams"]["resources"] = worker_resources
    -
    -    for comp in [head, worker]:
    -        spec = comp.get("template").get("spec")
    -        update_image_pull_secrets(spec, cluster.config.image_pull_secrets)
    -        update_image(spec, cluster.config.image)
    -        update_env(spec, cluster.config.envs)
    -        if comp == head:
    -            # TODO: Eventually add head node configuration outside of template
    -            update_resources(
    -                spec,
    -                cluster.config.head_cpu_requests,
    -                cluster.config.head_cpu_limits,
    -                cluster.config.head_memory_requests,
    -                cluster.config.head_memory_limits,
    -                cluster.config.head_extended_resource_requests,
    -            )
    -        else:
    -            update_resources(
    -                spec,
    -                cluster.config.worker_cpu_requests,
    -                cluster.config.worker_cpu_limits,
    -                cluster.config.worker_memory_requests,
    -                cluster.config.worker_memory_limits,
    -                cluster.config.worker_extended_resource_requests,
    -            )
    -
    -
    -def del_from_list_by_name(l: list, target: typing.List[str]) -> list:
    -    return [x for x in l if x["name"] not in target]
    -
    -
    -def get_default_kueue_name(namespace: str):
    -    # If the local queue is set, use it. Otherwise, try to use the default queue.
    -    try:
    -        config_check()
    -        api_instance = client.CustomObjectsApi(get_api_client())
    -        local_queues = api_instance.list_namespaced_custom_object(
    -            group="kueue.x-k8s.io",
    -            version="v1beta1",
    -            namespace=namespace,
    -            plural="localqueues",
    -        )
    -    except ApiException as e:  # pragma: no cover
    -        if e.status == 404 or e.status == 403:
    -            return
    -        else:
    -            return _kube_api_error_handling(e)
    -    for lq in local_queues["items"]:
    -        if (
    -            "annotations" in lq["metadata"]
    -            and "kueue.x-k8s.io/default-queue" in lq["metadata"]["annotations"]
    -            and lq["metadata"]["annotations"]["kueue.x-k8s.io/default-queue"].lower()
    -            == "true"
    -        ):
    -            return lq["metadata"]["name"]
    -
    -
    -def local_queue_exists(namespace: str, local_queue_name: str):
    -    # get all local queues in the namespace
    -    try:
    -        config_check()
    -        api_instance = client.CustomObjectsApi(get_api_client())
    -        local_queues = api_instance.list_namespaced_custom_object(
    -            group="kueue.x-k8s.io",
    -            version="v1beta1",
    -            namespace=namespace,
    -            plural="localqueues",
    -        )
    -    except Exception as e:  # pragma: no cover
    -        return _kube_api_error_handling(e)
    -    # check if local queue with the name provided in cluster config exists
    -    for lq in local_queues["items"]:
    -        if lq["metadata"]["name"] == local_queue_name:
    -            return True
    -    return False
    -
    -
    -def add_queue_label(item: dict, namespace: str, local_queue: Optional[str]):
    -    lq_name = local_queue or get_default_kueue_name(namespace)
    -    if lq_name == None:
    -        return
    -    elif not local_queue_exists(namespace, lq_name):
    -        raise ValueError(
    -            "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration"
    -        )
    -    if not "labels" in item["metadata"]:
    -        item["metadata"]["labels"] = {}
    -    item["metadata"]["labels"].update({"kueue.x-k8s.io/queue-name": lq_name})
    -
    -
    -def augment_labels(item: dict, labels: dict):
    -    if not "labels" in item["metadata"]:
    -        item["metadata"]["labels"] = {}
    -    item["metadata"]["labels"].update(labels)
    -
    -
    -def notebook_annotations(item: dict):
    -    nb_prefix = os.environ.get("NB_PREFIX")
    -    if nb_prefix:
    -        if not "annotations" in item["metadata"]:
    -            item["metadata"]["annotations"] = {}
    -        item["metadata"]["annotations"].update(
    -            {"app.kubernetes.io/managed-by": nb_prefix}
    -        )
    -
    -
    -def wrap_cluster(cluster_yaml: dict, appwrapper_name: str, namespace: str):
    -    return {
    -        "apiVersion": "workload.codeflare.dev/v1beta2",
    -        "kind": "AppWrapper",
    -        "metadata": {"name": appwrapper_name, "namespace": namespace},
    -        "spec": {"components": [{"template": cluster_yaml}]},
    -    }
    -
    -
    -def write_user_yaml(user_yaml, output_file_name):
    -    # Create the directory if it doesn't exist
    -    directory_path = os.path.dirname(output_file_name)
    -    if not os.path.exists(directory_path):
    -        os.makedirs(directory_path)
    -
    -    with open(output_file_name, "w") as outfile:
    -        yaml.dump(user_yaml, outfile, default_flow_style=False)
    -
    -    print(f"Written to: {output_file_name}")
    -
    -
    -def generate_appwrapper(cluster: "codeflare_sdk.cluster.Cluster"):
    -    cluster_yaml = read_template(cluster.config.template)
    -    appwrapper_name, _ = gen_names(cluster.config.name)
    -    update_names(
    -        cluster_yaml,
    -        cluster,
    -    )
    -    update_nodes(cluster_yaml, cluster)
    -    augment_labels(cluster_yaml, cluster.config.labels)
    -    notebook_annotations(cluster_yaml)
    -    user_yaml = (
    -        wrap_cluster(cluster_yaml, appwrapper_name, cluster.config.namespace)
    -        if cluster.config.appwrapper
    -        else cluster_yaml
    -    )
    -
    -    add_queue_label(user_yaml, cluster.config.namespace, cluster.config.local_queue)
    -
    -    if cluster.config.write_to_file:
    -        directory_path = os.path.expanduser("~/.codeflare/resources/")
    -        outfile = os.path.join(directory_path, appwrapper_name + ".yaml")
    -        write_user_yaml(user_yaml, outfile)
    -        return outfile
    -    else:
    -        user_yaml = yaml.dump(user_yaml)
    -        print(f"Yaml resources loaded for {cluster.config.name}")
    -        return user_yaml
    -
    -
    -
    -
    -
    -
    -
    -

    Functions

    -
    -
    -def add_queue_label(item: dict, namespace: str, local_queue: Optional[str]) -
    -
    -
    -
    - -Expand source code - -
    def add_queue_label(item: dict, namespace: str, local_queue: Optional[str]):
    -    lq_name = local_queue or get_default_kueue_name(namespace)
    -    if lq_name == None:
    -        return
    -    elif not local_queue_exists(namespace, lq_name):
    -        raise ValueError(
    -            "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration"
    -        )
    -    if not "labels" in item["metadata"]:
    -        item["metadata"]["labels"] = {}
    -    item["metadata"]["labels"].update({"kueue.x-k8s.io/queue-name": lq_name})
    -
    -
    -
    -def augment_labels(item: dict, labels: dict) -
    -
    -
    -
    - -Expand source code - -
    def augment_labels(item: dict, labels: dict):
    -    if not "labels" in item["metadata"]:
    -        item["metadata"]["labels"] = {}
    -    item["metadata"]["labels"].update(labels)
    -
    -
    -
    -def del_from_list_by_name(l: list, target: List[str]) ‑> list -
    -
    -
    -
    - -Expand source code - -
    def del_from_list_by_name(l: list, target: typing.List[str]) -> list:
    -    return [x for x in l if x["name"] not in target]
    -
    -
    -
    -def gen_names(name) -
    -
    -
    -
    - -Expand source code - -
    def gen_names(name):
    -    if not name:
    -        gen_id = str(uuid.uuid4())
    -        appwrapper_name = "appwrapper-" + gen_id
    -        cluster_name = "cluster-" + gen_id
    -        return appwrapper_name, cluster_name
    -    else:
    -        return name, name
    -
    -
    -
    -def generate_appwrapper(cluster: codeflare_sdk.cluster.Cluster) -
    -
    -
    -
    - -Expand source code - -
    def generate_appwrapper(cluster: "codeflare_sdk.cluster.Cluster"):
    -    cluster_yaml = read_template(cluster.config.template)
    -    appwrapper_name, _ = gen_names(cluster.config.name)
    -    update_names(
    -        cluster_yaml,
    -        cluster,
    -    )
    -    update_nodes(cluster_yaml, cluster)
    -    augment_labels(cluster_yaml, cluster.config.labels)
    -    notebook_annotations(cluster_yaml)
    -    user_yaml = (
    -        wrap_cluster(cluster_yaml, appwrapper_name, cluster.config.namespace)
    -        if cluster.config.appwrapper
    -        else cluster_yaml
    -    )
    -
    -    add_queue_label(user_yaml, cluster.config.namespace, cluster.config.local_queue)
    -
    -    if cluster.config.write_to_file:
    -        directory_path = os.path.expanduser("~/.codeflare/resources/")
    -        outfile = os.path.join(directory_path, appwrapper_name + ".yaml")
    -        write_user_yaml(user_yaml, outfile)
    -        return outfile
    -    else:
    -        user_yaml = yaml.dump(user_yaml)
    -        print(f"Yaml resources loaded for {cluster.config.name}")
    -        return user_yaml
    -
    -
    -
    -def get_default_kueue_name(namespace: str) -
    -
    -
    -
    - -Expand source code - -
    def get_default_kueue_name(namespace: str):
    -    # If the local queue is set, use it. Otherwise, try to use the default queue.
    -    try:
    -        config_check()
    -        api_instance = client.CustomObjectsApi(get_api_client())
    -        local_queues = api_instance.list_namespaced_custom_object(
    -            group="kueue.x-k8s.io",
    -            version="v1beta1",
    -            namespace=namespace,
    -            plural="localqueues",
    -        )
    -    except ApiException as e:  # pragma: no cover
    -        if e.status == 404 or e.status == 403:
    -            return
    -        else:
    -            return _kube_api_error_handling(e)
    -    for lq in local_queues["items"]:
    -        if (
    -            "annotations" in lq["metadata"]
    -            and "kueue.x-k8s.io/default-queue" in lq["metadata"]["annotations"]
    -            and lq["metadata"]["annotations"]["kueue.x-k8s.io/default-queue"].lower()
    -            == "true"
    -        ):
    -            return lq["metadata"]["name"]
    -
    -
    -
    -def head_worker_gpu_count_from_cluster(cluster: codeflare_sdk.cluster.Cluster) ‑> Tuple[int, int] -
    -
    -
    -
    - -Expand source code - -
    def head_worker_gpu_count_from_cluster(
    -    cluster: "codeflare_sdk.cluster.Cluster",
    -) -> typing.Tuple[int, int]:
    -    head_gpus = 0
    -    worker_gpus = 0
    -    for k in cluster.config.head_extended_resource_requests.keys():
    -        resource_type = cluster.config.extended_resource_mapping[k]
    -        if resource_type == "GPU":
    -            head_gpus += int(cluster.config.head_extended_resource_requests[k])
    -    for k in cluster.config.worker_extended_resource_requests.keys():
    -        resource_type = cluster.config.extended_resource_mapping[k]
    -        if resource_type == "GPU":
    -            worker_gpus += int(cluster.config.worker_extended_resource_requests[k])
    -
    -    return head_gpus, worker_gpus
    -
    -
    -
    -def head_worker_resources_from_cluster(cluster: codeflare_sdk.cluster.Cluster) ‑> Tuple[dict, dict] -
    -
    -
    -
    - -Expand source code - -
    def head_worker_resources_from_cluster(
    -    cluster: "codeflare_sdk.cluster.Cluster",
    -) -> typing.Tuple[dict, dict]:
    -    to_return = {}, {}
    -    for k in cluster.config.head_extended_resource_requests.keys():
    -        resource_type = cluster.config.extended_resource_mapping[k]
    -        if resource_type in FORBIDDEN_CUSTOM_RESOURCE_TYPES:
    -            continue
    -        to_return[0][resource_type] = cluster.config.head_extended_resource_requests[
    -            k
    -        ] + to_return[0].get(resource_type, 0)
    -
    -    for k in cluster.config.worker_extended_resource_requests.keys():
    -        resource_type = cluster.config.extended_resource_mapping[k]
    -        if resource_type in FORBIDDEN_CUSTOM_RESOURCE_TYPES:
    -            continue
    -        to_return[1][resource_type] = cluster.config.worker_extended_resource_requests[
    -            k
    -        ] + to_return[1].get(resource_type, 0)
    -    return to_return
    -
    -
    -
    -def is_kind_cluster() -
    -
    -
    -
    - -Expand source code - -
    def is_kind_cluster():
    -    try:
    -        config_check()
    -        v1 = client.CoreV1Api()
    -        label_selector = "kubernetes.io/hostname=kind-control-plane"
    -        nodes = v1.list_node(label_selector=label_selector)
    -        # If we find one or more nodes with the label, assume it's a KinD cluster
    -        return len(nodes.items) > 0
    -    except Exception as e:
    -        print(f"Error checking if cluster is KinD: {e}")
    -        return False
    -
    -
    -
    -def is_openshift_cluster() -
    -
    -
    -
    - -Expand source code - -
    def is_openshift_cluster():
    -    try:
    -        config_check()
    -        for api in client.ApisApi(get_api_client()).get_api_versions().groups:
    -            for v in api.versions:
    -                if "route.openshift.io/v1" in v.group_version:
    -                    return True
    -        else:
    -            return False
    -    except Exception as e:  # pragma: no cover
    -        return _kube_api_error_handling(e)
    -
    -
    -
    -def local_queue_exists(namespace: str, local_queue_name: str) -
    -
    -
    -
    - -Expand source code - -
    def local_queue_exists(namespace: str, local_queue_name: str):
    -    # get all local queues in the namespace
    -    try:
    -        config_check()
    -        api_instance = client.CustomObjectsApi(get_api_client())
    -        local_queues = api_instance.list_namespaced_custom_object(
    -            group="kueue.x-k8s.io",
    -            version="v1beta1",
    -            namespace=namespace,
    -            plural="localqueues",
    -        )
    -    except Exception as e:  # pragma: no cover
    -        return _kube_api_error_handling(e)
    -    # check if local queue with the name provided in cluster config exists
    -    for lq in local_queues["items"]:
    -        if lq["metadata"]["name"] == local_queue_name:
    -            return True
    -    return False
    -
    -
    -
    -def notebook_annotations(item: dict) -
    -
    -
    -
    - -Expand source code - -
    def notebook_annotations(item: dict):
    -    nb_prefix = os.environ.get("NB_PREFIX")
    -    if nb_prefix:
    -        if not "annotations" in item["metadata"]:
    -            item["metadata"]["annotations"] = {}
    -        item["metadata"]["annotations"].update(
    -            {"app.kubernetes.io/managed-by": nb_prefix}
    -        )
    -
    -
    -
    -def read_template(template) -
    -
    -
    -
    - -Expand source code - -
    def read_template(template):
    -    with open(template, "r") as stream:
    -        try:
    -            return yaml.safe_load(stream)
    -        except yaml.YAMLError as exc:
    -            print(exc)
    -
    -
    -
    -def update_env(spec, env) -
    -
    -
    -
    - -Expand source code - -
    def update_env(spec, env):
    -    containers = spec.get("containers")
    -    for container in containers:
    -        if env:
    -            if "env" in container:
    -                container["env"].extend(env)
    -            else:
    -                container["env"] = env
    -
    -
    -
    -def update_image(spec, image) -
    -
    -
    -
    - -Expand source code - -
    def update_image(spec, image):
    -    containers = spec.get("containers")
    -    if image != "":
    -        for container in containers:
    -            container["image"] = image
    -
    -
    -
    -def update_image_pull_secrets(spec, image_pull_secrets) -
    -
    -
    -
    - -Expand source code - -
    def update_image_pull_secrets(spec, image_pull_secrets):
    -    template_secrets = spec.get("imagePullSecrets", [])
    -    spec["imagePullSecrets"] = template_secrets + [
    -        {"name": x} for x in image_pull_secrets
    -    ]
    -
    -
    -
    -def update_names(cluster_yaml: dict, cluster: codeflare_sdk.cluster.Cluster) -
    -
    -
    -
    - -Expand source code - -
    def update_names(
    -    cluster_yaml: dict,
    -    cluster: "codeflare_sdk.cluster.Cluster",
    -):
    -    metadata = cluster_yaml.get("metadata")
    -    metadata["name"] = cluster.config.name
    -    metadata["namespace"] = cluster.config.namespace
    -
    -
    -
    -def update_nodes(ray_cluster_dict: dict, cluster: codeflare_sdk.cluster.Cluster) -
    -
    -
    -
    - -Expand source code - -
    def update_nodes(
    -    ray_cluster_dict: dict,
    -    cluster: "codeflare_sdk.cluster.Cluster",
    -):
    -    head = ray_cluster_dict.get("spec").get("headGroupSpec")
    -    worker = ray_cluster_dict.get("spec").get("workerGroupSpecs")[0]
    -    head_gpus, worker_gpus = head_worker_gpu_count_from_cluster(cluster)
    -    head_resources, worker_resources = head_worker_resources_from_cluster(cluster)
    -    head_resources = json.dumps(head_resources).replace('"', '\\"')
    -    head_resources = f'"{head_resources}"'
    -    worker_resources = json.dumps(worker_resources).replace('"', '\\"')
    -    worker_resources = f'"{worker_resources}"'
    -    head["rayStartParams"]["num-gpus"] = str(head_gpus)
    -    head["rayStartParams"]["resources"] = head_resources
    -
    -    # Head counts as first worker
    -    worker["replicas"] = cluster.config.num_workers
    -    worker["minReplicas"] = cluster.config.num_workers
    -    worker["maxReplicas"] = cluster.config.num_workers
    -    worker["groupName"] = "small-group-" + cluster.config.name
    -    worker["rayStartParams"]["num-gpus"] = str(worker_gpus)
    -    worker["rayStartParams"]["resources"] = worker_resources
    -
    -    for comp in [head, worker]:
    -        spec = comp.get("template").get("spec")
    -        update_image_pull_secrets(spec, cluster.config.image_pull_secrets)
    -        update_image(spec, cluster.config.image)
    -        update_env(spec, cluster.config.envs)
    -        if comp == head:
    -            # TODO: Eventually add head node configuration outside of template
    -            update_resources(
    -                spec,
    -                cluster.config.head_cpu_requests,
    -                cluster.config.head_cpu_limits,
    -                cluster.config.head_memory_requests,
    -                cluster.config.head_memory_limits,
    -                cluster.config.head_extended_resource_requests,
    -            )
    -        else:
    -            update_resources(
    -                spec,
    -                cluster.config.worker_cpu_requests,
    -                cluster.config.worker_cpu_limits,
    -                cluster.config.worker_memory_requests,
    -                cluster.config.worker_memory_limits,
    -                cluster.config.worker_extended_resource_requests,
    -            )
    -
    -
    -
    -def update_resources(spec, cpu_requests, cpu_limits, memory_requests, memory_limits, custom_resources) -
    -
    -
    -
    - -Expand source code - -
    def update_resources(
    -    spec,
    -    cpu_requests,
    -    cpu_limits,
    -    memory_requests,
    -    memory_limits,
    -    custom_resources,
    -):
    -    container = spec.get("containers")
    -    for resource in container:
    -        requests = resource.get("resources").get("requests")
    -        if requests is not None:
    -            requests["cpu"] = cpu_requests
    -            requests["memory"] = memory_requests
    -        limits = resource.get("resources").get("limits")
    -        if limits is not None:
    -            limits["cpu"] = cpu_limits
    -            limits["memory"] = memory_limits
    -        for k in custom_resources.keys():
    -            limits[k] = custom_resources[k]
    -            requests[k] = custom_resources[k]
    -
    -
    -
    -def wrap_cluster(cluster_yaml: dict, appwrapper_name: str, namespace: str) -
    -
    -
    -
    - -Expand source code - -
    def wrap_cluster(cluster_yaml: dict, appwrapper_name: str, namespace: str):
    -    return {
    -        "apiVersion": "workload.codeflare.dev/v1beta2",
    -        "kind": "AppWrapper",
    -        "metadata": {"name": appwrapper_name, "namespace": namespace},
    -        "spec": {"components": [{"template": cluster_yaml}]},
    -    }
    -
    -
    -
    -def write_user_yaml(user_yaml, output_file_name) -
    -
    -
    -
    - -Expand source code - -
    def write_user_yaml(user_yaml, output_file_name):
    -    # Create the directory if it doesn't exist
    -    directory_path = os.path.dirname(output_file_name)
    -    if not os.path.exists(directory_path):
    -        os.makedirs(directory_path)
    -
    -    with open(output_file_name, "w") as outfile:
    -        yaml.dump(user_yaml, outfile, default_flow_style=False)
    -
    -    print(f"Written to: {output_file_name}")
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - diff --git a/docs/detailed-documentation/utils/index.html b/docs/detailed-documentation/utils/index.html deleted file mode 100644 index 4a65cc39..00000000 --- a/docs/detailed-documentation/utils/index.html +++ /dev/null @@ -1,88 +0,0 @@ - - - - - - -codeflare_sdk.utils API documentation - - - - - - - - - - - -
    -
    -
    -

    Module codeflare_sdk.utils

    -
    -
    -
    -
    -

    Sub-modules

    -
    -
    codeflare_sdk.utils.demos
    -
    -
    -
    -
    codeflare_sdk.utils.generate_cert
    -
    -
    -
    -
    codeflare_sdk.utils.generate_yaml
    -
    -

    This sub-module exists primarily to be used internally by the Cluster object -(in the cluster sub-module) for AppWrapper generation.

    -
    -
    codeflare_sdk.utils.kube_api_helpers
    -
    -

    This sub-module exists primarily to be used internally for any Kubernetes -API error handling or wrapping.

    -
    -
    codeflare_sdk.utils.pretty_print
    -
    -

    This sub-module exists primarily to be used internally by the Cluster object -(in the cluster sub-module) for pretty-printing cluster status and details.

    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - diff --git a/docs/detailed-documentation/utils/kube_api_helpers.html b/docs/detailed-documentation/utils/kube_api_helpers.html deleted file mode 100644 index 6bf6fe81..00000000 --- a/docs/detailed-documentation/utils/kube_api_helpers.html +++ /dev/null @@ -1,112 +0,0 @@ - - - - - - -codeflare_sdk.utils.kube_api_helpers API documentation - - - - - - - - - - - -
    -
    -
    -

    Module codeflare_sdk.utils.kube_api_helpers

    -
    -
    -

    This sub-module exists primarily to be used internally for any Kubernetes -API error handling or wrapping.

    -
    - -Expand source code - -
    # Copyright 2022 IBM, Red Hat
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#      http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -
    -"""
    -This sub-module exists primarily to be used internally for any Kubernetes
    -API error handling or wrapping.
    -"""
    -
    -import executing
    -from kubernetes import client, config
    -from urllib3.util import parse_url
    -
    -
    -# private methods
    -def _kube_api_error_handling(
    -    e: Exception, print_error: bool = True
    -):  # pragma: no cover
    -    perm_msg = (
    -        "Action not permitted, have you put in correct/up-to-date auth credentials?"
    -    )
    -    nf_msg = "No instances found, nothing to be done."
    -    exists_msg = "Resource with this name already exists."
    -    if type(e) == config.ConfigException:
    -        raise PermissionError(perm_msg)
    -    if type(e) == executing.executing.NotOneValueFound:
    -        if print_error:
    -            print(nf_msg)
    -        return
    -    if type(e) == client.ApiException:
    -        if e.reason == "Not Found":
    -            if print_error:
    -                print(nf_msg)
    -            return
    -        elif e.reason == "Unauthorized" or e.reason == "Forbidden":
    -            if print_error:
    -                print(perm_msg)
    -            return
    -        elif e.reason == "Conflict":
    -            raise FileExistsError(exists_msg)
    -    raise e
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - diff --git a/docs/detailed-documentation/utils/pretty_print.html b/docs/detailed-documentation/utils/pretty_print.html deleted file mode 100644 index f2a8d7db..00000000 --- a/docs/detailed-documentation/utils/pretty_print.html +++ /dev/null @@ -1,491 +0,0 @@ - - - - - - -codeflare_sdk.utils.pretty_print API documentation - - - - - - - - - - - -
    -
    -
    -

    Module codeflare_sdk.utils.pretty_print

    -
    -
    -

    This sub-module exists primarily to be used internally by the Cluster object -(in the cluster sub-module) for pretty-printing cluster status and details.

    -
    - -Expand source code - -
    # Copyright 2022 IBM, Red Hat
    -#
    -# Licensed under the Apache License, Version 2.0 (the "License");
    -# you may not use this file except in compliance with the License.
    -# You may obtain a copy of the License at
    -#
    -#      http://www.apache.org/licenses/LICENSE-2.0
    -#
    -# Unless required by applicable law or agreed to in writing, software
    -# distributed under the License is distributed on an "AS IS" BASIS,
    -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    -# See the License for the specific language governing permissions and
    -# limitations under the License.
    -
    -"""
    -This sub-module exists primarily to be used internally by the Cluster object
    -(in the cluster sub-module) for pretty-printing cluster status and details.
    -"""
    -
    -from rich import print
    -from rich.table import Table
    -from rich.console import Console
    -from rich.layout import Layout
    -from rich.panel import Panel
    -from rich import box
    -from typing import List
    -from ..cluster.model import RayCluster, AppWrapper, RayClusterStatus
    -
    -
    -def print_no_resources_found():
    -    console = Console()
    -    console.print(Panel("[red]No resources found, have you run cluster.up() yet?"))
    -
    -
    -def print_app_wrappers_status(app_wrappers: List[AppWrapper], starting: bool = False):
    -    if not app_wrappers:
    -        print_no_resources_found()
    -        return  # shortcircuit
    -
    -    console = Console()
    -    table = Table(
    -        box=box.ASCII_DOUBLE_HEAD,
    -        title="[bold] :rocket: Cluster Queue Status :rocket:",
    -    )
    -    table.add_column("Name", style="cyan", no_wrap=True)
    -    table.add_column("Status", style="magenta")
    -
    -    for app_wrapper in app_wrappers:
    -        name = app_wrapper.name
    -        status = app_wrapper.status.value
    -        if starting:
    -            status += " (starting)"
    -        table.add_row(name, status)
    -        table.add_row("")  # empty row for spacing
    -
    -    console.print(Panel.fit(table))
    -
    -
    -def print_ray_clusters_status(app_wrappers: List[AppWrapper], starting: bool = False):
    -    if not app_wrappers:
    -        print_no_resources_found()
    -        return  # shortcircuit
    -
    -    console = Console()
    -    table = Table(
    -        box=box.ASCII_DOUBLE_HEAD,
    -        title="[bold] :rocket: Cluster Queue Status :rocket:",
    -    )
    -    table.add_column("Name", style="cyan", no_wrap=True)
    -    table.add_column("Status", style="magenta")
    -
    -    for app_wrapper in app_wrappers:
    -        name = app_wrapper.name
    -        status = app_wrapper.status.value
    -        if starting:
    -            status += " (starting)"
    -        table.add_row(name, status)
    -        table.add_row("")  # empty row for spacing
    -
    -    console.print(Panel.fit(table))
    -
    -
    -def print_cluster_status(cluster: RayCluster):
    -    "Pretty prints the status of a passed-in cluster"
    -    if not cluster:
    -        print_no_resources_found()
    -        return
    -
    -    console = Console()
    -    status = (
    -        "Active :white_heavy_check_mark:"
    -        if cluster.status == RayClusterStatus.READY
    -        else "Inactive :x:"
    -    )
    -    name = cluster.name
    -    dashboard = cluster.dashboard
    -
    -    #'table0' to display the cluster name, status, url, and dashboard link
    -    table0 = Table(box=None, show_header=False)
    -
    -    table0.add_row("[white on green][bold]Name")
    -    table0.add_row("[bold underline]" + name, status)
    -    table0.add_row()
    -    # fixme harcded to default for now
    -    table0.add_row(
    -        f"[bold]URI:[/bold] ray://{cluster.name}-head-svc.{cluster.namespace}.svc:10001"
    -    )  # format that is used to generate the name of the service
    -    table0.add_row()
    -    table0.add_row(f"[link={dashboard} blue underline]Dashboard:link:[/link]")
    -    table0.add_row("")  # empty row for spacing
    -
    -    # table4 to display table0 and table3, one below the other
    -    table4 = Table(box=None, show_header=False)
    -    table4.add_row(table0)
    -
    -    # Encompass all details of the cluster in a single panel
    -    table5 = Table(box=None, title="[bold] :rocket: CodeFlare Cluster Status :rocket:")
    -    table5.add_row(Panel.fit(table4))
    -    console.print(table5)
    -
    -
    -def print_clusters(clusters: List[RayCluster]):
    -    if not clusters:
    -        print_no_resources_found()
    -        return  # shortcircuit
    -
    -    console = Console()
    -    title_printed = False
    -
    -    for cluster in clusters:
    -        status = (
    -            "Active :white_heavy_check_mark:"
    -            if cluster.status == RayClusterStatus.READY
    -            else "Inactive :x:"
    -        )
    -        name = cluster.name
    -        dashboard = cluster.dashboard
    -        workers = str(cluster.num_workers)
    -        memory = f"{cluster.worker_mem_requests}~{cluster.worker_mem_limits}"
    -        cpu = f"{cluster.worker_cpu_requests}~{cluster.worker_cpu_limits}"
    -        gpu = str(cluster.worker_extended_resources.get("nvidia.com/gpu", 0))
    -
    -        #'table0' to display the cluster name, status, url, and dashboard link
    -        table0 = Table(box=None, show_header=False)
    -
    -        table0.add_row("[white on green][bold]Name")
    -        table0.add_row("[bold underline]" + name, status)
    -        table0.add_row()
    -        # fixme harcded to default for now
    -        table0.add_row(
    -            f"[bold]URI:[/bold] ray://{cluster.name}-head-svc.{cluster.namespace}.svc:10001"
    -        )  # format that is used to generate the name of the service
    -        table0.add_row()
    -        table0.add_row(f"[link={dashboard} blue underline]Dashboard:link:[/link]")
    -        table0.add_row("")  # empty row for spacing
    -
    -        #'table1' to display the worker counts
    -        table1 = Table(box=None)
    -        table1.add_row()
    -        table1.add_column("# Workers", style="magenta")
    -        table1.add_row()
    -        table1.add_row(workers)
    -        table1.add_row()
    -
    -        #'table2' to display the worker resources
    -        table2 = Table(box=None)
    -        table2.add_column("Memory", style="cyan", no_wrap=True, min_width=10)
    -        table2.add_column("CPU", style="magenta", min_width=10)
    -        table2.add_column("GPU", style="magenta", min_width=10)
    -        table2.add_row()
    -        table2.add_row(memory, cpu, gpu)
    -        table2.add_row()
    -
    -        # panels to encompass table1 and table2 into separate cards
    -        panel_1 = Panel.fit(table1, title="Workers")
    -        panel_2 = Panel.fit(table2, title="Worker specs(each)")
    -
    -        # table3 to display panel_1 and panel_2 side-by-side in a single row
    -        table3 = Table(box=None, show_header=False, title="Cluster Resources")
    -        table3.add_row(panel_1, panel_2)
    -
    -        # table4 to display table0 and table3, one below the other
    -        table4 = Table(box=None, show_header=False)
    -        table4.add_row(table0)
    -        table4.add_row(table3)
    -
    -        # Encompass all details of the cluster in a single panel
    -        if not title_printed:
    -            # If first cluster in the list, then create a table with title "Codeflare clusters".
    -            # This is done to ensure the title is center aligned on the cluster display tables, rather
    -            # than being center aligned on the console/terminal if we simply use console.print(title)
    -
    -            table5 = Table(
    -                box=None, title="[bold] :rocket: CodeFlare Cluster Details :rocket:"
    -            )
    -            table5.add_row(Panel.fit(table4))
    -            console.print(table5)
    -            title_printed = True
    -        else:
    -            console.print(Panel.fit(table4))
    -
    -
    -
    -
    -
    -
    -
    -

    Functions

    -
    -
    -def print_app_wrappers_status(app_wrappers: List[AppWrapper], starting: bool = False) -
    -
    -
    -
    - -Expand source code - -
    def print_app_wrappers_status(app_wrappers: List[AppWrapper], starting: bool = False):
    -    if not app_wrappers:
    -        print_no_resources_found()
    -        return  # shortcircuit
    -
    -    console = Console()
    -    table = Table(
    -        box=box.ASCII_DOUBLE_HEAD,
    -        title="[bold] :rocket: Cluster Queue Status :rocket:",
    -    )
    -    table.add_column("Name", style="cyan", no_wrap=True)
    -    table.add_column("Status", style="magenta")
    -
    -    for app_wrapper in app_wrappers:
    -        name = app_wrapper.name
    -        status = app_wrapper.status.value
    -        if starting:
    -            status += " (starting)"
    -        table.add_row(name, status)
    -        table.add_row("")  # empty row for spacing
    -
    -    console.print(Panel.fit(table))
    -
    -
    -
    -def print_cluster_status(cluster: RayCluster) -
    -
    -

    Pretty prints the status of a passed-in cluster

    -
    - -Expand source code - -
    def print_cluster_status(cluster: RayCluster):
    -    "Pretty prints the status of a passed-in cluster"
    -    if not cluster:
    -        print_no_resources_found()
    -        return
    -
    -    console = Console()
    -    status = (
    -        "Active :white_heavy_check_mark:"
    -        if cluster.status == RayClusterStatus.READY
    -        else "Inactive :x:"
    -    )
    -    name = cluster.name
    -    dashboard = cluster.dashboard
    -
    -    #'table0' to display the cluster name, status, url, and dashboard link
    -    table0 = Table(box=None, show_header=False)
    -
    -    table0.add_row("[white on green][bold]Name")
    -    table0.add_row("[bold underline]" + name, status)
    -    table0.add_row()
    -    # fixme harcded to default for now
    -    table0.add_row(
    -        f"[bold]URI:[/bold] ray://{cluster.name}-head-svc.{cluster.namespace}.svc:10001"
    -    )  # format that is used to generate the name of the service
    -    table0.add_row()
    -    table0.add_row(f"[link={dashboard} blue underline]Dashboard:link:[/link]")
    -    table0.add_row("")  # empty row for spacing
    -
    -    # table4 to display table0 and table3, one below the other
    -    table4 = Table(box=None, show_header=False)
    -    table4.add_row(table0)
    -
    -    # Encompass all details of the cluster in a single panel
    -    table5 = Table(box=None, title="[bold] :rocket: CodeFlare Cluster Status :rocket:")
    -    table5.add_row(Panel.fit(table4))
    -    console.print(table5)
    -
    -
    -
    -def print_clusters(clusters: List[RayCluster]) -
    -
    -
    -
    - -Expand source code - -
    def print_clusters(clusters: List[RayCluster]):
    -    if not clusters:
    -        print_no_resources_found()
    -        return  # shortcircuit
    -
    -    console = Console()
    -    title_printed = False
    -
    -    for cluster in clusters:
    -        status = (
    -            "Active :white_heavy_check_mark:"
    -            if cluster.status == RayClusterStatus.READY
    -            else "Inactive :x:"
    -        )
    -        name = cluster.name
    -        dashboard = cluster.dashboard
    -        workers = str(cluster.num_workers)
    -        memory = f"{cluster.worker_mem_requests}~{cluster.worker_mem_limits}"
    -        cpu = f"{cluster.worker_cpu_requests}~{cluster.worker_cpu_limits}"
    -        gpu = str(cluster.worker_extended_resources.get("nvidia.com/gpu", 0))
    -
    -        #'table0' to display the cluster name, status, url, and dashboard link
    -        table0 = Table(box=None, show_header=False)
    -
    -        table0.add_row("[white on green][bold]Name")
    -        table0.add_row("[bold underline]" + name, status)
    -        table0.add_row()
    -        # fixme harcded to default for now
    -        table0.add_row(
    -            f"[bold]URI:[/bold] ray://{cluster.name}-head-svc.{cluster.namespace}.svc:10001"
    -        )  # format that is used to generate the name of the service
    -        table0.add_row()
    -        table0.add_row(f"[link={dashboard} blue underline]Dashboard:link:[/link]")
    -        table0.add_row("")  # empty row for spacing
    -
    -        #'table1' to display the worker counts
    -        table1 = Table(box=None)
    -        table1.add_row()
    -        table1.add_column("# Workers", style="magenta")
    -        table1.add_row()
    -        table1.add_row(workers)
    -        table1.add_row()
    -
    -        #'table2' to display the worker resources
    -        table2 = Table(box=None)
    -        table2.add_column("Memory", style="cyan", no_wrap=True, min_width=10)
    -        table2.add_column("CPU", style="magenta", min_width=10)
    -        table2.add_column("GPU", style="magenta", min_width=10)
    -        table2.add_row()
    -        table2.add_row(memory, cpu, gpu)
    -        table2.add_row()
    -
    -        # panels to encompass table1 and table2 into separate cards
    -        panel_1 = Panel.fit(table1, title="Workers")
    -        panel_2 = Panel.fit(table2, title="Worker specs(each)")
    -
    -        # table3 to display panel_1 and panel_2 side-by-side in a single row
    -        table3 = Table(box=None, show_header=False, title="Cluster Resources")
    -        table3.add_row(panel_1, panel_2)
    -
    -        # table4 to display table0 and table3, one below the other
    -        table4 = Table(box=None, show_header=False)
    -        table4.add_row(table0)
    -        table4.add_row(table3)
    -
    -        # Encompass all details of the cluster in a single panel
    -        if not title_printed:
    -            # If first cluster in the list, then create a table with title "Codeflare clusters".
    -            # This is done to ensure the title is center aligned on the cluster display tables, rather
    -            # than being center aligned on the console/terminal if we simply use console.print(title)
    -
    -            table5 = Table(
    -                box=None, title="[bold] :rocket: CodeFlare Cluster Details :rocket:"
    -            )
    -            table5.add_row(Panel.fit(table4))
    -            console.print(table5)
    -            title_printed = True
    -        else:
    -            console.print(Panel.fit(table4))
    -
    -
    -
    -def print_no_resources_found() -
    -
    -
    -
    - -Expand source code - -
    def print_no_resources_found():
    -    console = Console()
    -    console.print(Panel("[red]No resources found, have you run cluster.up() yet?"))
    -
    -
    -
    -def print_ray_clusters_status(app_wrappers: List[AppWrapper], starting: bool = False) -
    -
    -
    -
    - -Expand source code - -
    def print_ray_clusters_status(app_wrappers: List[AppWrapper], starting: bool = False):
    -    if not app_wrappers:
    -        print_no_resources_found()
    -        return  # shortcircuit
    -
    -    console = Console()
    -    table = Table(
    -        box=box.ASCII_DOUBLE_HEAD,
    -        title="[bold] :rocket: Cluster Queue Status :rocket:",
    -    )
    -    table.add_column("Name", style="cyan", no_wrap=True)
    -    table.add_column("Status", style="magenta")
    -
    -    for app_wrapper in app_wrappers:
    -        name = app_wrapper.name
    -        status = app_wrapper.status.value
    -        if starting:
    -            status += " (starting)"
    -        table.add_row(name, status)
    -        table.add_row("")  # empty row for spacing
    -
    -    console.print(Panel.fit(table))
    -
    -
    -
    -
    -
    -
    -
    - -
    - - - diff --git a/docs/e2e.md b/docs/e2e.md deleted file mode 100644 index 83d8ae4e..00000000 --- a/docs/e2e.md +++ /dev/null @@ -1,133 +0,0 @@ -# Running e2e tests locally -#### Pre-requisites -- We recommend using Python 3.9, along with Poetry. - -## On KinD clusters -Pre-requisite for KinD clusters: please add in your local `/etc/hosts` file `127.0.0.1 kind`. This will map your localhost IP address to the KinD cluster's hostname. This is already performed on [GitHub Actions](https://github.com/project-codeflare/codeflare-common/blob/1edd775e2d4088a5a0bfddafb06ff3a773231c08/github-actions/kind/action.yml#L70-L72) - -If the system you run on contains NVidia GPU then you can enable the GPU support in KinD, this will allow you to run also GPU tests. -To enable GPU on KinD follow [these instructions](https://www.substratus.ai/blog/kind-with-gpus). - -- Setup Phase: - - Pull the [codeflare-operator repo](https://github.com/project-codeflare/codeflare-operator) and run the following make targets: - ``` - make kind-e2e - export CLUSTER_HOSTNAME=kind - make setup-e2e - make deploy -e IMG=quay.io/project-codeflare/codeflare-operator:v1.3.0 - - For running tests locally on Kind cluster, we need to disable `rayDashboardOAuthEnabled` in `codeflare-operator-config` ConfigMap and then restart CodeFlare Operator - ``` - - - **(Optional)** - Create and add `sdk-user` with limited permissions to the cluster to run through the e2e tests: - ``` - # Get KinD certificates - docker cp kind-control-plane:/etc/kubernetes/pki/ca.crt . - docker cp kind-control-plane:/etc/kubernetes/pki/ca.key . - - # Generate certificates for new user - openssl genrsa -out user.key 2048 - openssl req -new -key user.key -out user.csr -subj '/CN=sdk-user/O=tenant' - openssl x509 -req -in user.csr -CA ca.crt -CAkey ca.key -CAcreateserial -out user.crt -days 360 - - # Add generated certificated to KinD context - user_crt=$(base64 --wrap=0 user.crt) - user_key=$(base64 --wrap=0 user.key) - yq eval -i ".contexts += {\"context\": {\"cluster\": \"kind-kind\", \"user\": \"sdk-user\"}, \"name\": \"sdk-user\"}" $HOME/.kube/config - yq eval -i ".users += {\"name\": \"sdk-user\", \"user\": {\"client-certificate-data\": \"$user_crt\", \"client-key-data\": \"$user_key\"}}" $HOME/.kube/config - cat $HOME/.kube/config - - # Cleanup - rm ca.crt - rm ca.srl - rm ca.key - rm user.crt - rm user.key - rm user.csr - - # Add RBAC permissions to sdk-user - kubectl create clusterrole list-ingresses --verb=get,list --resource=ingresses - kubectl create clusterrolebinding sdk-user-list-ingresses --clusterrole=list-ingresses --user=sdk-user - kubectl create clusterrole appwrapper-creator --verb=get,list,create,delete,patch --resource=appwrappers - kubectl create clusterrolebinding sdk-user-appwrapper-creator --clusterrole=appwrapper-creator --user=sdk-user - kubectl create clusterrole namespace-creator --verb=get,list,create,delete,patch --resource=namespaces - kubectl create clusterrolebinding sdk-user-namespace-creator --clusterrole=namespace-creator --user=sdk-user - kubectl create clusterrole list-rayclusters --verb=get,list --resource=rayclusters - kubectl create clusterrolebinding sdk-user-list-rayclusters --clusterrole=list-rayclusters --user=sdk-user - kubectl config use-context sdk-user - - ``` - - - Install the latest development version of kueue - ``` - kubectl apply --server-side -k "github.com/opendatahub-io/kueue/config/rhoai?ref=dev" - ``` - -- Test Phase: - - Once we have the codeflare-operator, kuberay-operator and kueue running and ready, we can run the e2e test on the codeflare-sdk repository: - ``` - poetry install --with test,docs - poetry run pytest -v -s ./tests/e2e/mnist_raycluster_sdk_kind_test.py - ``` - - If the cluster doesn't have NVidia GPU support then we need to disable NVidia GPU tests by providing proper marker: - ``` - poetry install --with test,docs - poetry run pytest -v -s ./tests/e2e/mnist_raycluster_sdk_kind_test.py -m 'kind and not nvidia_gpu' - ``` - - -## On OpenShift clusters -- Setup Phase: - - Pull the [codeflare-operator repo](https://github.com/project-codeflare/codeflare-operator) and run the following make targets: - ``` - - make setup-e2e - make deploy -e IMG=quay.io/project-codeflare/codeflare-operator:v1.3.0 - ``` - - - Install the latest development version of kueue - ``` - kubectl apply --server-side -k "github.com/opendatahub-io/kueue/config/rhoai?ref=dev" - ``` - -If the system you run on contains NVidia GPU then you can enable the GPU support on OpenShift, this will allow you to run also GPU tests. -To enable GPU on OpenShift follow [these instructions](https://docs.nvidia.com/datacenter/cloud-native/openshift/latest/introduction.html). -Currently the SDK doesn't support tolerations, so e2e tests can't be executed on nodes with taint (i.e. GPU taint). - -- Test Phase: - - Once we have the codeflare-operator, kuberay-operator and kueue running and ready, we can run the e2e test on the codeflare-sdk repository: - ``` - poetry install --with test,docs - poetry run pytest -v -s ./tests/e2e/mnist_raycluster_sdk_test.py - ``` - - To run the multiple tests based on the cluster environment, we can run the e2e tests by marking -m with cluster environment (kind or openshift) - ``` - poetry run pytest -v -s ./tests/e2e -m openshift - ``` - - By default tests configured with timeout of `15 minutes`. If necessary, we can override the timeout using `--timeout` option - ``` - poetry run pytest -v -s ./tests/e2e -m openshift --timeout=1200 - ``` - -## On OpenShift Disconnected clusters - -- In addition to setup phase mentioned above in case of Openshift cluster, Disconnected environment requires following pre-requisites : - - Mirror Image registry : - - Image mirror registry is used to host set of container images required locally for the applications and services. This ensures to pull images without needing an external network connection. It also ensures continuous operation and deployment capabilities in a network-isolated environment. - - PYPI Mirror Index : - - When trying to install Python packages in a disconnected environment, the pip command might fail because the connection cannot install packages from external URLs. This issue can be resolved by setting up PIP Mirror Index on separate endpoint in same environment. - - S3 compatible storage : - - Some of our distributed training examples require an external storage solution so that all nodes can access the same data in disconnected environment (For example: common-datasets and model files). - - Minio S3 compatible storage type instance can be deployed in disconnected environment using `/tests/e2e/minio_deployment.yaml` or using support methods in e2e test suite. - - The following are environment variables for configuring PIP index URl for accessing the common-python packages required and the S3 or Minio storage for your Ray Train script or interactive session. - ``` - export RAY_IMAGE=quay.io/project-codeflare/ray@sha256: (prefer image digest over image tag in disocnnected environment) - PIP_INDEX_URL=https:///root/pypi/+simple/ \ - PIP_TRUSTED_HOST= \ - AWS_DEFAULT_ENDPOINT= \ - AWS_ACCESS_KEY_ID= \ - AWS_SECRET_ACCESS_KEY= \ - AWS_STORAGE_BUCKET= - AWS_STORAGE_BUCKET_MNIST_DIR= - ``` - Note : When using the Python Minio client to connect to a minio storage bucket, the `AWS_DEFAULT_ENDPOINT` environment variable by default expects secure endpoint where user can use endpoint url with https/http prefix for autodetection of secure/insecure endpoint. diff --git a/docs/generate-documentation.md b/docs/generate-documentation.md new file mode 100644 index 00000000..75b5c7c6 --- /dev/null +++ b/docs/generate-documentation.md @@ -0,0 +1,14 @@ +# Generate CodeFlare Documentation with Sphinx +The following is a short guide on how you can use Sphinx to auto-generate code documentation. Documentation for the latest SDK release can be found [here](https://project-codeflare.github.io/codeflare-sdk/index.html). + +1. Clone the CodeFlare SDK +``` bash +git clone https://github.com/project-codeflare/codeflare-sdk.git +``` +2. [Install Sphinx](https://www.sphinx-doc.org/en/master/usage/installation.html) +3. Run the below command to generate code documentation +``` bash +sphinx-apidoc -o docs/sphinx src/codeflare_sdk "**/*test_*" --force # Generates RST files +make html -C docs/sphinx # Builds HTML files +``` +4. You can access the docs locally at `docs/sphinx/_build/html/index.html` diff --git a/docs/s3-compatible-storage.md b/docs/s3-compatible-storage.md deleted file mode 100644 index 919ce815..00000000 --- a/docs/s3-compatible-storage.md +++ /dev/null @@ -1,61 +0,0 @@ -# S3 compatible storage with Ray Train examples -Some of our distributed training examples require an external storage solution so that all nodes can access the same data.
    -The following are examples for configuring S3 or Minio storage for your Ray Train script or interactive session. - -## S3 Bucket -In your Python Script add the following environment variables: -``` python -os.environ["AWS_ACCESS_KEY_ID"] = "XXXXXXXX" -os.environ["AWS_SECRET_ACCESS_KEY"] = "XXXXXXXX" -os.environ["AWS_DEFAULT_REGION"] = "XXXXXXXX" -``` -Alternatively you can specify these variables in your runtime environment on Job Submission. -``` python -submission_id = client.submit_job( - entrypoint=..., - runtime_env={ - "env_vars": { - "AWS_ACCESS_KEY_ID": os.environ.get('AWS_ACCESS_KEY_ID'), - "AWS_SECRET_ACCESS_KEY": os.environ.get('AWS_SECRET_ACCESS_KEY'), - "AWS_DEFAULT_REGION": os.environ.get('AWS_DEFAULT_REGION') - }, - } -) -``` -In your Trainer configuration you can specify a `run_config` which will utilise your external storage. -``` python -trainer = TorchTrainer( - train_func_distributed, - scaling_config=scaling_config, - run_config = ray.train.RunConfig(storage_path="s3://BUCKET_NAME/SUB_PATH/", name="unique_run_name") -) -``` -To learn more about Amazon S3 Storage you can find information [here](https://docs.aws.amazon.com/AmazonS3/latest/userguide/creating-bucket.html). - -## Minio Bucket -In your Python Script add the following function for configuring your run_config: -``` python -import s3fs -import pyarrow - -def get_minio_run_config(): - s3_fs = s3fs.S3FileSystem( - key = os.getenv('MINIO_ACCESS_KEY', "XXXXX"), - secret = os.getenv('MINIO_SECRET_ACCESS_KEY', "XXXXX"), - endpoint_url = os.getenv('MINIO_URL', "XXXXX") - ) - custom_fs = pyarrow.fs.PyFileSystem(pyarrow.fs.FSSpecHandler(s3_fs)) - run_config = ray.train.RunConfig(storage_path='training', storage_filesystem=custom_fs) - return run_config -``` -You can update the `run_config` to further suit your needs above. -Lastly the new `run_config` must be added to the Trainer: -``` python -trainer = TorchTrainer( - train_func_distributed, - scaling_config=scaling_config, - run_config = get_minio_run_config() -) -``` -To find more information on creating a Minio Bucket compatible with RHOAI you can refer to this [documentation](https://ai-on-openshift.io/tools-and-applications/minio/minio/).
    -Note: You must have `sf3s` and `pyarrow` installed in your environment for this method. diff --git a/docs/setup-kueue.md b/docs/setup-kueue.md deleted file mode 100644 index c8fffa10..00000000 --- a/docs/setup-kueue.md +++ /dev/null @@ -1,66 +0,0 @@ -# Basic Kueue Resources configuration - -## Introduction: - -This document is designed for administrators who have Kueue installed on their cluster. We will walk through the process of setting up essential Kueue resources, namely Cluster Queue, Resource Flavor, and Local Queue. - -## 1. Resource Flavor: -Resource Flavors allow the cluster admin to define different types of resources with specific characteristics, such as CPU, memory, GPU, etc. These can then be assigned to workloads to ensure they are executed on appropriate resources. - -The YAML configuration provided below creates an empty Resource Flavor named default-flavor. It serves as a starting point and does not specify any detailed resource characteristics. -```yaml -apiVersion: kueue.x-k8s.io/v1beta1 -kind: ResourceFlavor -metadata: - name: default-flavor -``` -For more detailed information on Resource Flavor configuration options, refer to the Kueue documentation: [Resource Flavor Configuration](https://kueue.sigs.k8s.io/docs/concepts/resource_flavor/) - -## 2. Cluster Queue: -A Cluster Queue represents a shared queue across the entire cluster. It allows the cluster admin to define global settings for workload prioritization and resource allocation. - -When setting up a Cluster Queue in Kueue, it's crucial that the resource specifications match the actual capacities and operational requirements of your cluster. The example provided outlines a basic setup; however, each cluster may have different resource availabilities and needs. -```yaml -apiVersion: kueue.x-k8s.io/v1beta1 -kind: ClusterQueue -metadata: - name: "cluster-queue" -spec: - namespaceSelector: {} # match all. - resourceGroups: - - coveredResources: ["cpu", "memory", "pods", "nvidia.com/gpu"] - flavors: - - name: "default-flavor" - resources: - - name: "cpu" - nominalQuota: 9 - - name: "memory" - nominalQuota: 36Gi - - name: "pods" - nominalQuota: 5 - - name: "nvidia.com/gpu" - nominalQuota: '0' -``` - -For more detailed information on Cluster Queue configuration options, refer to the Kueue documentation: [Cluster Queue Configuration](https://kueue.sigs.k8s.io/docs/concepts/cluster_queue/) - -## 3. Local Queue (With Default Annotation): -A Local Queue represents a queue associated with a specific namespace within the cluster. It allows namespace-level control over workload prioritization and resource allocation. -```yaml -apiVersion: kueue.x-k8s.io/v1beta1 -kind: LocalQueue -metadata: - namespace: team-a - name: team-a-queue - annotations: - kueue.x-k8s.io/default-queue: "true" -spec: - clusterQueue: cluster-queue -``` - -In the LocalQueue configuration provided above, the annotations field specifies `kueue.x-k8s.io/default-queue: "true"`. This annotation indicates that the team-a-queue is designated as the default queue for the team-a namespace. When this is set, any workloads submitted to the team-a namespace without explicitly specifying a queue will automatically be routed to the team-a-queue. - -For more detailed information on Local Queue configuration options, refer to the Kueue documentation: [Local Queue Configuration](https://kueue.sigs.k8s.io/docs/concepts/local_queue/) - -## Conclusion: -By following the steps outlined in this document, the cluster admin can successfully create the basic Kueue resources necessary for workload management in the cluster. For more advanced configurations and features, please refer to the comprehensive [Kueue documentation](https://kueue.sigs.k8s.io/docs/concepts/). diff --git a/docs/sphinx/Makefile b/docs/sphinx/Makefile new file mode 100644 index 00000000..d4bb2cbb --- /dev/null +++ b/docs/sphinx/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/sphinx/conf.py b/docs/sphinx/conf.py new file mode 100644 index 00000000..75f6f16f --- /dev/null +++ b/docs/sphinx/conf.py @@ -0,0 +1,38 @@ +# Configuration file for the Sphinx documentation builder. +# +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +import os +import sys + +sys.path.insert(0, os.path.abspath("..")) + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information + +project = "CodeFlare SDK" +copyright = "2024, Project CodeFlare" +author = "Project CodeFlare" +release = "v0.21.1" + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "sphinx.ext.autosummary", + "sphinx_rtd_theme", +] + +templates_path = ["_templates"] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] + + +# -- Options for HTML output ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output + +html_theme = "sphinx_rtd_theme" +html_static_path = ["_static"] diff --git a/docs/sphinx/index.rst b/docs/sphinx/index.rst new file mode 100644 index 00000000..fdf4c15b --- /dev/null +++ b/docs/sphinx/index.rst @@ -0,0 +1,32 @@ +.. CodeFlare SDK documentation master file, created by + sphinx-quickstart on Thu Oct 10 11:27:58 2024. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +CodeFlare SDK documentation +=========================== + +The CodeFlare SDK is an intuitive, easy-to-use python interface for batch resource requesting, access, job submission, and observation. Simplifying the developer's life while enabling access to high-performance compute resources, either in the cloud or on-prem. + + +.. toctree:: + :maxdepth: 2 + :caption: Code Documentation: + + modules + +.. toctree:: + :maxdepth: 2 + :caption: User Documentation: + + user-docs/authentication + user-docs/cluster-configuration + user-docs/e2e + user-docs/s3-compatible-storage + user-docs/setup-kueue + +Quick Links +=========== +- `PyPi `__ +- `GitHub `__ +- `OpenShift AI Documentation `__ diff --git a/docs/sphinx/make.bat b/docs/sphinx/make.bat new file mode 100644 index 00000000..32bb2452 --- /dev/null +++ b/docs/sphinx/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "" goto help + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/sphinx/user-docs/authentication.rst b/docs/sphinx/user-docs/authentication.rst new file mode 100644 index 00000000..d07063d9 --- /dev/null +++ b/docs/sphinx/user-docs/authentication.rst @@ -0,0 +1,66 @@ +Authentication via the CodeFlare SDK +==================================== + +Currently there are four ways of authenticating to your cluster via the +SDK. Authenticating with your cluster allows you to perform actions such +as creating Ray Clusters and Job Submission. + +Method 1 Token Authentication +----------------------------- + +This is how a typical user would authenticate to their cluster using +``TokenAuthentication``. + +:: + + from codeflare_sdk import TokenAuthentication + + auth = TokenAuthentication( + token = "XXXXX", + server = "XXXXX", + skip_tls=False, + # ca_cert_path="/path/to/cert" + ) + auth.login() + # log out with auth.logout() + +Setting ``skip_tls=True`` allows interaction with an HTTPS server +bypassing the server certificate checks although this is not secure. You +can pass a custom certificate to ``TokenAuthentication`` by using +``ca_cert_path="/path/to/cert"`` when authenticating provided +``skip_tls=False``. Alternatively you can set the environment variable +``CF_SDK_CA_CERT_PATH`` to the path of your custom certificate. + +Method 2 Kubernetes Config File Authentication (Default location) +----------------------------------------------------------------- + +If a user has authenticated to their cluster by alternate means e.g. run +a login command like ``oc login --token= --server=`` +their kubernetes config file should have updated. If the user has not +specifically authenticated through the SDK by other means such as +``TokenAuthentication`` then the SDK will try to use their default +Kubernetes config file located at ``"/HOME/.kube/config"``. + +Method 3 Specifying a Kubernetes Config File +-------------------------------------------- + +A user can specify a config file via a different authentication class +``KubeConfigFileAuthentication`` for authenticating with the SDK. This +is what loading a custom config file would typically look like. + +:: + + from codeflare_sdk import KubeConfigFileAuthentication + + auth = KubeConfigFileAuthentication( + kube_config_path="/path/to/config", + ) + auth.load_kube_config() + # log out with auth.logout() + +Method 4 In-Cluster Authentication +---------------------------------- + +If a user does not authenticate by any of the means detailed above and +does not have a config file at ``"/HOME/.kube/config"`` the SDK will try +to authenticate with the in-cluster configuration file. diff --git a/docs/sphinx/user-docs/cluster-configuration.rst b/docs/sphinx/user-docs/cluster-configuration.rst new file mode 100644 index 00000000..1fe28c64 --- /dev/null +++ b/docs/sphinx/user-docs/cluster-configuration.rst @@ -0,0 +1,72 @@ +Ray Cluster Configuration +========================= + +To create Ray Clusters using the CodeFlare SDK a cluster configuration +needs to be created first. This is what a typical cluster configuration +would look like; Note: The values for CPU and Memory are at the minimum +requirements for creating the Ray Cluster. + +.. code:: python + + from codeflare_sdk import Cluster, ClusterConfiguration + + cluster = Cluster(ClusterConfiguration( + name='ray-example', # Mandatory Field + namespace='default', # Default None + head_cpu_requests=1, # Default 2 + head_cpu_limits=1, # Default 2 + head_memory_requests=1, # Default 8 + head_memory_limits=1, # Default 8 + head_extended_resource_requests={'nvidia.com/gpu':0}, # Default 0 + worker_extended_resource_requests={'nvidia.com/gpu':0}, # Default 0 + num_workers=1, # Default 1 + worker_cpu_requests=1, # Default 1 + worker_cpu_limits=1, # Default 1 + worker_memory_requests=2, # Default 2 + worker_memory_limits=2, # Default 2 + # image="", # Optional Field + machine_types=["m5.xlarge", "g4dn.xlarge"], + labels={"exampleLabel": "example", "secondLabel": "example"}, + )) + +Note: ‘quay.io/modh/ray:2.35.0-py39-cu121’ is the default image used by +the CodeFlare SDK for creating a RayCluster resource. If you have your +own Ray image which suits your purposes, specify it in image field to +override the default image. If you are using ROCm compatible GPUs you +can use ‘quay.io/modh/ray:2.35.0-py39-rocm61’. You can also find +documentation on building a custom image +`here `__. + +The ``labels={"exampleLabel": "example"}`` parameter can be used to +apply additional labels to the RayCluster resource. + +After creating their ``cluster``, a user can call ``cluster.up()`` and +``cluster.down()`` to respectively create or remove the Ray Cluster. + +Deprecating Parameters +---------------------- + +The following parameters of the ``ClusterConfiguration`` are being deprecated. + +.. list-table:: + :header-rows: 1 + :widths: auto + + * - Deprecated Parameter + - Replaced By + * - ``head_cpus`` + - ``head_cpu_requests``, ``head_cpu_limits`` + * - ``head_memory`` + - ``head_memory_requests``, ``head_memory_limits`` + * - ``min_cpus`` + - ``worker_cpu_requests`` + * - ``max_cpus`` + - ``worker_cpu_limits`` + * - ``min_memory`` + - ``worker_memory_requests`` + * - ``max_memory`` + - ``worker_memory_limits`` + * - ``head_gpus`` + - ``head_extended_resource_requests`` + * - ``num_gpus`` + - ``worker_extended_resource_requests`` diff --git a/docs/sphinx/user-docs/e2e.rst b/docs/sphinx/user-docs/e2e.rst new file mode 100644 index 00000000..e64032e2 --- /dev/null +++ b/docs/sphinx/user-docs/e2e.rst @@ -0,0 +1,210 @@ +Running e2e tests locally +========================= + +Pre-requisites +^^^^^^^^^^^^^^ + +- We recommend using Python 3.9, along with Poetry. + +On KinD clusters +---------------- + +Pre-requisite for KinD clusters: please add in your local ``/etc/hosts`` +file ``127.0.0.1 kind``. This will map your localhost IP address to the +KinD cluster’s hostname. This is already performed on `GitHub +Actions `__ + +If the system you run on contains NVidia GPU then you can enable the GPU +support in KinD, this will allow you to run also GPU tests. To enable +GPU on KinD follow `these +instructions `__. + +- Setup Phase: + + - Pull the `codeflare-operator + repo `__ + and run the following make targets: + + :: + + make kind-e2e + export CLUSTER_HOSTNAME=kind + make setup-e2e + make deploy -e IMG=quay.io/project-codeflare/codeflare-operator:v1.3.0 + + For running tests locally on Kind cluster, we need to disable `rayDashboardOAuthEnabled` in `codeflare-operator-config` ConfigMap and then restart CodeFlare Operator + + - **(Optional)** - Create and add ``sdk-user`` with limited + permissions to the cluster to run through the e2e tests: + + :: + + # Get KinD certificates + docker cp kind-control-plane:/etc/kubernetes/pki/ca.crt . + docker cp kind-control-plane:/etc/kubernetes/pki/ca.key . + + # Generate certificates for new user + openssl genrsa -out user.key 2048 + openssl req -new -key user.key -out user.csr -subj '/CN=sdk-user/O=tenant' + openssl x509 -req -in user.csr -CA ca.crt -CAkey ca.key -CAcreateserial -out user.crt -days 360 + + # Add generated certificated to KinD context + user_crt=$(base64 --wrap=0 user.crt) + user_key=$(base64 --wrap=0 user.key) + yq eval -i ".contexts += {\"context\": {\"cluster\": \"kind-kind\", \"user\": \"sdk-user\"}, \"name\": \"sdk-user\"}" $HOME/.kube/config + yq eval -i ".users += {\"name\": \"sdk-user\", \"user\": {\"client-certificate-data\": \"$user_crt\", \"client-key-data\": \"$user_key\"}}" $HOME/.kube/config + cat $HOME/.kube/config + + # Cleanup + rm ca.crt + rm ca.srl + rm ca.key + rm user.crt + rm user.key + rm user.csr + + # Add RBAC permissions to sdk-user + kubectl create clusterrole list-ingresses --verb=get,list --resource=ingresses + kubectl create clusterrolebinding sdk-user-list-ingresses --clusterrole=list-ingresses --user=sdk-user + kubectl create clusterrole appwrapper-creator --verb=get,list,create,delete,patch --resource=appwrappers + kubectl create clusterrolebinding sdk-user-appwrapper-creator --clusterrole=appwrapper-creator --user=sdk-user + kubectl create clusterrole namespace-creator --verb=get,list,create,delete,patch --resource=namespaces + kubectl create clusterrolebinding sdk-user-namespace-creator --clusterrole=namespace-creator --user=sdk-user + kubectl create clusterrole list-rayclusters --verb=get,list --resource=rayclusters + kubectl create clusterrolebinding sdk-user-list-rayclusters --clusterrole=list-rayclusters --user=sdk-user + kubectl config use-context sdk-user + + - Install the latest development version of kueue + + :: + + kubectl apply --server-side -k "github.com/opendatahub-io/kueue/config/rhoai?ref=dev" + +- Test Phase: + + - Once we have the codeflare-operator, kuberay-operator and kueue + running and ready, we can run the e2e test on the codeflare-sdk + repository: + + :: + + poetry install --with test,docs + poetry run pytest -v -s ./tests/e2e/mnist_raycluster_sdk_kind_test.py + + - If the cluster doesn’t have NVidia GPU support then we need to + disable NVidia GPU tests by providing proper marker: + + :: + + poetry install --with test,docs + poetry run pytest -v -s ./tests/e2e/mnist_raycluster_sdk_kind_test.py -m 'kind and not nvidia_gpu' + +On OpenShift clusters +--------------------- + +- Setup Phase: + + - Pull the `codeflare-operator + repo `__ + and run the following make targets: + + :: + + + make setup-e2e + make deploy -e IMG=quay.io/project-codeflare/codeflare-operator:v1.3.0 + + - Install the latest development version of kueue + + :: + + kubectl apply --server-side -k "github.com/opendatahub-io/kueue/config/rhoai?ref=dev" + +If the system you run on contains NVidia GPU then you can enable the GPU +support on OpenShift, this will allow you to run also GPU tests. To +enable GPU on OpenShift follow `these +instructions `__. +Currently the SDK doesn’t support tolerations, so e2e tests can’t be +executed on nodes with taint (i.e. GPU taint). + +- Test Phase: + + - Once we have the codeflare-operator, kuberay-operator and kueue + running and ready, we can run the e2e test on the codeflare-sdk + repository: + + :: + + poetry install --with test,docs + poetry run pytest -v -s ./tests/e2e/mnist_raycluster_sdk_test.py + + - To run the multiple tests based on the cluster environment, we can + run the e2e tests by marking -m with cluster environment (kind or + openshift) + + :: + + poetry run pytest -v -s ./tests/e2e -m openshift + + - By default tests configured with timeout of ``15 minutes``. If + necessary, we can override the timeout using ``--timeout`` option + + :: + + poetry run pytest -v -s ./tests/e2e -m openshift --timeout=1200 + +On OpenShift Disconnected clusters +---------------------------------- + +- In addition to setup phase mentioned above in case of Openshift + cluster, Disconnected environment requires following pre-requisites : + + - Mirror Image registry : + + - Image mirror registry is used to host set of container images + required locally for the applications and services. This + ensures to pull images without needing an external network + connection. It also ensures continuous operation and deployment + capabilities in a network-isolated environment. + + - PYPI Mirror Index : + + - When trying to install Python packages in a disconnected + environment, the pip command might fail because the connection + cannot install packages from external URLs. This issue can be + resolved by setting up PIP Mirror Index on separate endpoint in + same environment. + + - S3 compatible storage : + + - Some of our distributed training examples require an external + storage solution so that all nodes can access the same data in + disconnected environment (For example: common-datasets and + model files). + + - Minio S3 compatible storage type instance can be deployed in + disconnected environment using + ``/tests/e2e/minio_deployment.yaml`` or using support methods + in e2e test suite. + + - The following are environment variables for configuring PIP + index URl for accessing the common-python packages required and + the S3 or Minio storage for your Ray Train script or + interactive session. + + :: + + export RAY_IMAGE=quay.io/project-codeflare/ray@sha256: (prefer image digest over image tag in disocnnected environment) + PIP_INDEX_URL=https:///root/pypi/+simple/ \ + PIP_TRUSTED_HOST= \ + AWS_DEFAULT_ENDPOINT= \ + AWS_ACCESS_KEY_ID= \ + AWS_SECRET_ACCESS_KEY= \ + AWS_STORAGE_BUCKET= + AWS_STORAGE_BUCKET_MNIST_DIR= + + Note : When using the Python Minio client to connect to a minio + storage bucket, the ``AWS_DEFAULT_ENDPOINT`` environment + variable by default expects secure endpoint where user can use + endpoint url with https/http prefix for autodetection of + secure/insecure endpoint. diff --git a/docs/sphinx/user-docs/s3-compatible-storage.rst b/docs/sphinx/user-docs/s3-compatible-storage.rst new file mode 100644 index 00000000..60937441 --- /dev/null +++ b/docs/sphinx/user-docs/s3-compatible-storage.rst @@ -0,0 +1,86 @@ +S3 compatible storage with Ray Train examples +============================================= + +Some of our distributed training examples require an external storage +solution so that all nodes can access the same data. The following are +examples for configuring S3 or Minio storage for your Ray Train script +or interactive session. + +S3 Bucket +--------- + +In your Python Script add the following environment variables: + +.. code:: python + + os.environ["AWS_ACCESS_KEY_ID"] = "XXXXXXXX" + os.environ["AWS_SECRET_ACCESS_KEY"] = "XXXXXXXX" + os.environ["AWS_DEFAULT_REGION"] = "XXXXXXXX" + +Alternatively you can specify these variables in your runtime +environment on Job Submission. + +.. code:: python + + submission_id = client.submit_job( + entrypoint=..., + runtime_env={ + "env_vars": { + "AWS_ACCESS_KEY_ID": os.environ.get('AWS_ACCESS_KEY_ID'), + "AWS_SECRET_ACCESS_KEY": os.environ.get('AWS_SECRET_ACCESS_KEY'), + "AWS_DEFAULT_REGION": os.environ.get('AWS_DEFAULT_REGION') + }, + } + ) + +In your Trainer configuration you can specify a ``run_config`` which +will utilise your external storage. + +.. code:: python + + trainer = TorchTrainer( + train_func_distributed, + scaling_config=scaling_config, + run_config = ray.train.RunConfig(storage_path="s3://BUCKET_NAME/SUB_PATH/", name="unique_run_name") + ) + +To learn more about Amazon S3 Storage you can find information +`here `__. + +Minio Bucket +------------ + +In your Python Script add the following function for configuring your +run_config: + +.. code:: python + + import s3fs + import pyarrow + + def get_minio_run_config(): + s3_fs = s3fs.S3FileSystem( + key = os.getenv('MINIO_ACCESS_KEY', "XXXXX"), + secret = os.getenv('MINIO_SECRET_ACCESS_KEY', "XXXXX"), + endpoint_url = os.getenv('MINIO_URL', "XXXXX") + ) + custom_fs = pyarrow.fs.PyFileSystem(pyarrow.fs.FSSpecHandler(s3_fs)) + run_config = ray.train.RunConfig(storage_path='training', storage_filesystem=custom_fs) + return run_config + +You can update the ``run_config`` to further suit your needs above. +Lastly the new ``run_config`` must be added to the Trainer: + +.. code:: python + + trainer = TorchTrainer( + train_func_distributed, + scaling_config=scaling_config, + run_config = get_minio_run_config() + ) + +To find more information on creating a Minio Bucket compatible with +RHOAI you can refer to this +`documentation `__. +Note: You must have ``sf3s`` and ``pyarrow`` installed in your +environment for this method. diff --git a/docs/sphinx/user-docs/setup-kueue.rst b/docs/sphinx/user-docs/setup-kueue.rst new file mode 100644 index 00000000..86956e01 --- /dev/null +++ b/docs/sphinx/user-docs/setup-kueue.rst @@ -0,0 +1,109 @@ +Basic Kueue Resources configuration +=================================== + +Introduction: +------------- + +This document is designed for administrators who have Kueue installed on +their cluster. We will walk through the process of setting up essential +Kueue resources, namely Cluster Queue, Resource Flavor, and Local Queue. + +1. Resource Flavor: +------------------- + +Resource Flavors allow the cluster admin to define different types of +resources with specific characteristics, such as CPU, memory, GPU, etc. +These can then be assigned to workloads to ensure they are executed on +appropriate resources. + +The YAML configuration provided below creates an empty Resource Flavor +named default-flavor. It serves as a starting point and does not specify +any detailed resource characteristics. + +.. code:: yaml + + apiVersion: kueue.x-k8s.io/v1beta1 + kind: ResourceFlavor + metadata: + name: default-flavor + +For more detailed information on Resource Flavor configuration options, +refer to the Kueue documentation: `Resource Flavor +Configuration `__ + +2. Cluster Queue: +----------------- + +A Cluster Queue represents a shared queue across the entire cluster. It +allows the cluster admin to define global settings for workload +prioritization and resource allocation. + +When setting up a Cluster Queue in Kueue, it’s crucial that the resource +specifications match the actual capacities and operational requirements +of your cluster. The example provided outlines a basic setup; however, +each cluster may have different resource availabilities and needs. + +.. code:: yaml + + apiVersion: kueue.x-k8s.io/v1beta1 + kind: ClusterQueue + metadata: + name: "cluster-queue" + spec: + namespaceSelector: {} # match all. + resourceGroups: + - coveredResources: ["cpu", "memory", "pods", "nvidia.com/gpu"] + flavors: + - name: "default-flavor" + resources: + - name: "cpu" + nominalQuota: 9 + - name: "memory" + nominalQuota: 36Gi + - name: "pods" + nominalQuota: 5 + - name: "nvidia.com/gpu" + nominalQuota: '0' + +For more detailed information on Cluster Queue configuration options, +refer to the Kueue documentation: `Cluster Queue +Configuration `__ + +3. Local Queue (With Default Annotation): +----------------------------------------- + +A Local Queue represents a queue associated with a specific namespace +within the cluster. It allows namespace-level control over workload +prioritization and resource allocation. + +.. code:: yaml + + apiVersion: kueue.x-k8s.io/v1beta1 + kind: LocalQueue + metadata: + namespace: team-a + name: team-a-queue + annotations: + kueue.x-k8s.io/default-queue: "true" + spec: + clusterQueue: cluster-queue + +In the LocalQueue configuration provided above, the annotations field +specifies ``kueue.x-k8s.io/default-queue: "true"``. This annotation +indicates that the team-a-queue is designated as the default queue for +the team-a namespace. When this is set, any workloads submitted to the +team-a namespace without explicitly specifying a queue will +automatically be routed to the team-a-queue. + +For more detailed information on Local Queue configuration options, +refer to the Kueue documentation: `Local Queue +Configuration `__ + +Conclusion: +----------- + +By following the steps outlined in this document, the cluster admin can +successfully create the basic Kueue resources necessary for workload +management in the cluster. For more advanced configurations and +features, please refer to the comprehensive `Kueue +documentation `__. diff --git a/poetry.lock b/poetry.lock index 3b65c16b..5c4ce93f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -151,6 +151,17 @@ files = [ [package.dependencies] frozenlist = ">=1.1.0" +[[package]] +name = "alabaster" +version = "0.7.16" +description = "A light, configurable Sphinx theme" +optional = false +python-versions = ">=3.9" +files = [ + {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, + {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, +] + [[package]] name = "anyio" version = "4.6.0" @@ -862,6 +873,17 @@ files = [ {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, ] +[[package]] +name = "docutils" +version = "0.20.1" +description = "Docutils -- Python Documentation Utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, + {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, +] + [[package]] name = "exceptiongroup" version = "1.2.2" @@ -1257,6 +1279,17 @@ files = [ [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] + [[package]] name = "importlib-metadata" version = "8.5.0" @@ -1744,43 +1777,6 @@ websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" [package.extras] adal = ["adal (>=1.0.2)"] -[[package]] -name = "mako" -version = "1.3.5" -description = "A super-fast templating language that borrows the best ideas from the existing templating languages." -optional = false -python-versions = ">=3.8" -files = [ - {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, - {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, -] - -[package.dependencies] -MarkupSafe = ">=0.9.2" - -[package.extras] -babel = ["Babel"] -lingua = ["lingua"] -testing = ["pytest"] - -[[package]] -name = "markdown" -version = "3.7" -description = "Python implementation of John Gruber's Markdown." -optional = false -python-versions = ">=3.8" -files = [ - {file = "Markdown-3.7-py3-none-any.whl", hash = "sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803"}, - {file = "markdown-3.7.tar.gz", hash = "sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2"}, -] - -[package.dependencies] -importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} - -[package.extras] -docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] -testing = ["coverage", "pyyaml"] - [[package]] name = "markupsafe" version = "2.1.5" @@ -2484,21 +2480,6 @@ files = [ qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] testing = ["docopt", "pytest"] -[[package]] -name = "pdoc3" -version = "0.10.0" -description = "Auto-generate API documentation for Python projects." -optional = false -python-versions = ">= 3.6" -files = [ - {file = "pdoc3-0.10.0-py3-none-any.whl", hash = "sha256:ba45d1ada1bd987427d2bf5cdec30b2631a3ff5fb01f6d0e77648a572ce6028b"}, - {file = "pdoc3-0.10.0.tar.gz", hash = "sha256:5f22e7bcb969006738e1aa4219c75a32f34c2d62d46dc9d2fb2d3e0b0287e4b7"}, -] - -[package.dependencies] -mako = "*" -markdown = ">=3.0" - [[package]] name = "pexpect" version = "4.9.0" @@ -3556,6 +3537,17 @@ files = [ {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = false +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + [[package]] name = "soupsieve" version = "2.6" @@ -3567,6 +3559,169 @@ files = [ {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, ] +[[package]] +name = "sphinx" +version = "7.4.7" +description = "Python documentation generator" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, + {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, +] + +[package.dependencies] +alabaster = ">=0.7.14,<0.8.0" +babel = ">=2.13" +colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\""} +docutils = ">=0.20,<0.22" +imagesize = ">=1.3" +importlib-metadata = {version = ">=6.0", markers = "python_version < \"3.10\""} +Jinja2 = ">=3.1" +packaging = ">=23.0" +Pygments = ">=2.17" +requests = ">=2.30.0" +snowballstemmer = ">=2.2" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.9" +tomli = {version = ">=2", markers = "python_version < \"3.11\""} + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["flake8 (>=6.0)", "importlib-metadata (>=6.0)", "mypy (==1.10.1)", "pytest (>=6.0)", "ruff (==0.5.2)", "sphinx-lint (>=0.9)", "tomli (>=2)", "types-docutils (==0.21.0.20240711)", "types-requests (>=2.30.0)"] +test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"] + +[[package]] +name = "sphinx-rtd-theme" +version = "2.0.0" +description = "Read the Docs theme for Sphinx" +optional = false +python-versions = ">=3.6" +files = [ + {file = "sphinx_rtd_theme-2.0.0-py2.py3-none-any.whl", hash = "sha256:ec93d0856dc280cf3aee9a4c9807c60e027c7f7b461b77aeffed682e68f0e586"}, + {file = "sphinx_rtd_theme-2.0.0.tar.gz", hash = "sha256:bd5d7b80622406762073a04ef8fadc5f9151261563d47027de09910ce03afe6b"}, +] + +[package.dependencies] +docutils = "<0.21" +sphinx = ">=5,<8" +sphinxcontrib-jquery = ">=4,<5" + +[package.extras] +dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "2.0.0" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, + {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "2.0.0" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, + {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.1.0" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, + {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jquery" +version = "4.1" +description = "Extension to include jQuery on newer Sphinx releases" +optional = false +python-versions = ">=2.7" +files = [ + {file = "sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a"}, + {file = "sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae"}, +] + +[package.dependencies] +Sphinx = ">=1.8" + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "2.0.0" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, + {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["defusedxml (>=0.7.1)", "pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "2.0.0" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, + {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, +] + +[package.extras] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + [[package]] name = "stack-data" version = "0.6.3" @@ -4025,4 +4180,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "a54e3ebe29255d397651cea6d849ada39f03565a1a7bf13084092be3600a77f0" +content-hash = "4463099e8d145fd823f523b134f18d48766038cc3d2ad466864e5a2debcc3479" diff --git a/pyproject.toml b/pyproject.toml index 37eb17a4..17b59880 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,7 +34,8 @@ ipywidgets = "8.1.2" optional = true [tool.poetry.group.docs.dependencies] -pdoc3 = "0.10.0" +sphinx = "7.4.7" +sphinx-rtd-theme = "2.0.0" [tool.poetry.group.test] optional = true From 878638257f7f50b92d8848140ca3941ea9883659 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Mon, 14 Oct 2024 14:30:36 +0100 Subject: [PATCH 377/496] ci: add publish documentation workflow update release workflow --- .github/workflows/publish-documentation.yaml | 45 ++++++++++++++++++++ .github/workflows/release.yaml | 29 ++----------- .gitignore | 1 + 3 files changed, 50 insertions(+), 25 deletions(-) create mode 100644 .github/workflows/publish-documentation.yaml diff --git a/.github/workflows/publish-documentation.yaml b/.github/workflows/publish-documentation.yaml new file mode 100644 index 00000000..80afe7d6 --- /dev/null +++ b/.github/workflows/publish-documentation.yaml @@ -0,0 +1,45 @@ +name: Publish Documentation + +on: + workflow_dispatch: + inputs: + codeflare_sdk_release_version: + type: string + required: true + description: 'Version number (for example: 0.1.0)' + +permissions: + contents: write + +jobs: + docs: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Install Python + uses: actions/setup-python@v5 + with: + python-version: 3.9 + - name: Install Sphinx + run: | + sudo apt-get update + sudo apt-get install python3-sphinx + - name: Install Poetry + uses: abatilo/actions-poetry@v2 + with: + poetry-version: 1.8.3 + - name: Create new documentation + run: | + python3 -m venv .venv + source .venv/bin/activate + poetry install --with docs + sed -i 's/release = "v[0-9]\+\.[0-9]\+\.[0-9]\+"/release = "${{ github.event.inputs.codeflare_sdk_release_version }}"/' docs/sphinx/conf.py + sphinx-apidoc -o docs/sphinx src/codeflare_sdk "**/*test_*" --force # Generate docs but ignore test files + make html -C docs/sphinx + - name: Deploy to GitHub Pages + uses: peaceiris/actions-gh-pages@v3 + with: + publish_branch: gh-pages + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: docs/sphinx/_build/html + force_orphan: true diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 6e56a3f8..16b5aac4 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -33,8 +33,6 @@ jobs: permissions: contents: write id-token: write # This permission is required for trusted publishing - env: - PR_BRANCH_NAME: adjustments-release-${{ github.event.inputs.release-version }} steps: - name: Checkout the repository uses: actions/checkout@v4 @@ -52,35 +50,16 @@ jobs: run: poetry install --with docs - name: Create new documentation run: | - sphinx-apidoc -o docs/sphinx src/codeflare_sdk "**/*test_*" --force - make clean -C docs/sphinx - make html -C docs/sphinx + gh workflow run publish-documentation.yaml \ + --repo ${{ github.event.inputs.codeflare-repository-organization }}/codeflare-sdk \ + --ref ${{ github.ref }} \ + --field codeflare_sdk_release_version=${{ github.event.inputs.release-version }} - name: Copy demo notebooks into SDK package run: cp -r demo-notebooks src/codeflare_sdk/demo-notebooks - name: Run poetry build run: poetry build - - name: Commit changes in docs - uses: stefanzweifel/git-auto-commit-action@v4 - with: - file_pattern: 'docs' - commit_message: "Changes in docs for release: v${{ github.event.inputs.release-version }}" - create_branch: true - branch: ${{ env.PR_BRANCH_NAME }} - - name: Create a PR with code changes - run: | - if git branch -a | grep "${{ env.PR_BRANCH_NAME }}"; then - GIT_BRANCH=${GITHUB_REF#refs/heads/} - gh pr create --base "$GIT_BRANCH" --fill --head "${{ env.PR_BRANCH_NAME }}" --label "lgtm" --label "approved" - fi env: GITHUB_TOKEN: ${{ secrets.CODEFLARE_MACHINE_ACCOUNT_TOKEN }} - - name: Wait until PR with code changes is merged - run: | - if git branch -a | grep "${{ env.PR_BRANCH_NAME }}"; then - timeout 3600 bash -c 'until [[ $(gh pr view '${{ env.PR_BRANCH_NAME }}' --json state --jq .state) == "MERGED" ]]; do sleep 5 && echo "$(gh pr view '${{ env.PR_BRANCH_NAME }}' --json state --jq .state)"; done' - fi - env: - GITHUB_TOKEN: ${{ github.TOKEN }} - name: Create Github release uses: ncipollo/release-action@v1 with: diff --git a/.gitignore b/.gitignore index 2940f885..9ac5d687 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,4 @@ node_modules ui-tests/playwright-report ui-tests/test-results /src/codeflare_sdk.egg-info/ +docs/sphinx/_build From 9288016496266c7b999ca8057ef8b5c32a0bd0ee Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Tue, 15 Oct 2024 10:44:30 +0100 Subject: [PATCH 378/496] docs: add new documentation and fix existing docs --- README.md | 5 +- docs/sphinx/index.rst | 4 +- docs/sphinx/user-docs/authentication.rst | 4 +- .../user-docs/cluster-configuration.rst | 18 ++-- docs/sphinx/user-docs/e2e.rst | 19 ++-- docs/sphinx/user-docs/images/ui-buttons.png | Bin 0 -> 22385 bytes .../user-docs/images/ui-view-clusters.png | Bin 0 -> 28767 bytes .../user-docs/ray-cluster-interaction.rst | 90 ++++++++++++++++++ .../user-docs/s3-compatible-storage.rst | 2 +- docs/sphinx/user-docs/setup-kueue.rst | 7 +- docs/sphinx/user-docs/ui-widgets.rst | 55 +++++++++++ 11 files changed, 176 insertions(+), 28 deletions(-) create mode 100644 docs/sphinx/user-docs/images/ui-buttons.png create mode 100644 docs/sphinx/user-docs/images/ui-view-clusters.png create mode 100644 docs/sphinx/user-docs/ray-cluster-interaction.rst create mode 100644 docs/sphinx/user-docs/ui-widgets.rst diff --git a/README.md b/README.md index e166b4f5..ffc22626 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,7 @@ For guided demos and basics walkthroughs, check out the following links: - these demos can be copied into your current working directory when using the `codeflare-sdk` by using the `codeflare_sdk.copy_demo_nbs()` function - Additionally, we have a [video walkthrough](https://www.youtube.com/watch?v=U76iIfd9EmE) of these basic demos from June, 2023 -Full documentation can be found [here](https://project-codeflare.github.io/codeflare-sdk/detailed-documentation) +Full documentation can be found [here](https://project-codeflare.github.io/codeflare-sdk/index.html) ## Installation @@ -32,11 +32,10 @@ It is possible to use the Release Github workflow to do the release. This is gen The following instructions apply when doing release manually. This may be required in instances where the automation is failing. - Check and update the version in "pyproject.toml" file. -- Generate new documentation. -`pdoc --html -o docs src/codeflare_sdk && pushd docs && rm -rf cluster job utils && mv codeflare_sdk/* . && rm -rf codeflare_sdk && popd && find docs -type f -name "*.html" -exec bash -c "echo '' >> {}" \;` (it is possible to install **pdoc** using the following command `poetry install --with docs`) - Commit all the changes to the repository. - Create Github release (). - Build the Python package. `poetry build` - If not present already, add the API token to Poetry. `poetry config pypi-token.pypi API_TOKEN` - Publish the Python package. `poetry publish` +- Trigger the [Publish Documentation](https://github.com/project-codeflare/codeflare-sdk/actions/workflows/publish-documentation.yaml) workflow diff --git a/docs/sphinx/index.rst b/docs/sphinx/index.rst index fdf4c15b..3c6fe876 100644 --- a/docs/sphinx/index.rst +++ b/docs/sphinx/index.rst @@ -16,14 +16,16 @@ The CodeFlare SDK is an intuitive, easy-to-use python interface for batch resour modules .. toctree:: - :maxdepth: 2 + :maxdepth: 1 :caption: User Documentation: user-docs/authentication user-docs/cluster-configuration + user-docs/ray-cluster-interaction user-docs/e2e user-docs/s3-compatible-storage user-docs/setup-kueue + user-docs/ui-widgets Quick Links =========== diff --git a/docs/sphinx/user-docs/authentication.rst b/docs/sphinx/user-docs/authentication.rst index d07063d9..82441d56 100644 --- a/docs/sphinx/user-docs/authentication.rst +++ b/docs/sphinx/user-docs/authentication.rst @@ -39,7 +39,7 @@ a login command like ``oc login --token= --server=`` their kubernetes config file should have updated. If the user has not specifically authenticated through the SDK by other means such as ``TokenAuthentication`` then the SDK will try to use their default -Kubernetes config file located at ``"/HOME/.kube/config"``. +Kubernetes config file located at ``"$HOME/.kube/config"``. Method 3 Specifying a Kubernetes Config File -------------------------------------------- @@ -62,5 +62,5 @@ Method 4 In-Cluster Authentication ---------------------------------- If a user does not authenticate by any of the means detailed above and -does not have a config file at ``"/HOME/.kube/config"`` the SDK will try +does not have a config file at ``"$HOME/.kube/config"`` the SDK will try to authenticate with the in-cluster configuration file. diff --git a/docs/sphinx/user-docs/cluster-configuration.rst b/docs/sphinx/user-docs/cluster-configuration.rst index 1fe28c64..6d27b0f4 100644 --- a/docs/sphinx/user-docs/cluster-configuration.rst +++ b/docs/sphinx/user-docs/cluster-configuration.rst @@ -29,13 +29,14 @@ requirements for creating the Ray Cluster. labels={"exampleLabel": "example", "secondLabel": "example"}, )) -Note: ‘quay.io/modh/ray:2.35.0-py39-cu121’ is the default image used by -the CodeFlare SDK for creating a RayCluster resource. If you have your -own Ray image which suits your purposes, specify it in image field to -override the default image. If you are using ROCm compatible GPUs you -can use ‘quay.io/modh/ray:2.35.0-py39-rocm61’. You can also find -documentation on building a custom image -`here `__. +.. note:: + `quay.io/modh/ray:2.35.0-py39-cu121` is the default image used by + the CodeFlare SDK for creating a RayCluster resource. If you have your + own Ray image which suits your purposes, specify it in image field to + override the default image. If you are using ROCm compatible GPUs you + can use `quay.io/modh/ray:2.35.0-py39-rocm61`. You can also find + documentation on building a custom image + `here `__. The ``labels={"exampleLabel": "example"}`` parameter can be used to apply additional labels to the RayCluster resource. @@ -46,7 +47,8 @@ After creating their ``cluster``, a user can call ``cluster.up()`` and Deprecating Parameters ---------------------- -The following parameters of the ``ClusterConfiguration`` are being deprecated. +The following parameters of the ``ClusterConfiguration`` are being +deprecated. .. list-table:: :header-rows: 1 diff --git a/docs/sphinx/user-docs/e2e.rst b/docs/sphinx/user-docs/e2e.rst index e64032e2..846536f1 100644 --- a/docs/sphinx/user-docs/e2e.rst +++ b/docs/sphinx/user-docs/e2e.rst @@ -11,7 +11,7 @@ On KinD clusters Pre-requisite for KinD clusters: please add in your local ``/etc/hosts`` file ``127.0.0.1 kind``. This will map your localhost IP address to the -KinD cluster’s hostname. This is already performed on `GitHub +KinD cluster's hostname. This is already performed on `GitHub Actions `__ If the system you run on contains NVidia GPU then you can enable the GPU @@ -91,7 +91,7 @@ instructions `__. poetry install --with test,docs poetry run pytest -v -s ./tests/e2e/mnist_raycluster_sdk_kind_test.py - - If the cluster doesn’t have NVidia GPU support then we need to + - If the cluster doesn't have NVidia GPU support then we need to disable NVidia GPU tests by providing proper marker: :: @@ -124,8 +124,8 @@ If the system you run on contains NVidia GPU then you can enable the GPU support on OpenShift, this will allow you to run also GPU tests. To enable GPU on OpenShift follow `these instructions `__. -Currently the SDK doesn’t support tolerations, so e2e tests can’t be -executed on nodes with taint (i.e. GPU taint). +Currently the SDK doesn't support tolerations, so e2e tests can't be +executed on nodes with taint (i.e. GPU taint). - Test Phase: @@ -203,8 +203,9 @@ On OpenShift Disconnected clusters AWS_STORAGE_BUCKET= AWS_STORAGE_BUCKET_MNIST_DIR= - Note : When using the Python Minio client to connect to a minio - storage bucket, the ``AWS_DEFAULT_ENDPOINT`` environment - variable by default expects secure endpoint where user can use - endpoint url with https/http prefix for autodetection of - secure/insecure endpoint. + .. note:: + When using the Python Minio client to connect to a minio + storage bucket, the ``AWS_DEFAULT_ENDPOINT`` environment + variable by default expects secure endpoint where user can use + endpoint url with https/http prefix for autodetection of + secure/insecure endpoint. diff --git a/docs/sphinx/user-docs/images/ui-buttons.png b/docs/sphinx/user-docs/images/ui-buttons.png new file mode 100644 index 0000000000000000000000000000000000000000..a274929203e5cbad91aa2a3dc21a08363f9a9b73 GIT binary patch literal 22385 zcmeIac{rAB`!;&3P|_fYM2ZFt5|TtTpinZ;88c_dJkyMlCR9R^GGv}-r6OZw$V@`! zd7i%ge1G5iXKmYWec$@y`)jRd+urwi((Ssh>painIQD%%_T%!Em%F%$j+Kr=p=^@A zB%w&5tjWWF53Z-d|36HS+>d{)vpX-XydM9!ufOhtKku}cRJT{MHnw-tw>6@eSXo;d z@z@#K8W~yHnOfVAt}PV9hYpYrU9dIMw>Pu4+M{e{X+%+WFxtZ>v`5LvW)J^y{u6un z_)iN8pFS=a?e^W2LfJ!+mN=*E{AQ@lNnN>QMSN^l@qOysHMgUEqNCRzmOSxY=F?}L z$Ye?X{jU3iSn8tHvKxLIz7M+gcq?{8MW|@9azdDv_^qKlwra zckf{GfhpGi?GH#%E135F@=IL^_OXwY=T2K$99hLRFDc!k&7*hqosfDbe^#Pyr9R^k zuA6q`i8r$|l8|^|M}3kP!$jF*x!;3Fp zzTEC$=Od}~mO%(6j8KL_i2 z`krx{88cH;-Z$07rj_T+r<3n`FR@@jX8Q28oD;@XZ#3$zaB*^;DHh4g-pP`|PAwV+bZO-IM?J`7wo{k8j*KdCcN^z`&7T#>W7 zu`4lsRXydwU-&~R>)n!4QU-rZ zpGIHHJz21_V3C!T^*86!wOmL2nW3iRCr(H#FHF~@7|Fi>__2FodhmCO(MHFi-?wgd ze}5+zdU`&SYP9Cv1zP%DCm&HA=xK85@OY4vbUZyx^5G8sng~f&mj|2KV;2eo1DVcW zxRA;d(zm&@ukXQC@#XE5C$C@oy?n{?=G4tKxUJhCKOQ`A<@LHKMGbDty_^d_k9Src zJa{$iYmCyzpRp=Oruu);W@KcXj_26D`>{r<@#m(doxeU_y%?hyRTp#-&oK3`X8nc@ zpZ%DQ$VW){F5M_lRZvjy2?(gpoKRF$l>elj?0zIb?3k|CvY_Kg>wJ6Xw56Ds*xa9o zyeXB%c72=F)YQ_<>t%Q>JBx1p&T|piaZKBXY>QoA)xGoYN;4f97(9-$jP-Ice16O> zV$)efWk1-kDfF~M*?3U~wsZ~Uz~z^3$M0>TFdbGdddX*KGdt3D)a$jV!@&0Mk9OXv ziM$x;GT9qD_2R|$6V~l)Jep}m(F&0i%7zUa)RPUU<-$aL@w`Q?t!zw3R3!p-pXu!H z_Z75g&>R2rjp8l+`~a2rKwn2)o$@D*R2tgNY(>Qnw&)uh(`?zYg)EMhl~t^3;qr`@ zsMiYHg$J9zJ=!TSHqj${qb*19V)#Y5>({UE*|UdY*_?Lk!2?>=L>>Qs|4h}@)!*tD z-=A9;OcYKUTl=TCmj>%lzlh_!_t2pYnbsXr#rHSeT$nO@`TBK)!8gBCo=c}FML5)g z=5?hRtzX2PIJvlz-REt!vu&xX!_M4MjFE`fO#hjvEB?Ewsi!>q*L!KkH0zGtSfTF| zJ>^|JJyC~cnd$DWVHa`yiX)$7*>aLi*mm9YV51sO2TuOVT=$k%#1TGXR30Q5a?1 zN8->UV!lR5J{+5F)GaP4xyW?+@|#l(AFqZ>1s}U6pK5&NYe|VALYL*VJ-6q|yh;5@ zmR-BL>*Lhalk}+8tX-Q65)l z$;s#J?N6v82fcl}=aY6ezit&f7Z+D+j>ATMef@tOh2n2QLnrG-@NIf68J4G9r=kyc zUQ4;YfgBe^f23NXjy(7F->D{Bj4NN?o11gCA8ltZC@45((@C*v&AL?e+x(0 zM@v*h>b31_NOs*RV5~RUTiLYvMy}K7ZV8F$dx}3v0_^%ioo4on=GLuSvG(HP;x}5e z7!Dmel$4&X;vpS;ScMMZN1?nC^I$Cr;3)B?TKhBILajarfka{|$>1AZeJJ;Z3m3kA z{o0v3TA;M+<%^EpV(vcKc73s)u_HB+v&n%84;$n#s1C9#9c= zo1s5V{f;SR$&TC z>N|PvtxQb!uH&Trkh)0e|*i9uRgS~K|I!GmilM~@z* zi#qeokLixT|E86dMQf5?`}_MT6gxXR)D78b>i9~eR0;*5aZX0YZ@|dR%=fTLY;?g? zJ9at$cGHtprDOw1f6rH8VPRVtTC;9Oo*n!Uq4K-$aen@p?R3;9Et~I_mR?fH2?^Pi z<1`vJ%%PSOCX$elFuxGz6B83NFzM&xbJo>0M{|~@^U4#=k8U4OHD1t*`2V=_3^R2?c}`i zB`OHM?6Ko;^XK2c)h%}K+O>QKL&3h@rx$=g@t#1`0#v%cpl0}tCx80l#ruKwv5~jg70Z(+anDt{auW9* zQ(05-@$;WYsEzjo>wYOKJKU1@`0-pal$a5a&EEj#HTND%&WZvIgAS%F7R+N2l zT1(-L$d%V8&gHvKlk1T5g0ewEd1+~BEPQpTFTX@vt~&Hh#hEus$X4fFT~C(<^Ckv` zJ$p9M?%?6!k?gtbN>atuFwtA*-+4cH@L-_pAoDS8Z^f9e51DzD+jjZ*h@Bug6_|lf zuZWWFJCbUsuTO=;|K_v<9rk%_Vxsmnuc#;+(k+)%!0t^bUj99=bkjz~ZXJp1e@ahJ z-`UkgMM~Om3$sae*tgQBT*)`PrRaByst=xiC7#^;`}cK`OPAD8PiVYv<4N&BppTo0^cIuqO~%QQS_QpjOLbSJ8u5@xq3gUT|1i>{$Jd zB|5RD;PNY{XW^nQ5kX;1PGiF<2d;*FS}-X8+uy$diC9I*yCU4Aqu|bK7h03Xglp!5 ze(QG}Rrf|Y&$m`eN0@GpRZD!#cu>xi{iiLEAX!$VUsk7Z{5AtSdwh^1YBg{tQ?uTI z!>8}LjIfD1Zxt}E_&KMt*Y2#jvA6ciJ~>B;bp-GNk<=vVUr@&FQ%Ka1QiNinSRVfL z>8;3%0XUB`)9-Up_1~ewt>3bxq$$;;{)6x2)D$_MTNxN`y%x0S9lU178L{*=PD#wh z-dPO)(-CK?Y*CDSYog+)z5NVk1|jfv)0MUi3A4<34swZFSu|0cz)1iK5ZW` z$M4_2^?-h(0nKHCj{1&`*$lO0UbU=s6%i4+d+(lpU9`f#-^pA6(PMwUZ5(RLWk&Ij z9WEUz&vY8qVPR#Bu>MdkV*QtNEGAvtqvl7i6Sx=aH%LE?QC$$~06gNPNT=W7(lRzJ^hm6P~e}8{I zhXLiKrO9d*LG$g1NclKbo<|(wyVugtpT$~HiW(Yrkn>LNpu=mG9Vs*^E$wbx+>sA* zp?=7AYbmj+@qX1}i}Vk+a7bOhzVG&(J6)Zf-oA_nN$w(GVP$FZ>dRNJK37(5Lh5`F z5O6givjeMN9>i0OE8HCV%}1~S4GoQrt?l{I_WYU{rGqNE9hrf=y1S9aKBpL!+l;oK zApq8}h9t0*}p%Y$F5qpWNa!5Id^}{Zc+KDzA4{(_Z&u?-P{bZgX)Ez zXL_r{1CUg|;7L&BULM!KJ<#|7VAfm6DNZn;xi?nfGMutv`UEnh)_WU*XrvtGDQ|2bYOvpuAd?8fSnU4eMgUG9(e9ulGq9$C&0 z)Gqq73Z4g=?fUmmz3$51g@4F~osGI)lG@tZ8A?irw$tHYi|Tp`gM1ql#YJL4cQ`Qm zh?!%}V|P*@70|6UG28QBnuJK+lma-;SgSC2whQ&pYD=uxk z=X1%E`89ZCTG|O<3_@9c{P^KKKe1n@z}*)lfpV5gS@yje|D8s%h+n>}*|B5CaY4a* zuU_p0R6v$qhx6~Bn@?^$p=e}`m4&6~_iq*iW$db!rY5+}g}{JwyiXmiX%nz6$NBkh zCnqNlbY!l*Mw5$Qg4QAOpQn=GpsOnWZ=Uymxgh`T2O=Y^DE*0P^qq13Mfcb55muR< zX8+`^A5Qt~KUrvr@1LrEx0T`0fYcMuf%M(#JA-CzgY4HVEcE8HVc!-^41#G6AK%4k zuUqo{vcPaaxr_Bi?8LGiUuV>sNDCpF3v2Zn)rxw<&hiznMit7HcYx$F)QE3ftFxe2 zvBEaSyD@E+vU#&RBZU!v(Zh6sZefqR0was+sMlyfD^0C{U&5K}anL%^_(F@uV+gPd zG0G9p2C`7H#nO+Cg3@#imOrk;o!&Gkl9fzwJ)~#$aW!AuhH-s+;Nk?B7ah1xhv(vz z!3j{@bMNFk0k~>&+;MUVF!orme>~U!IuXjMox!c6KH#`q;5!rZo}N$Wj{I)6DOurtEKY6tE-zT)0Qaq-SNd8Rh75 znCMQN7|AwKPR_&o=$&IO9lCOh-Gl1+YRI0^X2Z9|GJCkT*gb$SKnIZFHak+E4ucJA;#aSI{``3k z;BX?n0y$u6q-_^e5EfDA;~%bs+($0&dcfgHINwD2G3>a_&D}bOPpmax`@o8-k)zPX z`Yq+8=Y|*QE|V)7{9Ci=FI9B=3(?Zj&dtwvq12z3lA@ufUzv4k17N)gf@42Ww*id% zJXoSk@Ui%emG*o$m2-A-f@ZbraPZB|pa>8e&u3ima<)A_-);67C?Bnz-6UJ%D`tRy z)0PY#$SpQNA$8CxfB4FYtK~=&_Cye zsY74B)E!*2dDAAz{@N%$%jOe4>rv}IWGvyXvcMWO)6E0LH~rYlP;;oLDh_&GH*OvU zV40R-Xvh1^J8hR89UYJJ@!fj&?p=@|Q3Q`8G}OulDvP=JSG}CKj#+wiE=sMIPcA12 zc{okV>gnyF=s~71@5gTtRS$foYk1hdGDIkjrvqRb`qO<<`6y$5e=uGSIk^eyAGbop zx*L|33Wc5?fMfEO`H;xCx6=)^W;0T@Zr^@4;act*N^4si$llM7S2^M>Rk*jSXIil* z=;R|hs1P7Pu`xm75O%;m$HvCIQP-hK@L9KuUcVk4FhGP&pejC-DmKJmF)D)B@^o;+ zX~1~Dnc8+J&&9M)``IRLT$Z`OAR!{Kjbs~PN9I<;2vyn5I$*T z%1!TV&398vUqOMcDfIHn%Fh0bE91bjdVE6KfO0q{q& zj)o=znpSqZC(fjElI&WhtOwP%P9E|8spFlzx6^df(%k%NP~sL^W45x6(`U|{QAp4h z2Hx9(!%p$O7_hr*ZoHcW3gxZRM>!S|$D@D#{89F>hAeZ3W)o}lLbcSzi-uqsruDH* z*y;y&@2)$ll|hePxTvnq0sx=ZAa43|54g}8;I!|zsW*0xjBLJm@nXEo>}W>-7M%0g zvCWIKqqJimv}nc-{EpX1J)o$h#Ap9c{sp(f8WE?FuVY=s3_Esw;7j_1$`77*HcmC3 zD3M^TW_7bm^L8YBmltOnrwuCu%Y%6%amu6B5_f-p%>MA%e%U`bX@EuRD1az>K*a>c z{dl%t#p9EJ_#NDDU98GRBnt^J&dZlC6J5f6;>STGytL+%E3<{G-@g0rs>!-(_=rt- z8H6x4l_ zyYPU1CnuzU=TInod`ka(dng&BEF)7EBGsDbqC_dhO-e}Y6BgD$DZD+i23%OPz0 zp7I2gA-47?4-d7sxm)V9rUqp7;r!vUsrkO}hXDZrnwp5auT4!FT%)=D;D3>nCy=m= zwI1MV_QTC8TxYnsFR`(+_W<|CrlF$8sK&pBMshCRpYj)rsnGyIdE-r6+h;AA)+Q}G zAQkZr)t#ri)26nTSa#%Uq z8$2L3N03d-odvSV=y0J{%5`a|-b8Gp{xz1lgxe)dV`zA|zP7}F7cIgM{3|AE3;XHq zfDcVU{eS*hUpqO)#!wy$y{BO~Jp%jWHiGsDHGCQrk{9`E4MjPB{Li(y2iNP=QA)RcO$dt?M0 zsB>`eAqb3Nd!7sU=0y-FEY4Qu;Hye9*MSX+aF0UP=8t=2uUr9+Wdk985tq`EZt(yd zKp*+Q#avfLM&|KE#*L;Ko>rb9=ThyLphK_joZV?%6lzL3X0|# zqUx3x0HnMxWEoSub+fcLdqcJTON-3wGlf4*jvPO}m8>am;nHn%C`?5w2y1~dJcmw- z&G;Wl_m%lR^Ky_tI*{Ql-jrF*4}8( z)8I+$bJ-{&Ak=pM1?-J@Y4aMnCpY16NEJjS(EY4V_PeHG_u5lmg zI@Xco{rB(RcOs%=jlO^Xt|=Y|g0A0J6$)@(svwdYAE5SJ2192%7B_y$oxl`r6BdN*!7LY0W&nVgv+O2y9)S9TrMNLh!xb}`p+ z7zzphQ+DV$h$602r;w3{if*l?wYOTWi&4@Cmh$rQx_kGo9v2Le3ws0Hqgv12ckEhDQAGtk z>5>qE8`}3f)CCBoqR*Z^Qyx49RgBy^GW%9mRzIYgO}OU$_;?-&4UuJiqoVb@XEo?&7#kfbMsnp3QTim9BOc?^w>2tG(L|9-x^BWILfj+_Dk?WKUp=FJ>pk5Ih#qJqiE$w8w$hdQt9Qn)xG z@1~vHm&vzI2jLNp7Wb2~~MKlkoM=ZCHM7vX~#fq2aCsFR{>vPaE;#U2{Vq6{O zk4_x2dHwUT_T7(!fVzp)y*gj*^;Y>XpWDn0qE!PbWkFm-4BQ3$)q^yFx<58EBZVVr zmzPg^E$=Qom|8iv3y~zfcgZ6F$I)ksCHDlRqW>pC_}}uM|BEsHAN~OC{W`P^{>uyS zzr|qx58u!K2kwC|TLt|r@Q2#kS_n_xIGfB4cF`0Y4n_tARl1(d)<_-zqjfs>@86Hs zOg~JtrMkMhFQ!rjzw_OMh_L47=7u8FJy|VY0yz)#P+PmeU4v)z#UZ7-4B-(M_Y&Gk-5duLQuW; zWgOeLzxcRdX93{~V*cIp??|@vd%x+0h$I8Y->K#>UAq09`8Hi^Nb9kv=Um32yYXE+Ws@q?%k^M@!oc>}x&`kn*v)9Tk8{ zA#Sw~k~OqFWd)(gOUlvm=Y~51B%#28N@&(q%G*u-Q$QXt5#aM=)Y00j0Q@D$nn~AxY!h{zT2JInfFmeyA4LP>2AwVfgA*izn>PG%cY{l; z2ruckzJI?Dw310<7q^oir&wu!`kBJ*GQl)xwQWD5N{?Lx&!VCbO&)425oOWqf+J-; z^4tz7JKG+#1l6-`_g96BZ-M5k;)=znNi$P|u|Z+b^;L_7si`{m@V;UGJA2gB8ab^t z^xQ|+@X@1`LhYVeF;(vE&~++kv-;a@m!o6?zo6L2!19AXlZH9&CR`)4L-Jh^JdkaY_L`|4+&!?C_U0m8e(ng zjTW*2Xr&X00m24~!rEinS#Jfcfx=@eU>MjVD9E&T?_O@D5A=S_ypQ1>i5?EQ42|5h zDP=!V)N#D~rVM{c%Z;p9LL7yf1$`w}m;Izw>w3t?>YADnxCfjGd9A*$zKq1w0zHLh zbJ)pr*bwTDeT^!n}%O{;wf^7x)4MP9Ml8Mok~y>}Gt0 zbaEWWUpZcr32Hb&RYX-qp*KV!oUdJZGt`ZC@)tl02s&slD@CD?6$!G6)reO5__$$0 z7trye2F*m~l{KnYJat>JW^ZF6#|N*v_dzH8UR&v$4e(Imj3lgzg5`3YQub8P;m zIH#AH;RTP@4qP3$={#b!zUp3Pp~-};CGUFZibL_(3>(nDx&UgB6}WhKuJGt0rkgV?AL3W`95_I8 z#&zl}6c8e^?>^&-NTPLVueyBkG21zi*3O|JdYrPlcrE5QwM1g*7>72YQh=sL5hPJa z41(>Q0IH_VX@}51?i?LWWMGVniqc2xW^8u$L3+9%81?b%rCZZ&I~-S6Jdk-JAyyGJ zoSZfEAPHYLpP$&qvArSvOvk0Y8qi(REL&8$&Kx)(h0h!Vg;tRb@b?cyTlNQX%f8d6 zH4O|5u48@vbam~uX!umaZG!KsMMFwjR#r*&>eZ{kpdw{x;M=y%01qc8B|SltNIRHe z>()JpiE>*PTicYZoSZWB9_{2Z{Qjs$-2{JUU}AC*9j>PqjR`+s7%_tArwE0i zzb5j5W;gpYmvBT)KSuk%W{~8CK-m=g;R5A1`|x z(<~b0&YwU3wXsnRk`*s{2SJDy=~Uy&zYtzn2j84=Q>Cn@jL*zeO$d}dL=$N}r3ncn zV_v7wlgF;N;wdB?X$VJAK-soqo#$M}I&aal96t;3KR(C{xH$zTnr!Fs2slK_(PuNN zeJ`z;ZENITJ&lG&g!17Y^vYPLM?jTgm180tmkt~}SYGi;APSMmx_I`E3GW$kGMz(F={{E>Kx}pZw*1SOc zK~Oh;q+2vz2Y@DDY*rI-@A51D{pFU`{bCK0%E~Vg@t07Pd%$pm(AX&hYLCLc{^{%c zINe&)^KvW020FTJAPG%54sjBalF||qcX5XoFJHN01P_x-JI%F3MwH?p^svp}uYza) z!BJCKff)53t(4kaCu?Dcffu443(A1^N4vlKGlEd{gFA;!47$?$`an@sDwzATQP`%_ z`|J1bWhnYau(hl?Yt^3js&DC4-xrf`j{`VMKW17hD$|!OM;Aw&8-WI{&y96y9P?UP z%7zo>2RN@_&>7Dqb#gF~pnlx+Xoh2;91#5wdbxq%wk~sH@7{7+A}T7On!`ZOBpB<`TnO6cAJt7A0Xpwq-48J$7-&?<+WksMn-04W{oVH zchHzip)3AH&f|x~mp&{fBeM;yLbIlnD7G`M%3!(sI62Rwb#wsk0o(5HoBU_MY0sm} z#S2T>i>$0u**ClI!%3hG#W-qwzVCn-cZG$U=b<|9OfZROK|u$g-EL!N=fUDJKtTL74Tq8;E7V(U5EhEGXEG&+|B!u#)lfMlTay+es+!^;dc@!(V{DMnW$27qvSeYdm zo`M6J<#T%?Th>C&^q)dZm7*bVqP99*oEIMwHXlXjF$!C*c_$UU>`N$=f3cUEjl;i_ zBhkt&gLTbTSxrjnK8cXLyu9nEw8EMf)gai^f?^zPS<0Z^xWniCyC=9%Ig*;KiY~OZ z9%!Ac{SeFe!C3sK;B)$c3ME*JBy$8zs$3y7yU8SH_XSSQlP6DZV`7?apMf&)Fh4*4 zji`&Fp$2voSs~V=A3jc9w5Ruiwy@3v{v4*HLccg0f`(=@;|?m!-LzVH1V%a0?1Atd(uFSZtXP>8z~ymE&OHKlMt$ zL*k3POR`ZATrYE73!ZzMlsG&eCBL@4BG>s9FPh!{YmX}_Dn28jZ{G0f zkW7>!GC&kC*B_YwZmMzqLb1RXA2T;Y7`*@r?~NHobHTve{PjnEI!=O%Z$pGV4|03} z75X{i(f84#iWK*2oBu)RDg%Apj~X}_hliU0;043fl9Cqadb%a(@QiwwOLQz;D!%KbCjO_l+D>Up`rWt@0Sd8go74lTGiYmPo;ys zg@hI_HyU2r*GDyq^AiR2Pb0$wotcSxHQktrRhT7Noo3rrQCiPua~JU7Ox(C}BkkDa z`1n(N*6jVD>*Xbw2Mi1ix$x&e*aAt(o0zhw&9!eU$yt{N9AKcYzZZrh&2qnhfMBRB zeWwc(O5{J_oIH$+i$l9zCx}<~7#tDL(M2}NuFUDJo=T?4gRDqu0RdiVC}i-MJkrjx z*<0dZ^sS>q7tj5{K?ANXxHV%}jyF1RbmTb5l9*pG>ThdASs_tp?r>n=KA)#g6@o15 z9W%9Y+;Yc{SfG!`n~k+VG_ zwD{?5z&lVSlcq4ZDAP|T=oUT(4>QpS3l3HbYgS^hwYv;6@>*nGiC6qaTVb{W=y?YS z3&qW{pyIy5lf^mg^Kk)za@)5-@adtTKO0;^VtVDdJgr6=8K@tpoX7SQz|IQ_(uG6b zc>3|##w89B_}-8dDUbt|qvEgS68i!gLZ!i{Q%(oNy3sW-5c`@+veCmG&oAmex0Rgr z$wu@VNE(CWitPP*I6V&VO=I)lRMQtmi}N9fBw}(YL-J++~+zw;!m85$N*n~_7y%{-Z_W9b#^`k^5P&KEx1;F zM1upviAf0hpXRg}7uQ;-d1p}xZsJb0g9k@O9wVv@VAr0nxtza!+qT;{-*H#sOyJRS zb6dP|aW-J501Y1GBGTpfSl5LisQNAEOoanmJXD{Q$U`(s0w`naNmP#6wt}EO8r%V7 zawlB%SZ0X?NZfJrI_~;KK)nbkkB~K^8*b&~9FF!F`uq1Tf|84mk3lAizhe^EBtIUG z;Aq{#f`R2w5hrDV0od1ss^`TW(XWe!&lWZ>dJ_|q3LGTlr_#)c#A~@91kZr8p`og} zX`;b_16>eu;9y$ksJO8eJrK1v({gLVVL<8N#gCjR(R0Mg+(!mhHr(or^FG;QTt~vJ zHu4)9b6A;TqI}3bhCS&mjvK2}9vXVd2+JGguvqUSzF$0vKI96Qg4_@@cwt_iv?P|? z6G6V~iTI2K?2FkfD=X_SavBHa3;rxPVSN2MVeG72KAY(2f5xftl4AfsU|x>2PMZI0 z={*m53yPrPr?OEIH^$V|6kFi580T&95V)Xe(XXiv=HFNhZ9A=flLq`ZKrAWRSOZ54 zjufE+9EYM!maM#RY@=;>PcKVt)D+3`@Aj%+3#2!xPe1psJEBfGI4y z47GP%Uba@;us#oQ3ILjYH;J`aY5m#LtfLNESp&rd*@_lrnHa`3i^A0q292k=zT>9~ z=o)~GP=0!Fe|jS5G%*0=G#!22+waAW87x1TOLz`2}$H!r+8c zrcm7APw;Uv%Mow%Z{pfyjZVwPykB?o`Y#m_gYw&Srz>y^!$JC{7A2)mP5q3grSYS zXi^2DsG8I7aLv6c3w&*a@)|qayzl^>x=cd7gkeO6dw}R^L6m!q|zTT5U zIu}hz`gcf21C+G~QTdEqY|3bd%P+60Z{H-Lgd9OTB@l3v5G+!|kl8q45BrZsbr1%K zAPC2$=$Y;p6Vs6k7vq6q@#pW~edwUoaf_p-6K)0KrqHr7wEg?hO~PTHWcsO;+{uAS^|*;NJkQ#F4IS49xbSi-(bCGV`KY^^QY09Yun=o zH&+=DH~9f@-+pNS!N|a}4|njKm>5i{wzUJhaG&o9uFbI25_TMVxo_XTQpgh5Ki{VQ zGdOqz-=b2U3svr>z5NSN@N(ECdJr78fDIZc*L`75yZ|>Vh#XKT;Rvv?J;!)Ow%cq1 zVP2rm@6qYuM^lY$I=+e*`^XTb7~?R*47NnrjAl+k$ycOSYHz|597o%E0yxC~L$W|R zzWw+ijcJsZA7}cEIabF@IQ|R`9fi2?44VChO`8HC$orszLPavIkbsMDKS%&S?6<;B zBX3Z_WkAD@YNnCaY7}siIj{|>H6Y6jXi%X-`7IT$&aUnQ{6jk24>Z7tA|q_qdpT_4 z;nJvPBWNsQcG)eFDM!0S(tdTM^u|?9&DWP-pO6OS_~07e=G24GmWWaID=aL84{?9F zd(I_U+2^?ThaW$F%*xFzM~v?~c1#Xkoqb4&fByZ8=5tm;m4Zk9kXFVgm_y_N*5aB? zV5^t_Gqjiz40!mE5#phLwJ@T_yfJ|XmdYQKjsd%ba}Jc^kQv3bn!fDTGM zW)qGW7Y;>72Icr;Mq&DJ2OyGJYgV%ROp}Q%CU$%uK75Wv6Sn{Ngr4=pCG>FWXwho83b@A>4Ro9--F4o8W5&XhUjTX~-pQAu5Kee#A*|yU z)+ikgR75TzEbqId5r78kovC)urluw{dPP_8RU}00+0SVU8^;EE`WM^w%lM!eE5|5G(rjkqhJxJq zdKJ_&2=%}>H8u5RNQf+2jXi^jg+Xvz#PrBsy0kS)@m0?ZSh#3;^$9^i6+&_$r9MHQ zjVCvt@T?%r73DJ)yLwij1MfulACi|Uv?HRAtSpau$wFCqaacu$snqnjFx2g}wZ!l_ zJ~_#VES;R5&IIJhBrFM#p-Tz1*!2AEX5 z+)E7RmTuVd>(FY}oFymh=_(`{emF*mQ2}OF-;j_H;#U}aThRKqv4>>=p&A7VY8wj+ zH~N=MXoQ1f1fiCiRs4A#vIH2zh^dVD#tm@O%%c-=qM@dzIbqPLB)6nHmlwG=KC)lH zp}$s=^tmnd-Y6jtG2ydRd%(w@1N+LHJ?o9s=7%qy8to7-*Wk6!m&5#*HVn7lT3WQo zj1UTK``G|bI7A~BVC(Gyl`%}gtQNFzI%8AT_w3=BANt76SFN;8nl z$SB?t2B?}Gdk-F#0q3Vs1O!q!t$bJJekKe9{a-}h{#od?8ev7JaBJasRhIinWGi>n zFpdi9{X(|dwuGhzK&2 z2!FPhJoAnSj9U7v=jZ3|?CH6OYTothsyNSpF>H~SdIi^0pa`u2l6%B1GL!%7_%@HB zDbCJi?47dRQh_DmAZXV3MXN}xB8fi4ERyM?+shN>ypu-FhcL)WnziccFEurhBFKCY zP5%2vT;M;Gzhz|)_$&kHl}$+5daz@e1$ScDRVnzLSsME%mem5+5;e0TZt| z1bdK=&%{*>0VAhEmG?#y>-@X3I0!1`#94;pSf9dgRAwqF8nI`2ta!^Q+rO9I)z9uF zM-d|M7KAFg0$;v;qlRw5Jz%V``;7A#1*#u!-Dw<7eKWHS@WwoO^5lGch?pEPi&3B_d!%`h2#Zk;_4S^3hf=+YsWI9e!~K03JL-K4UTu{jXlvS$TQ+cMY-}iRiI&0=r{AMzVnh z!Z;MyZXinDl#|;7BeLYSGNrHC;(SXefG5v=o*&*U3I%(D#aX{FGrSQc6Qi(PCr|D` zNLL0gpmKB#4ZYztGBdQYB2qY$`aJb8cn;~ih{2bw@>((G=$9QeB~_)R>!qab*$VG1 z6B5N?7N(2Zl9-w*Fw#+&=nDVL4fn}Pq1cgie0+SwFbKi#L)qlQv;{$|KyLS*KWD_c z5wjtHl(EKDkS&!qE(~M=7?a%GiKMs6^-g%do4lVzi8zM|yaiaP|A4q3p2klDSW~I&ip1p8$xY%674% z52jnv&?KY2fJm{HlmXARLB*iCq~EuzzDn`#4!n+O{ND3M6k4QcFr@CKK#_rv_$;EMIBGS*`qAyQ79FEoHy@8>i9ogi)0IKnH$j_)_bYyS}c$u6&l>%hp z&+r$c){>5C{Y*TfiZtn#w863cfW}B0%Lc<_n0>Z^vGwR=IEL_`g5Jdd9a8ySaK$b# z8;#yZNvp?uug%^>)0t+=?nf|&e6Fk8j$(WPl4Vzaf6UD#%2^Bykaf#-v{cv9DhAU= z$^Qx}Qa{nt`_g%=lM1Qxtvt7Ufx8Q(2n3e&ErErLF#Q7CQl059L~ISJCtw(9Xpwmb zn9j%y45+I4Ux-B3&1oAT1K(4OxeEi0ySw|(C|PDSc}%k9(s^R#LgW;WM3{*89vjMt22RZ^N!VfI;9j<%|NVFj@C#)o= ztlwod13kDr?Srr{w-(0-X~ z9tIB4J&D>OvbD!|`)x3LS*Qgnqeu%;m|x)sU1wk*=p9NzY&`Ps5u$A2Z=~(*(=a+w z3bFWGaq)$QPa2WwVeo^aRTA{~?{&TtCtkpi89$GtPwbF5`9^4_#X4Zx6zbsq7Rw(M zjmSAoAO}J7Gou~6AdHId&QtF^W%CxZNz%}bm*CiBLfeJV`Em{p4qmXQC$PJfBhvPx z*9EM#A82wLdRHCplm3`aGH%J3{n0vtdQ=W`Kz)V?lv%D-RMl^&*q-w}%-IfuL4<6n zep26yb}7*}d-{GQTob`wFc9^>qadI79SEjIzI=}Q)n~Trf1}-0<~H0cc+_S zLYM$^c#4kVFb4y&m*Yb9WnwU*g5h4Wu}RX-v6sec6^xK@NEZzx#`lDI-6hLVtldoq zhd`V~3wIjuiKlQCMMoWkN|>0K2s4!;?1G08!k7oy3v|s0|1If5e4nAAruNOu%*1HA z3W_i3T|krI0xC;E*El^7{M>fpuQaZ$(vk^#{-4aUr$#W`6?51$^bFWGHxaP-PX_Tni_B|5V*`Zm;k9_pNQSqgx(? zHsn6kmY>SZ*A=L+sTOvd9}06DS+>dU8!KErCn2w(pkFhCe!^E6r^kL92J96QVs>zF zD6Ot8f%N9Q?lPqa4qOF!d2bj-@P>vR*pFg}A_{7154MOeZG}HEMRKvV zRogUOlW2&Nl9K-!cqEpOef!QzKxtrX3~GpY@U!UZYQQHm=?P7F;qpc1$PF2Z7ZEmAeG($@s9` zI%2nl{Pgx$KbdX*H|_c&IM@)2|L@nwfIk8Q6{l2XE3;_prf-^5F`DBm2q(kjo63t- zK)tOD_{P`es~A{rMPY$+_P*T53#zI+%q=X)#HQ%#(&QM%4zZy*Y|ILo`|?y&RA4=$ zp^ynkK-J%nT3~UbqL3FlY-MI{eCJ_=QNx&zVLAFDu)&6Guns}S!ih`2AU^Ag>SHR@ zMeU6UfbT{_A?(Q$Z!E6X3xs>uE0ZvVv@;$HCtPQSe1mmdW0Y=cK8I};>8+scWKT~I zL2(KSPa_g?a#)?(9GF3Fji=Me99v{07i4=+r9b&PP_7|CY~|nx2nyN}GCYz$tMf@S zT}Hu*|J7DNLv(8GUe%a>ZFA~~zqEt$HX~yPt3M6yOTO;j-Mie+c2U5fmG%iG=FhM~ zE&HUSOvddHVBDl_=t(@J3M*wt67?X7`AwjO%DHc!go0Hi8YEj$*+ECIDL~DoMWR6b z{?JxcQK9yiGMxUCia{Np(ks%=w;pp`FkfJyybcM0yuO7v8OL8oM|0QJ*J}{&vjtu2 zH5HNWcA)V%zBg=^P5iX9;ieS_0uyvZc|&;UKn+TG5bRCo(Pifl66tOT0O zfiW>Tplse&jZ2?fIxXZWHfj&14l?Rc$ZX=m>}UX5Cr5N9 zM{-AB+fhd({0q9UV2FTH1DKUYXg2h~#4j z^Hx??<{)vzf4751k6=|+QPIdVhGc{T{e2^Eo;MiObrX|r)Pj@U%65B0;Q16=_{SAD zjg6>IOrw7JH9DT`0+-@t-L>&$<6d}_StfF`$ zhkCxNU`k4g4RmP7ZpFCB4c6z*QQ4mkty~x_Slo=Xhu51A$%xM$C{g-nBxW-;l%nxCIf&yBb!E$v&^(BSv*;X@aLvckfV z?7rPZFLN<_@8?GcRV_Wc5WS{j-&$&GIdL1PHfDmm0XAde;!>Swlq9ZWDV~Lg^TG@F z?Dp+7(y*(LKBd2Z`uro;i-9j*ltP&`72J*7)0FR)DJhBuqM4J*a|qtYzS-sPV!wbe zsYJXO0aw-{{)C;!-X7J?lE*auUw@(YBK5lPEn25A?C}9@ zd_^6dW1OKJCFU_ZXG8nDMV`is{cB47i0NW!BO@cBixM**T0VT-jP^VFo?JK`6A-fe zd`i(F+%F=cMf@3C&P+YYwmf(47GBmNi*)!+;v`HvJ5inzdG(sbR!&9d5fjGXOP zk==~Y;Y$G!+^}Vf+H^B9XAGw|RNZd2yH0jC)h%H2ubXi}J6OmhWVc35(C50dFf?;@? z;BT0I`t*oMONkS!mWSSP7Ky~hj*04!A>GX7kLmD$mZ69ogM&n<;~NPPH{o&hR>^mM)$kOZDIFNxf-l-9h_x`;E-qSSLvaK3$HeTn3 zxw-j10RbgwgAKzs9hBh*T(@rBC3q()PsYc`_l`Q84h6lsD7HMMB$-P*OUA|@>ODXP zj*Mg>GhWiye-|;#cvW;SJU~01|FAE1mXPqot7%^G8QzZwMJ@zA*2+(f>U|j|Xc2^T zUE5nR0n3HvQsx%|Uhz<$sz$`!=aS%t$wF~8R%j{VY#bR$fEA+V*zx1?yFKUhA6D1F zONG9HWy0mLDv{ArG+%+mizBov`Q9}kNkwI3r~+DBrx)bqwfXfcZJ2<2pIQUil`**Y1@t_01z2E8QKlTx$7qstVC3aAFh4IRo2vH#+H9tSUqbn)V zj%;yJj;(RShEKJ;hEq?{NW5?JnwmnH(Ym(32d{%59<#&WHA28cf85=`cpZunyWg;C z=0_2Ar!yks022|YTF%IDl}1vL3@FjCm8Lj{4Puh|9%=`0J$1fL020h~V`ENk?)B(c zyu>=yzsg?qwEBTUG(A1tYiWUwj5m3wK~pO2rcIloqoYaJYpj$jKo1>*;Sn!1U4CcV zeRNq|=#%!B;)0GtnnT|_F)DKxW}t^D_wUo-9T+S)Wf&1Vfm}xhXTZ_8PM==*AX;p} z{|cuq=|&U(QT3#vKg;UBGGs0o8y|q1HUK?VJs><-`AS4a^CC^HPq6AAX8}lh0F|@V zW;N8`3p@&w1y!476B`GK%@AH*%sNFRCo_PE5*)VM87}WwamHf#8tg5M+wz zxVU0WgrR;O$GkyY^%z{KTWe}+PPt6%D=RN2ukOH2h8$#F5y|jcnWsgvD@G0($I?d@ z6zmoh6g-8OwjtNQt2gI4a0LZv3+HVZU-V&>!?iD1TJmIAcWlOt1Q4$(LqLtG1V%91 z-6yXg`9gRv=g*(s$bcJAF7Jegb0A?G)_8)4 z5GN_LV;?d!fr|+c-=qg&Cn2LMPMvj;--z=vCP zJKw9zWR&c<cyPo0moh@QMgaJ$Z#NY)?+pk(4vkKCo zP4_$NCfD}>jxA_|Kq=jW9%}NJuhW<$hI;1_8o%|FT**(13cW8TJtJ6#Pm0mSo zrQfn8cw)kq08_w)j**`$&#}vU@iqLIw1+Okfav;BUY^^gkC#dm28V|7pg2BTT5^AN z!t5D*OFsmvMeTBnIYqvi{O2X7r3rYs2WfCL2_E>*TQjaVzWlG3gu@R3l5!FW=k;#?57KJkwTY?b3XH3zG|xH$afvuMM6SCt{^X~K|-=M z1pl3~b36WhM6|F_lQjDprq{Lg*o4IliO*759RM@<`JM<;!IBN7v98!IDj2Sa-! zBWnj!8^_5lWm5PM3-KWtdn0{EGaG9LEi)@45-ro4419tN=M5bh1o#Am82ClRg@nZg z#AXXG6p)ZGkSNHW)^dJ2*6pOFRo(f1YVz%Y_h&3$_%`Kl-@k2y&CgHg^^2w#Y*i8M zMW(v@-O784)D6cDx%rYC1gf|Cgtxm*?)*4W>Sa5>ewm$zu3<*3+Tqs(_q%rvll2}G z_1D66Gun>d-+N{!oN_oov{*fj4RLDQSdY{jC zYsi0p^S*)r)8p)0Du%~tr?esvMnB(x?hkNK08)lj1Q(LBfR{A@NUDOC? z*W7MlF)G{U%e$6hNfUoH&$@4LuxfQ_CbZZ}vDj@P=+B=&j*=<=`^7K4x%Q!{Y2V3{ zCtJ;hnH9sXuPx6xySQxnGe3THJNcpesi~~t92XQFKYa^4ENa@4LVon<(bhE0hoP+J zIpRy)mqI+(mis0qRF|d)!mlgzR|SgQ8at>{bSv4S?Ty_$EhXida&K}wC#O_=PHtta zGmp0=^?a(#!#%Ap)bB}oEbpbGqqFI_d+%PFUYXRy#6-45ThI=Qqa=KGKV=J@Cf~dk zHTitRb#^#m%xk-O!<$x2V0y;8rWU+o_oQ*MYBCf&8mZ^5nO z`}e8UC5M5@!G;*BBS*;If4H|JO*3^59#+V#`Tna*8TK3N9ukt0q!bj@-QBUwN)aT! z8xbBW^F~zxw2O<2Uy>B4NIG(@l&)XDUTpSHJjo7*jGYR3_;3d!BO{k?ab%3BP1TY+ z9{sYmHsz5c^+9weFPFIHP)7Qwy!zUlrpchIO*SMv@^IP+l-hAMoigiOEOw(c(6^d&=28rQChlXS~> z9HGS48P!Mf+s*SE)l$2=yPt5KWvh$e*1UU~UP-!Q>m6?j`p>xY7nie+X{IQvOO1Yi z$GWz%V6w5k7C`ABQ)bsRjjT;nuie0`Jy9ieL?O!onmJ2$P zww2Ai^~DEV&ShaDcDmrzD^8B^T{+q+_(Xy8l)+OE%kOU=AXTm#8g8kWZ7y&a+Jg;Z zviSB|IZEI`Q{uUnGA}PgNjZ(4`#uwTV`Jm4!khm1$e#w$-aq9O(ve|hdj(q?nwu$a zb6j}IW=2LqF@#Oc&(CM;9r*qAm0hm%&EM2|?o-=tj($6vtkL)DSH)!6Mv!*8h!{D^ zmA5zgdc8K7#l;Wt^71CPeSb)~!cG4BTe^Q>Ai0?RFOp~2&em6#cojzW{Vj8{Ri8pa z0{yAkKQ>BlkiEaRgJN;2ntti0Kkp0O5^;5Pbwo7VsNC7J>d$3;Ppss7ZAi7f(MdFJ zh^~G6=Z`JcuA#sB(WbELiW+0RW$}8ROAmZ}b~=jFiO23K(WRd(#0Kz)@DHV%a(Jd=3F0SFG#D@Z}IoSJ?k0Q7s=K<5+&=SQ7peg{k6x*={{e13QBe~- z-3=6jrZsAwYmsk_>Np&>R2m<=JBK2eyu)8LwY;8dDu^iD&z&ZF#<~hZpP%*L%vRzT z^4zxX|-bxwCoO<)T~M*b|SZH+NC9JtI>nvLD!`5+jm~gs9mS zR4Iazrk!W4a{W5Pp+N7c{_5&0snb>g_RlZ=st4f$mLRZ zvAu#$EqJ{4OnsTB9!&FBcl~{~pDj zk4Q^PpOKO2A7jmZ@q&$HdZ>}2LRmRh%_gZr$h_63Ax3ltfw=k4La$d#mf<-P__4$bCS(NPWg*Q1@=En{D zK0d(OpZF}!!^2bTJat#nZT?x?XaN=ueTPLrK;UzhQaDHD^4w_Ch~u?<+dHFe8C&>6 zW1Ys@)t5SvJG*!LJGWr-qaNe2-nlJI+!(m?PpfObgg)x&>4`0j{TzEzGuQHp=jzls zcMp%L`T6YzEH$2se;Co{o}>Q$s(bQi*M1HXeus_KrJ;DT!y*BwqkLZL?w(5nG}o?O zGyVE+E%GmYf-OhQ7pYVX$bj zv>ZEB{*a<2+e}mZ=I^ki$;+=V^Kag~dAO(KBpy0|?&L)m&CWl4J@^>LDi1qB5WhG-c5sOnsoGZ_*?S^xQkMlC;%?MX~ z5EVsBq-SmI@6dJb8^^5w2KX@tC3S`o%{|)gwI~_YdR}V}^72Gcfx{KgrfDRHF`C44 z$hd^u{s*)Li!r;IUUiRAxURr-?#!Lzq->;ruN9@__b}JvphU>^5PAH*k(RN+jz_bu zC0j%Pxjkhjbe49}4bn9|pClhTErG3%Z{)}PjiX?fq}n42zS}~WGMNknnHg^EWK)=A z`g{3f4hrgzk|?$)h{t{&o?Oh`w;@R%v!$lKo~x&k$(iZzciiA%KiZMfG7|Po!~H|@ z{amUIa{ArJD)$^r)8eSFuMhBkSGk7;cmKEBeY(s(5uAk2dy(RcotfUBctL&g@r|7f zw5}{3`Egqx|GltJJ>&N~OtGpDFTNUBoa#@DIyyyv2_e~PoVf-btU|Fc?noLdYEuf9_5-ya@rw(l+~K(c6vcKYWZ zt$%NA@3CA|*5`?d%#WEgbhNan&2zM)ME|+-y;0}hPPXP~r;}MxrSYIarC)rd+{5$V z`z6}=@UST9wi?TjP*$24LRlt<=VNtyn+uP9~YGJ+=kVhnEU zjCam|e-b$>PZ^~+cJ#mS{?oM(I{D_&Is13K8nI!V|7_KaqB?^@r1AXN_xs-7DbCW} zg*R8u*B@pw;4SlJzR?%HJl#P?LfB?S25=lFO7 zU~ZK7O^(B&*1tXlAE##5xDTN3F#Ltlb7RekL+iyeOKGbPA(Fwy_{OiVE-eB572Fy> zw4XzZ;n=ae0Ksn!DtFx)?>L)l`TdKj5Xn<92X7FJtec}3gXpDIky*|g>yle`>|v6S zcnX-CaPd`ER#wepcLfXF01MD2`sIE>!FP=*JHCx~=9x7;-$sI}_oLIMY@pJgW_G;u z2~c>J!;os}8u}FtPkpXc=jD7`#+$!e)pIA?GU#1P;%?t2K{c+vvt`E#uXPFhgc08c z_$vVLfAP&V1#A!d)!f(iU4XQFfVzO~=g=s!va^#ELRpGkX1sxjf|->L)P{4O13-QO z4uR@l4e0Rt%DcBtle*Eu77>gt5eEP*1jI@IgMX$JLXRr==3_lT46fr#ZZyWl9zPfK z1ps{TbF9)kYmu$x@A1po)yNXPGIaW0E{FTUthjc zP$1TlqGH&UZ+GN`?d^9ajhi!V`_8y7Of=voOuxOReMB$KqF3r(n?-h-duqgG>gP6> z>0kONkXkRbe8H9W?cMu+eQjl=BZpJQn{3lFG$CTA0jWrOO5Cb}r_;1^I6y+GK7S5k z*G%C9EGOB#WlL6O=5_SW*E&TPNgX%WM+rVed&Ku29juQk2mE43?ZwZxT<wjg57gAEONp z4kobk+ZzhZ2OXDZ&1%EgWw3PpW16WgcC*9k(V{j%DB@xcgGs10wSi^Y>4Tskra9Wc zFt)a~@~JkQ&C%iE`^gUpF(8A`Uba&l4Mr1R}-n^NbPiZpGAu!ibN!Mox!?$mrUwLPYzn~&PrNXYcDsW`80v3!~5+Cd<9lau!~{!?jXr{x)HWc&L1h)qWGH?*=Mza1YR zKV9JOznk;Yn_qxp*9skdd2|XZis(5xIe}rT&;x2hn62lZAYQ?`Hu290rC-jvia@zhAoRrfUnQC`Ww+ZXv}ofdx)Yuwu7}3?Xt&*y7uX z_@Tylz3=mA$Jth$5y+v(+*fm*7n}eO5LeYe^R|wT$|G&mf72~0IKhg=FDPic!RFH> zZ5eu^t|}!D9&8g45qYgw7Kd9s)cvA*!q>pqSnk2D{q6a7$3e*!CVK^u@_?T%EA!lt z8wSD2NpK&M9}as?nfd$qZDv4Hol@p~);_?F><{C-6olyeF)?8+q%3gDMdQ`i);Bsa zs6zX&l{aoMA-DRlCM0BZ;$g`4XDuUu{;c3>Q`?`Cy-K$R|1IqmN}Cl} zYGY4|0=yel`0P`T5w6pur+Z%7!U$yDSFJ>o$jtrzIs%LdOb3LxEk`>rG4VL|@kVtJ9f@Uo<{bz{O#r0; zd*NP}GIUi^ul*&8!Qaznvx-hLeSj80>;gf&#)UlI>J~eH0M3sTwb8v>A|?CNkDC1q z%5P?7CIdQyVq@&ab4U*T{r%-wou9G!I!roo5-$I<04v>kl+;*|CvUG)?7WYLhKBT5 z^5d}dgQ=>qAM5Ir?#lmVssiLumM6ns>y||0>6$b2$^umV7Ri+3rDCx-{2t4*`14B^SVdYOvujZ3m0Sh2UdnKSc?$4h{}(EG1YSo2&UE@*e;oA4N@W70M`oEQhu> zJdFFlupl@u*H>BT?mqxRcJP>Fh-?b}^*1Jb>N(HeVGz&QN)V|01q4k+T0caZ@4hlm=)HIzC#H+?DwsAGQb`B1dAYWZYot zMD{kB3W1hCK=N_cp#YoUr$8>B-?w@c3cS6BVvbJxP%g2L5T>q`y3+>A-Me>hi)+cz z+rSFXnoLc>q~8V!g5a_rC;hXq@JQ4Kj0xW-H>7R8IGRz0{|q?Df9%7}^|b{(2z;s# z1K2OUIieCRq!0B_ZZzApX>*`l;wtBbm(+;dek2%)a;*6NK)G_1VT0`0*jTzlfh-rw zG%dYnMhFV~7Q7et2}-6k*WVDy-=ApY20SNZ)AQ7RptcWS>4kdYP6~=02A@B)FTH3o z&CJV7wd%~hR^qx}URU>XytMRZPb*8y7dk~E$O)xnoA^vT0r51zT*dJVqiq!UcA%OT z3GP2-%cJG?y(Ml=_)21hULKt^9J)ftpS=U<*0s~%Y@Bt{m7~e&=~hqehlYoJ%F3hx zu}AFP4z)T;nQ)T&&4hM~>2!!LhLbtSx{XNwLRD1k{h~CMHi_ z7fS&dog4KI6Sw_AV%3rTp|WxtdR0fBO=C?cYj|VJZ`VkTc%^mLi!>jqN9T|zRc?Fm zE2Ow-@TL7jk=}(r%DsU7u0xkGo7a(+@HBqmFZECaVn#`LuPx9^?R2|UP&QY&8S|}f zi$xO)3;&Fa40XELi)2d>V4pfmXF%)=3JNld3u;B>=R`W+z4&TvxDb_^^?ckP01rp` zE58*Mjw;}4f`ztT=0s=q+s`Q=P8%N^z(!-96HTycw!j2bP!ioT&j$3YP5HKcoA){} zQpM*H)EM%{t;rr0$Ov|}whUZc0Vp2=c#;#NZZ0l;{rxh&l+1)cNl2r>7POD2>&*bj z$c{=x`qS_l*w{3prCtGlMZF#XDy?;eI`*Nu`k^=Z;SfLz)TlGi3d6jO?7w$({KN$j za(0f6zaZ8m+w_*Q_t+>FufdC%%EXYJ5|6#po!G; z@dB>1rXN3k)X44)Q*108cCc{pvc5@k3VAd&HI)Eii_owPtAqIUKi&^eJ%Hs)fc8!s z8^E*zIRkuNbY$6XyhFIFyPHpD;c*R}k|KsiYZ37QR%3YI2{CKsUz z3W&4E2L^D^#KpxyzkdXML9@W#2x)U31vcMpfoJG*Y=4YRsX9loVmL>#MThLNUqHZ( zPmdXky*A>IHktqxBuAy(bWU-lqt{37zErg9mnKved~5x+SiT zlRb1kE*&a=b&w(UNk{GCR56Ph999>KB2s0RR{h-qR~x=KA_fMyJhj4FY6 z1jWs2(2tF&9d!#G*$t}#)PDyDgh+d>JKyVz-z>w>Umnv4W*CkAY?H1Mw8d4~4D zfhvHqtlV5<)EsJVZR*=ptg2rMZ(11FN1jHl4VxqK^IE*?dW;55<-}TuT8ZupwS*g~ zgkB>z)C+<1Q>n*FKBVNkB|BSWT-(mZE5&p|k^z$E@YIHztI2CcXl@ zaB%1yNGdWy4n->~C#SNrGX_I~EsJ`2ZIIuaU#K4+8yh18r-XzA_l>oMb__?>7J6sU zEJ!|kz<3o`_b!WDp}?m;OB_`MK!3o!8j zYEgTZA=3I8YBI5nQ2l@Q_1%eyiBZ4QhHyoC-$Q9fxawm!YfYflqk*MGTZ@(TmAX- z9vd4QP!&UsXpLi8_WB2<-hml14CVz&YE^#;oZDER+#qHdh^2r-!hX^ltD#_E`lu$r zI6ud=7yCkUQL~KNAM}wDU4UR9&^Aeqh&%eEsKgkUn4AkdBn;KmyE%O2iJs@_#HK&! zbki74379oACH9vf4Iox)f;;V@6W{tCi1)>ncPzJV-O9G^)*kujiQ0f}f^4|aoTN~z zvVhGYJ^;*!QCya3({t)ytEr4x_D|ZyQoowhN_$FJTU!(50ML-tIRGnfj|IR$ER2Ej z6jV6k(La3n5E&E0n(u*{N%B*oTUl9dHG;lKgy(8583qN=@B>p*Q=4)o5H*0V+rSOA zQ8_1<%rk9zr6AyymX?AelKB+W92GPsqhe8h-_s-2GmaTaZJVAKITJUpNO`;UPELhS{c=fa*0LAED5$VYPS z+&O40!KU9;xJT^bDhG#$f!9oKZ%8)Xw6*0EQ&DKhOVTYO?_MJ>X9R~;Lgh63^6XBH zi}~=`>G8bY&HQsV=-8;>Nd}dE^|2=}T@i&^v+uavkDou!ARogEtw3Q|=y@6G_gE>J zyD=RP=ej~^VPTQHPzUI+^!rr?21kBx$I5ldM6J7?A+s@dA*PGl$qv3pGr4nr^jkY{ z5ipNFW;myQ4t!~93dWcw)3ACE=7Ig$!cOBI4=%n+MhMG^?m&G%a`G0}hrT0^i$@`v z7(o?qc6L^S;0&T9tETo?p|grLXJ*52oJqcW^}Myt)|yvsD*E|62j@5R()=}w8oHB} zXb=rM_8cK*mPaIM}vUK^L$`>ppSwl7UrvlCD>8v5O3JDHLeT z1e>w=15f>VyS*6$LCmz~_Z>VKnPfks-yMFr=XAY?Yoc5rG+|>DTq3~&U{h66Lj^Bm^wdG6VISNosC=HGiT~Pp(km&En#2f}Y^le$=!}uM*mk`yV57j|b zCpm~w^Xb#4F<5(q@NgJw=SR|Ruh25a;Nlmw7i4xcsBR0c3#1oj8m+;B((@9q6GM4# z?Q~;6F@oG_Br663nV_jrPgZIsHmT*?>JvBe?;!^&1%YZHxe*mM&!)Eul^3eV`qB_R zf&b7+|IE!XgYF^B??D=>L169pLMUK{@~%X2K!hcvX++8?i{Cu{KesvE~6)#?^8C ztXcL8Sq|19m|WV~J{hIUyG%@mwr+WUn^Kwk!ppF?mcY)7D=UJx#xAF*#ztRXXhnzR zBtIxq{>QL+I!~ycKe@^Dg=T6{lZmg+;f>`{Ub(2)Gog0&M_IR>JGI-_qLz)Kah-nPMkbh{T-vFeHf=BUOq(&bDs61R#2B3>nSj?~&k^|;5i4T-(2Xjg#y8m~w~dZ;6$oPgoLyZ*FmA&j#3*}`QO5fRW~H@l4;JN1dvc4!2QflH1*Zfn~k(>W6Vkj-X zK|zD@Uh92zyk!*~9UVzRw2{#^H3hd&We&bK4GJ3`9+v%=jE!4dU0WN0F#^RW8GVJA z;XrFA%mniCJCRcfU}Va15>W(}zwJxKN=Ub*OM{1@tA)jw-cx$Q17YYi26thH8{WS^ zW!+n9rQum!9X2m-tcDS5oV3?xD9t;`$t%$c-Q3&=kqC3S0zhx*kRQH%i$GvCV@OW8 zFfe<|*q`N3jVc-uQ5PORe)B!Dy@TT>judb$? zj}JngVQ8WD$m_tUq?wv zckkAGvm1P#nT^c{S!7^g5%aUq6uuF~$0JagU|r!kyKvI%3Qh8b7`Hz^K1g`39^Acq zx6u{z>;kHvAXpf9fHWzRjFpSID;H1*&nfPfALo|w;bHlv z=V!$*gKJj$jR80lu&5Rw>W4{T@K$UFE#Gw##0vBII0?llWv)pAx)$9U&v%{U=;`Ts zD&-NanW{=Veln+1Ii+e#vxdR=BGGcj$H^}jpwqY0Chxgp?nPjwn@0o%3nzZj3$x_eI0m!3YDiCg&^N^ zO&ogl59|xU4#A!b4GqgNM5Cyf6=^SUU?tR#%dro**jQXhD-b_+PQ-S~Myj0Mlw5_A)e`M6SfDR%0}R0WZt3 zW1FEdL95r-I!(A{u+4xmFD0z5y}dySpt%YoQ7w5b7CPMj)aR#SF6*meklP1wxo2>9 zoT?9f2LaN+$mlF!3Fa2~&Y7>5^!U+A5F0Nfj8?P?3JRw1SL|MsT?LxH`5Dj>KG=&w zg_dnB*w1;9Z4-rqg9D}zVKKQB1D}Go@Oe%<&yQWf80Z-y4(J+Y5H>Hb6UIf?3haRx zB4;ZmApD!nUv+hJ7H9mR?NvsxKZh5BJ+P#r_QI=+ z=_nD&_?_CNP7rY;z4P%>Mo`i*!6i&W05fPxH&6q7IRVg6oSUrz=Ihk4hPJZn14 z>cI|p*2g(|3&LK8mo3-^XNjPHS^ zs;lcMdL|KJMm#YUgXUjG=j7VRe8lK?Q?s2?Q`?KK^BhWfYx-3h^mI(9K7V|$dundZ z2)a8VtD=L2qKxC_D=`lbUqhrEg6R$votVTU+X?d(;Vlx%pfXT&V}fv8)8l2Hh8TkK zf07`uC?JbVOF;eS9UX;;zd$rVKRz6;Scx|141WL;!U$h>s{YRsa5s3_2HTL?Sm9UO z76;5jh*^4QC>3IPyE5lPhzUl}7EXwXRVui-->d_}3p=_EEfs6R5uZHky7;SxiUjdM zXi;Dra9?nJ(BGdT->@p`haAJ-)$BtTG)5fWQ= z?6iL4S)^0)ISm7sg~cgD>^kiUTLu*H0aUR3!a|PYr<TTA*6MfZ*(W0<)(b>uoxne}1(! zG?0@Z$!jCH>j+)0>hLM7S536=t0ucMXU?dlX@MD#VLXp185-`zLwmEj^X+8PS~)Jh zexXGM@Q(P)@>sbA#ImzOs0y(1N!aUJN8mgg>QiKU_FUYoK!1{kQhW#<{f(BfH=`Mr z`~08nZE9Cwf;&>8`$#d_0H~grZ{VTP{%B1OBV9@tf9{646MIYy0mBuOg)SSG-BkUT zQ%?QW*Hazq5zsmq(d|&^)MOY&4BO-mA3h9EWgTK>YIau0)z#AZrNu#M3VsG$pj|(RnQCa?NXKE=AMr~n>d*d#xvn3ibole*G`1uFdInrN6{v3b z`M&-8lUrN75AQ6uA?J5-SzWY4RZYG&YS@;(9rZ>gS;G!)0aWobVDKs&4>1b!6ZP4& z@#<>OQOTDQ2_o6tzgyGFvFZWe&X3G}ft?67nD{ElXAiW~Q*b%b`C|UF=7VLY-N%!% zKY3~Xe%@Y3^|FmzGFHkx=5^m>#qUI53--2(-84L3^6d;^KMHuWyC%(_!T5wN)VV*i zvo{w1^aapM>(UxMK4PXrlYBO&2)deGQ0O76iTf!3iY@ijQ7esHiIT!8hC}rOrNjXWm{ZvI8s-uRf;bf#t1C1DYdM>l8I`x1V+pR6% zByI$v?oYBBzVHN#;=H^($`4+vi~JZAsoL$Wd4|yzG2R43X0N+@_wHqUAEd71fWyWZ zR4D#oP9y8phb{qy;@Ma80+$(RYkL56%vb6|ch;X=5_8d9!o|yLelzf0V!G=LI z8tDK!4w%Obq;Nb-5LhSCM~IGzo?A;LSiUtIh3c96()9iN_y0iB>>nSG1qmdI$roDz ztwULEFOt2n7YQ0^?XbWd3fv9}+0x1i35kjpPFNH%eE;D^^9JFpo~lX*M<8L6K>H-B zRjpLNg<7^vZyfF=%dTGzIN}t%jBIMpNYG@UK*9n^keEkDC65u-19IIl(P!|T?uHZw z*{&Mskub?(VT_=*v)p{JO3i$6g7V}LS}0;^5Y-ZlJ_KY|R?iNE`U)gLzyl*NbFkL^ zKD4TD(GH#Sd?ndUEWJnTL3L`*ORsIM=#s_naRhb*EOJZVvU&>NFSwjO3O1B1S`}N< z;i-d^l+U@8xTFEFJ`?s$=s2c5#RUSq>hMG2!~DDCD&nrq%cT4osD??PxTJ(u#4=K# z_;{ARk!kVa!O2biqS@|ib8TdYgm&IG|N4@)A#E;AF=K{H1l1Qy_&wh)vH9R5I(fwg zan^#i3GhmytYUEg5p0(rzRFT5IXS`vpDT5bf+2^k*948db~N9nyYPW(YNA@g8AN&@ zL_c_Nz$jm-N8O7gbtMOme$3vJ5~}+;Jsox!{n2mfrP z{#gy5?iu0IJby9BF|V%(g49$2#zu+Xe5o+Nmn9l5Y}x)8we=kcBnZw!7{p14_<=pr zuJ)Q@#zSwhn;=x50Tvbqp6Ee1-2n`l1dK+SBFLW>wc(w6?Oz$bk<@zjv2BUMmZsYJtMTGW3Lzf^2|mexq!4TFL19!;$s*P8)XUb`S_)U;qw2%FM9Jk37+Obukca znIm+SHNwezM>kP}7D~PQ{I%1+nZ9r_E}*Z^ zbaK4)yVV!ZM62>G8cY(BBW6HD(12<+OWGZnqK=^>5r$2062cHv-_UR++jQTzZ{KQv zb+d<`bh5^=0DU3Sh(4yC919vu*c5L_rj6_%95-ca^M@g)YOy>%Y5!{{sE`JW7^gHJJ_+^uPgBo6#3c07OoS`^J!zQc6WX48Wu}IC*>ZAYxEBe%E4_C7Gt1!m@oc$MXe4}^v+_Vm`vLgXdRx6U8hh8HZkQt zCpg0Wf{&m32}?ejYdg{(1Ayb8#xOIpj948(bB0tAO=$5WBMO-9gn4K8k+aiIUG*I6 zFm>J~1`8}?^l~uB+mWk|j*d*&pd*s5VQ@0QEkkEQ=5C2eL{(ec6M}xB(!foce{(cq zIwG1PzL~iqtXSwl-)cZWz*eZm82Emus`3W7gi|VANH?^Wk5|spduh8s%&d5$)Gr7z zV908(zSdIdYc_|q{wtMXzyk}b?8vcDPt%%`T2c*2(Ai5%tAHUKF@XWI1{ZXI77jFC z>D23S4kZOEID|twzM}L@y*j3ahNE z^T(bS-WpdVc2EBcLQObYb5;h(%m9Z&KwBST^E`zb4OTK6!Y=m+<7^9hntb?}V>)8C&a%JIvA1i@DdJ8nc zR2W-cYvg^>OI@K-n|H zO~;WbX2|(u#c+e=*^yth;r^H{y@h7=67Mk8B4hle)WdCgVUiGb z3Aa54DA$1-NL9y%M@N;=v*G#P1YSaj;YcS@m1(AT@~^u*#-1J)Hvg9^0vT{b%>F)v z7G_s2jN%~CDaMN5B!+&(90SaaknliZIlGIb2mN_V2~Qk6`#WP*&*4xMR5{`>RD#S7 zhJMewiZ_9DCvPPIbHEI1Kl+V(fSb~7e(VF52RDc_X{w;$$n`$B*p+54W5jm@N0^8f zOpL$?@i0Z@@Wl*WEBggQ&FN-pFf&3^!x5}UC_GHsZvc{sK1Q6;f#wRM9AqJr=`1p` z8H0*-8aMm;dM3%nckN{5>KIGWr79;G$A)%U5K1=G8)&*ImA6dDpFs#-tFtZVVW2YJ!n^z5xA1dq!|(yee` zo%-4Wp~3*V0i@rzgxwVfkq{D>-`?<55Y@o+SFr%*J0@@mcAkegGEgg9h-Xn}k&P3v zidOHCt_mCN1MS4Dy$@kTIFZm>*=*bueb0++XeRP(j+As0fqRS5Ykl>p_)WjN+jciY z6ku8);k!lyB)=U0+FvSM#L0lnVHu8ZY!S`SOlvVkEaCtoHyHAy0a^DzSHX&|H7&mY zl16886z@LwFvsK5m(CF&3DcYo*_jDw%Qt}Hptd|EDA?H8Yi0isYRt1AX{s8Y&wBjb z{WV?h^Rv5&^ExoX!qMyTe4!Tu@gx)mA`%h&23A(EIg((X2k|&qlS3tZ4DY%qUd~1> z(NNYjV~|bAEr@lM(y9Gtb_Qo6eEAHk$WZ1^!K@%4E_RI0sw+R3uxg+oGtJ#qO2cr* z9~HO>$$P?qVHe|iOcD>Jf_k#zerQEFAQ)Lg^*MEW0nM7tr_2Pae|f#4z7zgnjNS1~ zqS5!%V0`@a8Wn|5Bl4jIAh;FV7(x~&)gJk*?V_2w;^XJ{5&eU39Q;<3V!XeY(LCY^ znX)jcj4Ushe`|NP*RD%}V z-n?l9!-5^lO;rnf-sN5fna=-OweBk<#s6~#;QyCZt0ttG4t`<=;pV^YG^~@x6=b>VH*?2rt4RGp{yZNt3Q0WLrhU{0Wu|jc@LPa zk^rp;11GZ49^PEX?-$+0F%y%;<7T~kmxy@9^%{m}0Ld|#lLc@gOk=$`+X=&8gJZQk zx9?YOptSPJ%7MgDqwU0?6jQtMpbg?|5^-$m>C@q_uV^retj)~!sV86$EQ+iaooHT3 z_U08s$IwvF-w$#7!@B|uMI3U4>W=x`euAx{(1hF!K&_hwSW7fQjW$5MpihTm&`BI# z!+<0J2Ylz9_9@ILYz;xaf#8Y?xYNgnf+-QMezM+&^3lMQ#L+LLt(fy42BMS_zHv!c zA)OXtiUSQosxS`Z1SLcDAvJR&4o@OB8#4Q0$%}^pE(C)K0-hnbn?~noBaQ~U0=f`q zW1?`(3u7HtRBpKMMH}+=#|OcyBWU`C0gBQmOwm4I2B3h|4R0j)G3esrbR92&si7Vw z!VN=!Ol*%yv|$p9=I8;jnWL4!;7!r!)Xd#80;tI{lMsfm*a{b%Oj9fo(#A6QiwOFWSrvIR{rIS-&pC z@XU@d;gs0-&cdRi(F}l<=l!WwQc|n{-Z7<+0ccApf`!gKI{Fpz#pq4(*thk)y?W9p zSY*lp6tGa*I`Esv%m6D=_cCBuMoL>}YQ&C@9)+5f5~iSqN;t@45lBXMwWog{tLw zLEMRZF>w@7ZU!1Cx%jH#v0(;WnY~z#fk~{=u$7h79JqcBzBxb?t}hRK#T@ui8$IT` zzs8Bt!(?Y(Mo&|dx*m*XLdpq?T2|eK7xbV@dB86bZiH^Tj3E9^FXe8UZSCxw-_y{b zQierlqBBJJE<0Y=eH_M;+C7bmdR#+t))f}1 z)I6evv z0FF;sQ{HcQD~Z9Lf~JN$dhxmpJ|Oe9H!4vAM#Rw(466K`=#Py<>hnSCdajWcX4Duj zoqFod=9(T8G&_O@K&OQvs&90(4kh6-l80!5Na^lEOLRdPl-SF6fyx5CrNWa!obUiL zXD>0w2aJTW8*C>q4JJ+?uP={gIJZ#m+jko67qpMC-9iszUvWo~Bt$BxYlQm;M@a_W z0R!+uH-*C|6V(udVvvd;{o#QZwE5gn-#wc8Hfc*q<0 zVHOU=z-4p?rcCe#&2Ju9Uudfz>$y^VW7WEeQ$j$~=a2^IH05=5buW1M$@YmJ1~_AZx49apl{I*o%DY20Ee%WR;>&T??EI3c9brrzqia1%9G!6v@pw;;O z?M-H30#oc5upEUt#BV;c8AH!|sAfQ1R@q&^*2n}_Gp6^TCxj6L?lZffru3_#IIh=Z zkIDeK`|g7WXDQ19`31vim$;^b8;{X1anJN${uHb`;AS`WIg5H+lzXo0D;1{$XRY=V zl{~|gr7yWC5X8ZWj%>4X%(MzHw1)1A@}3X`suz{B6GR+gNdCAv(C-Z)IO6im!WwarQG+ed^klrF3c;Q+!} zNXc?2OSgKcaRw_4Apm(-WL{t`-Dn@8xc;x}%0L0b6!YFBJ)2^AUudmoJ`-Fvre1s>32uK{l z0z-88SwU(PGcqy)bJ$}P^C>th>~P-)c&@!@$)dwbp-7!g=`Cqm8m{9XkN*)2@y5Q-Q` zJp2a6aD{`g$`0ik;bb_$3;{IWnKm8sT9==|fKMYaXnOIpJ za2+Vt=YSsIsWC>^BEuU4^7Bs^t)WWZLkW9}%OJ1hFqWQ{XD!+B;uH8r1kE8FVoYUbOw&5=%hAGb*@{)oRW=jkbhbxBH1 zCC-;ad);zMTl-K?PY+zBiZxlYwkLR~b?d-qst~Kfo*OrA+_3Gh!f`m$XUcUoHR`o^ z`9k40hqbeyg&a{AHKaCNr!F1hwz)sRrBrA=R$E!L@k?y&)<#~d?&ZV%HUU{zt*d)1 zHY6h3-K_;hlq}g!zT;Z+964uftv`_(-?*;azkfqwt6E~p^6GEd3MomGJ{uaZKaKL(}^dChbiKcqo zCij2ufzV%pcRE0EBlKS7Xdz$1@CWP}8X78NZ!Z8n8U4o=2LO%*rm>mBsfeH@a42+u zurxLwuUtf3%#1T%`)qwuUkIeUTNLH1We z7ec|(C~>_REn@ZZnghN)oTLkG9Ku75i81mFApuG3zFaKaH1dEU#BnUhsb7~S--GfX zifN+O^A8ItP%ie`V{V73Ix*0+w#M`{->!c<&K98W#bD-**OgpjB2CV{S?N`c1X_e~ zAII_f(AN-4yPzEk2nwRFss>w(qy3x0Lp#mO!m^#PXqK)oTNpRS5ob?{$v!MV?B4|9 z&YeEJiE!IjRBXL^^(tXSRaEp-wNk=SrIWYDe!#Ar0(rT3p~nrUbIw?Z73Ol|H~YZS z0**>XBNPa&h@kw$FE~vk{hKZ9h82-`h`hYEPgGw4)|6FL2*VJFU3qo&4k{`txSRO+ z`PDljpi_e4ZIdXm;ujP&gcupdef0p$Xi)5AOihP1AEhTGFhYgmny%RBvuk7F<(*9H z+&>w|sv~{h`6*!pg_RB8f+hIY$%*>2%6RyRp4Wi^|AG!T`_GB1hck(IZxNc3(g*&gz?B1HXobGz%S} zlwK)ENi_TZmKn<(CMsHN3_?@Y(h>@-j`%9zz^RX&?m{m&Oj~>LVhenViNQ zjqdyRw_>f6-QQf!dMuQx0cKqQWpY4VGsgwb zuEL*X1kDE;);BSEn3l$VZ#((sU2dhVFE99mUYTAqB&Md8mRuO+5z)vd@q~y*T;Y4R z85OL;H*njEjbDGV^`DjK$dz#2Bb{#LvS^%kE(5mIZyuInh1uNrSQ0YlkPMElOAw{P z*Vp%uktEj&*1dL!HHVv`vjLS+6O?SgY_X*y0@}7|b^)gNgLq`^(G4!Ey0YG9OjiD1 z2F=}+gm9U({_GqO6Xrsofu-w?`cts~qzhzdfhBvb@+ z)ok;xyRfW4!A&hKi2&h>v`b4%)YQ~!XxoZmYzgY#IMk)#T~xHPd$NCgM;?E?e$Wz` z1DCP(^`iO%TO_wg<#D>#l>erBHFB}+@aLOPv)If&v|lt-Pc)wVT9W;uKgam`}m{m?ubFlgQN7b@0;60Nav|Cd9I!i|JeG4dPw}UrKH~Cv*n?8 z(k+RnA07L^C~W?AW<%k!)Z`ZlZ;``_3b(RIsL(f)X^Ittk$Bn^YihkRjr9)tZ`my z>6g*u;MH_=DB=}f0Nb1`DD(XM{EW=Z+cY&bF*c}aXfOiqM@{vE*i>9~sI17|-hQ~Z zOd39ZxN*#WDZ1`^)ER^~x_~+nZX-a<>85k^`>zn?5(fw`L_LX!sDj$z$CU@BMoC2l zH)|r!QxLC?(T?YfiHaiFEXdzk%)sw_)OA2}EQWgqA^h<5Yc9-yNU67<$&n2A2ti%F zZf2HtEuJ{=0^KKU>HGKZhr~Kb;6F?0qOPp0RL4NbXzg75tkjXXSs~~Y#QZuSfNUVG zmT>A!O}V6a?6XNB?C@x)Z-H$=pc45rh;&qWm7bnnm9sz~ydbDy$6DYCa&d8i-IxTe z3*mZ+iJ}juz`}bhFl|BC*^IlXfqd;(67ZzY%*r3frtkwGFY|A&NuTX&bqRZ8r35A= zudJN(?AfUry=;Jd_ykatI6^M5OG)V(I(zc~iX~3P%m9d?iPqHBY2H=9k`w*6cGWA7 z8+hK$oDSoc_H47z=_SbFzy|Eq>OK$SZmsP-69XPiMNfYo@KHF$2AW|xka0=$zWnSnr#LqY89Y$TWze8h>ZS`6Uv3Ibxra(0?KIVHvZ z*Qb4ftFIdqpPQYv4qQ%c@Kx|jScvFZEqk$1ePl?if~EScw6*lt!AmWdCr?}Vtu?lm zz}0!=^peJauy=X4eAx%Li0I>QENeU2HYO4NUs*5qcYb`*FeF0NeEh!GRR?==-vz3u z>(Y1Jq84@rk`A;#Gvv)KYaC5pkayKGH|I!6Nr9?Q9HN8C8^?%i-NDKUx(+D0B`{9; zw!L8oUa3TQhKZL-ojJ1^K!TtX!y_a7Fw0`U&Y>4?-@g5Zk&&6u#|~@jUL%U?XSrrA z-cbE!aQG-BF77i35(qmX%>haihY^S)kk-~b+qP}1waq)1$EX_!15R*MlrQ8NlpZD~ zrpJ#T1Gjc4v1ec)BxKzciI`}?nUwTRsQ^3gZq!f`GfBWa;@xkaNzxp`!u#PwMu9o4 zp+U)c@ihY{r;^wc9OP_(9t_)?<5+tz=o@UibhyAnxWtK9PGLSk9G*bjhDJx%LiJx- zUzyCA4|5HHM2dGngg3$!wu&(irltO~vyP~i=X7=Bs@6Rhx}BI|s*&*{t!Y$h z2N!oyP_QyHKOkC|QQd*SxAPO-!~q}w)&xu!iTsBl4yldzsC>lbEi%D}h<66UBgTfq z3e!%}j~_qNoc>93Z5rkB`4eAB?~yk zk&(@fQ|;i1xM(?C0AjAQFWz>8!Vq7SJoGIQ&p^EM1+Ob)MUlkA*2roIU_teGW22%!tvW!Q28N@k8(amKFdvgIU-V${s)FKIoYw zx3Py%&#N)%2V@S3jQj*dvDCYi*^4&D3wfaiOX|;_r}IiBQXUlpFM#=oB}fYR3Eq8~Yr$3Fq@8!Wsr} zJy7F|1Be7s_s%IODk2mEya%G;OM=YOaN?ukmQ)HTF24o`2{ZV!KS{euNfY2O#;LU1 z*!XK^UmzUZ#)dfHY#uB~7#x|PR1wAiyeUr*18n$nR6!)tF1$KGuoVnMp*Rq}OGs;Y zuS8orbO#>YV)#s*guEFc=4iNt*46!hbZ==nW_V8+=QW`$N&g@Z96lE8Y1mv#6(dPG5tZE+D)62iY=U~mkg zhdy|-`}}v4#x8Ow>T8E;j@SMB070af0}EJpDp)*T4yS9#b`RtZhvW!7k>Em@E>Paa z7|+1kS`~^baRL+~!673LBpmGgDi)H&8^sX3i&((+T&u(Q=C6nDQ`tOOU3Eum753Jb zUR{`EWUVZC<6H#M2S8Sj548%fhEWBl$Hy;tZUrABY)_b6pT#f+Ep-Q+JH#+L&qgn{ z=jQ@Pt+VcVi+`Obfl(GWKx_$Mote&rf>j;pKB0r4gUIOV#bf4V2Q36(xCSJWcxMmP z^&}YRanK+FTnJG112lKcc54LZe!4`e%{ToTz{NqiPl2oxdJ@h}1so9zfrWC@1Jb4Q zvEmPKWCY#-jm^qxC+=J0r61Z3m!qPiabDmMaRvtPgpG~O*>d6x<9#-3uvg;jfcta} zE4<89P%L3>^#dRV6m1$dnk{~LF})VLNYI@-o1k9f%)>j^IdkH?1o)mG6AAC00p;4@ zkPDF^&mURQA6U!sdGSh}$%I*O;mftId)-Z!Mna!P1t^eGkzeTX7zCkuH zx`*72ijwMML?m~TQEJxWqe?~FUGJYKZL>1Z)^~2Vbrl^;&pDdnokC_RaQpW0O{?zn zCaxmGSH@Oea)vLHD;{%%M+TZJUbOQ^u>S%MQO^Y zPL&RC4nBso~K0)L9PS8`E$m#z@zpzyb@x~ z)MSWhv@*7DyVP6JsyZ@lsySdOJPW z+HIEb=*;AmQ45)#NGDS4z&jj|0{EjMJx0o?%MJlMKAs-l5aOjlukrdD=r0C%3!$s) z^5~nmy=Q##zJzuhz+>1PrLcH(>@?GD5#f%X(uf#h?z75H(bkts=7Y7-b1#ImwdN$$4LS&g+bS zpV#xp`7^!ze(wAJey{I!eYX3Xz?a`jjV@_a7ep|Lp=bZoc?(+GRAaO~As+ zsB>|3e+JRy_#pN_U-zbAS4AgdI8kxJ?c1lKg!~lSSK_#5C>3)ro{m%NA}%B32(JeZ>*Dx&-M zFzP9@=gu89t|TYN8_wR~^-ly>rK2EW{MqGG+vkQ}SwrcL8wAJFj8?-te>Hu2a{6wz zr(k5(NgwRON^WRPn4qAmZ3Hz}_v<%gi1_U>vQ>|YDuh}&($2$2-Q3tNfLM7_&%hh( zKMiy?JtM=Z_NtDK&a(Zpg~URx%o*6y(h`P$5fy1DoBi%eYeY04>yHWx_g3uyKNjPz znE24HN=^hY+QO0f11+X2qhVtfLzu`lrGV5)TGm$NJpDkN~;NNIyx=zxP8=$oBQ-fDE$14;W$s) zxy=c=Kws-IP%pA5(i3DGaiisk6zo`Zk|Oye_9R--e%lT>#y)*xcpPg8JtYlub$Hf8 zx+MOD^#OV)I>E$eEMD9fqTd$w{u{i?Jq+Fi7S$5fHf!zLD*K3CI2Gi?BG*guzhOGd zfeQiXqKY;*SqHnQ%%4ABq!j>^BEwRWL7jk>eoKGi@Pw~bAQVIa)x*}x;lQ1$yOq(@pQdyB2Qyhl7xw#2~eDXyx?djNw6OAGk zs}a}1(OVR2XBoXob`@z^N(XNjuwKTxd~mFZZ4Zu5U0xgEY`2_aLxLfc(5fm2(y=Vt zQlJ@%#RPO4awKBx8bU44paw1_xPZnIJUg!7RsM!=`C{6iGJZ;1lK$|a&e*ZNXz~DU z*V9x%$NlBYLsi{0r)luGP-|wQH;mn^r>onOC5f*Xj3S6{lPpz!D0R0kPC;>JAbybvYePu^BWn2KhyfS1J9po%q}0P--Vs2OQD)|ssQSg?2HdHz z%T2Lg1^y%Pl3*$##@qJnMuGj0|44(RU0xNK6>9hxpEdFqbhg-!?FcUU`CRPan-JHG`PElzVo$$S8-dOFT&9BxG7qQw(|6oFhYN%KyB@L-WI=RtQYx*}%4`l!hk z&fj74&YBxMV?l7F^j23F!MPwyh{se{F>h-O9N0{rX(N)%7(o>RD_PCF9b*+mnCB#> zl7LoXarUDxbVCN_%`nadGm8!#H%<`SXcBhXw{5f7envnWo-Y;2dd;t_>PN6Uk{Oef1WRS$^{erF4go<#HecbA z-Xd)V!ZGu)&CAQfaNrPK@6X7CaQ3S)W4zefqi)Anho2|GgnXadsj0zBYIVMxJay_Q zq8TIPl*Q_}Z5+c~(f_?cT>=pQm~$6VV*fGpP_-v;MVP7@ zL?ctoAtTouR3d0f8c8?e_FPAWb!>1^?C^ig-1gv~GqAc>ll5=Wg7%vAtgQJClbf^U*TzCMX1yNF|4$F)E>|daG+}TwFo=5nDUE zxe(=wl~+jn7v7YD&)k2EG{1*whV3OMK7ZhOtr}a3(n1x%1_e{qpQ)M1viH$jsI8+j z&c4b7PEbj`Rfl@<=-ua`AV-KpFO7Y!rKQ!0myrz3I&=E8w~tTbo%f$fTO(3M&>F?_ z1i$)Jlp08rO=?eBc;m~E!Q!nsO%1+lH;H(#}J<9*snityuvdc7wX zXGBn@C%7N!KVU$4Y3VJxunE2y+1a>2X>9|B7LQu+i${T#EeNG!x$X)+o;7xt7Bf97 zE2X%2x!);&|GD(U*CKyhw=V3_<@#qdn6wsa$w9%gvdYfx^8GHv-*X{QkXT2(*sJCm zsobA3G@8Qb%nv_z8WRD}75{ZbCV4EP`!)%fa+0@$fMqR^CDl0FN1=&ute`qFVm6q}lv0r$|UqbNmi z@YkE4-VEPn^V%D7mM%E6`iEV!C7%RfSLI-eM4Scr^r&^J9rm*&A0RRd6j^fd=;Bw@ zZ8v*=4D9D2;;9U&%*5?Xo$3!P3#%2|-44Se|6ZQah-_fL+jnKQGQYp-3K03q-fLs}fg9uY}Nf(vR#yKMjX2Psb-h0!JvAAUsT;B5tJKywIAZhF)= z$-vZ+*2xc9Y}Z=$t3lk6yfgpk zAINeS|0&C9wAFP({=?kSo{n#;w(-^I=ti3@Rp&!oF1(?1qe=XHyS+WH?MW)IJ=Z!I z$H3qmKUn`DrJ18bQVZF*MCMF^=(E`Ffz+lKx#&U>T+Wx7fg9pk zYD8t{e=v2Qf+7`RtdeLckaNLUD85-LuLM!I^RC!&2`>rtoq6I4kJ>g7Gk0$fy0eA9 zL}B>#_U>t0vbPRyfHG}^nw#UAkib2#e5ruCZ&A9Otjx{L#YFHuLxGxFihy(|9Vx36 zAW?eJ*wVS*qmyKwOwMsx9A>VJqE`Z30$(m$&lDit?PEnF-f#T6e{^eX#pz_w|fSw(4g?p&Dt2i@oc<|#>awkB3G0Lh;BqHZuN zFa>>%B~G+n5EJ24Jo8B#?=df+?L5w$$uaLo-eZ?SGj+}6a;vl_Py7(g_Nnn}8s4eX z{$VaZVERP5GF*A^@cUkBg9k_K2(v(eFHxWf^k~!aZtUEmT`EdrQ4aD4&7L#o4qis! zc*gdVuLzT!$^0NMvf&qG_?I|uT^e-4R|MNfyE(@!G=7C7%%O}Rd@U#alp1fZVuW$+ zeZc+BfqW$vWWki*{sHGGK{T)pj}q*Ev+hKSF%)lL$5sD+qNh0 zrEMoMDgb4}-J#fRGC7hzB-A?D1)BIE86KjB2j~$>f}tgZbBKdZcs4e!JCa^>OMF~Y zbV_ZY?ugcuL|*je4nES1bW&wnw1*64oCER#7LGQOb|j=^%0Ca0r-l{G{8$N{^n_z2 z=z%a2+)YnEObr?7PfI0i`7ls`s7+o@p(yDGZleNR(E#7iuVG<@6;S}BOg?Tf^ zBK&;yqJ>_LeSWos(P%0Pn(nm>&kiNEac-2CqtnO1sm>>yf1n(a#g@el6Gso-Res`c z)L&6fimC|ZqeAuO7Vm2PWZ~`M0M4Jb``v==45INOp?T)rw6yW7ZYllr;PWwSuT%Y#1kxCssWo$(oX1eT$S+Es?|oMj z8g*e#c^~P%06F%QTzUZiXb^QS4J@);#fi1vd2g7 z-!I$wSp4+opiSi^jXFIxsco4)IXTte$UW{;zVqSu2UA}c{5xD)aNOPhXYBL&!45Ie nPrK!6{VCr0zW|MG8`5XpWBoBP{S>_XPsw=JyqS^TuiE!739Z;b literal 0 HcmV?d00001 diff --git a/docs/sphinx/user-docs/ray-cluster-interaction.rst b/docs/sphinx/user-docs/ray-cluster-interaction.rst new file mode 100644 index 00000000..8e7929b4 --- /dev/null +++ b/docs/sphinx/user-docs/ray-cluster-interaction.rst @@ -0,0 +1,90 @@ +Ray Cluster Interaction +======================= + +The CodeFlare SDK offers multiple ways to interact with Ray Clusters +including the below methods. + +get_cluster() +------------- + +The ``get_cluster()`` function is used to initialise a ``Cluster`` +object from a pre-existing Ray Cluster/AppWrapper. Below is an example +of it's usage: + +:: + + from codeflare_sdk import get_cluster + cluster = get_cluster(cluster_name="raytest", namespace="example", is_appwrapper=False, write_to_file=False) + -> output: Yaml resources loaded for raytest + cluster.status() + -> output: + 🚀 CodeFlare Cluster Status 🚀 + ╭─────────────────────────────────────────────────────────────────╮ + │ Name │ + │ raytest Active ✅ │ + │ │ + │ URI: ray://raytest-head-svc.example.svc:10001 │ + │ │ + │ Dashboard🔗 │ + │ │ + ╰─────────────────────────────────────────────────────────────────╯ + (, True) + cluster.down() + cluster.up() # This function will create an exact copy of the retrieved Ray Cluster only if the Ray Cluster has been previously deleted. + +| These are the parameters the ``get_cluster()`` function accepts: +| ``cluster_name: str # Required`` -> The name of the Ray Cluster. +| ``namespace: str # Default: "default"`` -> The namespace of the Ray Cluster. +| ``is_appwrapper: bool # Default: False`` -> When set to +| ``True`` the function will attempt to retrieve an AppWrapper instead of a Ray Cluster. +| ``write_to_file: bool # Default: False`` -> When set to ``True`` the Ray Cluster/AppWrapper will be written to a file similar to how it is done in ``ClusterConfiguration``. + +list_all_queued() +----------------- + +| The ``list_all_queued()`` function returns (and prints by default) a list of all currently queued-up Ray Clusters in a given namespace. +| It accepts the following parameters: +| ``namespace: str # Required`` -> The namespace you want to retrieve the list from. +| ``print_to_console: bool # Default: True`` -> Allows the user to print the list to their console. +| ``appwrapper: bool # Default: False`` -> When set to ``True`` allows the user to list queued AppWrappers. + +list_all_clusters() +------------------- + +| The ``list_all_clusters()`` function will return a list of detailed descriptions of Ray Clusters to the console by default. +| It accepts the following parameters: +| ``namespace: str # Required`` -> The namespace you want to retrieve the list from. +| ``print_to_console: bool # Default: True`` -> A boolean that allows the user to print the list to their console. + +.. note:: + + The following methods require a ``Cluster`` object to be + initialized. See :doc:`./cluster-configuration` + +cluster.up() +------------ + +| The ``cluster.up()`` function creates a Ray Cluster in the given namespace. + +cluster.down() +-------------- + +| The ``cluster.down()`` function deletes the Ray Cluster in the given namespace. + +cluster.status() +---------------- + +| The ``cluster.status()`` function prints out the status of the Ray Cluster's state with a link to the Ray Dashboard. + +cluster.details() +----------------- + +| The ``cluster.details()`` function prints out a detailed description of the Ray Cluster's status, worker resources and a link to the Ray Dashboard. + +cluster.wait_ready() +-------------------- + +| The ``cluster.wait_ready()`` function waits for the requested cluster to be ready, up to an optional timeout and checks every 5 seconds. +| It accepts the following parameters: +| ``timeout: Optional[int] # Default: None`` -> Allows the user to define a timeout for the ``wait_ready()`` function. +| ``dashboard_check: bool # Default: True`` -> If enabled the ``wait_ready()`` function will wait until the Ray Dashboard is ready too. diff --git a/docs/sphinx/user-docs/s3-compatible-storage.rst b/docs/sphinx/user-docs/s3-compatible-storage.rst index 60937441..0ca2cc0d 100644 --- a/docs/sphinx/user-docs/s3-compatible-storage.rst +++ b/docs/sphinx/user-docs/s3-compatible-storage.rst @@ -82,5 +82,5 @@ Lastly the new ``run_config`` must be added to the Trainer: To find more information on creating a Minio Bucket compatible with RHOAI you can refer to this `documentation `__. -Note: You must have ``sf3s`` and ``pyarrow`` installed in your +Note: You must have ``s3fs`` and ``pyarrow`` installed in your environment for this method. diff --git a/docs/sphinx/user-docs/setup-kueue.rst b/docs/sphinx/user-docs/setup-kueue.rst index 86956e01..1f2bdc04 100644 --- a/docs/sphinx/user-docs/setup-kueue.rst +++ b/docs/sphinx/user-docs/setup-kueue.rst @@ -11,10 +11,9 @@ Kueue resources, namely Cluster Queue, Resource Flavor, and Local Queue. 1. Resource Flavor: ------------------- -Resource Flavors allow the cluster admin to define different types of -resources with specific characteristics, such as CPU, memory, GPU, etc. -These can then be assigned to workloads to ensure they are executed on -appropriate resources. +Resource Flavors allow the cluster admin to reflect differing resource capabilities +of nodes within a clusters, such as CPU, memory, GPU, etc. These can then be assigned +to workloads to ensure they are executed on nodes with appropriate resources. The YAML configuration provided below creates an empty Resource Flavor named default-flavor. It serves as a starting point and does not specify diff --git a/docs/sphinx/user-docs/ui-widgets.rst b/docs/sphinx/user-docs/ui-widgets.rst new file mode 100644 index 00000000..6c797e04 --- /dev/null +++ b/docs/sphinx/user-docs/ui-widgets.rst @@ -0,0 +1,55 @@ +Jupyter UI Widgets +================== + +Below are some examples of the Jupyter UI Widgets that are included in +the CodeFlare SDK. + +.. note:: + To use the widgets functionality you must be using the CodeFlare SDK in a Jupyter Notebook environment. + +Cluster Up/Down Buttons +----------------------- + +The Cluster Up/Down buttons appear after successfully initialising your +`ClusterConfiguration `__. +There are two buttons and a checkbox ``Cluster Up``, ``Cluster Down`` +and ``Wait for Cluster?`` which mimic the +`cluster.up() `__, +`cluster.down() `__ and +`cluster.wait_ready() `__ +functionality. + +After initialising their ``ClusterConfiguration`` a user can select the +``Wait for Cluster?`` checkbox then click the ``Cluster Up`` button to +create their Ray Cluster and wait until it is ready. The cluster can be +deleted by clicking the ``Cluster Down`` button. + +.. image:: images/ui-buttons.png + :alt: An image of the up/down ui buttons + +View Clusters UI Table +---------------------- + +The View Clusters UI Table allows a user to see a list of Ray Clusters +with information on their configuration including number of workers, CPU +requests and limits along with the clusters status. + +.. image:: images/ui-view-clusters.png + :alt: An image of the view clusters ui table + +Above is a list of two Ray Clusters ``raytest`` and ``raytest2`` each of +those headings is clickable and will update the table to view the +selected Cluster's information. There are three buttons under the table +``Cluster Down``, ``View Jobs`` and ``Open Ray Dashboard``. \* The +``Cluster Down`` button will delete the selected Cluster. \* The +``View Jobs`` button will try to open the Ray Dashboard's Jobs view in a +Web Browser. The link will also be printed to the console. \* The +``Open Ray Dashboard`` button will try to open the Ray Dashboard view in +a Web Browser. The link will also be printed to the console. + +The UI Table can be viewed by calling the following function. + +.. code:: python + + from codeflare_sdk import view_clusters + view_clusters() # Accepts namespace parameter but will try to gather the namespace from the current context From e8432bf0d196dfea50c267a49b155accd28dbd55 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Wed, 16 Oct 2024 12:33:45 +0100 Subject: [PATCH 379/496] Intuit python version for compatible Ray image --- .../ray/cluster/generate_yaml.py | 20 ++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/src/codeflare_sdk/ray/cluster/generate_yaml.py b/src/codeflare_sdk/ray/cluster/generate_yaml.py index 0b174650..83a2f958 100755 --- a/src/codeflare_sdk/ray/cluster/generate_yaml.py +++ b/src/codeflare_sdk/ray/cluster/generate_yaml.py @@ -18,6 +18,7 @@ """ import json +import sys import typing import yaml import os @@ -31,6 +32,11 @@ ) import codeflare_sdk +SUPPORTED_PYTHON_VERSIONS = { + "3.9": "quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06", + "3.11": "quay.io/modh/ray:2.35.0-py311-cu121", +} + def read_template(template): with open(template, "r") as stream: @@ -88,9 +94,17 @@ def update_names( def update_image(spec, image): containers = spec.get("containers") - if image != "": - for container in containers: - container["image"] = image + if not image: + python_version = f"{sys.version_info.major}.{sys.version_info.minor}" + try: + if python_version in SUPPORTED_PYTHON_VERSIONS: + image = SUPPORTED_PYTHON_VERSIONS[python_version] + except Exception: # pragma: no cover + print( + f"Python version '{python_version}' is not supported. Only {', '.join(SUPPORTED_PYTHON_VERSIONS.keys())} are supported." + ) + for container in containers: + container["image"] = image def update_image_pull_secrets(spec, image_pull_secrets): From 2c7bcfa5ac5a9506292687bfc4a3cc2920fffcd6 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Thu, 17 Oct 2024 13:43:32 +0100 Subject: [PATCH 380/496] Adjust demo notebooks with new ray image defaults --- .../additional-demos/hf_interactive.ipynb | 8 +++- .../additional-demos/local_interactive.ipynb | 8 +++- .../additional-demos/ray_job_client.ipynb | 8 +++- demo-notebooks/guided-demos/0_basic_ray.ipynb | 8 +++- .../guided-demos/1_cluster_job_client.ipynb | 8 +++- .../guided-demos/2_basic_interactive.ipynb | 8 +++- .../guided-demos/3_widget_example.ipynb | 8 +++- .../notebook-ex-outputs/0_basic_ray.ipynb | 8 +++- .../1_cluster_job_client.ipynb | 8 +++- .../2_basic_interactive.ipynb | 8 +++- .../preview_nbs/0_basic_ray.ipynb | 8 +++- .../preview_nbs/1_cluster_job_client.ipynb | 8 +++- .../preview_nbs/2_basic_interactive.ipynb | 8 +++- .../ray/cluster/generate_yaml.py | 14 +++---- .../ray/cluster/test_generate_yaml.py | 38 ++++++++++++++++++- 15 files changed, 122 insertions(+), 34 deletions(-) diff --git a/demo-notebooks/additional-demos/hf_interactive.ipynb b/demo-notebooks/additional-demos/hf_interactive.ipynb index 39835929..d75d96ec 100644 --- a/demo-notebooks/additional-demos/hf_interactive.ipynb +++ b/demo-notebooks/additional-demos/hf_interactive.ipynb @@ -68,8 +68,12 @@ "source": [ "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding Ray Cluster).\n", "\n", - "NOTE: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. \n", - "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." + "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", + "\n", + "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", + "\n", + "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] }, { diff --git a/demo-notebooks/additional-demos/local_interactive.ipynb b/demo-notebooks/additional-demos/local_interactive.ipynb index a491b97e..09cb9b89 100644 --- a/demo-notebooks/additional-demos/local_interactive.ipynb +++ b/demo-notebooks/additional-demos/local_interactive.ipynb @@ -35,8 +35,12 @@ "metadata": {}, "source": [ "\n", - "NOTE: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. \n", - "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." + "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", + "\n", + "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", + "\n", + "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] }, { diff --git a/demo-notebooks/additional-demos/ray_job_client.ipynb b/demo-notebooks/additional-demos/ray_job_client.ipynb index a16ae6e6..31c5793e 100644 --- a/demo-notebooks/additional-demos/ray_job_client.ipynb +++ b/demo-notebooks/additional-demos/ray_job_client.ipynb @@ -41,8 +41,12 @@ "metadata": {}, "source": [ "\n", - "NOTE: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. \n", - "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." + "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", + "\n", + "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", + "\n", + "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] }, { diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb index 58a52727..51fb0026 100644 --- a/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -47,8 +47,12 @@ "source": [ "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", "\n", - "NOTE: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. \n", - "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." + "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", + "\n", + "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", + "\n", + "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] }, { diff --git a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb index 05682d82..aaed9930 100644 --- a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb @@ -41,8 +41,12 @@ "source": [ "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", "\n", - "NOTE: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. \n", - "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." + "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", + "\n", + "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", + "\n", + "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] }, { diff --git a/demo-notebooks/guided-demos/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/2_basic_interactive.ipynb index 1612af3f..8610a055 100644 --- a/demo-notebooks/guided-demos/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/2_basic_interactive.ipynb @@ -44,8 +44,12 @@ "source": [ "Once again, let's start by running through the same cluster setup as before:\n", "\n", - "NOTE: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. \n", - "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." + "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", + "\n", + "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", + "\n", + "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] }, { diff --git a/demo-notebooks/guided-demos/3_widget_example.ipynb b/demo-notebooks/guided-demos/3_widget_example.ipynb index 11521ec7..cf0df6dd 100644 --- a/demo-notebooks/guided-demos/3_widget_example.ipynb +++ b/demo-notebooks/guided-demos/3_widget_example.ipynb @@ -47,8 +47,12 @@ "source": [ "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", "\n", - "NOTE: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. \n", - "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." + "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", + "\n", + "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", + "\n", + "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] }, { diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb index 3d581e6f..3e6dc193 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb @@ -47,8 +47,12 @@ "source": [ "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", "\n", - "NOTE: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. \n", - "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." + "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", + "\n", + "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", + "\n", + "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] }, { diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb index 1ccad58c..e79d47e7 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb @@ -41,8 +41,12 @@ "source": [ "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", "\n", - "NOTE: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. \n", - "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." + "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", + "\n", + "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", + "\n", + "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] }, { diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb index 80207c37..f6417521 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb @@ -44,8 +44,12 @@ "source": [ "Once again, let's start by running through the same cluster setup as before:\n", "\n", - "NOTE: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. \n", - "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." + "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", + "\n", + "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", + "\n", + "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] }, { diff --git a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb index 3d581e6f..3e6dc193 100644 --- a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb @@ -47,8 +47,12 @@ "source": [ "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", "\n", - "NOTE: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. \n", - "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." + "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", + "\n", + "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", + "\n", + "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] }, { diff --git a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb index 0a2b9343..40195d64 100644 --- a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb @@ -41,8 +41,12 @@ "source": [ "Here, we want to define our cluster by specifying the resources we require for our batch workload. Below, we define our cluster object (which generates a corresponding RayCluster).\n", "\n", - "NOTE: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. \n", - "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." + "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", + "\n", + "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", + "\n", + "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] }, { diff --git a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb index a58c249e..8838a5ba 100644 --- a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb @@ -44,8 +44,12 @@ "source": [ "Once again, let's start by running through the same cluster setup as before:\n", "\n", - "NOTE: 'quay.io/modh/ray:2.35.0-py39-cu121' is the default image used by the CodeFlare SDK for creating a RayCluster resource. \n", - "If you have your own Ray image which suits your purposes, specify it in image field to override the default image." + "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", + "\n", + "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", + "\n", + "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] }, { diff --git a/src/codeflare_sdk/ray/cluster/generate_yaml.py b/src/codeflare_sdk/ray/cluster/generate_yaml.py index 83a2f958..01823f1d 100755 --- a/src/codeflare_sdk/ray/cluster/generate_yaml.py +++ b/src/codeflare_sdk/ray/cluster/generate_yaml.py @@ -20,6 +20,7 @@ import json import sys import typing +import warnings import yaml import os import uuid @@ -34,7 +35,7 @@ SUPPORTED_PYTHON_VERSIONS = { "3.9": "quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06", - "3.11": "quay.io/modh/ray:2.35.0-py311-cu121", + "3.11": "quay.io/modh/ray@sha256:db667df1bc437a7b0965e8031e905d3ab04b86390d764d120e05ea5a5c18d1b4", } @@ -96,12 +97,11 @@ def update_image(spec, image): containers = spec.get("containers") if not image: python_version = f"{sys.version_info.major}.{sys.version_info.minor}" - try: - if python_version in SUPPORTED_PYTHON_VERSIONS: - image = SUPPORTED_PYTHON_VERSIONS[python_version] - except Exception: # pragma: no cover - print( - f"Python version '{python_version}' is not supported. Only {', '.join(SUPPORTED_PYTHON_VERSIONS.keys())} are supported." + if python_version in SUPPORTED_PYTHON_VERSIONS: + image = SUPPORTED_PYTHON_VERSIONS[python_version] + else: + warnings.warn( + f"No default Ray image defined for {python_version}. Please provide your own image or use one of the following python versions: {', '.join(SUPPORTED_PYTHON_VERSIONS.keys())}." ) for container in containers: container["image"] = image diff --git a/src/codeflare_sdk/ray/cluster/test_generate_yaml.py b/src/codeflare_sdk/ray/cluster/test_generate_yaml.py index 68c6aa89..606cc950 100644 --- a/src/codeflare_sdk/ray/cluster/test_generate_yaml.py +++ b/src/codeflare_sdk/ray/cluster/test_generate_yaml.py @@ -11,7 +11,9 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from codeflare_sdk.ray.cluster.generate_yaml import gen_names +from collections import namedtuple +import sys +from .generate_yaml import gen_names, update_image import uuid @@ -32,3 +34,37 @@ def test_gen_names_without_name(mocker): appwrapper_name, cluster_name = gen_names(None) assert appwrapper_name.startswith("appwrapper-") assert cluster_name.startswith("cluster-") + + +def test_update_image_without_supported_python_version(mocker): + # Mock SUPPORTED_PYTHON_VERSIONS + mocker.patch.dict( + "codeflare_sdk.ray.cluster.generate_yaml.SUPPORTED_PYTHON_VERSIONS", + { + "3.9": "ray-py3.9", + "3.11": "ray-py3.11", + }, + ) + + # Create a namedtuple to mock sys.version_info + VersionInfo = namedtuple( + "version_info", ["major", "minor", "micro", "releaselevel", "serial"] + ) + mocker.patch.object(sys, "version_info", VersionInfo(3, 8, 0, "final", 0)) + + # Mock warnings.warn to check if it gets called + warn_mock = mocker.patch("warnings.warn") + + # Create a sample spec + spec = {"containers": [{"image": None}]} + + # Call the update_image function with no image provided + update_image(spec, None) + + # Assert that the warning was called with the expected message + warn_mock.assert_called_once_with( + "No default Ray image defined for 3.8. Please provide your own image or use one of the following python versions: 3.9, 3.11." + ) + + # Assert that no image was set in the containers since the Python version is not supported + assert spec["containers"][0]["image"] is None From 51d0b2f2b1ba90a0aa314e9b93eed2f20a4efd6b Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Thu, 17 Oct 2024 13:46:19 +0100 Subject: [PATCH 381/496] Remove image from base-template --- docs/sphinx/user-docs/cluster-configuration.rst | 13 +++++++++---- src/codeflare_sdk/ray/templates/base-template.yaml | 2 -- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/docs/sphinx/user-docs/cluster-configuration.rst b/docs/sphinx/user-docs/cluster-configuration.rst index 6d27b0f4..238ad51c 100644 --- a/docs/sphinx/user-docs/cluster-configuration.rst +++ b/docs/sphinx/user-docs/cluster-configuration.rst @@ -30,10 +30,15 @@ requirements for creating the Ray Cluster. )) .. note:: - `quay.io/modh/ray:2.35.0-py39-cu121` is the default image used by - the CodeFlare SDK for creating a RayCluster resource. If you have your - own Ray image which suits your purposes, specify it in image field to - override the default image. If you are using ROCm compatible GPUs you + The default images used by the CodeFlare SDK for creating + a RayCluster resource depend on the installed Python version: + + - For Python 3.9: `quay.io/modh/ray:2.35.0-py39-cu121` + - For Python 3.11: `quay.io/modh/ray:2.35.0-py311-cu121` + + If you prefer to use a custom Ray image that better suits your + needs, you can specify it in the image field to override the default. + If you are using ROCm compatible GPUs you can use `quay.io/modh/ray:2.35.0-py39-rocm61`. You can also find documentation on building a custom image `here `__. diff --git a/src/codeflare_sdk/ray/templates/base-template.yaml b/src/codeflare_sdk/ray/templates/base-template.yaml index 19e5b191..b59d2a49 100644 --- a/src/codeflare_sdk/ray/templates/base-template.yaml +++ b/src/codeflare_sdk/ray/templates/base-template.yaml @@ -69,7 +69,6 @@ spec: containers: # The Ray head pod - name: ray-head - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 imagePullPolicy: Always ports: - containerPort: 6379 @@ -150,7 +149,6 @@ spec: spec: containers: - name: machine-learning # must consist of lower case alphanumeric characters or '-', and must start and end with an alphanumeric character (e.g. 'my-name', or '123-abc' - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 # environment variables to set in the container.Optional. # Refer to https://kubernetes.io/docs/tasks/inject-data-application/define-environment-variable-container/ lifecycle: From fd3877d9bdaf276d91f157658a375508633d0374 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 17 Oct 2024 15:28:36 +0100 Subject: [PATCH 382/496] fix: publish documentation workflow --- .github/workflows/release.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 16b5aac4..ccac5260 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -54,6 +54,8 @@ jobs: --repo ${{ github.event.inputs.codeflare-repository-organization }}/codeflare-sdk \ --ref ${{ github.ref }} \ --field codeflare_sdk_release_version=${{ github.event.inputs.release-version }} + env: + GITHUB_TOKEN: ${{ secrets.CODEFLARE_MACHINE_ACCOUNT_TOKEN }} - name: Copy demo notebooks into SDK package run: cp -r demo-notebooks src/codeflare_sdk/demo-notebooks - name: Run poetry build From 2e28f8acd33c5f410c5f06958d5b6ba37e183eb9 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Thu, 17 Oct 2024 16:57:59 +0100 Subject: [PATCH 383/496] Pull latest changes before syncing to notebooks --- .github/workflows/odh-notebooks-sync.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/odh-notebooks-sync.yml b/.github/workflows/odh-notebooks-sync.yml index ecb5b0a5..f0853bff 100644 --- a/.github/workflows/odh-notebooks-sync.yml +++ b/.github/workflows/odh-notebooks-sync.yml @@ -47,8 +47,8 @@ jobs: git config --global user.email "138894154+codeflare-machine-account@users.noreply.github.com" git config --global user.name "codeflare-machine-account" git remote -v - git fetch upstream $BRANCH_NAME git checkout $BRANCH_NAME + git pull upstream $BRANCH_NAME && git push origin $BRANCH_NAME - name: Setup Python environment uses: actions/setup-python@v4 From aee7c20cc54d9d2cd566ef8dab4ae6c5b667bf20 Mon Sep 17 00:00:00 2001 From: Varsha Prasad Narsing Date: Tue, 8 Oct 2024 11:35:50 -0700 Subject: [PATCH 384/496] [Add] Design doc for CodeFlare SDK This PR adds design documentation for CodeFlare SDK to the repository for reference in future. --- .gitignore | 1 + docs/designs/CodeFlare-SDK-design-doc.md | 141 ++++++++++++++++++ .../History/CodeFlareSDK_Design_Doc.md | 1 + docs/images/codeflare_sdk.png | Bin 0 -> 106171 bytes docs/images/codeflare_stack_arch.png | Bin 0 -> 290535 bytes 5 files changed, 143 insertions(+) create mode 100644 docs/designs/CodeFlare-SDK-design-doc.md rename CodeFlareSDK_Design_Doc.md => docs/designs/History/CodeFlareSDK_Design_Doc.md (98%) create mode 100644 docs/images/codeflare_sdk.png create mode 100644 docs/images/codeflare_stack_arch.png diff --git a/.gitignore b/.gitignore index 9ac5d687..52304a62 100644 --- a/.gitignore +++ b/.gitignore @@ -14,3 +14,4 @@ ui-tests/playwright-report ui-tests/test-results /src/codeflare_sdk.egg-info/ docs/sphinx/_build +.idea/ diff --git a/docs/designs/CodeFlare-SDK-design-doc.md b/docs/designs/CodeFlare-SDK-design-doc.md new file mode 100644 index 00000000..aecdfd56 --- /dev/null +++ b/docs/designs/CodeFlare-SDK-design-doc.md @@ -0,0 +1,141 @@ +# CodeFlare SDK Design Document + +Author: [@varshaprasad96](https://github.com/varshaprasad96) + +## Introduction +This document outlines the design of the Project CodeFlare SDK, a Python SDK that facilitates interactions between users and the distributed workloads component of Red Hat OpenShift AI(RHOAI)/ OpenDataHub(ODH). Users, in this instance, are both data scientists and MLOps Engineers. The SDK provides a high-level abstraction for managing machine learning(ML) workflows, jobs and distributed computing resources. + +This document should be followed when adding new functionality along with being updated as the design evolves. + +## Objectives +1. Provide a Pythonic, user-friendly interface for interaction with distributed workloads components. +2. Integrate with Kueue for managing workload queues, quota management, job orchestration and scheduling. +3. Provide separate functionalities for data scientists/ MLops engineers and platform administrators. +4. Allow data scientists to manage ML workflows, workloads and jobs. +5. Leverage Ray and [Kubeflow][Kubeflow] for workloads and jobs. +6. Ensure extensibility to accommodate future integrations with other tools and frameworks. +7. Follow best practices in the codebase to make it easier for developers to maintain the project. + +## Architecture + +### CodeFlare stack components overview: +1. [CodeFlare SDK][codeflare_sdk_gh]: Primary user-facing component, offering a Python-native interface that bridges users with the underlying services in the stack. This SDK abstracts away much of the complexity, providing a unified and accessible method for submitting and managing the running of distributed AI workloads on Kubernetes clusters. + +2. [CodeFlare Operator][codeflare_operator_gh]: Manages the platform requirements for the running of the CodeFlare components: + 1. Ensuring necessary configurations are present on cluster to enable access to Ray dashboards (includes ingress and cert management). + 2. Validate and mutate Ray cluster/AppWrapper CRUD requests (admission policy). + 3. Creates Ray related Routes/Ingresses based on if the environment is OpenShift/Kubernetes. + +3. [AppWrapper][appwrapper_gh]: AppWrapper groups multiple Kubernetes resources into a single, logical workload for easier management. It is integrated with Kueue and accepts the k8s components to be created and managed through a template in the spec which is untyped in the form of RawExtension. + +4. [KubeRay][kuberay_gh]: Operator designed specifically for managing and orchestrating Ray clusters on Kubernetes. It automates the creation and lifecycle management of Ray clusters and jobs, ensuring that they can scale and operate efficiently on K8s. KubeRay also provides a Python client library, enabling developers and data scientists to interact with Ray components directly within their applications. + +5. [Kueue][kueue_gh]: Batch workload queuing and scheduling system designed to optimize resource allocation in K8s clusters. It ensures that Ray Clusters are only scheduled when sufficient resources are available, preventing resource contention and maximizing cluster efficiency. This component plays a critical role in balancing resource availability with the demand for distributed workloads, facilitating efficient job execution. + +At a high level, the interaction between a data scientist, CodeFlare SDK and the rest of the components are explained below: + +![Diagram-1](../images/codeflare_stack_arch.png) + +The role of the SDK in the model training and tuning stack remains consistent on the client side, regardless of any changes to the components it interacts with on the cluster. + +![Diagram-2](../images/codeflare_sdk.png) + +#### Considerations while designing SDK: +1. Ease of use and integration: The SDK’s primary role is to abstract Kubernetes specifics. It should provide simple interfaces for interacting with any of the model training components on the server side. +2. Lightweight: The SDK runs client-side and should minimize resource consumption. It must prioritize responsiveness and user experience. For example, using a polling mechanism to fetch status instead of a constantly watching resources. +3. Extensibility: The SDK currently integrates well with the CodeFlare stack, which uses Ray and the TorchX (pytorch) distributed framework. In the future, components used for distributed training/tuning (as seen in figure [2]) should remain interchangeable. +4. Security: The SDK must ensure users see only the information they are authorized to view. It's the responsibility of the SDK to generate an authenticated request by attaching the right credentials/token during every API call. +5. Typed Object Creation: The client, to the extent possible should allow the creation of known, typed K8s resources. This prevents arbitrary payloads from reaching the server which could be a threat. +6. Release Versioning: The SDK adheres to the Semantic Versioning format and should follow the specifications outlined in the [SemVer standard][semver_std]. +7. Version Compatibility: The SDK must maintain compatibility between client and server versions. Backward compatibility should be ensured even if one side is upgraded. + +#### Codebase Modularization: +The CodeFlare-SDK should adopt a modular architecture to support the seamless addition and modification of components. The project structure must meet the following key requirements: + +1. Component Structure: The SDK should consist of clearly separated components to promote maintainability and scalability. For example: + - Distinct Python packages should be created for command modules, helpers, and utilities that interface with Kubernetes (OpenShift) clusters. + - Separate packages should be defined for framework-specific clusters, such as Ray. +2. Self-Contained Packages: Each Python package should function independently as a self-contained module, encapsulating dedicated functionality. This approach will simplify management, improve maintainability, and enable the SDK to evolve more efficiently. +3. Testing Framework: + - Unit tests should be integrated within each submodule, ensuring comprehensive coverage that directly correlates with the corresponding code. + - Integration and upgrade tests, which span multiple components, should be managed independently to validate both individual modules and the system’s end-to-end functionality. + +An example of a modular structure would be: +``` +codeflare_sdk/ +├── build/ +│ ├── requirements.txt # Dependencies required for building the project. +│ ├── Dockerfile # Docker configuration for containerizing the SDK. +│ └── image_stream.yaml # YAML file for building and deploying OpenShift image streams. +├── docs/ # Documentation files for guiding users and contributors. Includes design docs. +├── examples/ # Demonstrative examples showcasing SDK usage. +│ ├── guided_demos/ +│ └── additional_demos/ +├── src/ +│ └── codeflare_sdk/ +│ ├── common/ # Self-contained modules shared across SDK components. +│ │ ├── kubernetes_cluster/ # APIs for interacting with K8s clusters (e.g., authentication). +│ │ │ ├── __init__.py +│ │ │ ├── file.py +│ │ │ └── test_test.py # Unit tests contained in the same module. +│ │ ├── kueue/ # API interactions related to Kueue job queuing. +│ │ ├── widgets/ # Reusable UI components for visualization. +│ │ └── utils/ # General utility functions and helpers used across project. +│ ├── ray/ # Framework-specific APIs for Ray clusters. +│ │ ├── cluster/ # Cluster management utilities for Ray. +│ │ ├── client/ # Ray client interaction modules. +│ │ └── appwrapper/ +├── tests/ # End-to-end and upgrade testing modules. +│ ├── e2e/ +│ └── upgrade/ +├── poetry.lock # Project configuration and dependency management (Poetry). +└── pyproject.toml + +``` + +### Client side best practices: +1. The SDK should perform client-side validation of user inputs before sending them to the API server, to the extent possible. These validations should be limited to client-side checks, and the SDK is not responsible for performing validations that require server-side data. The SDK does not need to maintain the state of server-side objects. Examples of client-side validations include verifying user inputs, validating YAML schemas, and checking resource requests/limits based on available data. +2. Any call made to the server should have a client instance passed through it. By default, the SDK should use the same client for all operations unless otherwise specified. For example, the client used to list clusters should also be used to fetch cluster details. The codebase should be designed so that these operations are either methods of the same client (e.g., a k8s client, RayJob client, TrainingClient) or the client instance is passed as an argument to the methods. +3. Errors returned from the API server, to the extent possible, should be wrapped and handled explicitly. + +## Test Coverage: +The project should aim to maintain reasonable test coverage to ensure code quality. Wherever possible, new lines of code should be accompanied by corresponding test cases. Reviewers are encouraged to exercise discretion when merging pull requests (PRs) that result in a drop in test coverage percentage compared to the current state. To aid in tracking coverage, external tools like Codecov can be integrated with the repository, allowing for easier monitoring and ensuring that the codebase maintains a robust level of test coverage over time. + +## Release: +A new version of CodeFlare SDK will be released once every three weeks. +For details on the release support matrix with other CodeFlare components, refer [here][codeflare_compatibility_matrix]. +RHOAI and ODH support matrix: CodeFlare SDK APIs fall under [Tier 2][RH_customer_API_support] support on `RHOAI` platform. This implies than an API would be support in for `n-1` major versions for a minimum of 9 months. + +## Security: +Currently, users must manually manage mTLS certificates and re-authenticate Ray clients when connecting to Ray clusters, though the CodeFlare SDK offers helpers to streamline this process (as seen [here][cert_generation]). In the future, integration with a service mesh like Istio will automate mTLS setup and certificate management via a sidecar proxy, eliminating the need for manual certificate handling and client re-authentication. + +### CVE Management: +We currently use two approaches for scanning CVEs: +1. Dependabot is enabled to automatically bump dependencies with CVE fixes in the upstream repository. +2. A downstream Snyk scanner is used to identify vulnerabilities. + +Depending on the severity of the issue, fixes are prioritized, and a new release with the necessary patches is tagged within the following three-week release window. +To streamline the CVE management process, it is recommended to automate security scans across multiple branches in the upstream repositories. This ensures that the SDK version used with each Notebook release remains free from vulnerabilities throughout its support cycle. + +## Documentation and Examples: +The following are the documentation sources for users which will need to remain updated: +1. Upstream documentation - includes `README` and examples in `docs/` folder. +2. Demo notebooks - The NBs which are to be tested and updated to ensure that they are up-to-date with the most recent changes. +3. For RHOAI/ODH customers - [Official documentation][DW_RHOAI_docs] will be updated after every release. + +The API documentation for the users should be clear, up-to-date with any changes, and if possible be generated automatically using appropriate tools to ensure accuracy and consistency. + +Implementation History: +- 2024-10-07: Initial revision + +[Kubeflow]: https://www.kubeflow.org +[codeflare_sdk_gh]: https://github.com/project-codeflare/codeflare-sdk +[codeflare_operator_gh]: https://github.com/project-codeflare/codeflare-operator +[appwrapper_gh]: https://github.com/project-codeflare/appwrapper +[kuberay_gh]: https://github.com/ray-project/kuberay +[kueue_gh]: https://github.com/kubernetes-sigs/kueue +[codeflare_compatibility_matrix]: https://github.com/project-codeflare/codeflare-operator?tab=readme-ov-file#codeflare-operator +[RH_customer_API_support]: https://access.redhat.com/articles/7047935 +[DW_RHOAI_docs]: https://docs.redhat.com/en/documentation/red_hat_openshift_ai_self-managed/2-latest/html-single/working_with_distributed_workloads/index +[cert_generation]: https://github.com/project-codeflare/codeflare-sdk/blob/main/src/codeflare_sdk/common/utils/generate_cert.py +[semver_std]: https://semver.org/#semantic-versioning-specification-semver diff --git a/CodeFlareSDK_Design_Doc.md b/docs/designs/History/CodeFlareSDK_Design_Doc.md similarity index 98% rename from CodeFlareSDK_Design_Doc.md rename to docs/designs/History/CodeFlareSDK_Design_Doc.md index 0274f65d..4992406b 100644 --- a/CodeFlareSDK_Design_Doc.md +++ b/docs/designs/History/CodeFlareSDK_Design_Doc.md @@ -1,3 +1,4 @@ +> 📄 **Note**: This is an older version of the document. The latest version is available [here](../CodeFlare-SDK-design-doc). # CodeFlare SDK Design Doc ## Context and Scope diff --git a/docs/images/codeflare_sdk.png b/docs/images/codeflare_sdk.png new file mode 100644 index 0000000000000000000000000000000000000000..e33638d853c440046c702be8eae7d9ea3bdd6883 GIT binary patch literal 106171 zcmeFZWmr|s`Z$b)k|H7@4HD8)(kT+sz3J}GO{ajg)TTkYySoIWrMslrbT|LSbB^cu zJMrQD^nN+}x;AT=wPx1b9dpkPl9d)kL&igffq_927Za9;fkB{yfq~sYdJ%F zH|7)E4q(37`cg%%eM^K{3qHv%Cw(%&)JrWXk3#~7Ckyw%=Sf5wm9D6$C=BT7Y-=2t z9fr)hl&i1$e&#_t1R@aM^9&{^fJ5qQ&#?a`42(0DHre1a@3))uIcpMYxXRz&_&*V9 zk_~5xsrwW@7ULYzLCFV!vqgb1z>A<7gb}!7UQ0l@Ma@fJL@BQXk-UWa*8X{#ogEwn z^GorIWqKIAUOgY}-m{d@R?{3bvt#GiejrYqkEG9nkI6ZkOdpK7?!mo-k!Y7?0kIHj zw6NIW*Bg7v=UFzKIfCk?R|@r7!70;O$G|1|m;4v%*k4}0Ayp2Xre@@3&@f~vco&Vysq-ebt8caT zGl%lq=SZ?U`Uu;j%b-{fajV5P_wuj=GOH})gZ|-Abft}F<|`+!jymgg;{5fTI=sW* zSxbIz(}3@2BUzs>XGrHDmCim9j3RzfND!hx!8tNH6EMIo#U^JK+W zac6Zq>UPW7Fxgobn~_fKHF^uGFwApAcu5jJHZ)0CRe|xzv7c}UF}&^G&$3A15PdR( zU*kq*ZDDkp6TF4@5+a9p@mqy`)hr|nb5-hBbi9K&iq(t|j4`uJ{k@)qV6Dw9K`+Mn zr#hK4?CTN!rW2j@PG@_a*oy}@Pw%^-GE&TRJp(wR1Uxv%cM?4krWjnws!lXp?RoqN zZxt_)rVaK!=evl5^Ut}TojTrhI0`*mITx0TJk5P8>i0a4xTx>rY(#xqae59uk@t;& zV_qn9C7=xh?C&{UM|`^bR5hH&%0)Sz-}J%qDL)TdEMZrIuY7>)Ep-ilgkr4<7+*1R z^SbBut-cig0K0M7rtw2EaRsBd<>fGBTF8Rm37b18r6*hm%{z#&2d`XtH;(|lq=YJX zf^C^z8AIm%IL2M)Qd@MaG4pW!Wc}P#NCl%dhthrc(`4}|Q+YWNBfe;~Rzx96l>snb zw}Y*dErP(khg?g`;`g^L_+PZ3V5>!9m`j3#g9HBC6cp~~ZCdDccjz9Q7OID1=}+70 zKe#7U3G}WUqdaM$U|p7GfuU@Jo$k1*g8?t2-3q=ygp(s7BKJdjcP&{rl&ymt|762g zPZk;CGw}`90w$^5sSITo4s98c18K-RWSP0{`F_X57?PH^*5Pw^kr$so|HOEz=N(U` z`~~ipZ`liNa>O73y+{LctX_fdUr-Z#20na>#1ccOK*|?9Cky-<{lg`Tus)1OxFK@t z2i2<&rg_iJLe(-m<*@riI!BxxcoKc2MNmerwq8~d6pMf}&bGu)$y^Ed1hl_-;I@sw z;~hY?{|>@^7SbVK<{%k7*zu{1xd^YYGjZ9zCD>7icUm<03yTfqKB9`v+f?t9quxt8 zcP1?#>mKM4_ASl*i-WFKj4k*#zHsd!Z4{xT1}}161S1)KF8=KIdCZ2<@#P{$kT7os z+6df!mw_I(Etf5mEuk&@5qx>dW&E@#StP31b8=$&S3#nER9MlIQBG0e(IsEG#EE~H z87NesaKf`U13x zKs=ejjG=6Z0yHl-|02(Ne0+Rhf@eZ+T-eNhyl>(>FGayE?>sU`sxsHsWYvUu_T}vR zS)^IZS$wV(E2)dz+@B}g5?gGi3a8Ib(RWZCsT@lh1Uzb7vt667vCiV~UZK&V)pWy$ z3579--QmTty0TIyP9_parEbs-ej5Cc$j)|Cg{$_aDx=D-8ryvDCs*l2w$y0Nx4mg~iZ5;dYmPeSa-<_wsJxdg_e-^*c0Q z)Cx2RJ}d4#`**ZGG=0=u+!A&k%V`!xd7q6x69G_A&Xy`kw+qTbPh#nl!g|Mi{Hyeobyf6Lsp&gT?lf12 zIYynqLZ3nz@ICNLq|&8|2YJ;SiqrNC=FH~IorJj|nl%etjs53&%}E|P9@%#k&&Jyq z1lP)j(mil><2TIc_G<8 zxt3#zFrAmoLU`m$4o0cwuI42}EW?z@sOZy(WZnwfHCmNtD(O(yDRnE<+7T<1`NV~m z2Ddti1?YSuFCDKuZvl^0g#&1$sH@066@D0IcqUbphu^i{ZQX6}0;%|;&}Ve@H*$(>U8gr4x{X_7LaN`pm1lM&Gy`T%rAxx$6#l&5}j|n^uEkK0%KZW8Xye@;RUU}G=I57g+N&>wu8&! zJ}|(96O9)Au{Vr5E9QGlF|~)jUv~-79Osaw#!(&qs%5tfp_ixc%I9PxQ7SXZHL3Zu zPhwJ^79y|xak>gO&a!RC3PxS82=4LjQG8l;V!x?xz0n|bhwMkUkTuhGQt2CtfOE8s zRE;9OyeW9m+=o0k`}K^Eo5oGu$xB*(XkWw5%50ysyoN)4p1IdxbXJggG7*y9#g605I1Omvw`$WhCTkhhAuX|ppBQ4e;vlh}ssq~YD^nP) zkcZh;au4x2`HO6597bGF+yZF7*8K*#7|WlQsFa|&!V}A0skflEkg9}Fdqb^87tJxu zLQ2#{*kx6%zq)_YGyg|Xoz`pF`o_LIGr5TyM_)&k$(z#BIi03-htAxvjIi4G{uTwL zH6;vllJgp6SKE;9^u}KLcgmF$Hs;%trE}lsxN3T=tn-iZ@@I6b^iAp2XbbliIjuI= zHGLZKpajtOdy{jT4bOHX_gq@o9S_1hR&Ek6D$l0~%eZ76zuJ~%5#^^TFU@_O`8g6l zKkfN;SJplnJl5VR9au}q#4Yclc(FCAKdyfgaYoF@vj`Tl%h)vN?(G@GOgc!#v)@sZ zkd&C)J{;Q|f=}aggm|1^a;Djf4!Nc9JNP)JuVmbGT`Sj4`n5LRd6o?}7vD$qWOw(c z#wE>)Rueb=0rT#*5~1Ktg-M%+h^aN-?#RhwzVcD!R*e!Mrg}w>l46?IKcek zeZpn8rbEb4NIevLEcAKPJ8VRI%xzxrWeU;<5H;Q#)NG;n?Vd)9*m&E2XS%Ws-SOYXlP|`V(k#yrxXNq zAlZni+QYzLQ#^j3h|7~70`pIqDuNt9Qj%Qy)|L#q2G)9p49=D|kMqFrICB9_OG5`; z5@$;bD|;?yUb4TQ-~!r@-Hc=;e?8)0&PxW8k|p_IZD&aGp5YzCJ2F0G5)u*~I|Cyw zd0~;i4+s9^B{Okwu;F541cSj0U={{zJ7Y#BPEJn7cg&2;%=Ew$^!6@R4!X|tR`zfH zHp%aKgbnTW?M!VPOs%a*9_Q87vvzdgB_n%$(LX+aTc@G3=|A6OW&d}u000>u?=Uhk zykq>w+`v$t$6hX3Q)fd9Rbf+00B68E_}E$B^ZfPvzuo!g8~+#y`sYX{7G~BzhyHQv zUxzB$8`^!awglek!1qsj{eAGCH~&76hw%~gKTz>EJpa`TAes-Ehw&do<3pYAIV_6ibE#j}5T>+iuZPljIzl${R9zkBgt)+Yed9>x3L zS04c5n{5p7_4((7|1u9EWcRk^ zB>w@ee;edW;r1Vq{&@Eg6+r7S9Ri%sztQCHZ+t;F1NYyi_BKW&0l6Ac<0Aeyx_ImK z68%?_@_c}4A7znf7x=GqftNV{Z|frYAIbkm@_%EF|4I3OlEnW>`TrFAznOslDfa(l zZ=L@?+wv@C*j$-3$ZC(wmj$CAU$sVaWf)CIvyjBwb46uRd8nzXJQJ%-$@K$JqZl+B zzB=xGo2afC9v{qe9P3uQxD3K9X#r6^VB<@4hrYf=*KbC1w6ch`41et~uPmL+k?)cE zlhfG7K4LFfwnrvK-ij(b#!Zla+u+ULOBEUy|j9)O@AUu}Hb6#Z4P z+?&1ct)9Kq&cFzcoSN`^55ldl+%LQZz5~uuXneK-*`F@U z7h)emZg6jD>mbB(u0|rV!#~@$vyj~F+=5hwFY8yEFG7exZv{Vvbt;ctZb(43N5Ovb)~}8O`}?mH5^>y5jwJnz;YX4= z?Zvi7DYEmz2ZzupQU_AGWrpv@f=K0_V*V0r=ECXySZ!Wsj7_IOhJwQwsm4|St)uCo zx%4*?sj--$WHGv@xj8>TKXVAx$5AdO!$Dp`M@*|y9Jw*|ex#aqB~`1|)9?0hm9_qxhN|iK z@+a`_kI2jlofSRRDuetOIyoVR=B$$a==uHntp~J+Wqqt{>E1!YIf-zSo5AVZYTeBJ z^j3qZ5?;IIWkIYl<_2DsIvYW1xuU!|%#jP(Q$9U6p8b$g!`%Iw-IFV~)_0bg!EF*@S?y9^B6u zi&CiUr1aiues>y+4kS=wI^WiOFEw2?b#Y+7(Qv2oP$7|(XNRWL9eO8pzPmt6I9nT5 z?q#9*{h<~-HVLIU!3|2!b5>J_2@i5=iHiwvnTWBwT=d=F_0jxAgt z1PA1E9OWV_yR3!9rA5nUfBO8YZ?Z(4v9y&<5t~kp%upf@qQ%?TpN%txbgD1ZQ5&7x zXWcsNs3#Tl>m!9qiN=tSHFUCAjWG<94NuF%A5%^;^fWI0z~k*ImHVDBcdLyN#HSH_ zAV*sL^`_ER)3Iw^{@%AjuKS|(fM{iiBAS}hp9Y~O3 z)!Z5mr#3Sy_1v5=RvL`u3^q_Ekc0O%DrEfIJ#Ns&_^|pype<%gCDr4F3Ugj+P1YBA zZ4+nbjfyYkasf@z)+Q?eOvjiP4O{jq==CM*3B1G_ z!~048zeF>8wi-Mb&=c0)?V*ZZ&xezx69zhGZ+oBtfdY;gIEW_-gA3bqe)8nT;Ad3Cx}yx`W$ zg=H!lhD(zz5sS)m5`jF!;(4!>bI)nr3a?xC)x*%103yaeArSzO`>Yj#?V z%=(=W;QU>H^8*&5m~{T+eDZK_?J|vO*J4az$*Q{ZL=mRX@R(x*FHdH0w4(KQQq`JF zM&9~n2Ps*jyU2JZ-d9!=OJuj2;q0jj^Yf8GVfV4~mJqAd8yg;J}IDYm!Kwjsk% zIacKk^ONyUdhimM$M^Sc(UmzT7lz6O!_B7+$TkSs51kh`=#A=_q{Rn8OVYN!*+}%Y z^A3J$VH-%}C)BIOG~S5_P?<5KPiJOIYb(zV!pOF+Xp;_L zX%3Uiy-%kq=PHpP8wy(A1fCp)*tKt_7V4NtVLSRN z;dJ3bn!HKqBYWi*E0hyIFeub(i|^e?A07A2Y0C#QMHEXVyk`D~RqK{JEiLDDG^)d) zq*}3(mSa1YrUyT!?{zxuqeIPgR)j7ZA0;ql{Q76$``r%qsSW#MoO0t)?v1Jk7Mcu0 zZqMg5JO~Lf=MJW7tzLIVy{kj7{H6IYwZvfIN64;X;zsZB&aNsrk1b1LGrWCbqOUA| z)mXWRa*ay4Y#=M+$Xk@?=M)r6?wV7jeH1U>Mv!xRBD1k2)%>IB7}4jKbj79A{I_fH zt|+8G6_+wY&4&nP&OD%5yR*&e8-=lXs=Ra1Qk4iRSElvQI-*Rt=Rg)hR}`k5c^4A; zOA87&ap>HZTpBCML+IaYeH-yQAQveMU4-zy z$JtA2JpDRKQxSG;1InX0&K_E#qmy{WxL-6mz{^n=k6W$mKSTO(Bk;b!Ust7C-eP7q zahYbnrQWyo==_~&-u^U2FitdS=VILZsp=P)KQiI~9x+lf7D`t&O*Ao8cDuJ@@LhMn zdUdkizdFBBYQ4~*TAJwQW)s8!N<2#ssyYa}{87GL0yUou_Jj&^Po4X!mU&(tyfKc2e_~$Jo zc9{g?&kAoJSop3tS~tC#&Yf&2KJcu5#YVgS(9A!IRBowM)>OZWR^E+)MijTeezCJ; zG#o2ROiUhzuR$ThcW-k_vwu`}AwsEE#)`63I)Oa(y`5d#W5}QS5bOlNlA9J0S79j8 zkB1hfqi1_jQY&`v_2Jd!To?dQ_Sr7y$#{d?7d1I!2>01tw$YiHoW`l8b-d$oysdVf zhr#N5ltYJyZu7I+L3((S7dh~!AEp0qE=wN6XPs90%Jpm;>b5sX%2%CwT5aZ5X+4uh zJ3cK#DDyz?WZhN3)`HLNc(HJylVT~&-IE>HH*wIlb{wN=2c3GcwF!t=2=5)_DG?3C&Q#mZwcpO!_=?G5%@(2rA&Tt zXw2kgTVM4}@L~CSoZ+)x6Rsaz&jxZ&2rzND6Gw4|GR zjh4}i>FD9h63dCjTs@s-m&VMaHy(azZe4@slh*dI!%}HSg$D(c#qNHoZk#rf#e5#t z&D`@-nHP60O!}>-XY~)d+JVxwSFFMUp^oL%H7B)}ODjW5)#kMa{&7n-H!>gT(}L}M zoHCb~tU|CE&n#z~C`1}ZpilXgBZv2*t$435S<6EX?$AZA5^I{;bN7eaC+0m?FAu$Q z-N8#!!^XEItw+k*P<*Q+%S$(fmr5{81uQSvK8|Ln)A%*}YiEMe_c z__aaNT;M9D``%@h>C%j`en`t?O5?)zY}ExaEgrs7&!x{nBKDs90jkGRGfo|q&~2_Q zy@#1c0kn!q(Cv+|0)a0KJdbc&2}th$WIufrh1(cG1>Cj4B&}DoPIVLPdTU#6$$W!? zqbVPjQ&~v&c(I&)5hSE?R-oh6xuu#t41A6u;nArL*4K;pN9CC0i$u)xxxH)}cYxwh z7S|l{IEcVqADXE1X(8lxYOsGJRmS?{GFxTIPOso7T#Gd!_#hyu;ksdQaW1zUHRat2 z;0iQcD|jUmH8)_+|9VNscAtivR+y|a)4a*c*LrA#mJ*^fQ#kC`(j@`8l8k4LJP687 zJuxMNyj=}u0GYicUQhrRe7*T1gSju9pQMORtgI>jgpe=HHsAc6!i5bxyBFqwpM*pE=y@H|trMwEtQJQ) zU_tUDV0;aC)a{K>|CxpV+J^)z0MB?-b!EZX9-Kxwhxk=aMWUM3Z5rI8dBF*#sFXy( zrjvqUell3Z$ObW7ZkpbAXE!V;mAX+M&)An*I`^XA{q+W<=BDj+l7ei{Zl(4b&k3XQ(IB2*?f9so=V)q_Jq7tMSXHgw|D@4Doc9lwS>pNV;QJf6; z3OAotBd${kPJK&t$0l2>9S7M0GR?>CfmILsszmc~hR9c%g$@%Tv%VVUHY%Q~`G7;U zLnPlXK-=A*{g9iyemJ1*6*4McreoUiOR(B?FCUU=w7`9JLk#DL`37X>OPbbj#@&7} z!M=o6*z|O!3_O+g;iDMQINNB#bQU%ANeT` z>$Da^STUV&s1>dyH&$XobzN?!k@8h56Q_BKOc42`C{IEzPnX08$t`WkxocXrR{6E4 zfP@6IH>zPnwC3YQb*a&m466L%P&c{!!qyko0+sSn(BJ>BYk^bmHZ-7eJ|jU3bd zs@r5E@M48yqXX2A(kUlcACKE6-u-N__Gsv33AX*O;dsj+*#OiV0Y&oh(D}P8y(<(A zr_E(t2xP_sutUNh5X9bu^Hpbr zd-K9VH(xc;0~@Yn{fs?d;&|@)*RE6+31NGz+RGRK+!Hj{^Yypb5TkbE{bh8XTjw9R z!6V}4l-f&DwTmly*w^~F-BTTQ1>x=271JI@S4Lgdx~8`K1I?T@{2^y1lcx7kQP>b{ z`_cJmdg<(kVLk6eiWzUJr{)@cy!W~y^>-G#@Hm?zzj$X|Rd4xkDmlE;!V{K@UZ;6& zpS|7u-qSHEU3U2y+w13i*3FKZTBsPU(Vej^@+yAkXwjjCwrcr%dOkC_>WV5yE}P|V z+(Z_-E<595-WQWGLbXMDvX!W+S8rN|dNm1*vgnhe==V z;7@HRm=P9}HE|OybjV{8)$FkeGEG_LlVNv;>mm^>ajS*M5|1YuxTWMEMG4}ZqK zrucK`Au3eB0i_>E%80k#!>wP;c>Ty??BSrT(z@W;V)jd#$W~R$m#C|Bt)Od))6x)S z)k<%=>`SuKpr2eEDf(fn+O$1P8AsuE`6WWLdtKTrKbBo4)marZTW^HLx!a7s9#AtS zzL~@=*3#qxiw#*xrkLM&@6@4U7RDh|pbhFWG>fpxH&acR*wgg9vvf*^YD*1!S^Zqq zGdHDZ!G^kb)ipo)8DlHC4bM1XQY*4wg}sU-EsvK+zn-T;pwp;$rDM{W;ae1m!a`g8 z?EXD)P`8qU#~cUSVxD=owNgPw5l^XdbRw>3hB4B`g>fy!<+hqVkc(ET-_>i6EzCD9 z{Hz7J_^9utmFkb4@P|)o)pe7qO#x;$g=%9h7b`s(zfyUe9~MR%lvq@xFfr=w@30|G zhg;?P#}iXaTqwJ=JK8kp7tJayqeT~+db+9-#UaIcH?5WN_f~xo$KGnj_jM@~8JS?&dau_U@vsI)$naVyHL0oH4@#u91ES|Y4vl52X zgdrMG;W}v-lu5LN2v)eV?Xdy1$j^u^0r_wASZc3OnoWxPOW_wSC=q0;FOl>FM5U1w!1W$d7$@y;>uJDkvuX7FY<}jP^#Q6Ie>=m9 zXZz^vI62Qg*=Nd&DAAjRgYTBr8fb_RZ0WZ1Otl9tjAr#XX*r>%ApG|A>e>JeGoCGz`^Mu37To4zJv*~xW=(X|vDYmlNf7r7z*rhg^mm;N)h0NZ*67%Cmmtcr750!)UtETk z?%zOE7tWjc8?W#0p1DWx@%>;@kZ8r#B;t0O-_wSUP4+Q&+)UAktiO2n3;MD}XXPck zwL16ei{#TE9BLU!k(qql;2l zOl=^gAr~eF-HI$ht2>8Z~wWUGBRZ7Uw^6?&d0 zu-8Gi{E|qZlMwMU?-M$@PE|{z-9IVJ@eI}zMZ(^)`6S#*zD&S#Dp(cd!D<#N@l7G! zozvy4t9|GfeT;v0icYU+>ckoat#Z*z2`l@`hRH;8XQS#=#Sy9dk*s%#NNO#YW>F^Z z?Y3Q^QlkxqEXJCqo9IgH2S=DKPRi;=n#Ns2EO8@)>NP(AGii8yp+KQtQ)CCFC(O=P zb5AtP!D{u-u8(7HtqGD4T1`bf(iY*RN}_Bi|4e_M#l6AomU3@65Kr3u`ZcqG4Qhru zOLt!0GuEdWd0Q0OY;r&H&;$Jo$g~SkitAb8-fC$S_wI*8z7l467lNG64D3{_kGeXp zX;quCh1W=LOjCKKCchnrZbb+7}+%~aVImhyxi)n)gCi~*AqxUa0I~Jm^(C^wL4h@#zmYvL%msqTIylC40se zhIVoA?dCS4m!s$s4LaX(UEVUyYsUzXUZN~EWSIBPndHf+IAMh9+u3>r&r5NL30eA1 zLwGG1x;xpsbU$OYYOd?D-1fxfSN^8qeMXmjiKJ=0Tr@{4otEUW87=~TZE-ZkOvThx z7`p}|;n^^800|Jp;3pwhWMtI0Kw53xe$x33a#d<3DO+tBF_|79#L}vK6@2sC?AvyaEG=$Xpdf7Fz2Y3_)_m|nVG6h&!? zSAEJ;u!4HNee<2{%3;8{T40D0j%c5@+FYr2j2hS#jK>rxLp?z-xcrlc#oj&vl3Z4q z=={zR$fjKyn29E3f-Dy`?Tp{-+b5@2J8^fxO zb)%63yNCA*aK47(Sje;w`0PxF236dYbQ3C5+T2TwphPisV0(m0Po$7x(x~4^ioCxy zL-lmikC@6|$5byqD2lP^Hb;Ll-9x=zOLK`T?fcXb*eLRWIr0wm;xe%Jx)yAxIFF#N;_Mt?F1I%sAHU%XkP>+;VHTu%8LAnlm_dA z*buXcujYxYF;7({A|@Q&qk~HrO)Zo{A~>CwVe;+e2EUqH*lT`so1701GcP4tU<)v6 z2O&t{7aCrs7G&NvN1Vr4lPLI78To@Ojea+`iz;) z4_rbnks*_dLSgHd>Bz%o#ZbTnHy=8`&gklo*>vhncoPV1^=O>*nMz5)iPJG{f)sk+ z2Ox)@&sIf&0LbF6#~fZ~WE`Cna_{QCY9ECkXk}IG2qg9@V6u-Z__iGlY|YsCwraYU z;X-O;=(WsZ`>>S6TGbLY=QyONY^RVO-CSKkz=ffM1PKcLnS|a(G|91)Q!&Hx82u5p zH!J4jMNcP1^3!6BVI&|3gVITLL~@RK{U*&ITFtQ9Y>`!mcY`<*DaDAl=EKLpyF~Z3 z!<9GEvVDyWp{Vzx*%a^djkW|Za$*=s_kiC-Aq7GV#c-nooO zGSM55JaxTnce@MBoFBUO2(L6_iLA;Dj%@EJ1h_0yl|T*?leVDEH4M3`b@;0DuG(@z zo}mORhx+v1-g05UPI{r_G5I=FVKb3feY~)iKeRI-Jdp7bxp61WJhXErQDeJY^d^9y zhd=Onef7QNxrXtP5yYGFscN~fgb|Wjp^;;~YV-t%sjxRKXIN39H;kbrPy5zM=KdzF z`YK#4*lMY~ze}--Bj3d?P5ilmk|Zh%bm(_skVd^-0h_i& zZz&|#77KHmQtG}vC`NOfX3CnEV21#+!Gbs;Z&9e&!7|NM#3{~Tvm5`^5hJ5iv+;sM zVmPc}lRnkZJgi4e2tg22pP%?9JGj(KWV`R&awwTIxhpR$+XwmLzWR&&5goeZ^n~nl zt1$X~!%b;IJ>Jf9zW{q19L&(dhDE6Pb>5n6xRJlfYKuJP7O*wW6Wp~A?)$Z=?Kw7` zSSxNT<%HQBuTxmE5?&hCbDqtAHPWYKp1h@zA)>3caNL??s^fk3s#lQe5QCH_u)1KL>%bQx~BpisxlqjN&T_1VSw{VZ42S) zoZI=mAdu`P^8?ab21Rcw|E#nWXhMqTs4<@+V&XT$l=JNps5BXy4jQT$S^7r7G*byK zjj#L4uT9j8C*@q_N*NXZPH)CT>f5KnUOq_+X5BtIiVv|1iKpK+qz3K!vsXFW##fwE!kCsPuxx#ZySu<(3cup#DBlVOsY zztJRHACwLn8c8uiElZMSa2=OjzmddkOY;rNL@w3*MR44 zGPQdoo@h*R_UdF4t!`J8xRAoMLULA_Zq+`oWia@#%qXk&p1${Mv6ohLFk3luN%M>O zxHDb1ca?TO;@C^iqZux@-(Fbm;?>GS55PB_pG$#~kp^C@JI59Nk@KWIV&D{+j-@Vg zXlyx~NP8E5V9`zqaoP(wpv+~7U2cj2-UL;#mK_<32KUzC$IW>|_0+VWp&jT#nW4If z6+-zf(O>WMmzNY!vcNIvSaoNTFg5sc7GBeQI5=$Z`;BA*Ggte_d6_@BtePk9rdZ(q zHeBVDZ?C+3-nvxWP+cBjSk~Oq+|aFaL$Bml_wm5ukM(QeJk*a6{TGPVKJvu7yL zf2wB!qA!e(fa=PCME3&iKMmk-ZXszx0=8u@u9;l_%*p-nu<)xV@cm!Zq$%P4nLhn9 zHRE$g>8jupK2><}B{~wSbAcS=3&d2Gn_0?jw>0^!c5-F8Ts-y|#_#ZGJvcPVS=0^Ko zp@^k-AW#^G6GV}I2zZwyXn>B>&Rj7SHv^cKzd(BN>zjPAA$BKj zM2tsqp4#WlwwM`Z-sNQHrA9gqk)hDIVskbMSFLt;)G`g^)h|yzz zpMpZ-l}tkWPjhd8w_p&!zmXy)1^<-y#5eTHJ=vjU-N@xK{YGJIJc0I~tl8EIKQL=| zbsT#jC^8S9B`olND)r|`CDXZ5Af0uEeSL8M#WjquLEqC(4CIL3czgGi0(Ealm2FVowFeSge>*s$6rWOTo;Gj5Gu5Fv|d=pw&WhX6Am$&!>m9e#Jfz;^%y%C5ymY9CdHwwwHeOq76H>xECW z`rY9zfctn#W0H&%rODD>%X3>%OE zD3|=U{k!7{;^!y8aSChtmjFDIej~pH?5iYziD)F&h2I@8KJQ2#k@!*Qe@OhVNR6tx1Scr>LH{BTVEUkX%!{LwySO&BozlV-L6 z(lL7&^M}LVwjYn@E7)D1n)ZGr{UC3j(Omf=p3NK*!=R}aj7{$fN#sHMn`JwE`bfVb zxXjkuUj~olT=)!^3e#|2v6(&Un%IQ`z9(tU(BdP7`B^^EFJo1g`+ShL9^OSRcc5)Ho z8hD_3l5(cP5N+;=#pZ9)C}0N!z>-$KY%cQnsyY88JBr8JVu?9ZF6 zKx48+m|85;5+Rjuj8tX2+e8HS)@GC%)Ewd1n}_!kND-&gn1 z+nKHSc<^Ol^FNlcN+bZ)y!~nYDnFL+*?Vp81O*(a`DS3XCeT~uW zoagOI8S`m8h6P3HwG1}9-RFDcUia=kJ?-H4+Xyw<#&?vB;60~6OF9)zm--U9foms0 ztJR!AAGW?U{#Fe;ab9ZG3}>6@`9J~TzmzvK*`xCAaAM6DEaL%Or-92WoN`7upwfAQ zI9GlBvQ>Mw!)XQAxdP)y40Uu2ovKhU4s8L0>&Jz-C09qrPc`@3hyZ7aljOtqV^T@) zL~N$5w+BAkLa%!{S}6+7*M6t~d0F%jAc>NEJ%tDA=U3qSFGStRFcBJ$jN zLY>v|3ISF`rvpd3KZLP$<9pN21VapX(Uo3V?r0Aj&?}nB7wL6>)zfb7p()|ApO7o$ z%LDA#HC>JtGOzTP!VPV_)5^%7jRJxobx9f(S8zJ`BZZL%nNJ?~*GCOE+GB8>3D zpXch7#pinJy(L*`%dPve?J2iZQY|D5pS`}*=#1~EznlgeJ6LOw_dVsoPFF7dHSbcN zl8P27WAi%@!qD!%?V*=w1W~L^PiSnMmM5>xw3a@hl?9bkI+yEA_OInwv=StO5~ac% zo@*Ozz!8jXD_`d3zh~7X+MUvtgsG8lP^7*(PeOVmVC#X8=kXmn|I=_O+>3lgd0kFL z)dq)zrsG+4X15`Y3y0!FwzZ%h;@DuUzMeeKopEX^fhCtI-u?OpM}{cTYhDt734`V0 z;9ekt|1zzDkET_0I-GJ8p?|^c{0IAzJ5^dl((?T6Zl5*Uou07@J>W#**Fd}5q9eTb z(v=FQesuVUtP^9kLbv0Zjh~`99CbxmzdD6~f(VZ(M$Otqus-v7`9iijV@EUM1x|O@1Y_j-!&pwfL@(dBD`e#4Ju-hM!i~h*n?2;cGoy8 z!f|1qN3|NL`_-%k4a(_Ff125hiBKI@rCE~@k^sKo$0A;6sp zdUR(H=SGApbWF8h?)G<#`K75$MPQDWSmJLw>0u+QPFOyjDkxj1>E$|nBO2b9+U%Ax zx7A#4danILjI?9l@amlG7qA}Q4(i?1PHKNq&8bW zb#c_s&e)f*?J)?yxy8Ufd86Yyr{(x;IPy)SXPQ(YjvDj#>*mRwR36iggyD9HI0j8I zjN#O(3!%aToY~#@dc_{6$R+r4ol*+(Ir!G;7JcoZH#jdDzN zCdwm2B9E|_%#bQm%RPPVwCV01Tg1in!PSV#>$6^C8s!?u zoHdl@>ce+1(p;DRa_^_M77_+I_T*;j%|qY zUY2Wh!4BjyxnVxQiA~b~OG3pn#Q1N=V^y#9_A}@wCeF8WiB!N7(;v~%Q4>f3Yosfz zQ%HF^TV)#3kQNs)LGwN|r&Cse^KosKFu#Q>3PGasGyd&vNQL=@RsBKg;^^`YZKqQT zZXG+~G}htNlDjD1okg<7QcgAx-!g_ZYN(t5Ni2pKbGy9ePKo7+2t$FU#G>;ag=2fj zwtQUQPi9SYj4~8M`;B|j1*^s}moGeQHZYJoBDJ(Ng1(poFU6^)TltUr=-r0-amo4_ zrH7O{NYRU~ZeJR@OCfPu?v#`92`ju3B58?#`7_lRkN&VOccK_J9X-RKC3v19PD@SZ zn*aD~%;j?Muh}$0B zdrk%7yAt;51vJg{pwAPkRDn1v-tWKo!*6xsK@e+X_?HqoK)L?$Wt$+gW@Syp{iR2~ zYNo4R(waMB)?2`zdDTUnX%F9zE5>2N08ge`mJIwh!?vrsOgZXjI5syT?c-s?KpSZX zSZtm9M00g#0G%R>iOiWl0iX4Qy||?) zH@I07La)w{$$u?Cqg3!kCt>z|n@fDD3)y*PurKRgaHpQbZb5AV$I2%=NvGd4Ix>OtHi$@mzM(| z7FmxRjd($);EIqr$m0C_w*bhpyx{v0RmE~5EgoYSG&Vv(5@mpLFy5c&TsDTecyk+wOkATxbV!KnKQG`K4&R`>qQ@=yS3@g2&{!T_rZkAU4~3 zDxxk|hHoaW3pk^X|AVb>A0eWJmD<;wda~WG_e~&H&&EKq`NRF41E@|-gOYkfx~RKT z{7*XP{kdpfYb_@A?9jFTMD^89T9LM@vkK<}qv=x9?_^>gQ=J%sc$oKyMPHkow%mb zp*Ir^ewVCpmCn>N^ArZ!;TSZS9CEaU+H>$wjfydav3BrK(MEx1ta6CN zoJon~`+o=-?Z2elU#e83`;4Q^0sMjE-I5l7gY{SDgLU;LiVgKict6sDv_N>*6(Ggf zo*0I{45^Tmb4Ph}F*fx}I1Qk5~pP zUZTD$TCfaa+arAcvkVyj9jLTm)d~YUBY8r4DZFo0o%W`pmI$~k2O;ssUUhgcR~_d0 zv>kWbJ6?xqCjM2cwKKJvC`{BSHW2)kLtiWAYwjCmFUbtnH?f!h(dY#-_?ya*C*NhC zf~WU9N@Syq#6X~O4X*?t#Ro?C%@s)VuR^lTi!b}TDURE`ZV1rwO`vr+7>-Y$fD3y zW4q|(x)+qQo?|s?e%eg~2(TSfE{6bgpZ|^KJfJ{8r?u6s-yBS3S?!KcUP@Fg&|gUf zWU(yZu{$7anepHn{!?n&j=N?<-t_!obud*JTJ>@K7JI0V)py;sRX!6(0gT0$Fh0*8 zu@y>nnV%-fg1HGoCZfu5uFVU9=+fDxtUyfL?Da6QD>-@+OslYQ59>!lebh(JFVy8-sRO?rPO1bnHiHS}} zPqL{Q`z*d_wz>xpl6d&noll5}C2?{8WcLs|uKl3`iB=3M#THE*v!+7y1P&wy`P~%1 zuLp9o+F%{RrL zW-6I{EV~MgN=-nN;>4eae#!s-#z^^bGF;Lx5-wltzDe!?b_RJg3P33a7qwFwWP1#dM|XZB}VK>LM)% z^h*K*{h@4$QBxKuouG@bn2v+r>a3UTaTBK} zV6Nzj7Wz8|`Fww#p=j1A@;mB7l#j9q^XFpg#9VuDfM8HC{KvJvW=6QG6!`-JHSnzT z*}`g9m}8Lhtu`~>NT}g3L@nz_rR|~-xFZO5^SrHAQk8jHhnhWa%!=?P@hqZ^D=ZW( zJBhWMvRtrQxtJJ7q(35gimV)HB_bhl!t zyJyMLC=v?Pya&(S8B{YROBG&vNuQ!2j8*oy;M(RCo2e`|kIwem`SVMTXqi{Hu?b1B!0*#{I;(x}iJaSH9*M zFyEN}7zdi+a1At zvu7ap0+VWnAE1r|qgWk;kkw$|*y%lvSP=F#|3bZaIfX2R2Lb0o!Q&C^-u?Lo-UKhc zc$6^+Ije;^45B+&cE+8PsJRJN_wXQ)9R{27=Y6I{bp^LkS)u!BM*_ z2Y#j*z)D|fJanhU0%X&3G9*^CpaXArB7Zg~jC}rYmOVoBsPdNQbA9N&i{Dtj>p9u; z1!7SepVtcDcR3TY)^GsQ%A%0)q|uT7YsMA#9t33~Bw-sF$sQM=Sv+iIH$>?Gm4M1O z;O}xaTlxEKHDZ9&0j$vFA!I~B3POY(qese~1o(psdN2Psv3lCd^(j8wC+7v&Suc^4 zYhIv%&k-YkZ%6~kX1m86eO7`>C*a?RtJ4xpfBB>g@BMUoAN;~l6#gYII_(w9lkb3M zq}1sOR68D*C{+-fz?sMMHGj*_9wUqm;MvdO%#n}KcfzVhQAw1g*v3fFevu|#Pc2;TE*DA zfHM#QtOnL(Al+gaWDR!M%@s3izkA|A2pC(%gmaoOFutrJ#uIczMz&o0G0(am>|*Pt zp^Hz>2MaXdj{ZxV>Pq(};Mrz_@C>3h_reK$%ciRLXs{y#Y&;C-e>3ZTZLFY24M3s+; ze-O~faiibfEU#+wVZ^lN+|m=6YAnpNYZWF;rK4xl|C~9>Vgo0FV(d9JVn(w{I8HE8 z5hE^@&vOq2BX++_{GX~EEjt3Ot$S84ynw7ye%YJi)NS!R-ZRr3onx_U22H>1csZSPgd_06k7{LMpDI$iNE&nWGHV+frvKqCK#7CRKdp$KSd%>St=$jXCJ%E@sbeWucp ze)qv_NvSH!fWy1__G&YI@ZU^_T+7+}${o4uzsL%J%rM_CP|pdk2(DcH6|8GNm?o-E zubh@b`FI=xQEco_6|TW!vmVP`hOiG=%oBfncK;3y=7l>aN$CZaWVJg;?}b1*Vhb`D z&M09Zo+}Z>_rLXoBSd{cT&YJBvz6<|DL_i^0ZGhdyC7mI2XqK|KP_V}YycsVs_zVD z1CgWwVNh%26OND}aE$9_E2rECE_X3SoZo1<&keFqMlYQyYyo1?H{c*|`H@QZy(UKjn%;6_1a+vG_dQ?CDMj z<>u2qRryDy6PzOUF9rl}-|)L`-i>W7-3q6s_v4=q@tI>s753}sMqA3>4h3#qj8-Oc z+5J5k$?vQhDX~pgfqt2I7l`!=<{Z$(j!>tqjjk!wIPEO?^${Ams@v@URjPMt?x711 zvVOy)u{@SH+WYWhZ>mmxqG*v475jcwS}n_tib<>TN8gV`I_L(Yan{5`l>`Bs9*KEJ zo#SS-gEfrYv!OO-vX)bD?2_mUKATwpH0k z%O32m0pN&1&spz&Z)@E?aDp*fPZS z3D{=@cpsMa-`i7#uki`MP-V-q|M(BJn&9Xh36JsFg zm{zggT=kA{Mk!5byCc4mxtVOD+rl(mRG;s&xaYC*zR2kh6Ib$|*(>zv-bb2OQ1*#) zAlp!E)}_XeG1-xmGFfcCJD$Q*?o%Gk?%Rrxku6FZkIA>1@p&DLn0=hRGN+_`z)=?8 zu11Kjfy^QlKQcazBdP$ARbR|IciT-b)YztboQ-RjgO+jV)Tk1%{71t(}syJRVk^JTn|C-gp{MK;v4Z(~xxbfitRP_Id;Ihjkd z=vBPjFtz{smF3fV{p6qMFxy5^4TPCZrOou*z%+zA{=QBUWvintCBN`!3S`6M*N zI$3~4vBm!rP`(@8t?)UFS%Owh3iyn_=Gvfey?o*bJVVDxPX-N!#s45dDTSV8(K;g5 zu(#6h^Bpx+>YAxZ=bh1G!E#GyLc6^TeKjwoE5extb<}5M5dZP(ChdrQ5*ylvhdh@> zJ?f6C-TP>YoPkKghI85Wf|6LX&fq`qJr}DEocOo=X)6jN{6NBrUa*xa_yr{UUKD!F zt6ibE(8D}&R>l{yi2kg%%d79P0W3P5$jJKGIx;82+B2vuez#Vn8T5|F^?1#GB0_MS znW|RjIF-HmvaPNLl|p>-2Eoo$Y*bP}j+pXvOAYEihKZcqvY2UowTN?3?OH<)K{2F(<}Ez;Rp zSXmq08A7P}%ic#saLb>FZ5f2&moumRx%Ce93V|}m;9R?SQGG3j4Bc0bVP5Wk>~w#v zeku_^-Rm}9Xslb0H}=M6(jgc3l2oAOXGv^wOI9q>#UO-G3XX>fh3LgxhYVP57SCTl z5lDWo<-BL;=G{^+t6lpurQ&0y+lG9({RzRQ&}Rbf>9cMJpW7>1@8(8MKUeRD)*7e5 zXmfz(ny}^KqH=-#Tjc-C*$e?XFFT;5SC(N)Sd_$U1w(`eN$L@ zf&&RdLP27JQdVKWx_Q0?xjrczy|@Q{1a%jG-gQUh{n6H7hOslOpWYQz>-B&>Qi#z( zJG!nEX3TJ-A}W)9c4g!J2=}#a34Hi`zypZ_B7w$YkttM?oc)dt`ES@OroF*dd@>8Qwhe|EGOGJ(7Kk8J( z*W3>cR$bH;LBU>OIiwW*%0k=FR*j{vKas1mplJ-E(|mYLdVDu#y?#=dy|DnhK*YA1zqn=|HyQF?6crLd!&Ww|& zC`H9|bcLQx!+OAK)KVy_A~M)7-cWDo?5Z^|6VPK8o#l2%xQX{CB_!7Ni5MAy9cBU> z+MI{w&>cC@IG@7!5ora8SL|1BWV#)F?*A01+#}VkltVQ>&EdZ(t*pBRu0e?{c4are ztbH@%W~x5sn&?Q7>?90GcK52`<92%s4(lfCt)z)>uzqoLS@)MvYt&J#GMQsT2pqf; z&l)~0G>&ujz%z2tBf?0H;Mxx?pySXmbNF63PfN08Fo{(e%S&SZU*8`Z?0FW#t*v_k{1zFk+;ZdnI=|*Da|)t`Ok>?Bpo*T_ZJQ-%m*?KthY~n zM;>1^H(HHa)ZLbc%uA7+5FF~m7&@PHf%>!qxnb9lAf)?2z>eLi?O~ zvABKBw=~P`m<;RZsT51BAj)FH#ty*Wr=D7dCh7NY2GoMD$8N-@r+9AmH5zY+;(F+? zA7l1a+OJAaw>*5Kw_znjD~lCr8pTA8P6Jo;2)Cj_2pS9xMs=Y~3z{ehKDABBRnbBW zvLf+*o8d)CwkBk1F&f(LG9jwnPbg~GuO4)3^k$v&%+qyqpLt)LeOoumb;_U+rb+PJ zGK6sy&K!{ohsgr$^kXn_46h6InE$6}L(W(2-2m=TESN!t` z?-J)g&!v@6b70n^JF+&EK3Vs8V}b&@(e-(M1|$H(PRw};2{M~`L3SD+cuj{5Mt2P~ zxP|H&BQ(`Dv}V`q;*H|P$7eQruj6}I`D>hElCXaw)wmSEeTTQEM~2*85BROg-8dD& z1MdZ@&(P3dlg14E3xB1XlP-(!SQOvxXINVlZ`*gOyeSR+Y%YN1bBV{%2mBA5p+UG( zhAGpP=%=;{pMBwXrMJhsa@TbyQFtss-XpoQ+nM+2qrGi9sgtR7XZ|>sP21$YWV|5{ zcgZ7n;}gOY41wG;tesA_5K^ln>VaOdHvxY+Y5s|RnLU5C@2g6#_|bxJ)U2YkvYItE zIBYdGoL754rTA{?kjJ9#VRlc@Eow)K@Lg?-5o2Y@djdy6w$>50;%OA2JIdHwlw&#R zFidiTJ?`a%`k2`wx}FOC-M(u+F)dK!9iHv%dgo?8%;uG7=~ERKn_+WUTUz=oyY;RE zS0`ujq5peUq0v-T$P_R~L$nPcI3?zYST8@5AQNH-Z0Tqdt742rr8MRf0>~Gb7u}eD zeYxJDE^ch$aJh7#FAk7=XgS|eP^=Xmba@F$9<+eWnmJDCn#(^rb z@V(e8ZV9QX9)~bJwWPlmHjei$$*>Pya}|qjcPLCZwjNR;rgbhyYQ%@Ki{F-rm;>Ot zZX*c}+|%cL2X^6Yk17wOAaSd&6=6gOhI zEwL`%UET*i6}ULP$6jP7)XLlQdbYpNB-7=|soJ%hewQE?lPUt04Y>bq!TBDR1*Aar z6(c-8)s+_O)kIbsUPk3_?JvG_>*Q8bq}kW`-ON_&;~6SO7Usdqo>1&=t~4f^SRJvK zG?zDb!B`8af7k4=rM;=Q4jo}(YoS#t8<}}q%7Xzaw&A}E7dLw?(<7WYt%1c*&H*%T zu9Lu>9m}h!djk3q5Atti{I8qkO&iSDSY)LtcgDfwr$h( zV{&e{B!BMBwk-OK_xw6=CkR+QhO%RgRY?DnOrpC;$RwQBDlyz-JJrdJkbYhq%~rFB z-%jTd-+b{wSDTS3OkI=;|P>{D|~B9S|+YG~Y;A++Qc(78BwDf&Q6(l{_6& zVbBP$J+Xg=2@NT+NKbe28QPz{-4ktid$i&Z<@WR?Bw>-W6!|j&-YA5JMKhN&G&z*^Rqwb0wCdgq@(d)4;q58 z11BN%WBQ}*|4zah4a7<4b>5S3#I-XzG;c6aUScpkqQqFCV@Tha_|se${wAR}TY z*C$p{NOZgbW$w7M^d!zNT6$+eeAQSVL!n}C-*M<{&@cUP;(orb$^Y-yAiUmsGkHCU z{`vckKg#ccX%&3P&{?1lTJh*89`#yuV@2m0AZ{MwEV*xcfr*`;czz8S?@d*(H|C4? zTjbxIdS0&{VKA_DX_r<~6v}974CIK5u(GXFRr!?_7J6Hg4I&iye%nD`n{Dm<>xHy$WZ$194A40C@B!nZgO z#mg#+rStYVGuNcutj@Ymc4zp}RGuHKX&$CFxepH|GlF3icNYA+;^{@AIT9Q_fxg@bxR6{Ek92!#O_O=4 za(=CMdm1}(aTajxWo@d^FS!Vj`EJ-Y@bSymA-3jN8W4&a=Mp|J3viB!HT;v*?mnMv z;GDhv)Xd&^98fXFu3cT|#spFC8}W6xNUgE{0n;N1H_x+PoU^jlDA_1Z%f~nKC3GMZ4b}VN`n*$pu=+! z1==O~a!)_jQjoXGIRYb}0o=81OJfHZDMHIXx3f>MXr@JN@!YI<@(G2zfPD6t_t@6~ zFB8vv?;?FZN9w5?ig2~Q_LA}SYw~y$;iwi<-Tz?)LIPo@GRsKmrd{1M>}cIBew!}8p~0QFd~u5QvgT=#o3hs)13 z)jlGizJ#$mFANW$ci#KV&DON&9YLaZ2(>h!e|+!T_a`8xT17^yxRX<^)k+gNce&5m zEUJhN`sauJ-c)#074B`Y)$6|dJ~%#=2osz@p4S)tYBvEx$AK_-oQ2_qvMO!TU@z>> z<@UFYc+FX8s#Z4ZUFxi$^{MFBYsl3#&9d%y+kc`C3L|dia#^p%_Hm?n0nvF{W75;I zCmCz_v-Dyrgy+0aS3lX>_HdW03LRF`coeP|;=Rd6IelX)8LS)Cax}<6$Y1^)!;?V# zxbmP9qEq`eJKz7VYap37w3~IL(2C}Q{L@kDKO{Bb=gF$ZtEL;{L%NvGTo#4&Xd0MG zU;@(dTdebJp>)Tb7^PBwGU}V`zO6X zTS{r|-i=!n$DV`!;}pr#VqW>~T0GBe17nlvZ@sxAL*&e6m?7hhQjyYtUKMmRIqRpM z0lhgOx8;*6QT~U={DIKu%y~rLOlz>}B>gT2Nj_~~VJcY&C|+ZMl4IEYRxtBx-9>H< z55r~;*XU=Npr|oYYyWO@pEUhxQ_2^HdVljzC!DHu6saf1<0FR;b0~mc2B)MYM zL@;5E7rXpROuCu-Hpbzv@H^$Xs=aQz2X;L@8FSuS^^wPQ5o^RJh@f6#~+${c(Q8X}V^Nt0y(?l%`Ag?G> z^0ji+qJ)%vIQX|=WFTc2$UZ(3$%%KTxgS-11G&Z+ zLUoVXWWV~nD0R!y+M<&Ul1TZXEVi$pYOlezdC#C?b8GYXO$63hLT9`X)#y&KZiCSR z@wL%QOemDB#%BKMCW`4%gDp>Q#Jub*XG(Ujc<8&Ep7BsGV(Vw^ z9X!$F)NTbNN^R)m(JNpNN^A+`G09j5trc(R=Q(TY8unfP)KQ9GP0=J#-C)QS6r2AF zrpwYTbBL>GF6|E;wYXg=l<03e+4hN$#=&P@{tXZQh8A3Z^J_Yp-m3gGf5YUC zP3&JD{|DA|5!+Pr4e}Ug8V&8w0x5Cu4_`(!M&G@ri-o_EnDcAGn`h8ZPZB;uC4ol`yTu+ZpiTRFtuDXz?rl>EXD;<_-Qf6{B zUCJ)YCImN8%)I{Euc#dKE$`Cz;yDZKV)`8yi0rBpHvfc0eug4LnFy%TL^cHq?RT%{}%|I!o0cw#sBWgNr zk{xAZX=%N?jr5xrulknPsDtmSJwxR2%;YFGX68UK^yB#FjK!4LsR`X=DjS}X_JZ~( zRvg9Ts6RQMdHGo7Z3})ui@I;d8dW%vr`_50^PQ=x2)WYOg9uGBy3&7>i&qUr0nAA6 z_w>na7n`7%f`!7dEw&f;&Euf$bt=NoCPdctd*_UHiE&=qIBlL`=11CA z2aL^&+tNypiEeE}Sqwrs*J!rSdL&&&=Qu6v#wQ1CS8ReFr$iw+nHtOdtF_fGekN9! z!CURbXtG)Ti4}wb_)@jZwfJ8Z)zH+v0_Aq7ull1>&VmHtD10v-J$_LNs{T;x51pR+ z@dS`n@{-+9j_~#jOndX~jFd?^>tv0mahShA7lV!nFHT~XOQ;`9i^rAk^*P0Rt-qWN` zQ4YrT7$B#^otY1xl`l(LXPF#v!-DV57LfAVsPL-|4qzF&#b`|Ox#{%!CmM)cmtXvF zxgK8$DNEpkf`0szcBqI>hp=luVY{Zk9S*uw?fhlCvivt*LR_1{b~O=;4z^B#mW{&g z7_%g!%I@?s%N&>8KV%(G)Y_^uZ5$+63)eIB@-r{?7o;>R!@wf_ZMEHG%UGOty3)As zHI?sx-Ak3e{MBp~^K>kR{jYh9eX&7Pu}GLw*|*sYxi=ptDt)HS6r@|ZBqW{l)!E?mhD0F5n52y4afuYF;dl^ICav%*UFnS^!g| zfr~_?be$zt@}a|y?`l29oyjy08|2HV(r$5T4i7n$P>ec9`*40+9egX@ z_Be_PiTP_eJV#=NfgX=Z_{9AXF1OnGYhpZ?+4$3sK)4?!y=-J;_^S{xS_1->jEP5s z-7ZHX_;D8>+q7vk$rS|Y3Axzs?gahZC_XRQE!z#2F-pD9lKo~NA{V#kn1W|LWtpxv zKtd7Y-sZPsrVuuTB$@nWf+_sRx=$V=hyZ7_au+w z&Rb1?{Rt-5E_%h(Umf*xf{wZJtF9+&eCwq=3_9{(Pkg?^x85LOoh0|}_OKe>?Mj_& zdYu#9scH%kMkLdw^g-%bf3Uxvbd)+>-<#H{lr<%})@`Y2B#Cogu6341Oqmk8?V#4GY8)BMg#8O@FxXE_5KPWi)g}Fd?$#30-%Yi zp>Nux3g;@X7|zSR;WqcYn<}=`9garFVtoUm6~60*c?OkvKdn4)-phCirm;lj%sI#P#P-?H6=Xz=q3RetVKLe-?1pW^53!b`@BY&=8y}s*0zu{e|LJ1ElgD}x z>G;GwPZACOJJeuJ?w2pJN0d&3G%v}imF?FfnmD;lR)gZU!8Ko5zs5`hg^lZhWadKs zMy*g^ct7;mO7Z>B8>Y8Q>+3y)KKOCJ8O0d~jcfY}4=5a|QU>XV2%#bnQJs!gA|90_ zH3oHD8hIXq*FK_F9S!Otbaaik9GH!9EV0^Yq3pFt*CNHmpOs2uDH2BFU+zSCt*@vw z>Xj@u!X4hrQ$0O0;MwG~TC?$p4H;53R{9CmE_PTL?C2#ehiI4LQ(ZrDpZM{Ud4c`v zWc^nUo^%0+6R|OJpRL7!I#Le5c0`<_gAs=H5hL!AYVeLM{OeIt(AE`RstJ?lAC$A~ z0e#JIHCHjG(@l-a4^+ijyYzwM7B3-~SC@Yq1-YqRr$}a(aFAaw%_C=Ez^GN~>&j$% zyldc40&RX@+DV(}j-Xg=Lk+5)anNo;$flaE9LND2HRpGvm^-G)Os{TZNcE3(8TqSe zi)1S%QU%72(OFfl*dG+TsW#W(JwYiFCKfk+wFHhU<{{- zIi|UJ3r*4L)Z0&0q2)zCg+sfm<#l+xWTaD*r-n#c%RlF&?SL2kF7_w!dS$qJ#SA9KqS0d2v|P4W*7A^x3~fsh{HvuLKLP$v$7( z@DiH$54Er_j_c`n_k%NDe}842<@Br)9G0g#QXD;D=sg2AA&M!OMs``6B<$>?n}Mv? zabg|Zsww2S496<`!=J`Wnr@t zCOHH}mb@g|AWy5&joKyY9+-EiLQ1iX`QcBeg&w1_%n_|&lMT%)arQCg^I<~Ab3-RueR>=jN+pumVoUo<#xx!2lumm}X<#wP0Z+M^a?NoU ze^Z5^Ox31WqgW%n{;N%i2s2#%y!DNYTyO`kjgGRbQSW9UMGkq?ZLjsEW8?EcZz%FN zzn#7TW}N3)FBHQcyaRfj1AfFnwF)`WVQL97Hsk{-qQ;(qJogqf>n8?3{`Hl`XmVyN-Og*qV@^vI z;<9DDe~aB0&;BG=qJpp2-Qmw~FOBo|Xl48F5}RTC@8nu5)x#LvY&-<{kb zTmP)t+Vex9{`^)WD5;NNP;2pd*SqL=Pr!Lpu3i}CPieQiRgM3Oj8th(^oj*9EKyp) zVT*iUNWCX0uyZn`!REHiCdf-SACqMlEopdH_vl^6=jq-m=9XK2gS}y4Z=*9|HL(`9 zBij>{koeJ_7|Su+4|R2Hs?uB+YIGsnY&lmUgzO(q6OMMI$9LjFG%MBLV!2=;xzRjo zRnsTx@xjGs_4LI6$(^GqKux!)8Le2%EEyrJ#54 ziKCV}Ci2!0b3NAZ&*`}pxkBQ~P_Y8JznJge#EJ~L6?ZVqVtqNa~X4$@nrS9Ize$J8XBk# zlF4_mm!4k>Z1Mi)oi)FoqOR7q`OYokPo3J9ykt|@@_Cfd3gJh#+wTIQj-_ zjC8QaN80{pkAG;nuMQ#M%guDyNxBjC)-EY#)CrDPyI6QKZM_A@h_Ic^rdwZ5mOoB) zOcOn!vn+Yp#q^tS>h9*0!|+202gipk#^bY-%zRDee9SLbp|h9m1T!@cKXEVXowr~6 z>!8JHFgoC{-;9=BUNX7s%HONt@G7T>3|L9|GNLa2P89s=i>pvFUz;ac8IdlaXnq-> zd*@cAG4_4U1@>`aWm?dtA5GI|R;^WSB)dPQ>09St<2p(sy0{@-k*$`8BL>Y*5ftx9 zQJ2feCc6T)lKs@U1VPUYGEEM~d=nOr2>ri+Kzc>`u@0KXEfsSN>{<@SWq|;QGN=N8fljHM3^H#3v ztRj5KJ7Mxc=jY(2Fl-;xfmfr~W|%M$9IcanJRx9%kLCxlmt%a<;B}+>P z!W^oyq*4#YlR90xUY}h2Fz>urK&mPs(c1e?_U(#7iV<8c(~bFSKv~mTqCu0>cX=o^ ze`Z$dTrKW_>rrkjsc1}>*Ds8To-nZ>z86tTp#I3rP|>eG7-i>mwDoB)?cSkX1{vk( zkYa#5n!-~@&(%&|Urw>yj|SsCtEZcjx$+$u8J3q!kZ=UKs42EQ!3V*%*CSyXC3?E} z<2X+p6qtRX^7uW@u1o3dJYSEs*VLCE7!(poaX7=TjuqM9O)K zB6|bTB>2Ct- zmh^Dq`UJQ!%S{cCl|>-wBCgV)C3LOIeVEh6ITVeI$Ia@@L zt~1m{&wMk|`N)CW?XQd|J6{Rtj}1sWNwrog{-M1##viRNqF)VRmMOY@wcDdV zR7>C(0n+uz5SfjQA)Fox_H+tG(n;+)7PwA!-2e$V8iZK&%V52p8q)KXa>)~YWAGoa z=Sh{S+ff8V$(`w>Oog}*isON(jn6{)q$1o?4F+P4p~-KFSz}45J=)(Mr$Nb`K2bhG z3a0`~!8w;niVwQ#-(a{b_=*~@coSm#f6>9e6!LK8RpQCY)mp{yL*j?w8$m9`TIr-HeUlKK-T{oB2)2db za(Z-BP}&J)f~$7$>M-l@nXvD6_G#H)2R=VF&5Q1B7s0{Iz~Mh4Av|!v!N|?OGPDu>^=~1>daU|S zP`)4pU#Sa4^VAB8R80Vx7sXWt-R*G~39Oqz&3kVs)w@C~o$lyEkfCy4BLm;$(4j~! zG*aZY=^Tq>XZW7$2AyRRq4fRyZF0REVXT3ww&d{d`oLxH8gBVR zsnp|qPTSuU${d9CPcOzrhfS|OSgU0Ab=5}=_zZ7;9<{!zB{OyQ%gN|iqG5Dcbi6a} z=G0u(rg9SZyI@>K<#a(A&!^8cJL$SZNwS%nWd7+JL4`1wCWr}{==;M0J;9!f0%v(> zfBSF=rH;@Hk`k8~GL(Un7G}y1zU#jLk|#c||8MztBoRw>e|z;|>6@3vF9Y4M!WW~Z z0k0LPmgS~zN*-xr4RuQ8&{BLBBVY!7t%VMvJBE+A8FVljP*cn-*LTiNyZ2ji-yTtq z!Qbf*#wCx^UOp!g3J!lIE{9k5Cw?uP$A(`3bt7`9WzF~9vV7?$of5vH#mYojiJlBy zF#h@qf2u%)&06rJkAy^Q0!w)x0SSqG%fSpZCVTLwMHs2dgV?;H>D-W~r>0M8NPO|% zj5_XE6DFfFR5SM`xt` z>hm+axwa+6@-I9(J8L^`?05Jp>I76tVoY2ELi`4dYh(8v?91*w8`Q5N#5AbpMuwVh zur=sxnX2goxy^4j|KgB{TM2pUWM}QnvV8rB@$6Yvs=>=oD46M7n?0*7SrN5A2CWws zZGUq_ub6>%iDg`WwPq%j6IV<1xL9bp`f8TnyL^L#dHlzo)mH8I-a?c6=`mcK@J(;f z7d$;af(;8C5U~WN<@z*eFsC>F=AKBDezCbR`&tb`eB@kVm7;`Dhl6;w6ps5mT9cCs3WYU_SDJtxUUK|FuwY4jNOVVAX zc!LZketTKb`z{d(mgB^hw?{4a;I6?+sbcME@u_*PqXEGuaRrmxvvrudOO497!D)hy z^wD1HRWDKNJvZ4rXKHeA8O+n%?{AZ{2NE(Rn>{gHSfZxGnwYUEM3o5HjIyOgEc4vh zsFvNPk&uu+v}TwCnTGxKPM<3C-8Z9+a97`Nf*?!=RPE=)&|u;34m4=qlzw4$G~;xl z1oMLx$%%~C@fy@4pMA5NS_bnYG>4p&^jq}&R@nXC7q^l&!ii}UR9LF5#r++Zh6fa72F^o>Y zt6omTNS=$|=iTAEB6=Pz@(C9LPY0eXqiHSo*`H^fM*!jYXOtUXj?smU%}689OfYU1 zSowH|Xf>Is9(VMN8r)hk1aXkGnXV9z=H$2;{NCL7CwzJ{IKLM*9Yw-hnAz6Ud7~D5 zP`G$XHoX{ zfoQSwDv*`-O^o zo`<%lJozL3+xQipJ!e%;PA}@mPYh$mVT@PWogE^_2ekn`BsNN?^E9J=FnvOa7z5P zKm_A6xT27+i4|Vx7P2qh!V^l9S0I=tjwG#L{J=y!Ro%0laHvEeXEr8Bd0fg3mu)7h zs}JhxC}k6*eNGM5%qYw7U5c$mf3`kZKBP@eYtS?3IkFk_wV(D8N!-U>a&q!)`R%fQ zgqz>LWda!T<)!P%Drj*ZLUAvm1-~wD@?rlN45a>6|Is5J5C`im(vhWq7M=h70%2Le z@h3O?3M3~aT)F8Oj$^u&8CFc#I zQ&Zt%lBYjvx`Krmqr?&?1nn*5(vC0OrAron7Ey|!=~hw+Uh82RLaob_UG|lqbH6;~ zymLH66J7JLHciR-+)*l_Q6Q}u(#}Nl(`jo|We|uXwBCJGY2oucx3Uc!?3`((`j1|d zc!Uff^6RBLa7%i4JK5XQBlp$*-0}64Ed3&p`L3r741r7Q(vAyl%I{kNCXBsqt37yp zad?AOTpe^5`q*H1mK z;%Vi~FtD(+I$5I`pxGM?X4V4f6l7!>;d1-6Z^^+>*}G(-Tf=A4h-B8f{DHAHl#{>Ii%!k8;C&c*FZktcm3yz| zSE<{+IBwK9X5AFPrvb|30J6~r&jaoikj8zlz5OsXo=y)J@_HSBBbIfy_;x&VE z>M!tyi5jaGEVIQj!EQdRm6Y ztQi-(P~Pl6V1LR1?dP-(O{RRes7}!YdWLrU5E- zy|J9%lN=3hfw2KwZiX^qLxX=D%HZO3WecCdY#mWuOZ{X3@-7P%2*V%(kU4e`Jwi64 z1t8yCRu=8^bbaauyPk9$OI%JA+5W5Hc24HTS?7J#&3#X%P*DngTZ4h+)t;?Kfguq@ zqTyT?vL*N4`@1rEYT2~^kGHps%Cha+Md^}GNfGHzX=&;129fUWE(t-UyBldK>6Gr2 zkPhjT-skP}zVEm88tYqYjj_kr&+rF6cwhH*);#At<}r_HXlUqlvdgy9A}3}T{akeA zpmncrB{DWkprB-R*sS`Da%IfCL{#*a(uku)3;+N`7}1k(CVEZpFavxp|A!Y8&Cszs zJ>1I644n^*Ib(I!c+CvToTRr~@Gfs|q`|I=tZg)cKoVNpqzF%aHL`4L#2_mK@|VDxm>iNgmUQ!?%iTJjUD+wz_fFsp8a zxt6^BMo+JIf05!`=dnP4RZmI8bisXbk>>_6EUF-5UebpbFe*Wc|xW@16)MFO3K zJki`v(@r7KK$)oIcbl9&Sv!K8^{uBJr!UJAB`FCRg1XHe8~uj4mb+Og&cMJ1QY|TzZ{8 zho{G2t&a}BXKgR9m+E~9xqFy^;E`wS2bZtsW@FkaYNEY<`E!V5-) z9nRQS<-D6|4iof$v^5%&>dv}De?F*QV!J!tnibee{pzyXWXDcyN`n;Dmj8-X!1FBr zXE>qNv~B5QyVs#te*NPkbQteB0deWHh0!mcRD@AOOCGO-{2b3Tg;IaQSh0Eeoff}` z#SY^w1ZJf79uIuuby;N@#BZa?Do1&JHX8$-^-4|npXP_oE6kH$J6Dt(?s4p(+48w? zyX;M@I&AJDPx*ObV`65<3@qknM2}FE?>9&fUE!ZcPVafQeOi5?UOMwy!0Y0Vp!c8a zx^BsslIxWK5wV+wXv6Z`u7O*s4sIw^Zu=E>2Xu`GW*Ck++`x_w;qFR)Kki|}svh!o z+v>o86=X)88a-b_X1$hZf0LHkJ0}+*e-AAyIpNvgF29URG>42=Vkn?6WF+LNl@;^^8~*Qyh3+nDu}I$frnV71G@&E_vO^-HMmz$>fh^Di zV$Y!EeM#dh7+hh{E|NCOT-2k-Q)IBAvDpdlrt{jqtJi8t-c~zOZEfL;Y>oHM)pUBR z;8<3BzGxSq`3x0u#rH9MD`^!#MnzG%p`mKntleoVjcmeQS!i{Cp})aK(TcYIob;@jjjHJZcjE2^GK3eK>*5bZSh{zKB^UIaK(ieco-|BaVDxbdo&Ba z4hi`gT=P4c&3RU@d1Mb20sWvzAW`3ZOUqIZ>rd}+dhBwcq_k%>1{R{v@iwn2tc^{W#{xf2bRt>61VUTW!5Hv3H8KHR1%04XByJ# zk1Y7emg962M|Sa-_-aGG+D-?1r-R= zejk0#r0br|Vg5Id@u{JKyFFIl{7}pC-=?f=1L9MWcsqljf>RRyuACaUJ67*`I(xyG zT~4{75-X7*x2w!V#@F2xtl1E2V;7z^oe$^K{%d9e7s9%)%gulE{C4(ID7GtO; z9ro<}B`k#}=}NP2k|0mJ23~V-_atCLotI!#O_xcu2Y1%QUH|C89OHj>KO zl?HdsGa1r_VCU)Nh>PpT;7D0<4B0H0l{%wQ7iZPjIA7o}z?BWTp>$4~%D*sw1_WQt zEhegiAtT28vl}+!tK=O5PabXx_uS1dk|fa$A!f}*5sH`5)n1gb0~x8&30-#J*kgT~ zQ0$UV9`%s_T7~!3s_CS*GyZdW^)mBa(6%t}8Kd~1@xsp3%P^pOEzb9Vy5sjgaB942 zfrkDmcQYwI{nMBJFVE)1t*nni*}}IzVHpxNIyIC;Y|82as5ysX%=Fi%&tbp0?NZJ_ zC#0PR4?p(NmG;MxS~{B4A>)DE!W@O}#L`#>&C0afB$}GGnB`ldLCq4AU*o4zyo}x# zgt#TPdhNpKLygo2IuEgA94q8aZk4Nzdpghhtu~^eR`1yzN-?`f^}nO(4Kt4RiS&~j zvB3N=wq%)1{0NINv>UH?&=3hF@DB!MZ0GFgb>_ntoxYubwilmuLwJ;#E5wNX|3tjDS(IyQ^Q&`Nvr(7lk8UOY zNR|i*_+k!!D%p;L-hhMET&uRZ@;(Lw*b=3LTHtZJ3zcDl1VqB;w4rSKMkP#K%|A^;>ysJ!`wp zdTKtnp(?;am9OWd6_dfE?y2g>_O6QPeD*8<;CHn%eYc|%XOB4&x$v1ybdTY zd6`yn2-)*FwyHwH3%bJnU1WZ(wBF(3lE`VZn^Rz1x%DBy0Qia=GX+f0t8<47z)yW$ z7a+9bn#$)&?|agTIGro;49FpT-uDH_1x<2FLlct8#u$Ek2ZxDW;Y(~B zHmcY$120?Q_cf$(uXfuJIxQ_eFtq>v=B&zMPt0;EKZlsxexih9rx9vY;%uYkGsqki z;HUpq3F-R~Wcu#0HIB{%f&R|el3t~loRrTg`s(ppR;Dvsx}(Wkhtssl(mx{$&IHAp zPo44frZV&fc#Ni_Gzyt7uPX9=Zr)_0 zXVs~QZrJm?JGTuFS&?zuFH>g)txOl1stzV`V=Om$YlhYq$)~;j`qH=Ag&uJSAO8)Y zd%VreEGhPB!YEM*jh$}1Ya$;5o@t5Bj0|VI*cJ}898%}xjSza3=)YW1au8g30b#hh z9KrkhdkawYzu8TuD(HQ+=_4zxb}aPx=M|9oiVV7mDXazAjf|NZEloWTyc24^pI|}K zQh?0f%if$dnRJ{}ySo^DTq2^}7Hps)Rw*t$;0i%!_UFooJ$J^2xJ{le2P0{<5{*vT zxEmHgG_*AGeyya0ASnr^)>YeGgRx1+)>9ox*3Ies2 zA+8|;B_}e&Mf=B7bD31m5^Rnlh1*Djwu_>#Km7^-3^(<=gM74Xud{tdls0*8Dc@Mo zs^1`c@Gj$So7HW9)*Gs`TO|ei=^qy~AV3uot25P#LMDt6i*c*}7HD8u4ka?}hkF45 zw*<*T4~7=6%Sa#|BMBr!@_+)9%4Z6RX^ba+_oMMv@9TK?E+8hM2=oo}frmE^{@ot4 zHyWqKcmNRgDi?sVX(TCY2BAA1GbaLcNXUjD6bunSl+FxaeVC_dSES_BZJ{_`@y_mP zvC-ym=5g_=7|)3JkM*E+oUo_|d-5B7f(3|i>2dFFFLmE%bH~5(cH5%cJIWmQi1fn6 z7=U_gb&l=-bswkKLPHpy>@S>IqEfob+sAs^kxlN^hu6{joz0erh3Xv<|Cbw?;#-c) z-}2~(3WK?(^)}jcgi)VQH@=rBx%)ps;j^tO%`&DWAQoO+?@pM^gobpPZMqHsX!h;d z-QoUan$;n2_m$Ra2q%l8aM>9s%0tQ}1vFhT`=7FR6-Z#;UPBHs2u`K5cF(PE}`K%ZN?(>r( zAe~6-s0=qScQ8=p`ded5nlLtQ+W+pbVHyZzlmanop#A+3pHWMA)%Th-;yEoZEAS8H zB%<&OfRvsRNIF{lIayN!0*;H{tdp>rt>+T;l%B%$B{H>_+CeG(bu?R$GX#d#yXvvJ zkOm0K-A^Dqc~@JzEeD{SaZz1xf+aCA$2?Dc2rxXG3VRV?ynvgaqB{#XZDQUn9;ZPF z=p@B0R6tMS>lvl=HPCy^2NJX;pN{)uU#J4@9d#+?kyNfdAdOH6s`)2?#+oJ2xKt}v zdO4WR%OMS&Wzrur0hF?Y37GYiy0%lftfy@xp6l1VNB*kY5JRt4ER3yTP^v~JGa!9H z`VGLtxSr8vngCw9wBjB9nF4Xt{H;YH;K^({fCq?lEQoOetEwFA-YHpPlQZ^;KkH|D zNp8tiq2I31mY&<)Uz9yaXfxnx0Slu8a6=W{#6ZYMq<&ljcD!0!L1!?c^GXrwzTMQ1 z&#(4kg&wnm5iuqo?yh+yC!H($JcGppP?36LjAE+E7J@4Kfh6Z%gyns%^d-Z@K&{}|=r643)_LLMEBk7IA zFWlI$?r@BZD-rV2YmVK{0#6WpV4}7Y%lEI<`z}&dS1s6c?g;b> zOUV_0I8Oq1f!Y(kvC5fjWEHpdEaR4T3av5)z+GAgB~yg_{Sqow&XhrBSAF>*zDq#= zyBpPMcKibnw^YhZ0;fT3%znAq6lHIBvLNbg)J4q$5l)u4{quOn6b^X53+g@?R{$Lr zphs`6tAZ97>-JE@EH8ozJ_hFU2N^|h2iAgNlrTk!Rz<^lOIiHc8cI@622!d`Go-_m z(qD={Q|9Z6vh~QS2++#L(~1BQH5KW|t>h1%u*ovH?F|Mq1;1lcN@TY<@5BshTBcXv zp71(vN28z-R0DO#^wRal)5g{pFF|I5r*8^^LV7ijZy$pOL0Wvlcp2Vk;5Pi(c4a^c zC+m&l{&>2X3hI7;28^Sb$*s zMp$hw1onVOG6)hzg*y`R5T3D`&J%s}hSPo&^$a-R-Gn@-Fyuzo;IX#yxDN|oI?}l9 ze-ug4F|Q|Q&Q|J6?@bo?9v{V$2r7{Bx#p#El};p2yrMM)D$C0R3UL3xe9-3qoWsz6 z(Uve7U9I`#mZ0G_DZn+1<8wNnQAUTKLlw2HyxtQ*!is|m!V-BfHtU~;zsuVFv+QkV z$|X~HXl`ejhzF7lc73hYP#DR#-3ia@hEsdtiupC1ollZ!Ke=1zwcUH(tfOAzis_#7s0{3aat0a2u^6A-B z|76}P@>Lw?9j07U1`Xw|5ahk^kJfacRp|s*gM|XMAEPnigo-UWp(+a&=u!nz+43{1o&s4bvF@o&T#=v+$OLtFv?Mt{x|duBrqfvsnYvgk1lAPbQVk8of={#$ILl zn)Vpurs7jSZ*AdE#$2Fdapd!G@rsYh#O8&o* zh=yj%+~4rVXXrG)cCLfoek2tn!nEd!VlOT5>;?v^mmf zFLm~t&kVQr@S%_e4<6$$`M5q`UZ#9^e8EKKh03NQulQ`_Boyz6gfC_GyO) z5ODrD#4DSCs{@ue6}s==tTQ&CnQtl=;;^Ix^Z5A4X?-QY=dp_ywKTG&D3lx7A>L39WG#+~_yq^1iLVm-@gIVNznkd!1EzY+dFuJ__!hw;jhuk#GZZL2lASe~7njN@ChgLR`QK3ZzRA+;`gefQFHTbL!2uW0)X>^brfb=auv`kR{G3c!jCtjql$N2=@yt-Yh83Z_K`p0uheRSF@}Qbdj*Qoz6SlhM}p;QbHfNj3rleq;V=Q2E0k^M#U)N-BVUt-{1} zM~@D&D`raIWJQQEF@-@Ey#Umwf9W(D$F~LZk(Ne0Q>+Y*JI>Gf$pYXB#p5mlWSl1z zY!pddsEwad;0N!|vyVIgXo(l_WJi)+|wI)l^JBer3ChwBr zky>cL3?3sh$0#_k~Zy$zd!wY4>u zTEA(v9VlLs)_KsZ-Cp8BI^ooIYz)(iMQh@aAF zkXffZJgbY$-qrd>6<;3QIHUNGTJS;mDO@p@k|8u8v0n(}eN(O|WfS97^5vwFJq(n& z30H;41MWRmYI+qhvs4PDq@7}#P;0ItE)gFf4Slc~a45)e?{gy@c zYy+MehxLo8UtqiP0Rn)E?%Tgqcca(j+1_AnAlNMwoF1fLG>Pro5DSR(huYzXVIXeZ z7Q^g#w07;xY3!h%lxmS2xg!4fu&rv*NWSZDZWRZID(vz?#f+Y{E%jk8bni_70th-6r2s^Stn5iRPFcD6u)bf5JhwezJR@PGxs(UW{WC zLzt=8o_edCRyqfw@7yqX9Xc!(xBMMm8?A;fppA}o{llUwvBkuft&7b899O+r9+O+# zT9_iFNZF*Xxgc4bcALs=P7UN0J1_Xw&5#RSkl(dt^-dVjT2vxoX~ru)%i6gnB7&0>Rd{}`ocb(8J!O1s5v^n%Obt*dJz z5bou?Dr2G5on^G=B2ZSf;RTnMt6VZF*##Yt!oU1lKM(z10Y0cA_&-jqX{Xc3;<3e8 zX8W{bRuFvr35|iz_G>I;_lIgp&&`OWcUdcf#qT5DZKtXTcQEMGzr!OH^jdQ9+2%G> z-Ji}ZoWshxcRs-!0r{&sS-AEvwBK~GLQ|tpLRdNK+tq-;{N$x`+nPU`u~?lvM{lKi zd3)n*2=PoK8fnvtifBF(3R378YlJjq`7czoU=;X9toIh%XJrD>Hc}W;TwFPd`p3FU z0?X2R&{|y?I7lB^UJ0&z_s>GWAk1k|TV7rs=r4l$X0@%9c}K)s4&g_MM(=UrMPlBTVM3rB-=I|XJWcfO-+ z0=;RSZ+J)u?dPfa+3xP(?oyl-Tb8wf^)0${Ue8<9k7WEQM6+e`VhEQUnPhv)+Y~Q< z(#L1S{{P?Y%*H~kguHNTzKVki)L|U$QRx$!m2_Jv?cRGmv&(ra%4$5x1O{{jCF+Vyz z4m1Z=k&W^n<|S|6x&if~d5Ss@1Vj}DyziLFe+@$1ouS)*pmzi~kjw#2v-}76fPf)j z%lFs|-EKkwsph63Koxc#q!~&1{Z!6DX5A|jFhUT}38hVi7$pp2NQk~(=cy8-M!`a( z{dF@o{<`?A#F*m{oT8cgB|a1kGsNo|N?zO|0PgD6DvSb}FiViBg9DQ7;yn(NR!e#$ z{#6dkn-^X10mB53j#k0Oj^x?Kz(Rd9C4u-Dh-R=b&4AmYa!uU>Bivz%#G?avoQOM( zJVBL}l|!ukqO0=c-d8rII#rt)aXzl~i{QeDq`aM=fNn)fWHG#*D$4Rz(5%qclyLKM zJr>6sOqiii8AgWFO9ZyhHiZmAG!0w`2snZ`eXYpVP?!#cFfo9)=XI1nFty998RdJn zDV_fgjT!dEGcH+j^}+bfgubZBvnT!UF8NDMPN}x$hYG-B!WQ!u#2ODaqLZ*5O&3&r zKtwC5I&t`tE2=_U?4NbyCHk=`C;$SalrZ4{-#St7f1aogte~xrY223gA(eZ()f;*# zmuPrQI>|CgjBD(})#JO!e{OFk22_jo=)DblH=f@7AtwOk327}YExAGiT|kS52`<#E znKB!(lRaHR1{wazZo>@6td`U1mb-9eOq>pjC<7q_emxw~S{qL6AulNFmjTdReDH&d3-IfUgmQ21U!= zs?RP@ov`$p^^7Aag8H5&y=IS?j3#0jJ60J1WNX_*wsLqcGZ$&iUp7N=d)DeW+~l+sjf;w_(~eeD&&AuTf1AVZI#O&Fs5!y52IDDy_Xr6*4y~e;fMu z^|d$PF37bMQXZKeT5KgLzkIm`b(wtW{WXYGa>?YAY%Rcrc0dJ3Bxa3lxfqQ8Rqz8? zoMRBkZFw7aOM{=$$&KtgYqW>zT^{e@fltXkQ-Q}H%ZvD{>ysO`!`2{|)qFm@>1cY2 z+tEBx*~EW_{PkI4r6HoqM=$x++~+I?0;PJ@B%pLW6!4IqW*rZ}7gq^^FTOH=tiXDy z)fR;gVUuV<5TcnM4luwu4@TmVsak5WB;6Xz8WSn&tMYn@`TV&!l}wVVrvkVAEwe@_ zFS*u!?|$;m9p-qmQPa*?yX9~r-RG{(kH3dMGcmaSo(!a~rs0ApQuKq}zUM(Ko(@E( zK5(9pfS79**q%P3mwb>n!2xgL2J_K)B~3U`l!_MC+u*C=cgW3uxAFk6YI_$RWkfij zk<{jCl1=7|I2jt1H1Y+^AoMr;pOJhgPd_Ye@Uh&20`wsvD#OupM9@0s6$K~c(f=r6 zB|y#l{=q4bTm1q~T|o=SryxpX7mNz4z`P;Cu=7TkZ?f&gBO12Sd=vTg^k&7eZM!BWqHO>arcQU`wP zKmvXnV-owB3E*&(>hoaFyVxs_H@(_jlKWRX|5+tougu;D;m77Unr4}(KIu?c@}42kQvWtslw-Q zf~4Q%^|4f=M7+s~PZOO7!0D~9RWINk!Gxa);d_8YHYPvB1oV&1ngg9!F69eE7-j`< zKBC&8t9&rWv7j}v8M*CmLLZkJvfiSSO6|SclzxVeo+AbV%2*-WJfJ_QI@Nbcr@zRF zuY0=xjhNGRn`AJZ?G@VYTpZ|sV|Lm!!F7Rwlr15vIRRSd=2>@bP!Ll7XEjS!O(X1)zb+l^!-SBpw#9q>D97|4W}hPr54oyAhr3 z${7cfp+Gnw2ISh15D}I34Bk9Tq8s~Rr@t7dRwkd!n$vq6*oevK+eF@5%+fyvQtHK; z4GIUXH}(BR9ueDv31WC9z#7*!#r#i*G!?;32Lj9}_jV_6 zE=C&8Y1zUZulHsWBJr7f*4CdQ6F^NyE@<7Le6++%$Mr0%Zz*0Rz4B~$;ryT=MFTkt z3w~%tkZ%ga{(A$p=J8z4)aWeN>r07tpUdfZX(T?iIA3QD`lfJNVuR*oP>?HZnT z+M{W;DzxhZ*iA>Dmtc6lOkrPMT32l) zITuD-W%fbmr&gweegTN_6cSFZnZw$m`^T&P#iFCCg>uF=WIM4Ze>tIm`2y?;%&T|E z?HS)c zJ!v@m9fU~!R*V2 za3BGbU&65q68+O<-ZY~T+RC13GX7<}2R^Hh{ty9`bi`>tPj=9)!rb!!5wr;suDXwf z&x8;a0d6-me_DIF@K4$!@GqPV$s94hfO8S=P=}X2B`4u~hw9Bmox>uDUJwL-25L*1 zny*6WRvp+f^e<)zx=Y@L#>&0hUIx4g-=vl4Mq~Z`x$kMxRV^NSCd4a>##`wzf7sZ4>!WhJCpX&<_q5YcwU0Es29Ow)e68Eut;W$Z|wPbp=R z>K&8T(Sx-R#23APfZdIo9g&8cGpv7lOWgh>9g!s_^n2{(2^szMHe!==iL2Bp8QsOJ z-)}~8cE4F`GFSzE$LSWl)D5`!896 zYQ9>rm@I*1r3x9_Za{7&5)~Dbw*|>K^J0~}oVA}#QPXXJE=BBhLHgcex+TN-H%mJa z&%Mqi-OdFm>$FnlU8BQZ>!)cNz+F`4^d>=sczBXV z06*Bjb6Q(B4V$|shD|d;?&wxAfnhI%h4G>|S>KYq^?njTZ@9CcZ8%;k$Roj~HFob~ z(i|nBcTZ;HPIVN5c*(*YpW*Z-x>dR|L1hVDdUty7cZYSr!47ME0gAl6wAzsU#RR{* z=CAA3^*4@ehzjZYEhrd%B&ragVbCZm62RAFOp&E)9BgppcVDAm(yrIpsyUdfW*Qso zR&$XQ5TY_Pon``23}>;w{a_r8X8xxWftJDv6PoIzO;4RxpSF^YPw6ZxIbRpu3%cIG z@1|AKjDwczmQ&kV#kxCJY6U8uschcR%irA%JtLU4+rp@0h$SXht2byYH+c}z6LL1>(D&#{O8Iy2z~2+^3vDF2kd0RQGU5}dP#UPc&G)2@hA z9ZhquvuZC%Dp$pm#t-8v+1?!)3YOOXiP z%0jkq)35vLbe1*wea!N)1_GF!OIG99pT`j{3! zySKGmT=uZJoPsWQW2&XyYpZDcGlg_h=8iu+%o66D{uEahd&Y&SHMpUnvlzZ@KU@-H zXzcP*Q|N2k*vASo)%z^bJ_^bIa%kgNLqqpNphvAoWl50lNI)_GE|iu;_E?cvJWW&} zMWgm(J7N=wJZOb?c{n5QS*bO{2Ks3t1qhSiI7oCl@kZG#)R^S}YJ21Eu+5er?MmqS z0xe$MM_reotr6XZ66fnj`>iUuRLY3k^0AaxY1Pkk+MGyj?~R{%8@Rn&t8B(*l$gJd zKG!UKZ_Kuz*f$iHPp(9+Nx_YhIE%k5{d=o7)Y+_bUC@N*$*j4~Qm$LT?>;28ZeJMw zY$vZIu03~W?7;%CK;zlOyh@cS7@4%&bnrH@SO$U0Pri5r&cuft*bR(igALi_pmdB`_TX}Xn8v&p&V7=S`v=CJA%eM!RO9`EK0S8g0*m|a=x-_4u-)u}*PPg9f z6`j(nnePSr4K`;fLVlAM4REq}*rN&Gq6nDE7YBq?+f%wTBmBpsOK{YTv&_EoPl9XT za=R{KGefAdL?6j}4`4W@N+)p0IG=Fgy$n0q{o4dCdXpo1JUJ{+TTPCOWFTyp5~S49z%16 zFqF)dsZMd4dkX@h3(Mql$uZys?=N^I^CMY6(g^^P`p*TA^rP22Z`*d^74`1dCYldf z0ep4V61KVx*&#kyk$>%boj8Py*EG>1;rt*CY>?4M^Cn=vLu&UZiQC?>6Xr z$D5n;X8MC9&>oOFtUHfwl?ky?YDg?aWv1AkHFO&v`{_p8$!XGeL62cRO0s~L6e#wV z{?utK>&{}P=4SA2YUS7v1dh}(rx%bMJ=xbV-r#`Oe=}wJe=}teApT1SC>hT8M{CLh;8!1vCc{AVi_s>%r%0ktfcFobg0^GWRPcKQgMib&{%tm`wA|)4s&*{`lPq18V=2E{n zlF10+Q%FXW^{;Pom1?yy;sL0k5*BvvPs9}Ca|Bg_t3DQu{7+WbZDfh-q@G9j5~)M* z{uQe&5)=?AwSu9f%8573*gF|QvDgd}@$vF~iApD+9!WLI>|-$~5teaCNOK@B6Cn zL2h4()aj_ATtI9oN28dP!+*d;2YB;wjUuf&b;%4~Ng!^R*}>eqJJ7z_P;7Tl=HgcW zwM`NJPRRQz-C3c`QQSnQa`$a9NAnS?Lra)kvd2m0DI z9?F1gF1$4dv^`!e8+9s+PL8OvX=a=Xb#}h215v)VLrXr&=H^IT+Dg^V<|8cP-pS{U zix5Eu!`nMmrwSS78#dFG-uY)!hfn98k4s5z_ieggRRzR{;^}j?$sfiH>(VLB%}?Kj zmYCM|^}Rg21-Yq;Mz&-H$n2G@vD#velS5{Y9p%4f?_|rz#CFmB6`*@g-EXaHw5U`1 z7w84NgN20XU$GphI?tLaSz3Jvg;Q=2RJxva@}ezb=PF_M=$?I~5GdSU=a0al(Fliy z4c-M+G>lr!W#d*i0`0DM%mq@Ja_Ym0_i>6@H!7klS?s1xC`cZ?m02aZd=#9ZzlqQ~ z>%OOh1FN9Vg%)xFg_caOJqEfMw;jlg(TC`^@YYr}Y=2jYl+ya{Are_+W~Hc@=}uw7 z-D2Q>c6rHxoJhC;%el@osWh5&%xamI$PgJtQ|ySdHnS=e0B^t|lX}tq(6?~~QTW6d zd@gnjF*M6T$wEjLpkJQs;k>9B48#U^*a!}4q!~_CxeF&ynd624)YokH}(meERX_@3rx=*IEUiv zXyo?#H11?Sb}#*u(nw-W{3h8H@3v){!u>W+Z$@95`X& z4e(x8OTx)yB2_(UpqdW66T8`43vg?IVt#5lx0!>G8Wkq>FO{nh6wv;2%UZ*X zW!_D0zwoD~t5eM*Dh>XVWo)h+a1AfL450WN(XRzNI8oA{{U9~lUDvhng&p^S2m~x{xB+f)Bwc|3KshV!yEl0 z`-yA830|aA#O2|ZNd`1nHy!!1kf_fuVF)A-f!3Eh;7vh$XEpQordAeeceVbHLISt~+1z%Eqc#%bck_wp>7)SeTO@j)%wkVg)~!h@=pX%p1G=dM z>v0>+&a5YTkecc0;Q_;!%Q*6^D!M}KIs~~ysT1bHF#$!5-2cs)+@bWq7wVT77#Q@* z%>t_-C`GIF_Bo)vtN5Hm^qu``o7lJg*SXleQ4{5as&rhj#&*`THD7O!<{>EQPLqXe zT$4w-GF~ zEK-Xx)=(Bh$z7{Qr>O9AkfKlZay#x?DW2V0s(sH`S1kmM1sXW{zE!+=N_u>x?4qh~ zqyaU8q&h7&Tv0u1_!t`HQZGw;ZkT~}7C|H8Dmppg20--qU7G!{ew)+B-rm92=ZOSZ z7nJ_7h4M*ZPSr*ETxlL=Z~X4#BWn%?1eU(?s$YZtl=~|oS^=bxQNX@Og@*YbxpN&h zedVkDP*6M((&8d&oKnIeg%Hx++?3hm){tf~fIp^-jeW23>0}~ssAhjfvr#c&BLPl| zs`=>T=e&N4xzT0Z=eXj&Uam#2zv>pVIx>wCI@d%8tVSidtZ!QKIUT?)H#sL^Q@iX` znGB6el+9yFrBWgvf?&RttxD4xRo6$M``2DdgDoVYUSo!nA} zeM0DYrV=tzXW5FoA^K*8oEN96VSCBEUSi!^2ZL-Z?}`*pZ@wn(C;jSXr^+Atha~<(cIv+P7&h@ALIQ?Cni!; zj135Q_A5F0NXSPVyM?Q z3I8xOA~Nvc3F_C+Oo3iVQp|DI}Nn^mC{C zz5JL&)4DeP_$-OJmM0?u;p%rmB=f-jdnSk*3y2tJXrVzD@pH5*>EMRcLUqmaYyGbL z=;KKdJU$LCdLnj&!dz_#8fHx-hYIHcs8-{151PXU-jC)VsTT`WSnbyIN3%m4Ep z;HH~77jg+Io^xQ$6WH?=$1;a*;*+vM2qYW8M9aN>lSWDK`(9#bclPl6bc4C3S*j*- z>xhoKO3)Sn`;;X|JJaF>@iXp5i2ro$3DGb?{%^U)n8dHcbHuDXZ`56Ky(~WpH&sk=Yzv^-pOcq3zVM3&ECJgmH zSnirXlmLMOdY2Z{pQ8e?;?o7Hx(cvLk#nx$^~duP?f19DcyyaUtl&%+suJP}0xxmW zCYck-;tM1kO@aR>9JTn=vDPhFVvbe2bWJi1hNbzA{+Pz zt!l~mOv$j-vU&Ahb9G?S*0;*EZRmQ}e@J3xtim(I#DGkHq~HA>0e*uMoid|+=2ASb zD)s0y>y#n}xSs}|)Y;_~@)))nvE@K_V9QDw1EWk71R*!Tvx;~#D@Zqt7z6s!d!O5r5b zs&&mFqCJRuYIJq07a)!14o-`Ai3s z*IFYrMXG%6Acav)W;ZZ+k+uS=U1ejAeo_P0biAx$-J&x&wL8qE>G6h&Bzg2TLZ)N# z&OxU4f9yDYen6>`NuQpPaM+<7ZpRZw?To11FpVH5Vq>p_sMfjsUK-`W4u=c9$5nm<8^s>adAb#-?S4==z%Arleqhir;@k7Pb1i)hHE zAcXt(rT~9KBkDyiq|6k0p;-n>9s28AoVJJ#7tfPW1QV8g=P$s)+xq!BO1f&P&aUaq zEP(^S2)tRF$iPiVIE#*Nhs_E~@BdQPU=LI;m<^j`fDA2AEIpvQ(aui`G>plR^FMfa z;k^S*CNUGbCMLSuO6ekcEFu1xx-}0MKA+%oZomgxd8PPwBGmyGrAQyLC;9J@K1gAS zc;O-Chy~z^cKMI?F2kwN)`DvGvypSD*WWqZM<3c6jSUqRa9OHIE(f%%el~}4{8U~4 zTs~acueg3agH74tB>411w{`fi;uB0XT}D+@ykXP9Yq!IyeK{f?Rv?Dw6AH6U4+WaQ zxql!**)4<$C@_eD(%Rob%$-nzhLS~-uNIMlO59FEGO}z7he~7<^dT|w4CSCVcX}EG zMGeYdiN;hVk8D#?5Pt#r`x4YX+a;CWxDJ(D6Z99SbipG@CBA7IqazRsR2V9;h@W5E zk1ChaAVj4OJ$NX_kxTT?V$jRDP*u}ISWvR9_rjb+;B88B=w2fscTD==Kr@b9V|K6? zhR4D(QmHzkCh2AIddD=o*oa;Jo?RdKDQ$7w_FzRm1CS~K6vp7-;Jt5|zB3n2&_zyC zt0SO%)B$7Yx)^A4KqboH-9y6#z1KX&CJ zvgh(}Rh+A0P($;AOAm7fw{DZ1z|U(WGCH?wUzx6Te|p}bQiJ!j)RY$H$C?UK8nh)1 z+!ANbOa@k>uGVweY=+T!V(Flh+L^NeNfGR~-_sdH+~ta}x7RsPWCB;G=7y&s$05Is z>9Q|}7obx4Huz`8fB;v%Rj$X4lPR6+zM9Le927gF$rDybtx^XqSk8|%&pJh+zv4lb zlk9)9oNgGVD%XBwMmWzs4CIYiw8&rBJ~?xThJ~tZMdE=v9^7kvTEb}1wQb|&>rR1_ zLY^D55z!S?mP)-=8FmQ)&zen;a70-=yK@@^z;Nn4DNwY!<`G{pSkbG|4*|UIbnBy^ z?g20?dfDbfNqL|&t<;zg8ahy|X|fiD$DqweUH^9$H}}>+#*k;mdsnh;W34?xwL$Be zLMXKg*rJYe0$fTay<+m^M#tc`doyf9_awx#mLn31O+BeR#9moJ`GuOB2>@*TnB)G9 zBT0=j#qZ|#aewBk{0mg#VWI;6^=}R@amVGs4qGM>5%_1XU!I|NsIUlOehdREYGo0L zCJF=APaG_yAY>uC|F~gdVTs&by~y9_S5!IFnW1wnjYlymB}-eC%O`qDsP1nFD>8(w zx~PL~Y)1yqNd1NVE(FGm7MzzPMW{rtn?MUrHK-01F;yE(?0|wBY8b85H#?U@u?5sdcewj2%Pz>(w} zVCq4EzW^sjg~cE0zukY}{e9v|%eB5G`u*w@#=`n6(3?(+cU>7jd4+DP`#C9TE{}1o z+8K&bkXq2r<`dZ12q~dTIRV>{jctkv42~Z;5gKIAjN=lAzyUiG@t-po+Z-G$&?-R5 zsdhPhaXBKlaDPvBsS=xsXtbw%`Ufh17>}!RE8fyI3 z$)MDc18j;IGPnqoI!eLnXyCaVUP2{S%;1zQK&}Ad)Y8sGC`K;U&UZ*qiTLo-zs`El z8DG76wKtpsb?dr^vUoS<*X+uWEGjDcV`wN1@HIvDCJS@SGnpmj4Q!+cIr8T^s{e@n zaqees9x95bSB~CLBzzhOYe6xx{Gg09l?OJ0mzv)a3Cg?-WKIv@fcd}>&&vTXLzMb1 z_!GD%xUs-(fPuqN)#824oN;@Nibnh5;~=Wc_J$afF6YMZc=Tf5El(h{)|(g|9`0&z;iPC~^ta^% z{06(wrL|B)1Fotrm#jh_7rYrYO4@RpG?Z+E?9mx9j8?OrN&`!Ba$v!G6@_&6IE``x z$xExbYz0ysRWF1u1}iJ#s2tthggQh;ZWyw0d0NyrNDd1qh)4#JoO4b}keqW|K`?>DMKVZ|oO4=5 zNlT8BMOeuhMDm^W_&N99@A;l{s_s9(x>X;mtb%o&cc$N-?x&yW>DGcT91F0yhbS|g zK0UlC;`xiC)_DrFWwu1ec8g$Rz0IXvgjMS>7x#*a`@K{)BOnJu0u&irXbLsscdO!nGlDL1V{Gq?IHC3#p>$UCmZm!?U3uW1XlA= zEybblLsg+Ay4?D?l9#iJ+W8^!;BL&)c>EnPRVBLV)`$J6_w!$fUb~l2tw72k{G%m= zyqrZRS3`Pzs+90LkCwuhdzSm2r`CJ!{*8VVr=YhCwnkSpwW2H`)q562Uk`c_VAWW} zoE;J{_A}g8yIYT0C@>a|2hmfGF2qh94_zrf$-dEG)Z|2&=(&GqqAefez;H?E@`cWb z-^&qiXfBw|E#1?uqBGx`NCk;7irhE&vUG@OvC%H;{J2?>CB5#}%G|pg)X@jQCJNbU zs#EPdv}s6#bWk)vDQJQEVb{qtXn#BOI%@qOC~io9iH$#DR_17ZEZ67sNN?(`HuVGN zxu5Jw?zUO{)=HYdX3qpoxOIz-lI?Yhm8kiB>xk&#`~wWCEoj(j1l^KvZb|yRGW&Kc zKtY10Okwiy%}ZUBIY`6(x8{JmB0)vIhb>La=|=7M;J4|bH7_Uoc<0WBt__%_hzDV) zlMI%IBI_hCbJxdyR$)};5`9G;-OTqhS60d|c( z%1^!icP^*nGZ7@jwCY**+^q_c0;%kh?MvVg4)TuZ4r zkEn(Bc+B$_|B>;j6OJjVRx7W|Y=a`Ww)-Aw!qk>oT_jTHpsH&2l#ClB##++`9q!#M zGAd+h6f3QEM}-;+DdO2@RTDCrXsG% zJ~Y@6b$2-{P_w@9eAGI9B^@dK0F>O!OEv)w6A+(3lG{f^n9tgD?FN9O`@T?Qks$5r zL3yY4q}tX8BtkXaK`EkeGvsSzP9gSjNnpJ4cRJR^%r}ANi8-R`KX)o@Ip=o*j~YVUvO@t8>F}}pDJ9~YYAu(ICV-QZbVt=@(tND4y6CyvvC(^czKQZJ;f>L3mm3aXB%e``yaDKt%m ziNzG;(!iF)k{R#<6YS<-Nj3Yx@aN zxJ>=20?J1#Z^{;y#4dsJU;la{m~m*9Dsl7vyO9F*s|QI-CLCF{V9iD|Vc)VsHGcy_ zs(%Lv-x)yj9)OCR*6WfMAJ!R8Zr9>jr?IfZeq(5x+LEu~BqYOB#URqn(^j?rc$85!r0Cfn-* zwZvD-Z8CO0)BI|j&IZmxk)_5T)8?jLV?03Yi*F|ywmb89T_g*j-}`lO&4eac0H*?^ zD~ozLUcJId!?=e_M6LFm;6_$?`_0m_L`kc;%n(yK4?l$%xK6Tn*^>z55vcF}I5CB* z*lY2v8OkGT6N_Thi&xc zETU5QI=9g~K<#%p%;goExVN5_w;qlTzSgx_mfs=$PQGnIM7zUf!Fj=@oXC;yZ_%4Y zOKruxQU)??p_UHzy+N!#J5vSxw?Mrb6&KHtL>AGJNcPtt5Qp7&ob6*Aczb3$ku_8A z(nSEt)3@FU#%z>Cx{c3NnQf(Tje2k|1WcrzwCHIiickGaOj5BHXw|G5%jG<>CQ$A3uF560#}JFfC-+k&(A(grd8aOSlx%dC_iylMy%@kr%ZV>gARddHjwu3lB&y|xtIIhJNky-sNuA}D8c_f@y^S@?pi1c3YbUdKYMzaUyc6IfZf0WKp!XbJ!c)H7RN1#Ae>|tqw z`|-43W`QjNA(ho!6Ol>r%RlUxs*j@UBCxuMf3G)@YnL&zZ(V}>rl*pkf2tlI=U4xo z?Tiy`r5}@S`RlvaauoUHY}GPze>$?#peb{&FB;9EK=9n{&$!rqnkcta{+C+{Z4O0# z43mG_^GY{uQR~_IMA^rbErV_K zq2m%W^6A1QCglx(W77;yZJZ-$jX~@u`>j<6rJn3Sxc8=^1e$H{yI53JVNe(*%Q3i( z>owb{V04#*&8&ZQkr}$)Td^bQs+ag=zWU1&HFz zHApM|Vu;Q)e}9;iP~~tk?}k^Au=agFz+!p#c0z=5H?~smS%uVq)fH5=>ttUyVphTM zrz(9J^G*SP8SlpmwS}*sO7jQ+EPC#SYn^(ojUDdbmM1MxfOHaq_xY;vMyjClMtjb; za-Wgh^|ou*t`#MSLm->&zqE_KT3-}TmK9#$AA)l&%sk~Jy{&$?Ooz#5I)QPO;6AW> z*QoQA|ErLC=Y`7n8+>NH?QhjI!cLE#evXt8&QE|nM*lAxrh(9)%((q!#M|=(Ge?um z)AsQV307W3mkQa}c0E;zIuVF(B5olS7$+CHFJ}xgKAmzWwahJ&+V9bA0brQSuVq$$ z^)36Cnpc}1=ykZ>rEM{qr`lmCqz_`2qax4UPhVcIW7-=9UoxpjCz3x*zQcXdgXO9; zWvX?FcES;fHkk)FS4P`ala)Uh7=V!9*_>-`t<{pi`_G{gRkur7Hrz| zOdx1`%TPk4;5(_uzNMkWrcG}`ESH#e^wNi1i3GW~O9&UXxCA^Bzj}u8LesgEICw9( zNitkA#=OZQcaKxwfm$A{H`nG zymWX9yb#UA~n_EcjoJd%@gG1wBE)*{_&@8~{eM+Q1fIA@s<{Yq&YiUCDf zHzD{fF2he36hf-K)0;;H>9=>?57-F!^MCXu;|+Ai7FHni%NF^+g!r0U`;j~0V)G!~ z!Gw;L2)T}R>}uaz?JAJgiU+oxe-Vtcw8;S#mGP5Ilnis0UJx7f;_im1XD*uARr_^+ zUMQ-mVBSc3T`fo#Gv?M>+y;0CCB?x~Op7()EmJRBbu{Idrt^_kCo5>}=3B^*LR#B_ zeY(c(9U_Vxs?(kc31^1$-yvt98LrT*z~1bq$;qUU^2EHAV!eF(@uClo8v2{`|KP1m zQ5I~R`ZG3m-)~?&_)iPQ4$VtoEjOu+wh0zW+CN#z))%eSX6@w{gS$Nl%~O#|-0bWU zAMi+?Q!W)P?5W_sadmUaofFd>MJi?rN4ETGEBCTf*D~YHdq(n(o%NZ1nMwB5-I{a% zSt~BnCal)+=E&ZLH=RC3KUW-5aD5EBDM2TmCLla2VxCCva^;%bK%`6tpUyG1 zs!cq;{G%5RwcS} z4|)0>>xf?Zpt6U4*z&BSqm`0g*{`nvp;MVfZ$JVc!F=ER3zmTat0F#;hk=4pMOw&I z6A~WX$$k3bv)enhBO)^MNf#%_TK>GHor@yaRu!@5OOGQCsmC69=$8^%tZ}hcMn|JC z>MFtGXBalW;fBj!a09Ll*YZyYd%{wGcGgB zYR)Eg8CG59mK}Q(#@YLamw4`5P+v%ltZdfv%#3+X!{G<_9J`x!o=0*kto;fD^C;@S z1O~r2$?{sOd5G%A{#Egr_vKqfU4Yfo8u8cw?qo?3 zVT++duv2<;Mk{BF@5)DS5>V=u8Qh+rx5X!h09vJU$dfIdkSMplF*7g0)`Ll-B+^Q@ zcWmNV;-;)%v2GrUmPlr*%;;0^eKdWau2~yT-6>I}TZr0KqBn9O5?#45lL!mdFa|Jt zDgG@JFH>5-8W8)vAc16-8^z`irpOS6m=!4@aN9|x zHy#E#f_KSa%x{8*oh5Y(eAs#WO=Jd6#bL~DL#Z%91BHQ-WS8eGDP*;icz-HBxDCSb*sCy9f(riN?qCyJo>RD@@w#iB+vE3@bkgj5W#_XUuNV zK8adYza{4R4u_Ps@LQ(Us~emqcTN#^=@)+NZ+%hL8?vYW${erBsw^OO9^D&}a4}Y@ ztxZfD$M1>T5J+jkDRgRi`S8?vAP_r&9t&otU67rXDn!X~bkwiZfzvJnYEiQB*|Drd zV8Ta;nb^Pa0!f07p;tf`5Z;OxNj-r%#DJhht zp^wUdSj;5R{<(kC;rkcY7zExOYKqfYe?N}1F6IEJ3)S?gXkheVd?$a*M|4O$tGXJe zcJ2%9kI`G^**{cix_mDz0W)Hj_tvJ6{LAFw&ij+inwa&r5&SChix}~poQ0)CdTkjxDB{#DN zUgcRxK0A=bTko*T@521z9U5d$CdL3I35Xi|KoEujo7zI914j%QkO^COk$M|jC^Ft5 zlAGcs#x@o#&eAZA-6mvj4KGeEmj8C(bl=W(_~t|{1*iqfA;+K$qONRBOhE%SUgQ1c z0okeQvoF3UKmroWD&Oc~5)z7Q{X$K}zaB&O7by6{ExXF*#&+h8KiC`<9Xhv5+{Ib@ z?6nm(7#SWhqJ>kpFld^UKlF3yG8rS11#=&z_!Wt9yI+*`hd{ufB|b2gP@;=lQL-0! z-QUGEr0L_RU!MNzgyf}8-mg|r%-E#Sy>qJI;NyeL!vHh{yY4{sq$18Ph`m;mz91AY z7wo`!IA}Ilt+}|ZUW5jg$rZVw0^Huny)LqyC2($b^1y!41RUjj2OsiFc+FnXgyx+I zcF_@Q!_(pJzq-}ezH}ZHUKzJ0C{gj5<~N#5v{+3k(GzG#GJR&m`N;gDnt@$CRTcp&*SZi$6>=)!zrTd2pzP13 z`r){3+UVTDR@B=nZXQs}3>a4#9tPS9N_gYdH_KC-b1N^WQP6xrT3qX4h6XU(Y)6W6 zpw1XlBDI!$UC&_hJ;q4+K3hH8gOEk?xi&hvshc+XD@1c~tgq1ZjK(HLlQ0eZr_pB@ zOi|)cwwvvuK#Aj_*DM9WS8WtO9f>6sB5h`;+XsMfVXB6W@b)#R1H zeGATj1fnKzP*bpG@{D1?y1mr+e!;Y0v80<~a|!jrA{xxF`gBB@PcR1qd;g1%h}l5} zMo&Y!R0g<*EL5+j@Wrt0!6u5w6c7B&OAkydvP19_^}kOnQO8t{T^>lso30DORb6`X zSviT%W1$5$_6|2F*Ro-S#DeqAE@@jkQNcA|t}r`*M)N=}e~gu$*$=HLX8E_pkQyVw zB2J5B$;|d}`kmX!F<}wGxj63=r?DP%=p0r zUcZvC^T#Wx%%_yDyc!?Jzr~Po4N$bx3Y83w_9KdWHln4zW5D)7ulf3?{ijhyMCLd* zd$QguUFX&ypntl&-hmELPJ}2fMAIsRVAcJIr-Gg`6oV9y0sFTx7OiK1O|hfpu8c+5 z+c&b5V%XSOa0+nxbMmN)^{XtPRM>cK)V;>o#(>RS&C1382aQ5vs-?99Fy!XRDD6JGQmilQ$e@CZLPU5?t2^ z^S-sxJt=H5&pX*R&FS5mtG+mLz1Z#m=0p%tXFvP~-D!JOxncQkXr8jwXC@7J9PqWU z<&G~noZo&mtF$)Nu7z2Ko$Or0pWOeG13S?CCGWIJD<&YHEN5m>GZeWC3-@Cg@ zM({a5bcyfeXk$G4II`D#fAouY3UZ~)tXZw#Qe4&W<~j1TeBpc8YbBg4t7|y8xW$f- zdU{2>7R-MXe(`&-ahJKHqf6X*fu!I9m!v3e4P^gI9`4iL>fqU4VaJ5#>WB)44cxOE z`QSKMVv@hj(Hgw^a99nGlvIBMaDKxlB~(eSguf1v{BF>s;kN>|m}41vCi8CH!rxU> z#tL;+_^*l8NlQdKER85nBybKvp)0ySKOAnTyfA|SrgmlbI)SemSkR!|ThV59=Bpz~O$1dT*O9J>@U^vpY4R2q5)OlpfcX&^38lz48SD9h8VpA%8s8`nfl zB1%!}$ca<^J^)#gCgQP6L9z7Vy6}CX`ue9owH$hr7BVI1h12#|qR13#QL7hY<-^RlwxLJb2KR?j_CDrTxoomK- z%+U4GmF-yfKFa4j1qI8-8lMELj}*36KYzZ*WQp0y?Rwfry*}fDHn*sP@Hsoq>lG6t z#igu>2h;R+rwD*zWat7n3koRu`k*0juYWw&_=b)`on%-GO{lzyr0y58S)r`hCfE>g zX+X7DuRKRuCPEX6aZ)(BF$dZ)q`)3`e9?ekCopODj_M4&6a*K6(=vFEDTBn;W%e&z zLese?x@a_{aKWfvuIP8A&;x$w!Myzo&jEVwPaX`h%I6zzP*T$hZ}8nCS54th|IvT- zvdZ>2N9lSds(SF=ohq%Oc4tR%uP7=q+956Iix+rD2foYQnko0ZM7Cz?Gx=@$6j~Qn zrBze!$^9&cmEGpx2428OlXMalIGBPUV%6UF5~%))xD-|jc#nmR(h(837Iq)p@MiP~ z8psWL;bqzYm-DsVl6RPN($MIhnIUxT)|0&Z!%@y=WUiG~5mbM^Eop9k`t9O2$W!uA zzjt#m&6IiMGe9oZkG<&6A~4Fp@!!Z%ijg5mhIr-Qa90)26Y-&HOZx^*;- zHCYQhIle*WngJRcs{j=G3#+Y+z8o=MQ9#%+`boP6<09(E2Tqw*)&M(d5>K*l0bQa!Ni-Md!xZ@2XKbKmCMz7o_1}JnO`V-z?N{o^u&5`V(trp2Al}Y zq<1m zDqL{>bs8FZf=m@l%lsBEn|X>b6W}j~^G0 z0Kd8%KNcw$bVx65+;&S{{J4&nOb=vEF@8kBLD!dVm>e-re^1l?KTdNLifk_!boWL= zrW@?4wTUT>Z^gi{Q7JUJkzLt$jZWxcUP?conRzr4@fKX!c4qLtC}rJ;TtOnh&z7g*13d` z(NW$(WuvyvV*CXq|C#yZWWiKtfzq!Do_o{=&Sy_>gD%JGJ$diki6BFrHCARuXAx14 zbommigIArEgnGXq5il#X2eoJ<{eN6^A)G%A@iB?VT=hx&C5(xQ`3YWZ0(QTSd>G`sJVgETn)I@?n9)N^(`qHU^LeJ=oW73PjqcF)jDSM%k#R zb_YS?zxa`u0qubwfc5&%V?IK|-h7X8ZE6_*&b6T_Z`?@1gD^K@1&;n$u=~7v_ZXjy zJ~*6c zGqt1Ebw9X&A#h87wvr8W@gEu5(h3@I4b+R_|JMZ5|A#J;0oxN{fKDKV?ts$GQBr*w z;JfmG^-j1UA_g??BlhP%r}0l5UD05jMv-tQ6nW$&fDS2jcyS$Qe+4Q*T=2qG5TIVv zalM>fb_X+>GM$MOkrB%4)rxoG0Iyf#g*PjQdWaCk-w*x&qj+LAiD%Ds{_mfckkReuhU*im9DA)MQdu<^sr8Pr*&XU}^mEOKrMKc7ii$_d#tFx_ zL#2fA4<+2QmRELQgA#t`DfgYU)N1w0?QI59crt@8pPz62H2!q?@yc!Rg5PTQE`P<< zu;mp*3=Lyk(PA&oy2@s2kbgawqT_ods1nJ=*K*!8>ob^cIc!GrQp$_mcRhIp`7x3{ zk+!T|Zh%2j?eUW*#iL82$0dSzkn{}-n&o$B(7Y#fTCyoJ7KXASab0*0=ordp+sqX; zLb*6tpm-l+Ma@CZJq)~G7+A2j=Y+OcFy2E9Cs(6T_{iEe{!Axmn86HrYa4F;)tiyH zoE&`X=bVCtor?l79-{@jl!e`jyd4(VE;b${h>&Uk4`RBCJq?POLi0xFDC(k&&kjU{ z*Aj+Ds^xwBzVAh{-o1O*G4pu@Gg%pGBysuwIue{zvJdu}uURRAgqkI!^+M0o;ZD!m z&yfem*aQRwR{K=uJ*q+rT$nhA@$8pp6=NlaqZbjQ53!0+D-Vh!H<+f?!ok^&*o@nfkw~POM3IG`iV%A|Z6O0UMcD zOq!aS``{z>Wbb1sk&_B6fzcTfw+3^n8%cxv2XfRK(DCCK%po3xw*OBJ?v32dn*|{ z;x|nVds;iTimOp?Ylq^$yshx(q4=qN3W&yo8{HuL)++h3r@I^Pb=|0f5Z3qa- z1gvFdxcBxj=jj&1obSESW7JRus<|ba0Uw!B>3Rcci+nOe4VBm9_so8th+tBcJf$NhO z7TE_2ph*E1;IE9m6ezn2eiM~c0S{+l}gV;)kBuBd6d|2=I&&>7`#Y5xzK_qUS#PxJD(lKhKf{98%>R+7Kn z4L}HgJNJJHKmHG35{vos=T(o#?S(h`Mz^>KJ{-RPqKtf=WS(@k-6)VH>Q0MA#!-p} zwYJN9=EnfFzB4e3{m1d&Ld8%!WdFoMP?!85c5wANVI$b*xv4CAB3lI!Udjxk1`av~ zfUGC@Be!k^0&49+uHtZ zZU45m{~Ou!f56)Q??L$ADEV)c{5QLS;$Q#XlK+!O`x_c$mV?{`v-AB9&RN>2~vqntsY7Q8KA- z`hWwhZ==M6%&n6eg`r{KF#?hr9T4sGB8Nf?!R({$sk%C@Ge5Vgf;;4d-|G*)pM3oI zJ#tK2PAT%I(`z%rND2&u>niKC!5ntmiFZUWX-Xq$M}B{*b-+H!+VEDz`$`8lOg^2Z{e#50R72tWLu?goSqA*(?Ms?)DyXY9LRCA z&RQn9fu27V=!UCA$yfo2&DY{3GteUt6_Q0!uz)G;y}?)^c1Yjd-JMXmq83wQ+N=Z# z+@5-}JnQzv)5!PX%N@5p3~1i#)`zKNU-cQexs5y1JhF516VJstc<<{ux1Qio2hK_r z8dm>kyZ&Go928)tOHq(gMW2w>nYVS36A`gBJys1@lbCwFRcO;W!t}+&b8RG7PL8LvogKY`V#IbSj70E@> zxcd%nyZ7rRDXyr|ku+?7BK*fOj}#*y`>mfQ_4+5mtE!e8`Tlxl5$c%-sTCGK(@xww z`a?(z8*Gm|me;v?W4Gs;B3ichUxN{jNt|ThRCj1MO@ik7tK_fhuQX-syTb_wkMhI1 zy1KSPrd|%>L(*4qXrNLGAoz)aB>rTATIUUWJoyv+d{6-&SFNS?^9%@IY4!BZ`xKaI z&wLe7izggpSlJ>stDEL7r-WJ~r#x)2NMspR3&PBUkBF;+m+~=RC98TnH~b)=7Fm99 zH;2bm3>-dfUY`z!oBh^H@_*G!7_o?efbw?0?%m{br2d%>8QpbOeG~uKH{u616{#f7 z^x<&#%6GXMk$yE>fKe^_D=|qmMxr%f>%hMu@Sz1Y zuehveadW6)%HuD+Cpp(@fVw)W70#Q$UqZG01(YunTzH$1Bo;o%4 z-pJ=GQQQG}i6(JCVMRk{m|gKO8XFs%AYUfkcP>bkrKO?uW=K3@F1|@?PMZ4)BH<~| zLb##UyFX^HK}QJ43}AX(?~bn-;4Jl-{)tcFcTxJ_qhJOKVI6Gw?ANz1Y?R*iF-RH^ zp*v?<3wq?MhATvl1+%{L&tk!lCVY1RQbdez>zjvEFC&qPkNWYbLk4a(>P(iGtgYIJ zK9|FSJtq%*1Q$oC#3?WzrI%TPzwHzrBUubMbD}%!)pb}Vtv{EI)7D!4Wf|sWJI|fx zJcre+#9et0YR&rH!vU+8u|l`tx#a1utL5VI_C%7cY5&oWXAD7B)b<2*cu?z#`B`G9 z%r#&YC`(_3@IkG=(0bth72azAtXrlX=abuAJ=hM3m$(|4p`LYYpcue~T0c0ZcPfv@ zm=YG=pIXsgZo_3b^2Tl7o91TaENnZV1WM*}jku%}^;Wzjpn1j@(hh$G@{C|bqE7{g zJ$lFN_B&^G5XdAG#*6562YpM{B({o@Ws+|{@ov2jo#F`u7uJ7Ugo@i;J**T1KEWal zevdE^GMi85qoqQ@;OLK&zUyJZ@}$1={E>~u5$ZVe%(G_%Nn;euyb?{zz1Sq$plMJS z_*`s%p`b-@7w4zsG<^g(Qez~8I?dE0ju);tN{#CiGQ^v(-PLDO!FelzR$CF?JL2j- z&1xG}Tl$yqk>s*nnuiu^dLctJf8>Lwkj0OQI6NdWY_zYvG-MT?5=IwyJ6W)?wNH5m zhnFzuckD&+lKebe2)YV`m{h6A-I9D{RPX3kQSVNSj&+Ot4s)yasJCLN(OoT3iTP|; z;%owsaeYtP2_a})U_Str^14xh4@tf6z(hAwpA#jkxj!8id-LeSnjW1h-X!X^*$!XxUvKV z=ZSRC?hhO0sCAnrX=h+w8uifpkT*jDotuR>+a(O-ljW=#!v;CwEY^ zgdUBcvp-z$;1jH$7TLAa9^3cE^maL_R7f-I+!NP(|+xa6cpKUxp@j~e*)OS{~ zoP0`SCaHMd@o*>c#GR9EmBq)b);3$_u-!&DPqjz~zuT*|vbG6tnXmEV9)yg^j&2vm zv-g(O44KOi(+VbDb`dANdpER|alRH%<-YQD&Owlsds5_=`+@ z$DQMgav)Ipm-iCgq3|lLuZ^64xsf_4Yb==LKnC+*`|c|+jDsB|E%OB}t77Zwhu-{e zmwyn$W;b8V^{JDQa&J#5pORlQCCY;dhBMv@{=scyrAaIoZ|T@-9oQ-7zGA_;Na!@( zEgCJNOAMN%_T6Q$VEA5nF6~PsTGeO(V+*5Oc<0%cw-uD zVF%pBuJ^~ZWHYoGg^tA>nu#QwTBXb5SmO$Ly|wQ3M7y;(PC+Zwq5GDgzS4#skBl4L zhyxZZ^0V%u3)s4@kDURK`Yjt?0xUX%{Jgv#2IofAw$!`YO=B;U2u15+2VUZnQ&BH6 zkvcs@p^^`1(Cf`r%X6wT$O*Ndi%Qt{c4CVA`WAT9c};n=?>i+9h@bVwP{=FRn*|-? zpV*Im;2a(vX62=eE0?wn4mRr!NJYxMfB#e0z0OX?pGUm`x6#^Z!cZ0I%fHp__b^ykG4a4&cqUq z>4_Qo@h1JMZM$LMCNI6vdjDyp?d=UQdlkIiIX)fxTHZuqT?+geXhQ6S?6oRDws zx3}WYm$d}?Y04W0*RUE6(D}e-eLDot#{HiHq)MNnFWa69uX_Kykl~NRPNT#VsSw+F zrfjv{a^k)aD~^0>+8l?8`q!*ee4RY_NZ+S6kL|}N?n^fs7uBy4a&U-v-9%iEHtaHf zX<+x+Jj_%NlsEJHp_x2X7Y~@t_qB+aQD8c6+h81NsrCg5ygTq4^()7ry~h1q6FoSq3)CAP^4=8 z9Cwgsd~)0bUrDfdKLtaPl$u)IJ7TC=;QH<4p~_N|k{pP#N1yUM=j6aUW|>GLSo2kI z_czWP}G%{z3_JVX7cPIOh!ialRJZ8rLzDDYR0s zSL$t$-6^JP{u#E2hkm~mh=5$7pD%{gCs+AkRe?I?roOY!%jg#|u)WHWd%Sw~@^VU! zR^gAHhEjjt3*8)z1uWlJR{+R*$6D)v$Pb8A4MXZGAbrHsU$2Q&?^5 zQM$1R4N{)}pxRMLT+uscsgBn^jQp(dGP;me&>rVN zrl?dBc@6Bkbv80hfrsm3stGMECOrxZy3TJiGgCLlsf#TwuOg1Dtq}Pm`jR-;j8e=~ zoM-~`Ki`@#e*gXnmvw?|qqJ5m1;$6nQqz-i6a3S&b{dB9H=VZQxBC1rnZq ztCca-l?3ABU~sbaV{3nGU$Dwhe)FEguo2CDd!~Nly2Pg!(AYm(j_0^gXISSr&1h#Q zYCRg^MQ&PQeCcJSE-c1*mn?jX-JHeGsR_%S&spmTq`edeGbH`Q$A>6$e>wYuudCvq=-oHmQgI&m(KGIX5YU%C_gI`^yiS*)637@R@sBu;@9pCm6_Vb<w`i0u=)JnLl={By zO;l83ZbVd`X8ezlTsuNyvYgjt028^?)3+ku-DU?t&brm{_T1wMlG1X4Tkk#nV2KDS z#8?jnGL5i2ZrY2SgyZq{)qy&<^|vgFT)Ts+DF3<2C(Tseoeo(>IYdECb{%~C{YjbK z!(Bv)dQQ{I{V9T^PYVe7KnyfQ=v)Iru&Cgy>wk78S%uUCTv(S{8q?9bP)q>5pY%Gr zAdx}z%x}3iKw=7-Bg`d&bL%NiBD)xvj~<^(l`&0k`CAAFOw{jx*Ctg)Ol-BpjJyf$ z>*;}Q_Nmy7*M$)YTPD0C8@e^xcB+yh=)$@s>Lt#Rh)*gRydx-VeJDI1^0wZ2iUk0_ zgtm+5_;>G&a1$<8=k%=?TNsv0_L3AM2O{ZQFY3+iI3xrMuLbQ;H+f4wWMU=_>R(Sp zBy~FsO7eojXc{CNQ^tK|N4#{{P1zRH19WlsH@NMZ&vET2HJk2cZIW+m@xfe-@gRuF z`02l*3>6RtbmZndWkV@D{5GM=O;Hm}#67=&uV{9+0xUwkyRXAEz9qd;F`Th&xF_Kr zBUDuZVZKuO&@^jyABq$$m*DPDr{QTmJ&eboqf0K+u9++|yuA=CsF+gk$h^eYrWe+E z)3DXOP;I7cMRjYwRn%N7F5Y^wES=n3RQhUc?^Ab6i&(ONM~X_|j;`>N*)Jo74x}zM z-|I?KJ(nNjat@hJ_m8;?T+GZC=sBkKz58jcX^}kkq0-TyUT?Z)gJVyGX7sarBMraH zFY8h&g&PsE&E9#vWn(&p@f!LQq<%QtDTo7_1@_c_NW z0ija(?$f?17R++W=^jWp{VhvJU}l`KIp3mo=(i#%4JeXASszh9h3$U1;~eWOex~r@ zh?aY%8wu>L@pxZfzpqQ zU#)?HC(JJ=uEkf8Rg%D*O*`glB+9W?%$vU7@53w}WP3AVgOnFuwcF zv8m3@`lnCt=m&;pa!avd!}5b9OU-Vpiv`x31dcQ0R!6*SPrYLV0xT`H2PZ^+OJCSo zd7KK~NNg~<^mk)>*!t)21uzI|9Cw}l^_GE{hML$pQOmpn&)6Zzw`W+p#50F@x1RmZK8kAxX_0GJLBccW|!ER z%gYLu3FZA&P`zfHP*|Nd0BU8m8ArMhZ0K*_;5Qa}TT5QvMq+nR(vKD^nRCmAa>ck@ ztn{N*nn@s%;qx1ki(eO$tokxi6t7Z(?;dAL+U8H%P!)tz82HZ(n12B~^tGkyt3RfH zTpo~!r-y>-s1J0607=l^MS8;^g>&@y;^M9Q{;G#{R2%v^Lw0lNw86=nbK0Ak}|LiDY|KzjJ@@w^lctYOhjBBvV zq%H|tb@781v0pmxw6#71XP|F+ji zUXX|zPYEOb1j1^w?j-PE5x46 zVqqv_l7DBcAcAVWv>N8OLO;641Y=uQt1b)wM{t})cP9ix{ z+=T_RA@1FyGG`VK3Fe4&MgGNZRf+cP$wr>&TI3PP=8zqSFg@yuW6 zC%8e7IEOj=S1`gQ3l#D?Pu+n$B+m3o*5*vv0VizTZo?}Sz6BT*GdDjiemTO7X zw!zJVG%$7XQ$At+j?ZeQO@8UR4_MCuVwfQIi0~^<7GzC=q;a>mZ_z6btNL8#jdu4( zJZSZ2;1Ts=PUn*hGWVgqvUag3JPt7~2v0o{=%;iMNZDtoH{DRx-|;xSJI{eIwJVG( zaj>i#L-SZVt!)|}A{@!~@S)CT<|X_|*F4OHln;U5&wkkd0&neWV=B?D!1pzv&CEIri%27d+uq9JEpSA^ z;bhTxS5*JU_AG0ZgRJsv6g|fE@cZF{iCqW66(3?A{PXVGL{5E;-GX5i|A?3WA%-cg zd-c5$O>oTPAI2hR5BZVA{M^BSgu$<=r7U&(U6y;ObHbB}7xz-X0Y|ClLnf(GOi!Toz01A0EuxcW8TWq}scPIX$3HmdjY0V@nF zZQgWi1)@Qzr+U1`o6kSkgj)v&I{BTA+qU$3Mn7=RqOzAb`xVmC@88G!{cf#&50!{f zjpb7QJjcM(3+%z9c!^yM&FX*vZ*SQ7`KS^fTI-=th$nwVUw?n1i`V4AvAMt2fW47= zXPup9QHb0ox!L~~PXOy^EZxhj5sblB(KiA1dmLNLJ$WM* z_MIj}p(5%mQHRX3>_wat|NTc(4VLMYl$)?R8t;_dD!Xc!$YZy+zny!V6_kk9tqJFK zttZvC5R!jRP)xq#AKuEHaaD`5PTcKBb+mYdG*n6HpiPY zsgzQ#{h*yFP2|iBVKHn%{Cc7JmPK`>t$zXyRA&sf8;$SsLjOeNolZHDc=#6mgZ=R%R5I4)oE6HRA|_p_Ek<8B`6tAm;PBmd-TgZopi9NwFYPNjJ-A-W*?17TH9XuDX^S-} z_93i4bH_6ah4cG*{9o+7`9GBH|36%kLQ*MRSt}uX*~XHk6xqqXlYJY-*tb+jSwhya zWZ#VphGCF3#@Lta*_XlC#x{og)TQ@zeeV1I+}}Uoe*faZoTp>X<2YW&YkNLluXF8# zv^>cLN^a#_G_*d#8;l9tQ_zWvIW%%%kw{mp=Rm^e*C}8nvus`P$oeM`GS3q0b`sCn z7PCBr3Oe#yg>_U(`LRPG5|u}-m$wF!xW@E`G+&ZnV32!SLE+Dr3+%^)oQ6jVpmkR+ z>I9UM#Wtt9ysWXEv7#s!%?E<0U5kJF^OF6DXbx>PK_65fju0K%H>wViR0 z$qz+UsdW23*QkcP&-9SSkYw>F)GL?%gc+3UV#jriEmCr1$(-+tc~H{VjQi%F^r7iE z)Yf3Zy*uAh#yqf6S;oZ7bo->+SHejG1LlNKNckra&5(#ISJI?isI?FM*b3*v`E#xSyDmJ#FZ&VNQz-BBVikoKATbZ^afrCX= z+#ra-|KrL?Om^CA;{@6DI3t$=$kJeeO#$Ek4*%s3745g^#S1g+uC=o z&AA*L8gR-oG#cc7Y_NyL>W;^=KWoH@gKHZ`_jj{Lk68hKtGQsFA)w^?{Y%`FOv2K1 zt+6#Bnz{9*doALqYtva8tGc$C_Bs{4UP{kzrDbq`VkV7k77{`H);)z{bL89wDkhuv zP9uE)$^;VfZ<-CR=9&gHT#aQ>rDw-4uNp~}nyLnXmR}FLYY1ho+y@du0cF^3%haV2 zs|6eo3b1m;^7esebt3qb%X@QQO z(FrD#L!{q@e4vav%Y?|)oeGD1{9VDx-cuB+(m z@|T&lL0_zh?NOI?&MCt6bigX%@nKa@7|XYB($3$!$10_l`(kQ$WETkN7>FrD3%TZ* zZpoz9%=MwU$DSAky^ZDzz3vNSOKIZ$ar3%=CqQ|C)M}Lfm}~ISbOG4*W1vr$M%N=g zYRT=~u~$SP=cB#X2Z}uYMKhnzdwhEW$Sv2Gxb{k00A#7C3cypEqS|uGOcJIanwD)k zwzmbHqM6_V9#a<;HFEX?_Oh&*VydKk*l(H;FNhYlUfryh0>@Bc{ZE1nj^yxK_k|4< z)h{Q~2AyAOBxrXKIC_LmbEmuKOPDJPO@@@UOfPoyEcsljHV;)EwNQtvIM3$7QROe~ z2gj-r9#i=C!}ypO4tTa|3@%5Nd^l7?HZ?<0hJhF8D{%*=6LWLYlPKEO{^N7BY((_I z0VSgjH4V3F^5M4{{GtEcSZvtb*iMncfF=q0#lyfkD3B>B9kl^iI=|2NPXONr08plY zO&UNTaenIw>Em_Et8Au*BDf2lxUhw+AhJ8=ttOn~UfaCLjlPJ5z_n2zJZk2$pC ztH8CEgE!r z0gLi~m@h|(p!uw@&qYjujT+(|q%#WnKb{fxC3v+RJx zs|m-xzj&!0ZBbw3c5!i;3-CXZ7hcY7?+nI&dA8;MMQY-8U76!h4_BATMa}a8y=1W! zU0w@G&6~|FJkEWNy9%j(tqK;I-ax{Ca;MZDOunBB6+WB}t9Ka4Mim+&pQop1Se6Ar z4M2{cVQi{jgbHHiGg_4AejL5alvx13sCHx2`{oeIt@l8O>=GR>c8GY-M8#{8VC=lP zwD2Q*c6D=D>XPjtr1EP=Xa!ecP{vI}bleki@$<(H+!(XICJE8=*{^Y6#2lUU*jz zfCf_I;smjN`mBmKyP_KjJG-()iKSbRG7m!^$EE)A97KQMEmq0Z0@b+>wdXP?A2w#n z_szffYJZeyGRUnbB1Z?Zzax;N+_Ss;d3Lsl5Lz;p+x18zCuDcU_sdYw=La$C0E>q8 ztGJ&ciDIkxhxy;xN5G&o1KAPnP_FX3YP13GfE0)%Jg<}BRC`;|6*9Z`2^N__b~*Mr zV;la&u4DNm?tgQ4iBY@PUnKZk zW9CmU0Ct^8k7|oW{hDY=A4My_-I$}&If&>~ra%p*al(7%{8FBy?O0i04Zd9?DlT=w zNC3N=n8nv(^eXEl1Gr217hJ{ZU63+FV)7mgqezS4q2P7_s8LI;$E)WHb0SctsjR}B z1p9b^FXdg2BGBj}huF(5o*F=sD?mVhCc)sgnnD1ySW>4Hf58?WZb8gEH>AtetsZUS zdlZTIu3|)W7RY*tC6i4|(3eASli(-}bnw%#`dFwPKlI)u21xq0xLdr>b|<>jbWO>2 zFg+~k_0F^2a>A7Hr;4Qy1;IF!QH9g!UF#()aT(RWwAy1gg-II#qhq%X1f-_YB2H?G|yku*Oj2t3)sL?W7hPyY4aEOYS@Yaf=6O4+6n3ptD z`t@6K8h-}VAQvivZZV7#H9sv-6R|6`fh0>f&wt#Co1z_Bi$Y5B1SPvd7SQ&&z z9DQYC+{9SP1OabmR9Nn0MA~~?knhK^*x5-}8N6#Z87z(s)R)U(!({p96i-lZTG@HH zxm7syJPOU>tDNe4TZ$qXQP2+-*bxgX7-k!hGZ~ZCe4omI7H04i6sxT-AN~p zD_2GgSXFZjp!J(isK~09*bL_%Op3p7&-Up14(AuRr*YhoDj_pcf#YV!%1Y306T0pB z4pWm{dbD~wm9T^S5l0IuPFqX@3cc@BtY&A#VG`IdpE0E8JD;5m@rm3PZLx-4276bK z1N>+Wk2sXM3!x*9BTU%+Q44iZ-|XYU>5iptc}?f>%fTWT{S$mZtCaG?waT(7ZWT4e zWr2yswrCZpQD_Qy`M_AtrIy&<=RD+l+_$t$Irk1>yxpFl+}vSCf=<=u*{!8c^iJqm zREAho*;fN^!|@t#{@-(r(rItV?*jGnHH=N{zT2@XH*vTyO=nx{nTCqZt7qu}|3Io_ zEE6l1A~3alPQ)HtdX+1ddz%Dp?^LUiV~^OHfZOXN^4{7gEi|lEL%NAKKpRjmhKdC1 z?sG8dhS%-68-K8+kl1ZHXKd>E-Ezp=6}V4VPFZsGe5{pnd&Hzah7yGFif^VC2&;Zp zP_fx+-xb%_YMh(V^bE*S(%*9F%tFpKTR2hdFUJ7N&{`%r+^^;lY%1S2M~_*+c&JRU zHPqImeH13af;gVJy03>5+5iBSu|S=r51qvmt~9zT19*`cSE4e!bM9H(WNzP(waQ!Ob?#c zCLbgq@*=9hgS~=N^Pg0uR>CMGWI$y3IT;-o8xD_R&V*ft4_Gj_&rDb$E;G1-s)Z{mx65-?(r|KfRpG|aya_fRPQLe{|{7Y zc3ax?7FqIBnydHa0zBb$yMy(Def1c!n;9)hIU3U5-e!+QCxY^?1K<;@{E%d{$CBU}U7;)L_D)k1>MbnKkpMw|D9SBw=E zAk%!ivJVr>Ipk8jDw>wBU9+NavjyDewKaC;nrz`nXU*fsNO{!&?s$PdgB<6dANrc; zm-6gwCAVQxickOv=%|YG%HXZ*QTAe63ZIdP5K$N!mKgBH_%I7(T)`X%KXg=7RE~_8 z@NFJ7Txrs1@vzqoqBBc~iM+!e#>Ae0tooeLIRU(QN>mv%&cTqSXJ&HSGFQZ^Eh0+9 zH@&|D!>eWczc^O!y-jTgaRQ(4Acy{@*teS(H7U;uJnIYn^09lo*`UIfy=c_+OYVNorMxO^l9koGU%wO07gZx{#JSC6nfFL>sF-Nh0zxub zmWGO%`~2Ya(?>PXzm}L!Qmp9-vuc{&0bX+5?>#p916`<|&|eZ+9A}_m)pejP=DK>v zim@G+166+2`6U};;Dz_wJfeRgke(g-m(U==2L;~Vicx744zS2*!B`AW?;G#?7)3RN zRtiGj<3$g2o%$^_HNPO1ewy!2E`=ou+M6e;LGsqt9NjMA1`8q>1ooct846Y>s2(4< z=w+~6&48dxwnAO6Ew{Jjq|P`}SY$^))xR{#g$}AYJFa{ZDBTr59Paej~KR#4lnke8)4*89q zNJrL}aYWCy$sKVRO~0J2J4d*VGFE3VCX2Q5a?cN!?&v}fnXIjjt9$#OquFD18eLZ2 z4v7zUE%=`l^wzv<|sC03-(31;SURZd^u~FD* zwXLb?UeToYus5#r3!|8?S4df}EAUD$K3)u&1)GBqlr(Eu2v=LaQ^CW^sz(9Q0PstJ z7ohQn^hu5rd!PEd$J;4!3EGa<-gtfEh#ScLs2CQ30Zw%hlJJ#U(bQ9oiT9N2Bdv7<;1}@g zbC-%|jx8z`aDjuCw;clEznSIaD)Q{xKg8w)>cAyu&Ho>I^KT6jfa zzFeXR8T*j?Npgk(48Zq%FrX>`ax_87#tke$Uj9rY`ZOD$R(>9sgrEdmacGu5ZBG2a zT27_-DOOD13X`w@?S@ohww=G_Is~{|+JihEPp+Zi{^JB=zIlV=rn?PFW2#>39oo1DFFTA`AIGzefyGeFcIf|}CNiDX2W76mN z6y#$Hvb$o_WXRg2){rvSXt1&QhPfaKe8<-2FPVsTsv#4Wg$QQ$Z{NP9O|;mb6mgJ| zVjNGUqn;-q|B5c{$F;%nXKS9SGLRu$b}R^UY6@HZ$EP zj2^N25*S-AqVtEE10*8JxusI~!fUp-8=cL9g`MT&++Ji{cehsX;P~9I%>#h_Bd-Pj z)S>*-3kn#$etavo-I`t)Z8>x#A5Jg)B0a-g`tesKlBMcii)n9II{_w@BR-CZ0cf2Z zS^$mi|0_X$B4MsKJ*-!~JZixDa?}&z^4ZS8ianEY&kEk!UHJ?xsk* zSjVzF3-tW)GKIz3Js{}W-Kz(2!(1aimt47((CfzG{a8l(){pxhB>xF8f+lD*;mF$D1>vChoH-*qyI2z<(dJZqDC zGAe(uQqhB=FCNrQcNgelsywSo*K)M@ml_eX1ejz@Bv0ZpWWC}ApBPe8RpI|#Qcgjf zD=8#;bj!q~7@oYU{is@R#4PQ~#-Mxw&8<7-cvVWO82}`0n4fZnSoFpwJ=$j#FsN9T z+j_vp9oi#BwJkc*G!XSZ_k(ZBLa3f%)BAF~kYP=ow|5>hV^tTPD3wqxV8o;cf9y6p zhpzV`-)vRkK_TKxoE6=Ql_nDJN-cW}TP!@^6?^Thf|l*Aa!YcL7(dAVU?S5`#LjKy z-&x;3z-+cJyOs(|$dFxSyI%`i0QGtrlg2=TDnzO+E!W=_1_1X6LPL z`XReo0j^K)S9O|@u&~hue1j@Kf3GEWo~qQ~SOVuCZ-l17f<8Aj9S}@5CA+%1;;68G zQ8hk91kLn^Pq5W6I-aWWg7R_EI|?70`tyNx7K+g?e<@?UJA#rQHM)Wv2RKY$kRkXd zu^0d~Yu@s|PO}pzb`lI;PQFa(+UD&zv4U$Zymf0i*GT-aJ_38c+^Pf7<79*|E4qNb0^q^W8LtD5S2XmVJUe5R zr~s?7oYlP79jgn(n#`-I&7&Qjd*t!{&!UH%B2S2V)5*4Cy}~9@bi%P-bk}K_81>6z zHgZB^_bR*7W{6!<90nCmTs{OzW~iM}MsUPuVazVs9Ko<(HMU5P73P1CW?^Ny<}bf& z^GjrQ@vO5FF5_~gmGN^$)wot;PZhT{1u9RJ3v`EnEwitDJWZyRkg_i|oP8DuWjtrH zoexTAvRvV`X}sv(0j@c`s^sUwE8orD1_jDQFx@{FzDQrX68`0S)&AP(Jys#sS-hX` zL0edy5sNx2X_L;;lGiZO*ZxN-n65lKlEjwiAv@Y{D##4;~o_aSLX!y5P44ZVih~Kh){)s9;$h zj8z;FjA;?m8f6Hk(@j**GjLI==ehYf;VQt;zG}!So9@F^@{Y6{JF4=Q zjBSPsz%@!X1G@UyFLpeK9Ls)eZD^tF#OY~XN)IRvzNBY|i|ZCRGECOew!>=XGaFmND(GoP}>9GxAnYL8n zHg*H|=Y=ZAk}7g9Qbq*Zpq*e6wC4c>4y+VgPNxum0Ct!(^s;$nJ6OTZquWw?b2c8lh{_FXDq z(Zp;Oc3(}!fk)N?v)Kg;*&79a6vA*1Ek*N--z<3xG&tw@p=Otdl&7xR*MbE*^7R_L z^62^DJD&b#qFWF3>yGGc-rUS!mSNO8D6dqGW4SN+61na>KdF)KH_rOy%2)zFJ$_<; zp4syQi^}Nu`t4O?n7zmRQXN7q*w(4nsm!~eqWbI4=ePzhr4QC4Mmg`@po57E6Ej46 zF77#h!Hfj+?Mc_E-M^08Cj{UV4)T`D%n7Wl3d>yPv5YUpD)WXzV?zfkb_DPtJ77kGbc`A4l{Iu-g>USqh{KH*~Y7hPSX4KYF)O8nS%015eMP zz3BS)r8J>G%xa4p_uhp6C1_??b5!CVME1s7*eO>k>E;h_F)^1m*j%fi{cVkk)$PGh zrJk=ng|}>l(?zS~eV5WR7$~XX*|Q)qqEFt$B&nY$-N$%uwx#<&SZJ_6$m!M-Xyesf zyqtl60H@r~Jf^&8E!@3toCAZF^u52J>_??X-N!K;Xf`dhrQ@YBs%$E1c17&z-rkW|s;1Sa&KL_<&ETq_)1T*+ugvK7&+a{_f zNR+S6_$8~X2}RYNxKN6K6QU>`{E0?aY)n)TC`4)$M@F(49!fXX4GB9$lTvXjk9I(?ml&HZvdy3zlkA{Erb+jut^#f1+@f+Mgxc9wYGvbX5iIeS>y`ZDEzV(&s zi~}CA;fdUrh#x_HN>m_mV=4TW6>jcGtaoF2P#L8j*?bO7EA|Xh7GAqj7%)e{=(?XD zlr}14s?@(UXAT8@)`0XS;Dok)w6VkKQx-R=&SMheiy)l)R#baOC}-5omu{gj*5g4V zB*vz_p}v>&y{k~!3XJ}kL8~r2o@0szanJkvSMu}6<3&Y?Erjha*<=;3=wdTlS&>Aw zUL_|G)>*i}CzMf8b2U$+9wV_kxsv^$L{Q_Q!LnC!Mj6WL`Ygn8Yp6{ebv3mSBB21U z3eU4YuJ2vB29`B19*m6RJ1*7IUT&@Mk}`iOJfv>*@Xm$>(Y#?KTJWimy#d?uj~6tC z&L{BV94oLu#!+hnK#x`9)b6{dC-Kt?%Z0S*9)V61#A;{-JYupEhvN*AOa%iT@1 z9V0MXJ)M4(OnZcwlk$%9vi}8oJZYl(u3{R%&PF$%Qb}S2Z3Hj`TgI6E{fg^OtBP>+ z9ts&7>|)*`3} zRz4AbAJ_M0D>&I(&fU0(MI`uGY_kWL=G*VokeoFW!5Z#I9baC%z{JE}l!lRoWGGeU zKYnTUO7Ab-#GTYM=u}d%#uz5ilhSn@eG{^LXqVjSIN>zXttXcmM8SbcT=`56oOoqhj!M{naA`R( z*waKufU4F=z$r|A%gfAEPbU>y(aD2S>#eC($3EEQXW_w=?V<%4B$VVLuj$e;0+IMD zbpZZpHs(kbzN#A=!BYN*WD*aA@>{^+%VYW{OT3SCvxnGd7V~`2}El6 zM0!Fy5vPSkDayF&m))|odD1o=FvlVxwpcv=@LBT$_D544@0OgUae=)>ZC+oK)WFi* z1@Vr*x}+dBcTle9Qh>WdkZ$t>bPoW+I_aD^-J3*H=i~a)b%|^snV7X7UEEhbIkf9v zybFR(#OF==_g*QFm}m`M26)Jrk*HmucFKyV=(5s(dqBiDUpxXp8}1zfx*!~g?ZqY@ z(_(;j)%6+oPm%TaFo0sex5LOI_e<@3?SN4UM*kBOGPLZG%;_r^H~1`r;?t0&tYjib zj}-#!l2q7Ni@7gDbLM0lv*)~*o-5FZBz|3|p?Z3}i$%s+y`STuyPQ~G^6LFu7M)jk zH?pJUQCj#kKzPKm+vVEH>e4bXAJOTqjqw5AfphlaNW#Q6jwlFIuPQHl+p8uzN5F>fYY5E>2qnD14Tcvr zg)aN3R zWY!(K^zbiZ1i@*?q^<;i2h^W0*|t(?J`PT?&IY__3Vm(IS~}h z(+_TY<)k=DEd`k6T;7tl5m3I4zfklCp?d@9{Bv*Tg)fTSH3dp#$7?WRVQk!jab+`>9xu~JsJ>zufk9eWmnDC#?=KvD)mh-qojpCCepSYxXQwA3B@MVYesT^{iYAi8!sd&1}{%3p|&MB`t>{!vAC=8uq zursaF2DR)%fn{(Mpufnd4l*WV9T*7nvRnGlPNJ7-|JpA*gA22j2ylCI)!+qH za$_J6xIi`er!X|26u-0}XFPKA_L#){Px5k>0QAd!PCx$oosvt@;pqu~Jm=bzMZn5C zUaaIbm3;yTO*U9H#*r>PAu=4$#zk@hmPi=5!UYVZ4So%kGwxDz)7zi1eJeskn)tPs z!3yba!dPW1jL0i;jVJh)dIs0z^i}miI1yD)Pyb?Y6o8u-6p4G73{>PXWZW^=%XzQb zliW5r9={TpZsoOi6zrL@_ao8S=bhJttPHujH;9hnZi(Rcl|FkAIzO~9{f&n-iqr{t zc(gj}a$=ARPZB{fE_Dq53t#Y+Vf|AtspT>ppU|S>Kx9b5{`sh!jH{UY?KV_2%H1|@ zWX#1HrJqzyCZtLZ;l=8rmZK{2gLoE%$k01P&aO0EBWnD4`vUY2Da-QF%*g!GlnG*?@o~OMi z!fD!2#=y8#$ssPsFIXmcHl2L+nxK+k+T2m=QNUb=g!pp-M$0O#bG50ic5VVxT=ZCG z0U__>M$&iLHRR`%>xU1Ab`zz)tv|rpt#xA!gdl>!!|T~0wXpS{X>&@RK(w?p7YLob zZVbxc5&#Zu1o=xBKPp*+C6)QKkuuG5&uQEe)zchj>^-Nl^U(d~Bc^ZVUE(X~Zqn3% zJKFKZ^cl2uK~dQf^pN(dDwrO9Us=^2u|tBtypUWCb2Tr5A%{f~VCSC+*gR*AfaWLM;W+~JF>FR;KIIl9gAJ6E5QXpVE zC#*6~tR(PU?EvAg(D3+Ujrd7T<_pm2OhkZQ`%4Z(-49M31?jJvz~=rRa}pu_RHTA9NA zH1f7y-McStS#dGe>glizau_mgu0>slOdCkoWh^8AB7O9MPTbpBiJ-!*6_ zfb=E6kj||iP|ppLa|geObN!RLDFY_nbJE@BZ%V~kQY5eoD1+MB2{)^(Kd9sq6 zKQ>QQ-JAbEK486sK&tzNajzWj$xO_cPOVQ8JqX-$7PMGH$?H5RwtC6^XWE5o2{!^W z$f_g$EbPG)!23Y{F&-hIWP?G_6h0cvm`A?NI>BJQe3HdSH(SE4-0kwN|QAN^< zisZ(g!X1_XOT%DV;Kx60V7|(cpe}Ro{-3?nR7=Coq5HZ- zMwJN@)Z^Z?Moo;n#X@60D2`~zkItJTfbR8 zWb6`W>=r1FFB2Hgt%6mD#U}pZqhiJl8adQJA9#I%RTjRZ`{#>-Q)o4 zVWW+8nzeBME(m2fBX(zeVFrval3boo^*>UeVks$X0xeR~=w24%;5vQK0I`ePVkKYN z(0p|(oua!w?%xCBD$n~o*=IYJ8Z@7Wc42V-?6AT{bs73oFx0e?)W>)Ul94<#---V| zXAfSfe7%;-a@i%C-D!>1*e0zVbGM{WNhFu+b_s)~igwUxJnVY)ukiX8!my6SIi{42 z62i=O^?3P^5pJz&x6AcTM)K+UzNs&vRMPc%4Vq(Nb~A871$tvYa#fzE)iqTcd1@+p zKlEJ%&>rP+hQ+f#8^RtiMZs-3`{~@a31~>?V$EhdH^^mPN#gta({H|byIa<@jWzD# z*{n4(N$1%g!Y;A?A@!v-!)m;bO+~*T3;%##={NrBtmoIiPWR`bFA4&0NZ~IIXD>vs6lUFu410HC!Elb@R2()p(=&|)O;VZ_(rOQ+iTw%R%1m{f`Ze465a zIyiN+03R|vhTr;+&L#s?*K5~n|MxRO8Y}VEO5o2bNmBv#d1m<@FyO(v!9Rgg1n{9Z zDaPv5PMry$1FTHTfIKkZznoVlzh{H`4!)Bl+X}d}lJY zQ~dae`@t>i>PBjbpR2W_yx@-bo=Ak(1ehscu4cS$h4EBtlwI-I4T2(v0sW4Jq1t(> zSNM76r-9d1{apohCcx9kbiTuSe0ag9)yQo>X6=)S$cXZ=t=x%e^z=n!Oh}QSbE|_} zP6^iaYUC?|mZ05VGSpSD6v$ie;|hZfsVtfE7_Uecr+*4}WswbcsG&adNA=b}V*e}xvhK8B8@5i}YCMN|RfocB zBYquHR<2fzvb~StDl9pCgh0EF-627X?a(>?sfm7kwcBLK-?tC5W-WD`<-OUKGZuP_ zx;mjtQv5#FiuKRyqifJ+$$ zTy?NO!2AALlYjlz!~qy0>$+~v|IF>*Bhvv_hkt;rbN-0)zb{_-paGbtPQNs*e~tGa zJ9ts>1z^}C$2(O{!-#)B6VPli2QcVK#3u#+W1@eCx5)&qvQ#H0|Hldb_ieXTPPUB@ zsC{}2|L4fvC!IxQ-NeQJ5d8kf(@7_tcmz9?MW?a(pKttI1AL7Hu7<6+x&3L=@$aua z6CeU$*falvCI08gEWlOz;nkHt6+izh#mR5~-Qa&q*nc!5ZZ7j3ubsU5pbaakk2c5k0JuMM4;o?Yu!d}*k9OXIe@;~ZRP8J&oqZ4ZP-S}kU0J0w) zs@({4s$59%E?#})^%{jAq$F&wH&R#Q{C2CsOIdMl7Z@RGt#gUOG6{Y?gbtnB%@}FO z-c)dP^G%bQ&sf0Oaif;lR`XbuVLtfmy<0uG+P*!SDIeW%7|7+cF?%PNVLH~(xlIx# zxj!4d7DNwz<$utZIwr9@HUmVuXA#pMxsXMOiILTk#>?Q^{duf_KZx4cXXBYWMri20 zs~t)#Wk}CDd7bI_Xtz#E!r#$IYeKcaV|3?}-~6Qg$N7+{syGwBs)!s$9dTR@+`xOa z+}!5wX-odEOeEwqCw8~~5!b7pXE#}o`f>~W-6wtBU#*)MHy&CiyAFP>-uueoxgb6{ z43m)bQSjSGn`n`lAgdqLRFp^FpY9ayk}tG{`LJWix6m@QhG1=ie(R zELGTqP9Cxu5l_~uo7%~53);7kf0$l-aJT?aP=8ff9v|Fh_dMEPC~(N0EA>CxN^Nwz zsb>)xy9L>LOP!=&A?2Oqx7Wygxq#5j=XZ5}_pvvqz-jD}<~-smV$#?q#`pNA zA7Pb-85y__tZqhxWFnY-G-uVXZZ6i~plku~73FD-AU&~Ib#E}Rg_3B9*Q?k=V&&)2nQl_F9lB>g3P{$|! z2U4@!o+%ejPky)Y8w$l^e>0_%Wvt=++8&PD8A0>ZtT%M}N7_|G^MefR^0LMx2`e+; znvE8<$TC2fDvYu_KIIo`3T07WN}`63wF1XR?anR==i5bG)~TboA%z%sgKh;_B@AS5 zr9gV+J1-c3Ls(;9rS3JcH*6iTbfY%gxl{%`>yGfSMpe|Wy3lI#a8WiRxBLFLIJ?pu zTzZh7c4ms4d<(Yjglz>_>d);FCXXjj3lcbT-@|?l)f+|m{Vw9GWzp5k`2`L9`I-G2 zHHVvMaps2i3P#KmB8&!J$~P+V8R$EfY6)vD_Qh4%TL6jz91($ETF`-cs1uWqevwU4 z`Ji+4Y6W!bY%TPWr*rP5l!xZB1;g|Np9H@RGq+r(o3RuAgC_n&N$}1@8oO%^aXnb- zl2(|PVfLivcVaorSNT&`G$(!6qpgm04;*TDO-I?Y{_KUy8``>eYL4<$eL0ie2efpk zaFO{xyjQ-pj;R9YIxx0-5X6_#%oh8xkG(^ ze%a$&m=elBb8eT?*o+3n+;us!Eorz>SKY3~fbM35oW-~%jEE7Z(}gWsJB zID|(!&@xy$E)MXbPo~`*-141pB1;3sdweBn*IvJ)5`J|QRVRra^TwglUOY9Y&DVE^ zV}vOqOnmVJ4(xeZ&c6_^n1{_<9v$*^SClFd)IM)M#p4J7X*=Hm}<9(7+X z&l$J4@i;%_#SN{V?Gbxepj&~M>WtOV4x(-qG|K2cHnpu9^Df`H*RQztV3|1IN8=AD zhNS!_mIcdMLW8=EDBu9O>IJ){@YSq8$!${iE?=QFaJb6(L~DP$0)fKhjhqB(R5wED z7Ql^EAPB#@R$8oYtN6s#4nbM#YI$NdC(1Hbk1Z4(m#hS$dN`M# zwrLlwEc|+ys1_KvvNP%=mGh0%5>{cy{b<;di+f?xqEip=b>zG@fJbwdZYSkS45adFHd_?P&k$^lM&uYL&09ik4a|mD zH(uU&DOmy`tW`S|U~;+;1to`AlViFa`5UkEllu0H?3BXf;D)RR0V?aRar9Lu%$G%6Qw{1ChOkqw1A0#v&@9b#*_0CpL!=_yq-o$QpF zFJ*ulvS{7p_D+c-a$_MWSk$JR)Pee6IBlFjPLi{NM_rG#Tt&kR^UuqM{X4{0bSk)_ z#5$Yhow1eqM|}EHU4O?*S5VSJJ!}HM(*yk~^_-IJn!2=MT8lDyv#U=2-uh=}rN$a? zM{Fb^)aV9We*Uc=nG}l574Kd5*1vXfO0EY`zh5BUT$1d%DY2+|)dC$KQn#-%{f@+2 zY{N{U6wO=xes%b0DTn$&R7SWM=SF~m7;I#PFAfKS&F_ANJ?myGg`6x~Ao2 zg~(dw@TW;wPpW$=RoHmJb?O^+WK@LDEQ{~_Q0Won>uIO!QDE3&+*X&A6nBnfhsx1B z)6&5qm1QX(>*V1)sL|dkb)4GY(-&!2L6n?TE<{N#D(5BDQFj9p1pTub#n=A_lYyPbaW6Wc~Rjo3*kFI@Wf7|=OLJo%-mSov5}592-J|S*EcfK8f@eNUliuS)F_x6jrB08)k+cA$qV1kq; zrH8uqA-sL~85OpDJ{!w(ih*7fPCi;@x`+lbqxpnn$-VE?K|?FD_lAcy>)Z+FM|EVf zP5y#EPG$DzeFWqCQhQBse3-9emhaU5%G~mZzb^;g{2g0uJI6+hP+MH>cGlBQ zFXP9>%CsJqPA_M<_MTkH_ay%O+;&69FwzI`fK;w;4#yc$rgG1ITX5~WE2Uz*xuohZ zwxRXL%m_1-|q-{&|V4t;G>4O*r8@_<`CqPZTngpqf+;2(Oa@Fg5p8cTNx)|G7Yf0zvJ^vnZJ9LDs3PL`TC%ez0)slLJMns}JC zYGxexT6j1V9fu@UfnY{EQd@VqxGzHL$irhI|06ML0}xS> zVGeqdIRc+}svqBI_R6=&$>4PJDu=q+QR@i!*~GegpcO>nc9+TJ@lxaqbR1xQvxzHg zC7N$?EB2uB!!I5Emwn(nqYuZxXZ@@E#8gUna#F>zwR(znD-Sj2h-Dw$YZ-Tb9^4@q zYwSGYBj{=GJVJCCoc}P~Mxy8&lutLtCY2;}Ajj^X!R{@_;SJU|)q|wuaLt{%T6Vx2 zk@Ol>GTH#2S{gyJbDopHTE4jJ@H3t|`;ZaQ7&mh`H)8jW8No%IPBN0q+2L61&T2VQ znX-)u$;W3aLkDepVqw79HwXwNVG;egqgj;Q>c`OHht;XKy`hj_*l%$||9~W)4}&Zl zVWZ?+eLbP^J+dOpu6>0yUbv0~sFb~~J717@SZN3Bp;HmBZ8={5{xt3_LrU+Vp9AX3 z^6*Uy-^W81X(^nW4(+($6h4ZprDS&d*MF?XLB4E0`JoG@@ZHd-#Re;~!Xg^$Ex&o-~gqVuH6_~l0>ZEr&(d{@kOH23n_lWlAZb1ivx$l zd^5@URz=e)W)8#V4V+iu?h=DLj=h%kBPQqnN)#T4^sVcf`_8lb!_(LGg-f`-YiyFO zvJ`V~JfJ0_!@uiY*R_*y)_CRDbXD#i&^mfXS--htM_z{Vr^Vx-x}}d2H~bEukoUg5 zhZL+^tpvd7pf|>`=?+p3+~x1~YtKBj#K7+P_g@ZU zAHmEzeU9ESseM}sox`VKogUE!)&2+{&EJ~H=EUV_)i@INQ-SCP5GqgO9eCl2CpZ;R zM51U!mXgYSy?l4#?>#%L@c7-A^^Np602pHj&C6~Adv^a0uxI(9W>x(!6a)EMpT$uv ze}ZXoT^20qU%T%Bhx(snZdiBL>)>vHd7Zo=PAr9Oa0~5=?*=UhvSq%_(>}f|I{O*& zF8_;WV6K)4=c{fhqfICyj-PpZ_-E)5xMK-fTF2mjxM3!-U;kp@^FD_-lXXu03p8ia z3wnH(E{NEJb0);pnT7f3%%vSb8s3vny{>q|U&pL(qW-}&O;g=lDDzE4hJP}NBBh&< zlO*Y21w8uTiCO7e2GPad|Es<0jBD!b;z30!6rufrP^lnVQ4vTT5E%lB;Gj$iGGrNM zNFvCx%tl322viXwgn*!oBr-#S5Fj{8WF~AAmXwh&@=^&agz?@;ZMDCCd%wQ<_T+i) zv(7#Dobx~bdmk6=Ld*F?4bo6$xgGR3f{={$;an&Bb^|(geJ>-?d9tD#Rgl+UZ?^s2 zi(Gi>dVrCoIU4R646@)V^I>h=kha@xDAgj>~&a;u7 zh#T86Op?65Nk(ARTDnI)uAvi^^whXRUKw9-oN07?K4-n5Q>XPZrRolc6-8V>Z+C(~ zv>&a8_wROW7&`aS2Ae3iTU%uC7p8hYHc-AYDF z!2LPa)o3%7@B{@Q{9)5aIf($$_(;ACYzy6DsUosH?Hb$cxEv;}rTCBlw;plos9ax} zP5`6A1Fm!!@w2A^%Z6Ic1K&MX-fT}Re`QK7{>A2W(iSp;@-QADd@>8bPG9x+VYzu1 zAR~67JNwGImG8L#5%ehWFpK4cAw~kK_e!=cm2RXq}aK5&%Q zS|^ds2JwO5YxX#<{oLAv)2h2X{rnE~Ut{|$L_yH`96v^62TO$=Mx;AB`=qhlq3@`; zP_kxxCI=EPEY^?>$o&}zBC(k>S)HQmGy+d-sz^v@6K}wpe)Lr4ztH#&qx%oLmmvYn*AeAzmfp z?kHdg*c>`97~)YzhEU2s$WU}>Ke)m4UG4_-+deLQ?=9k2$$4dG_q~g(xfNQ!+W}_J zSq}2c3bt@EH|AaX5e6j)|8SJ}6)GVMkeEfKDowndT@{3H*3bYS<)O`8`3B5hNMy8yT8`Orn?Tv)`2SCdis!`8R;fF#G zsyS)C?W~!fU70Sm;Z}J#WDr520AORCA{V7CA|r5|Ju2ovKe0_zZBY~rWF^=W$;%}$ z0B|zRtumCp;QY>nr%~2@Os_ug%u>{Ust?jL=gTvSW=pCkGUtVk^q;bg^k;CdoDBN} zf~kSdmHBfrq<7Ef06flcST72}wOOny`0Qbps$o^(n~<}}$VF+2rS+}T;>d)=CYT9O zQS{})G-%?Ju*M{3WE|2O=3YvveGRLflquE}%vhG9Sa~1h7i!nVI+OISb{IfUy9%Zr zo|P%6G{%JZr*8QsUYk$6g>V<$Tq}vFJ2KE}F_L@J76T!8+C~&nH8n6D*t+0*jUkot zc98BS>l$1+r`R%>uQMqPUdSTKlOaUb@?*!2P(d3cp+`Y723WA4ll!RL5P+_F8B-Xj zeYs(B=AMN5F3ip6Y6@>S&p>_IeM|ri@3$QB>xlKAF>51@wiblvIK$@*f zlgU_qvl9$G71kE9G#HWK>yNNMRT#$()}m}ksowCs;SM=fL8~WCwXb+!xtw|4)06@% z*6&YO5#3_CwMFGx?a{%)eiW@OQ(0$Z*az#@**DW^-^S8;TL!awz0DiEb~(tOaeiHA zanfRVIsY@kRxwp0q*)4`#el?K+RlE%hS3ppwjSTF3O(MMalEUgOErP*Zy4#6;e!i-E%c z%!t(qWKPZ@t$fU~%WQEQ-ez*7Ej=?ew+E@kziOB$@)H)FTZ*>Wmq>rzPpLw967&u! zW6LR4FY{Iyjip_JNNbq8??IAN*E9}MaiOBmJ?a3dv_8@)h4?P?a8P)rK^VM77AkRM zTP57Nc)yAhy!sd;aRRIoJr3S+c{~jtK}>=cVh6ILVUBF4sPyiN%WqUY&&?{S>n$Ha$(B30(__d1g|^J9X}o*cGW}PZ{-b3piZ? zykA)7Ke>yy0i*U!xU`ixslDCOb1!;)r{Roz{n!{7)7=JA7%N)2sHkuFpBJ#AE8R-_ z;xW5elt7whhC@DKN>6mPFek8>;`ne6uC+H8$qS1=j4yEXDC4(O>jd{GXYx^Sb?~5; zM!z1vw0>;jpkx3%2MU6J7hg@At&ht2gPK;TDH(Pz@0AtBPCZM){m8g1_E;cqExkjU`QWe<%(D zo22QxYb`fBMo;u%V-O<=XF_le&H;+~y%zXHgUiJnJH``CV8H1{Ag6>N@dTIDb60d# zt_0Kp(XF|%8kyOut@MQY-%QB+X}aGyV2{ z0&`-vYnoEtyT(20?A9sk#td}Jv77z1FHH;}!m_7BE=_BcJv#I;tz^1K(xRwi{dLmj zJ3;6(5*`>{^lwQ5=(+aGR~IrE35uwv7`^Mpt-;1gQvw&7)cV7-g$pZSu2)^`YF#*a zX;oJ(`^9(1eNO`4)%R%PM(g^t=q(@p2489&%8~NWor=EOd)>Y6Ns60^Ze zZDIj-!9dMeKW?ism)^P7{dSKKJl8(_!yDn=PN5|j zFQ;yZ>KguDQ;XbgFnuzv=^8`Wvt&FU5^H3`y;YJd!0Njjlstd^_1vOj&C8bwR_$_2 zEU$a*;OXe-_h_lC=&+xfsjVjzkeZxWcI;u7b8gvPqT9(Kc-M6~BkP#rB{nftu4iCv zX?S*34VAS4yC=wv?tv~Mgx0gV_;vv?$7e8`Ymk#2Idzz8#w{7j;Yc_FU`H{oZ}XXV z*9{=6EUM|2%;Ge|t&Z;4Kq9Y{W~$&*nQ z^NL1n{cz&o8?#Npe#LSJp6I$dUfJ$3G~uu?!T^O<4O~wYV3qF-ubo8ea!4acBK<5= zXqc36|7@IdwJQ)F42tpkRkt{^@l>^_R$1L{CwZu#7%IBC1#?lUL_wEkJr9s_cePM82%Hb9LA^tGR+2Y z2fc`(BcVhcAnP*}Wd_YBYC#5q%z$+D3+X^_JK!P^IvkiKrE5f2T#(rIJKz8&6@vYD zZ2E)1=6ufF!6*SdrB!v%=l}iPw?MuSWCW`{y2yp22*5yH)#eXzG-w@M# zCuLkdo$C2_uVVR(ifKD>UxB`u%%>)%M!=Kx!@Q*SU(DUjyXjB5&*C-l_fFey41f+{JA)?X=AXPw$Aiarz6zQFWDosFomoB~cP(lDfKzeTp zRXQQ`4uKt?_xry6?Y-aUT<1FH{5U_JT-SZiWMbMHY>Fj@uLc0PD`6!qt)?I?&7|gJ|Iy0E8~}J7nxIXnqtQc^p|7rB ziG!aTIFC=kiIW>RjQbk+IU*XLh^TX1<8hnwW~mvq^x)6UPIt9hxm zdsFWZyBeR*do9eK?@Cx)JFm^AKz0FHj-T0MRBVRs+e*5`Fc3ts8ciH(W>D| z`{P7qvYW`u%L8->5Uok>f&do#3X$Hr>zNzl@I=YvZbCq4u#j?e_fXIo0O0Y!m}P*_ zk7J!De?@8Kp;i`a5RO!{TI7rP2F1v+c#o*|C*p~Cj>G^n+9>t`fF$PmO3JOPyM@{F z#8ovqO!x4zegsYn3cAMve!u%-n;n7wp;4S0O_=_*)heIF`oM!GKu4HbmYFc@;IVMC z)s2P7wR_J%49Qt#a6%#{NkqaB%^F(kB-cSW|J5w3O8D2$I&{CuSp3@=KE!|HBvw{K z5#J4&Ci?x}QN=7UOjU{cmNRoOH-0)-?|PSXDcQc1iIG}b@5dY$ z_0nEyy-eQKO3f?bjCV`gJ1GCBqW)#0zUiDR#YcR{?fqwuzZ8+{Q+>I|%B&SK%?T9c zH86is{49=A_&sYvXYX=-ppX^^>20+w(_5RPi#iG33U>2tUR4n(EOxnfcKe3Dl4-6H zep))bzu(dLJ}KyfTf1N6Gy69`+YIpA+n82ot9Y}8m{sx)U&S(#mplsBU=tqUg3KBu zQ?jfNFuM(Kf6<8Oq&!^u{BCP`Gxlo3!#vHygbMgnkA|$}t_*;b2>%UJ06)naTwTfW z$+2O)-FVOsKf+ulJR<*`Fq(%kxf>K6pB{1G`$|2=_Y7FZy$_dC1Dsa`lpbsmjXr?i z3Zs}=ss;u#%ZF`-2%CLkcZk z;!mazOnmVV-_&-HI2zB=Mfqv_>KHmu?Fc`M+C2#@2y|;_ZFiL-Tso0?8*@~^As;|m z$XMDdi->A$E6dKOWAM9_bS?afS_*EXa1ZjCZeTpxCeV%Kvh&nRmaw|9C6Ev!Nucjc z`K%tScEt&mh zlC~LO3S2Z&$B$c@s9!u2}iJl(wYf+oE$znCpAc zw0zo^7YHuDGtrcRO1tHt<;sjfh$EjPy`$hhepUKe^0Yil%-z?dj~UhP zhsyV|KZu)*b&HLRD~}dYVEkomrcq5SoSY^_6&L=?e1(3+bcK7xVrA&Qa@d2;@O!cp zYWZ&y)uC$NIFdNhhy6q8JZ(5eo6AIwYCKou4gofg`UkB|3Hh)sMLm$7yp@0~a) zOxJ)Ep2XxU*AzHfE?YiF+(QT;ZX;|FbRy|?%BKYd!-tzn8~jHaM}$XYTX$XAUCWy! zy`kVdF#O`daT4u)5^fS`7k-3P#PbLYZPH8dOU`eT-ySJvtUVo29FYDd$bVV;P>;3t zd#zI))hG0@NX0~+@+fqYbd<_+XllOHTk(<)&s67`&|Ps;22|Dv}~PC-Ywr zO!md~g%su+P(B#Cq{AI=K?Btesy-Ys}oE+}R@{o736SH_&gA_(DG!{!#8@_Q&Tn${%e%W)9f)Q}&1VyQePHXX#Zw9p%x|eW`z>_f0Pn znr5Q=$+J7`LuYkwt)8jtLrSqmMSI$;08vyAym1UyMkk-ggZZe{Y zhR`_?Sl>w@Jk>kjI}gJ~IR0bq)k@`Hw)aDmpgMTOzE;TdlN@08rAJV8_%sM>Lb zTbocj8wH-yx4TrOp}nLx7=~40$mL za3f?DfH-|EYOPWVoj&TS4h_slQyFYJ!cdoK=`&owVW2lac@ zXZFXwT~JetD)(w*0wKcJc%%f)>mH6DomB8Y_%r*n`P;riyt@7B!7H-Q0|75Se+qy| z&r0Qoq!FOF9KaAd*6m&rW_8cIJ1Mx9?7ALuqz< zAe1(A`QE2{3+%U^l)c`1_`<8dpN~3@Bu=&`f-^V%XM7o_w`o9EIRjF7(AHqTfo|Eh zOO@W&=krov+HHAu>o+UPvzdyol@;e=E`q2#OV*C_9LI`B!RL>zX|IX>Ti+*S>2I(a zFncBL#I>-%xjWcR&E?$ljX&ysjQYY_Ob+k8Gk}Ob78m7$=)3uZf_AB#NZaq&c0PdauhnWe3g zYxWe^m#~+M8?Wg>O-NiLY?S3aWjvSl`s(^7eTsgSHW<;UHNtuet=~>ux_)-mp1iE6 zK)!FzcIhaH_#RO&5M)za0WIf6zL_aQL)2S%LhC6oGcz*%$2> z7R{K{np*MbahIUyh3(c?4gF!Xs7ELP^d!=-iEujx?b#yex*OrWbouSH=46V#Qbf%) z+Oaa1p(s;p0U13rJd!*+?ZdIH<{alf_M<~3q@JEnRNeF4>Bgw(xanckF(XiH-d)P+ z`?^_IPxk<2>TU+D^Ol~{8ztoC-q`veex|T%qW8&}aHg~TAS7MF#osl1>HB5pg;xD! zKr0O6Q#k-HyN>P7>*~u$N=3-mF~WYkudYnAl~|NJRv0!jc)~Gq?GgHd-E-RwYz-5x zqU$FmqPN_|7t)=h94b~?)-ctCa%9Cfm6xXOA&~Bo5Pvd##!gVvW%A8gXIwM8vBPG z`-s$^svqzk@g|#JZMnQ^SqbE02TE(lBxuLI71fWRlSFq-$T|{ckzTiUAGNinrUJHS zaHEXXj7=W_MqB{DK{z5#EAQ!rN;n4-4x|Ft{USz;$DGF6S~+`Ptdlw3%;Gi{bDuS0 zsMFiLjE7?{g0K=rn7NL^M`dLI7dB1=z{jBl+``6iu#Y$n-G9a9a5w;Xf2HFB03lWY z{QoGUf(`%tqOs2(n16+MvB3aB?7w^1$0G~(KTF@r%EJ54IKB-w5AaGuT0sFDYM45i zo7*{C+PlOFrr%%_Zacizbp`;a*#0~?3hIydu=P({z0+~gQGO$0YH!PHVrKuroY%wF z;ZHpPF%J=J)Yja^gvrC!#?D#91H|%I2@!1kPco2&>8~O#pFk`+%4$r~_D<$Z0=&<7 zpRtJFVPawub29rVqAnx%AK=)3AQnp(7Y7j_(B0jg*Zl>ry^{ryPgqzO`0P3G`Ewp@ z2_9!pI~NlV9y@2&e^v6I^~jhzn>txJxLDcSG5x96Z0 z<{=f?adLjOr#Flb-6X$LIsxB2S@5(^H*wlc5zTbAOIZM&&K4SY6OrMQO9F8I?Msa6 z3ya^0PWjywkYu%Cko3W8I`Q?-R+jjr-<%~rVfF|Y+V)=I+`;$g4IY#ie zSn;A@@;eDikfQjz-VtS4;r;tTW9x3gR({#8nEkf}@P}*v4xs=XV|=14Z<9Q=e@oCm zJ;t>}{2fC5{D8Rl!!4ES9DhR+KPXq+ zemk@AmN(o7GEnyz$L_AxNNQ;%J`!?Jo9cX&DZv>V9yRL9r#032JLZb*+>V0x{pE5% z2&qE;b(H+s?fcwR+-!>XlL`w9_wx*zvbuyDOnfZe%D}8E{8qi@qcW#fY4V@~ zaD)8(^OJBVc(;azhB<1oY_#;Pw{VTUDyK&No8gzuGEDaV5vc-wX9{Dh(La?EX!##& zL8kVFgmErkRRM){Piz)g5-7uZvXpqE8+61}RpJ^|LfQmbTM~|uv20c9Je9L|1oeozuqZPEI=lZ&6N z#YGHRjyn$N^1rKqsMh@*P9J;i-PAx75?%UZs)@fOPvdfOv{Y9ETpsjU>t{h#+fjE>joIYy zh|&55Pq40gC=0K8TSWz^yp&z;TLwApMUUt~8@;wdowX*h4o3~EgEjuP`eN!1SjX-h zji+AjB?~6cpgoDZh{s%xnfHG&dA_*)B3G06*$G=(1vyf)}vq1r1ThxrBx@SCii zN_nYnja*o_oYGAVcr0~-c$^4mdUK6Az>vLo`~uqi(rP{yfXKOkI#?d}u2w@Z2929t zC^4>t59E8=?>oa14ko5`pTFPJq?N2OKGBA~&qI=-2k0jIpc6H#-?)LRfzCgKQee@< z{mH`mYfl%Ly)QCLa9$#LfyHB8!}E>asWv)%O1wg_q<7;BpTwFevj)HxG#s3Ndhe1Y z;3UviJNk4(>*D@&Skf$EBqwnfWeO#P7(uCuM+-yFs3vT>6@b ze9hB%AO`!c_-z=9CHGT2HrYS@A{~D+ggq(vgch7SY}WC8SnKl-pf5?Rt8dQWBW zhtHpw20z29LP!G?i5w}g$$=lLS(qi0*S-1WrIOd-VeV|BAK^83AtwMz-^0R(axPF(NUo=TrD>P+1Bl$^V*V+MO{NIz2@3?6WSmYs zx{ZBv%_qE0Jsy0nrvA7V%K#;9558hIZ4VACdYC_|y~Q7X^|1M_0v7&pzL%;DAl?4? zM9aAn%`<_rEXOD?I=`+l4Qbx++9`b#1NbhTexY{zgv0Q;-`>y8cGI zxg1%rI?d;I3NyEG@SrmVsvbZZ)emkmO`(wU%sE@SdK(0X!l$)gngN8bZEUs`;@h7R z$prh^n`GG5ANfKXMVd(N_zYo|eg?ZedXE%C&eNx#{)`J=g+U__jm|I|JwE-x-6=z^ zcZKAMeY!f-z4ZdAgs68$&sdB^EZ{LcI`4xK;xY<|lQO=P)rIy05_%UpoTlD-?kodKxoVq%yM5rh= zgMZoR)o*YjOhZWe>xgE|yeHOtG}qB|rqVIT1wAUO2esVsBudZ7;8jpi*b(jQe2`SE zS^WCDPla>{Y43$AYPHS(F2=_(Hfy2AW%Ic@)W!<55s~QK(^Ktv__0W%027?EW_{i6l!F8pA;ZRp(F zs>btaC4HtnvRdJ^gGRkX1;lCey&<g*s_tOsK3w8TBlk+W%JfX?37}J`vk{ z#g1T-)iGd8Y#)%0V-Du(3W{`g4Y6Il1y(>dLn-Sm!fTF~k0mZHHe4>Kk(czRLJo^r zUmxk6JEtGJ8`0Pja)H&>cCK)j8k zBkzNQ7g2o;BgQ=wf^%%za}KH3lHo~Tzj_~Z!|Y>fkM2`5?IPj&y-yx1Mv1S#-o2*o zy|i!6%03oo+*OTI3<}eneU22Vv)RX`x!6QD4R$x#wq4Yf~tHXot)IT^0Bic z%R)x@5SK&groH0Tg9xyd?53|v!qcQS88Ns#YhH9U**V4RyFbU^C zGFfrHmPgrPsu=>HKJ+uxTN>r%7MEiht0-8 zpF64VQ1x0?#C4O0hu?!i{79PQ`(4yW0-Ta(j%+GIl}=w9n;Lizos6BNTu`6- zB8K#)?B|S8)D9>Zl40YqJj2_O#^+10x6ijws{HR3@-9ae!ML{JwWx8;a@Ze-MMEI!`*U{u3*%Ave= z_IT0w*yD+Gm)BwA(=-Wb-likkdia-!x?{b1W4pQ91rlNK?7bXh(#XM?@lKqD(WQ=F z{U;!}!O10cQOjyhtHCzjaNnex!%jb?Xl(kVV}2|IK6&7T)?v4VG1;Y3PZJ>4miM5n zy|xgz6$SL`k>5$zflS`n$Vb&9;S-~^Y`lX>Zq=SadIhIb>8PebCS%|IjH{O>Uk#jA zBSumE&a;juOWt4WN^a17xR*0G=uBMnAfl;G3F)i*iR6(l^5T8>;)VUx#<5_1YFFaV z1^GWq*3uVAl*j5PN%7zpw;#04vgdc_LBFqY;txFFWLq5^#e9AZ+qHV1z?5X{*oe9X zf}A{3KtKnAMwC~s5hv8lL* zcRlK(#jZ*upG$f^RkZScsylVe2`0(48hdgucc$>%a#6ON`Y1Sp&TQm68-fnyH(p_^ z`t7CFmK178{6^h+yzq%5Lch@stw+n06sucfq2zQRj;>2%LLV{JfcY^d-nn+R7M6X- zBl3)&fNLU7lPTVFINP-}FLDi$0_F9GboMP}=2}VNz-CF(jdkUe=dx5!4Oj#Xe9IDJ zxq;0WiL>CbWA$o}G_S(}L4gwv%Nf=`^){U;mAJcQex_vsD_4b9ZlJw(CoYN6eV4ob?bAQ9vD2cHvDsc|x z{_D8@N~Kr3aD@uQjmq*mo(XyW>4MCuqwLgLxXyg@yXVg0d8sq66Fiz~FrAHVvdj(X z52c4Gc+ZhOO%v6h{6y9#1L8B)zMs1lB@fwwoX9ru>XY^Fi`aj4*&bCY&}UY9dAFvr zz$#p8wAfZ&7w#0)pLP;3Za*yZ{6$T!_qrYLQ;8<25F#Z2JhBgg0o=(qQP|tyg8V% zA4jZelfICx{Zi~~y*szzy(O{`eA5x2&*6cvf zRlnbHf%k6ujh8I^;yE18e9~))$yn{xj^_mpxiB9UKTtATt?n%EHlNtyH>}ir6M&}C zcp?mw2HU8qw$?NvI$H2RS@gGVUbWQ&5RuIDo@n8E<;IIHnA!G*NwwJz8&D!m7RuI` zq%h9BJ$cCvGSQ&Rt=sCB_tdyr(gs;)ef|N&c~h7X-KwuT+PUfW+z3&-yAtVqvOSZ7 zI+2-yL6TtKa^ZH|_xd#Bff7DS)|M4pI-q?c#rp8c4)Dxx&uZ60IfY@<8z>sK(F^1i zvYE~e%EuV3C*+K5^3Swb4N1NK!>GEJ#?rs8Y-In61xJ7Sp9m?!FZPU(Iv&0Uh+u{*( z>7M3B^l7>w<>i?7`~`oBS@}-M5Tiqo8$%J3gz_UG?5Ma?95d)1(m&aP=IR>F>uWmB zR)G`Tp*_l(Ae?_~S%W_;{_<{^3|2d$a$geJcrBfh^4x7($a6VBb#-paDEa_gX%7EV zN?3(S=FWp>%SS%qkx}v3_wbfWt!g!g!ww$-93W1?hjLY!i zC2Y6LIvK9+>p_(?2RZ3wqx~W{YBX9u>r+2o`d0QEzbzLUS)Y)nl+@EM8=hwb2h)P> z6N)uUdik}Di!5T5HoDg>{TNs|dpHj|lcek14|%QTn(D^OHI$^&0iMB^f4FLpi_-9x5+4vWs zo-K@r5yTEp{S*;R3fH@#NiT=LJznrS&kmkbHY}_GS%n8HIM~AuGg7Krtdp zIr%&*b5ogdy_&_1&n~mZ>Mws-5>6f0eGMlxT*kGWi(f�B_ZpO8}e zXWklavO}`LI>f!TAy0q_NZ-@ScV$K$$A}t}oNbC6ZALH5Ib^5X26d9Ynnh5SDo~nG zVs}+$RHUIlKWU^bFfKcH5n=c&E@$Z&6LlrM)}QvIqxDRZR?wcOhjtXvQ&2NV&!OQ< z3mkLB?BpS1x|H=Bxw%RSUDoa3=Izrl)t2*brktm7Q(kG$Es8IvurrKgLwgl$Nj%Er zPqwWj!frJkAH|+OP@-;PI%6?RLdSZd93Pf|B7M`IX+Z)p@^-E3Cc@GIk0JK!k@V$B z$=>TF_RShyU(XCw6?rCx3&T%vRTJw<5=8@k)4uk-9--TLnA4Eh_*OM=E?+u_lq$`qV{RDuQM|2-z~T zC)3C8V4Z!K+AVM2x&gK4=hu-~|J;|`7`)?8o(>RsqxFbtxt92hVof*M9OnooZ4~_# zv#DOEr<@|JDOzmcT@&nLfpYXovqMexytuiFW01I#YdYR4VE^so#6H0juY`c=&U>9N z>{v%0Jm79bhooDI?*8su$+KW^W?%lcAz*iTQa1Wslh8e(TY~XCoft-z`STM>WkWUN zWfzJ5M}OlvFv)7Ozjgzq--+n*!6$pij%3hHg+w2y)%RV!X1c3r{19HEm^JWTd$U-J zo=!7bK6L;UZf*_0Ocd<)RkI!DD%R0HcVBzgs1*{tIdb}FYNRjn$BoBS-6(89)@h9X z^p_ZVga1v1dxW%xiBg9NcW!Lo#R5u}$*O3!aPWnIW6MsJOx`RBZN4G#$*- zRs8yXUb9+$d810zpUOPub2X0+umy`!c%eYPPV{vL5V7&*@=-2lRX?$zCH46^OOvfg|kbpFrW0)7EzlxfC#Xj_;!3y zu&}V}^Rxvs2yvRL({5}#OPE@S@8ccGm_j3aYz%|Z7pCLjPlMk;+Jm5i)}!qi!=0u} zAFGM##=EpGp++Y6*)qK@QWE3JIy_z`-J+GTUue{~YgoHOEmn}9XQg8(y=6VL?n=uA z)T@1cvNb&*0Ij2EByB#Es@L#AyAo%ieys)o$1>jUSA(oz&6q(^1b0*LNugcItMg~4 z0h6ShJbuvCGDDaEjtdTwxhC9$LK2CyU__!N4!&eVpQq&|9i$g5?M;ERQaGmOE=1u@ zOiB=12BQU=yB5TSHex!&3746^MPDDDWN(icN&Wz3mD!c9FG`W5J_S^7#@Wsu>bV|O zPnDRhRH_>6N?-C>fVe`uFA~MBw~^p#i_EVy=RpSf66B(8o7q2xxM&XATUYWI#5X?I zZfZPvErD*VwZhQSt1KD{cG+ls%uGR)aj#}`*2O89q!By3orcpZ~j13q?_Ydn` z$lWPh|MZ`p{UoU-AY8OOS|StD7r}R)K)PgND!BkYWn6v1<6LGGG<;clXjt(I1@i8O z`zAi5|A;E;_5u2yp)}OxD!9O?i2_5ijX~O{z;Q|gsuR5|kmv7*!k6uRD_`xrS2_zA zC%DrUd+1L=-(S7`=5=<`*F?s1$NvCtlcvR^!b{-|^3>l+o|zkd!j*FFLy$`a2i!j5cs5 zz3QAtHwK)Uqz4ZAMWEZ_;!l@oy8l_%NzFIz>cqcwBed=oIPcdx8jthi9j@D=BPTEk zv9%qZWz9B=g-m9lt52&wwKL6$lem8dR={r5>I&aNue_E3{jj>^q`NbUyCI8qA$GZ=O^@RiT)=bn-fG$L+^&3d-Si1wMxE2j?-}M#C5gefl*>x7srDZqvaN zQ2EU?x*p-S!Nr}(;{rcpJnwb}kJcqroH~RC3Mc{6wYk^4u}1GK0Iuh)VYjiW{RLkj zu=zTp`Eu(Jyr((f9Y$gjQ*Kf|=dx?8D$ZG^R~L(eN3c;z-E+`eBJIKvo=7`6880}0 zF6rZYGdC)Uw~=&=L5q}^-Wt9EA8;0Bfr25&TbsDE%XD8bUodz&ZuPn3{EKJ|19KSkDYq^`|{MP*GXZ@dH||B%p6Yh({LJ z1`d7nqVPMM3fzr^zNBB#H`jf~_i8hICiN@5g2H$$i*;e_1C%0o>11kz`tpo*Hs zs5~Fn+0bu5aj&r;tsgipk=LIU$CUJIj#pH^K9RnB=>=80EC^lLjH;E`!a?K01ATJR zJO?|=>xbPy$tqe;`PV{5Ep+F3 zCk#y!b9YC4%n*5(`Yu9#+L=>)Q#WVHCBrGZeX~zJR^cvPd`T;`&mwB2io<<1)fsEGsE~H##X3Qy}`oOhUjOwq-*j z;#MA6WT5K{@Gb0k^E95$-nZ@erHaBlcWdJ^v-^3)+;(3%R?XpYgE=K!jq>%CQ~i)I zv6+!r4!X1|L`sQ!q1Z@FN?=BsIV~6C$3kK`5Mz?8n;kH__)$`KUDDE>^qmLw-c{M2 zSoZ*5-(tG&rhx=-0h_MHPj(}5aPyb5KE1aFu!hfFu$%3KiLlWWrIy=5D0fAxw44tn zRCEM_DRbY1wx z+Z&%FW)RJ!1~953`^R|wXx?SYs3yck!f97|M@ARBmS*>!T-JrG)(I};TpovAEMr*V z_iyMuSE?-duY5HF&SdC)PllSl<{6&Z*mkx1f+a)75FOQNDmW7DV#GDLpscu4^vF`% zjPe7M}Seax5crdG-4%ZB!)<-AcmxHRp}j8VUS+ zc3JbuY^&K=m#b3|fs5*vz|JFj>DKi5so%pt?#{Ku#|MMWCrgGf()No6*j;6A$!=C` zt7K@FgbZfcV5{wGGgtizA|jFRE1HXp5X`F~Wfyp-sxvb|g>l`;(!pSs2Fbx{G=Hq0 zZHxL8f7lKcmhEI`Hglu2fKzc)f*U>_G036WQuYLNarTNg#1Jr7vJm8SnBF?ZFj`{( zuw{RZzGEgDb1=nj9SIq0(*vXBPmr{JSapeklL zYP4AsBKTCkdH!dAmw(L-vI0)5dS6l>s0b^a081JbCY2_Q^q~r__R>X}d<-BOy!tPV zfyb#uzA(gba_wS6P4KHRpNH!{O!JNlnYpi)slcP`+Tu5hwt|s1US2eCD?}dH>gL>z zD5~D*I?AA8SxI|hKhJ%Ot&+;Yc9@R76Y;f4QjF` z6$rjytS<)b)A)W-b5te-@$h(p-45wcZ^R{T)UBQ_W>q?kdIJ(ROKL!_E>jg>UyhjK zZSaCF8XeVdj;AhXJ&IRdzPh8*%>~)&`sQtg=;>%zvTq=>bByi52!*HGuLegP(`=FN z1S5l+Y;6+>9hrG3lqu5?C+ znL657#?h?Jp$nRu$@$Tdrh&^@Z#P%^7tWZigwaw>eQK=;dj9^PeYB{1%x0YL@Q{b3 zT?>lL?n@Mfu}|*d(NqR9U`3dxB!*PUCGzBV2ET1AaT)I;mohr44JCSeQN@tmYV;Ti zD7NFXeRFaxf8@b&ZRyPHC$9K|7~hK#51}G5=XA<=gpcHPw=wM&(q1^yr>43&EoyC| z+;+QX*ZZ?d&yYrh} zU!bPsuq6{7w3Y4HHWH@Mo#|sF*#1_87XzR-+Qt)nlYATxHTl_fm$6veseg32(52C^ zf}yAasSoUcW3@c(o)bc=IJFt$vyCqly1iTUPubX3Y~f9o1I6Y$fewcBExm~1xzi0= z+bIp-v_-H;W`XqPjL&sv!F4|9{Dy6n>zzZ#pbniYyM+;?3oT*$OXO3goB8LTMLdFqEO<%W?1L}1u4#wrh^oF4%E#=*$iLVLd$I1kG*-29ha*}WU@Kuwwn^fh zxRE?y?ES((z;6CY-m;Q{KHTbkMFuF(aapd~r-R^p`w)2K8OKoBsfd-RSDKxN9A9lG z^DXqm7mc}C=*p3`J3ZFz{^GNu>gymO)cQ!|omRO4t2uuqdL$?H2#u5%u$z6X+PYUf z>w9XeE(H<8V1TN~Cj7}p#io+(=vf7_he~~Eo`?KLEw3|tcFZ?@wd~yk zBvB*HHZ#v@icLtc1FxFMW zAYt;|AkS3JeP?!N&TphR5YXgDQbROovv2t?byTQ?U!Kpc?dRT88_{}T? z^+3H$kEih(3RxAVm@gnRWo5NrsT0l0#}#X988(s=J-_gkboxpq_q2#!W&iOohE zD5}-%S4;^zC-^y_eS1Sw`;>yE`s}R1J{!Rs97R8zE(hn#7tN;{m&CfP07vWHnN$0w z`p8`YG!I;62j*J&u0mJ;`e>tOhrkNrYTyo&aTsk_YNF@NhDSG9Mdf{RHQ4HOx+zQQ zDPjCNI<%z>pfBXe2oG!v#j1LEP~hksX```DhT%9>`(Ntxan-Tc z16`gw?mL|bq<#9Vlculd;h1C7E)Vz3H8Nl)-!I*zkOW5Y`z1YV)6IFnkz899f*6t$ z$ul>cu2!8&e!XfuWR;v=CMfrfi`Dhfg{_D{^YM5!gE2I+do#JrpRv^~)-q_vxOI#f zvNo8;Vr9jtUTf5VkehdXKryxI`4%0nBf_S&ZA7D+*W6R(Aj`3${KCsWwzO>`jp4-!> zF9ynYx83N~b7pSrE75P3z%|wfQr04??BJKG)$h8)eC4HOP`coV;4rs?OhD6KDM`U| zKXI20zxk*oEB70JQN}CSPB@p9{l&n+H>3S}WyplY)%2kqN3**TL@Q*4V|8cd{pnkw z%g;FAydm9#^o;FF@6F_rwgkG#t01U}|wykii&;D#%1KBa!WJK;SK7rFm6Xe@K{ z8jL|aK>R4Kz7~I2C_QD6rM(|7)I@fYlfRwZT1iGH4M5BHWxc^C=Q#KyOA(kd*BbiR z3$E7~y>)w$If@{j@%e>_l#S|ssWZRcRUR?Ys!OH-6V4v60AXGEA~`P2i^yQ2-fVL{ z2%Ttwo$4UNs?OiD=5T???-+8Z(h+6t-BgQAqt9kbu2vxFPpf7d3rgKdlYaHd*Fwr; zss8Z}g7<6bEzg}+XT@ejcwMU#O-yBx3zYZn&CZqBZf~_MJ5*+Db`^jM;f33G9d*+O z-3}%pF7*T+T9H8$j#Z93cdO`MZ%rj&1Ww8brdN*+OT8$oSLbsMlNF>sd?&6#*AQhX z`^)>_-=US%X9ct=`Pgh?f_b1rhA7R>pORo0s@Bv#JpsBr*>PeI z`0zcv(Yqo--M@Uoc)odegVAQhR$@!?Vx4uU_P394%*%2 zUWX1f6u0Sco}QoUic=XGs*JOBd!(+1$A#*&^-r)BwehqSHSMz(3U6#hZ)cU697rx!`TOIhAPr;g79vdp8F(a*mTf)|1kkbqiF1@7lBX+tOLG7ualjA*rSz zlc@2@W2_@Nz0uQddf$cx(k9!g)9_6Y12U=Ds>n>LHu_)wkd?yPp4xz$( zW+K|u)`4qy+BY~qT#sk6%eTvza<*p^0%0LsBkQM37FVmi{KdL@>w0aiDhk?pTMXIP z_U0mFQI^CjBXzgo=Fwq#B=2d}Axt;&dO-YS&m9Dvy%{)cT^%+bC4Y}@s#AY1AUC*; z9K$3_*ioFZ<8~9>7xIISmWDFuN<@}Zoj~F4E10&LGATPy^*U=*+XbcAE}w7Bb6hfg zK?CMjtH_1OX2!EOf&(#|+e1{7kBoN7Bw(kweHJ%F@g=*uG~Gk?aVzUsdj}&=J_)tI z7@nnm8!M`7#GIt`h5Wn&N@p?Zl`nB=oWI0OP5Nu7{N4gC%3u+0dR;b#XpQm-ZgC^i z4+&uKcbcn4T*_ZFaBy3W<^b_B-H%B0-7xaj2MqMx2g-(BmW@|U3mZM=@b%9a^e+p> z7Yi|`c)PcJ=R*@`5??&CeGglq;@`N$Z&7-G-PVfNXSMr+z(q1({X6fXesTfI?ETzb zBSJh>MH$M!6(&-N4uDg5kd`O2B17Bs{ar;XNh;UdApS#lydqQAU#GaK+fUYJx+DiW zs>AH08+BB%Uy<f;2bGrJ=oV3yPXx@uoT)Nj&l_H&I34m%ZJa8^fc(y<9s_m4>eWl0xCs~e_9OQ z+CO8x^X?Iji~m`&a67(=YqGn@d3E&*^RbIMa6%V@;5(`BMs-WEl2O!L_u9px6vTMS zIqGy(mwd7-|2n<4s{OdSjlBRMq>FXrjZ3&(JtxZYoe62ZHt+*ntUobo#%?h|Eaqv} z*lg2IJne_{&c~xwacNLr3du|Xu(;6OTVZ<1?p#o1tcV|Eq5*1pEn7CgfcY)n^5w4? zZ;F>~iQS)?us+V$;~=cpb$N^d7dEK-f9!nyAO7XI8%%eNf*(r)=C8>d}tw?r5_^j*zn?ic~AM3$BN*TDS|D>T?~@2O!gqD%K@ zeJ9Eac;-P9i-mvv<~-vIKxDzJpPT^OYZJBG%)GcbE??B%Xg$U!I&7|3!wMbE+WX}) zh*I}N|5z50L>%l#DgN~>fIk-z2(9BRu#3D+8he|Yj4vRJ#gDXo(fl>GE@r+}2n*IP zDUMJ2A@&}5$SbzM1|n>-6rXMkIaB{(%Qsf${zIcq*gdP7d0hgb1i`E z7Eu=H=UuKafCy1Q67y{={LlDZtmf@dx$CY86L!P|3FN<7u*pQ7X0lR$yx6TH|F68* z|9^+be~|b8Vix{`y#J4p=Y(^Fs8eU*)f?PGDk;WuP#kF+vFtfx8DVCs$G!@a-#pwW za@GUGZ**UQhUknJdrd;L%7W41rJoTt|ONSWDm*v)c4uY@_NX$g^hU$_JDy z6?A9b_caACmLGYq{^g$62mxY#R zu&-y%X>i2@cV(;M9cBEmebx9EOZSFl$zb`LFFuiOk3%B>1nlN zGC)n>cU+xJixq8Knx9rM6;FOm2+ls$P1#PuOz`S#x+QcT6-g)_2XbusepC$&4dvAF z?~jmVrb-4uGOmQ|)6?9S{s&p_935HrgpE!#F(#R4V%xTD8xz}hW`c>GOl;e>-LY+S zY~G&t_uaeJw{HJ=R|ML+si*3x791rHcqkwTk}k~--jcoa&~rUj;G+@{TuQZm zchda#{4)Rd^CKhGo6aBGo4#JCp3IjfEMv>;G~4xRhx{liEE+9KqA9RGa+_^&BynZa z#E03Z1HHjxLebc6BhSIa$B~qcF7BDprf%-Lm_~c#`>W{)4a3M@%$qv3+J4;vVEgZ> z3w%Lw@pvXOKe_w*t2b4;Vzh1VNmXnIzdHk*Cl2R{SNCIc$C zl+@Zm80KcenSGatbnj#_&*v79DJ?WK1D%*fV^;wT)MN(wa_Fi9}5rla*^zbLxD+ZpQ7R1VI}ivyA-NMQQ5pLKvOz}{%qA7i@r>U zbWJ*FOWQ+}Mq+6Y)H_;dG|DSQ8%Lvo(51PHmCmM{jymoN9;mbxo!}|MAzhB7S7C(VR&4(BNfokBl#qo*g`a?3*{$$U zG6BPujHuQ}(*PaisSz!Cn7K{y^|5&LrICiz_)@)fJ7ZJ1U44?V^khT}VYMRE6IYmg2-s(yQ9K%oRa^p8Na>#)ufIV{Xg3sIJ`6g+$RRtQc^E`mP z^&gV~7?P7M{gwX>ER&q{4?4ZMBsRLnUh=-6T;852FQp z<@?FOGmSXcPQB&<+#6DeCFfSvpG;%xyJ(KN=d0Pn;+dItoG8`eu&9S}jxGQpAHABU zo^{ZU3W~xM6kn#$=`>6Kez}jin~nZcJ(wbx1Ax_}Y?8K5R4-RY^uok!TQv^+KSO;C zM`ulyJH6PbiZvLo1alZ+CNkJG(%3CaG$76xeR+M*K-ecL`4{Nhfqf)3)=PG&-43~T z7Yiyzz&d|?7(xeaNTd@vM{?PWeR{rX)!JQ4t*`w5-=HK;t-u`;37NG;xf>0^wvGRx*?LUM|wer(c%T!A(e13MB=3@Tk zH7)xQiFz5vCst^_8dA>cmX z!|&HafZf*O2G&slypj07`~UlqZB`fr0w!&H^>yLX#Y*QN{FyPLgrDFf4NHE-(RjyT znf`4)lmF{u`={(AR{3{CZE%l+g6h^`xtYuV+N()Ig7T5kQ7Yv!RjROvJrhNuQg#fq z|K0!$G8*Q5qkVdVbKQ=73?i_PFt)+Bo%HnY0a-XE144w3MM~V4Hh?-FeDpt+Aw`Yt z8vC>57%R5Nb?^_bK~D`vGLU4a3-Vlk#{lVFyjuMD#BMvGIyDvR|J)vha}wOX`!(?H zcv0*Jhn-7WN>GdhA!{I}ftMbA*D-HtzR33Vl>!Lg3+PDydjYtpfMESmZls2dJ6dY$ zG5{?He7Il%?Ee1#NYzF=nb2jQUWPtaoO-7Z!T*+sSl{sV0Z+NjrDI@tSmMq70A4(# zr>TIH%A7Cfr18=^7*+6fOlIewh3=p%x7%lfGFd<4QxJr?H%FF$!G)N~+a!+&9Ni1u)TR>1?8gaPJId|j* zI>gw2cmK0d!hhm-9`}p*G&XBV7Z;bGnkLEL0{?s;S;PmVv)d+9C()cdg^`im@Z1#2 zclYen4keJ0k*S@#a`NWgpQhhgq2tEL1l0mX1yvL=vM}>BSfXoV7^@`;a9Tz&*&tDjf!N8r9f3({bEK z%#ScZ54Z}gcKI^((!629!aip(5|dJx9FhX!=O6!8MF^z(ZY`ZNAAvf`rg5xYf2tr% zo`?WlM#ASMH9#hy^7V2}q4px2fB<8FWNtL~4c}a!mA9MFs!?UP-^x!(O`ex27!Q&H zV3BEgv8;n)4sk2jY>l0(crsjB`FtKCMuP- zih+>PPE1-8IT^W`95#_M4SEKkHk%_w(Rd1j#y+@wIYT?wZIxcT$w3(#v-OU2TnQFP zyB~=Z^Btc!0A?xbvT{$ycFTMt``w=5U!Vw!1rRi-jyKjPPwOfV`Bvlb6QEQ~!B$Zh)Wbghs zANpS4F?}>S)s(}l=LTFSZ*Q`XGvYd#t@w#|0?5tORG992k(7~15Q|6|=_|A*WhqNk zRs0-Vu(JBArEL^pi5E1}iRt82#|Ge~Ybk~a_Q1gc+mmS;1o2VgG6_G#1MOp zMj?~bMpyWAE;(MaDr4Z7QD)U~_Wjai{K1)j+z99&iex&LwZUUL(~%lq>m8-b?@Ye0EIX+4 z*+d2*E%y^A@uNRLJ!`&j_w!{c5($3_?>Xl})6fg$Rx8&|ZNe$;!%%zQ6FBU`{Ne+P z^wnxSIQ9xXvfCm*3J~^6*2dqy6p-n?WdkUlkr1QlQ3&o>OcWMP`p}@O| z8C{l_>^{ZQ-_c9JKT<(OvVe(y9#QmHzVSSC2N#HQ{NHaAoW+)!B3>9C5b%QxKEax(I6#Y;}5_7Lu=Sf~UzoVRWxM zxr>imI$eEogArzD9{%2ZyvlyL6Fr$$k4Rurlkh&>J+bW`N2>NOmLrK>=y1&%h4#7e z7LPLYgPF8+_(b}3zA7ymS*N$#!nNMG#5}C^*?ouIyEr!619a&|ODC({qOvy+=BuZ# zRz3o+d=ox|X~y|m;IzQUvt;Xo$(H$I13`g@U3iAC83Qm%yLALH--#FbliwA*Um|+Z z)>Q^1gnTG{Rpf^a1c!-yw##U+Us9(&q(8#O=Z2gdp6tXunK3na&nf zLXa3-1g@zF3N@P53uH1W{{U~RCvqk67Wg8B(_T^3&0Irkyi?6z7tZ|9kVaE@jVqsD z_O_io?xbX+b;0!oZb`GVg(^Jm!GbLz0F1>CPS22vl?Iu#<}>nzvL(*-%C)neYe;p! zPSeg=+m0QOYnGhPH91*n?8KpAK@9;)%g8LzC>)NglUWyn5{Z98!X@M7s_&d1aY;#3 z?5dG`;ph>rxffwYJbqpIpX$Sr@0MPBqq0hEwa$cCU)HM!^Yb*{LX;3;%@83_;Sd{-LD=$A>XFncOv-PNk7cXH6auMf$}@ zNT)2eyg-}Zl}=blJ)a=Xy&uxvx&bTP&=+pRL8V)3(WFX)Nz>`&vf!926SARE>*_Jz zY;1Zv^?SC+85!tXC2}Gvjc0-7rD(6%Vzoj%y4SZm`wE0~HOsv|O_DK<`T&*O&SI9*J={)LRZ#s3qbn&3oj+@jo}JCdid?M=x1J#i zMMqIDAmcg-{!a0o;b0I}Yc-0dvD;S)HAJENnGpLrA-_K|gv`A>PE3f@$%iVgwA)5K z1YJET@b8F*W^z zBH%$KrS3lnMI4)!My}Ijmn4pe1Au6YoyU&AP6c;E>qFNWo&3(*KV9HYBj359y~+_R;|?KP|BD_0Z=#j**(G|iI3 zcJBo)D?xuMe*fm~=$GQH#&Eix8~~osv%L zY|q4{xP7I5dAof65kI^-K|8A=xZk{FX|dpZ(n0rxfO%&ut-HZ#I4E^M<_OWl_Vl_Jd( zX)Ie%*_C%gmZoDLPM4nT6dh*B1U#!)tro;YhON3U%I^)cB+B~b2&^(8Z^b1D=m$a!pBpz9t3Keg z&0id$T_evxQ)VR7Lz32`E`FsH*K}+)*``OPvx-nJS{3!%v5xQz@ZAI>nG}p4?-*Eo z$)rYPmHe>m!?N5eZEvvKS_z)9R;NqwLQHSruE~{3YqnxwDVCd6Y>Ts5X%JG7Ok5DG ziZ?k48xnIimlQ$F*xL=4;X|zNet%JE-3?hgxJBNKediXHXpi1#2zAx++#94g$nAbx zgPI%1n4J5`GbU+^*xGF6Nkuj4G()B#llg+^@43?0^-lv+Uwrnoh;DSS?)-wDOl`yk zU)u9@Ng)YBpZ6wSlfo~En&)}sjBe|Xpfs1JRJqkj3DiJ$_lhc*t!&4S0D{#p3SeiZ z&XT{YcUN^J{EM$ZiNzdgZ28HY5F*^}?k*|nkUF1Sq}YYA?Qv9`F}c*ji*19u$-4m} zKV3_mgw|Ou&RY$hpdu$W42_EGi;C@~gjf(xEn@Lv> zZ|~s8>RI+A+VVFjIvEFS_AcPM)*LCfZ@I4X{Xwrkw{7*@0*IigRi@2NX0_WCPuas9 zdf=-|bQ=`8>EfH32@#{L3^dNW$7;C5vvJVvTs}85LtgV|FlYnZJJ~MQWiHB{Gr3yK zaT<8%#{A~45DD9L3_YWHVmXhYc*6C56-6%p?SjYie1@za>wVcJXYYi`PXR99&@tR? zzP3W(mq9Bv+B)0htPmDW^}y1+cT|fHFc$Zt!da?zTszj|aUbzT{#1E5zMUvdFj=@6 z{2|M+M8vW>QZv03N53&0Uk4jj=e>laq3}M1bKG?1Y$ls5c55#6J4CKA#RV2qFy`q- zr1d^apbZDwPv8*8a^nJ_^P3Tk=kv<&erd{9iozBFap~)WRIqfC-sNfI54Sb*OkWVq zH8o}|2Kxf8Zh-yDV)8UxJp$w~AYrpQZrSHWxsvL!`(??&7BhoWee*BzU0ufVij z+*$<<3tmc@{m;`8#U?iB70IU)Vd^_%eU!`+RP%z;!3G#j&b!I0xY_#9%@$b(gM+}H zYFN!LT*iyK!1oMN^Skq6A;--%2Y6yS*4MXwBKiPamjyYmQh#)*h4e$MAk&o76gkS` zy*0DZUJ+3xDF_{vl`tT>k7AjC{M{YuyQlFR37SMUO@N>9kl8-EIGL^&pd#-*QB-uE1)E%tz0Nw zK1Yw3fZUR`NYH&-wrOJ*&3m_0Tv*XIJTMTxkg`yT2>nw;VENVi{(fBHUGL-h9HsKN zQJq96OD)>8`qAj?L)%L;cP)T*emKvQg^Y<^?j>_!n9jcBHgoEvg3oSlFH1`0NV?I> zcHIFpmo^3puoe9zyxUmWbT&U(32(dVzM$o7I@-roRjJL>y&X?;u;*^D5Wp{1)paee z(G7nn>plgNak`xK)eYVjO4TpcIrdv|L=M5>5$`o^@m2KR*>FruJlN_qK)do_e*%}B z7F!`m+1OSVj3-y`$@IO2rqdqsr?#ZMvQ`8NzHnc?C)4TYm?N=|1Vqp~5P5C;W_`O- z-tLd#Pog(>i4Wm!AONK*zk6t^k>cQ|1T;?POZBiYngR4AU@p0p+!(kZKXN5+*!>W` z(&(MCnNjq$lfKiqkQDNU&QNHvNG`olxiFkQa@-wkLUAAR;Y<<60P$0vdcl&8&1!*b zy`Bv@SQl~Z&~kbQ%DD*C1p>vPZyUAhYI?_!Ep_yycG@GTH zY&Hp9UH?Sktu>!=tLfL>S=HUFCJ+0+Cj9{YpAAF1xXkDRw})ZLmXAnPZ#Z$Y#ja7a`Z=xfY&05A=bltS z49`!b+GB*vPU!Zfcu}J*8eQP|b;oHkr~dw{W5)hOMED~-d5ztMmpe-{h>?P#`P2H1 zQw-dCu_~cVjrz<{;f?BLAn%z85+GK;`Md~9ID44D+ueZ4pfo}1(M}Hw;sp!)Be16w zy@n~rKaE#SyV+_K3s9Wg1@K!fn$+1pQ$j2B{uKIAFqqjgT;sWzsHYW$Jlnkr{{hUwH9R*lrZ` z^ByB2v8mP*fhSul$m7&M>~RP^!naQ zADllM*8QgYnSY+cES-7oy)3N0a_FBIS#fOob$4*=LxE(2HYf9qDT(v?08gmj555o9 z_NTC09Td$QK9z|ym|oR-)imA)PIZy#oFdX1WyQTF?||^7gXS+UC;o3U_o8(9ozW7$ z_i-I}Q!t&WdPNh8?U&*{^bqvjz4saKmVM2$tvUpV49KG(l3Bzg08X#)dw&zEMZSZgmL)iUfddH)Q{Kz%u7mGAm)_%?6@A$&_oNsF`5tT~G zMOJQQxTTvS-7WbC`Hasth7!-6M`L(>YT=H9T00aQ@MueWA=~+F=PP13=2Yg1SKU(S z8UWq?cBO(nDqR7`tszUy@xao!=`qjPh0Em!{79t_PV(R;@<)fU{@Ffv(yO=RXYH;l zQPKNVXG28_NapAfx%es5ak z;Tz|%eUa7bVp8L_T^XBZnU9AUAf{207b|<$@Ag9wFN8XA_me=!&2p&)U z;jpSXuy*Hai#Y8Dl#PdH=#=jIc=KJFdsvV7I_?BK=lk*qhH^s`mo`sn((hw*%u4m;ca z8EmH+*CBjsj3L;a1_e;yd+^5G*C^N5(f8gVxG9#%b|?B+*HXFj48FT?4@BsA6Zbq< zaBs`Sv|G6HJ0D?i`GeSLRqCc!stt;Iu${6B-&L3Vk4%mv2glS7)bMzoK390)drGR# zqx?~j2lW1+qO!I?x>G8`6&7BsRi4|eZCu#B+P4EQn%a{Z?V{QQBih@oYe<7&P>C)s zU_YJhe!$TJWaCpO(la!qm~S^bk3KHOO%B_S{LDv(@|Qd8N>N2K^)@6?@PaiAJign% zAK{=%#!w&X($1`L1aw5Dl>c&LIhvvX>rlGuAm2=X2b?hP8d-eHBZNZUatBdz%w@x# zOFe6!&u}Vc8*=cNQ_@&=jsZRq`oR%Un>RPg_k;``tfq(cPwOsMmw7MGWy5J%DhAco z2?Vbz)>W3BtGK|#*@@R#CRFTiHTJjQ(Ar~vRV4RN>pP(T(l_iaLPIg-%Nrwgrjc8wo7PobUGsc>D zoeWdP{0??``1)Qz(*(FkYQ7e4m__ zEbOi2V8P6Q2QDB7s>a&UJi|N7(?kqnn*a?Z#E7->apfLx4CtOhslPl=*c%i^9@^jv zbPyyZ9fYfCS;B^oOs~wfr;kOO3D{22b|2*yt0}dwXUNu>!+yb}GO>nXsB?C3b5wI4 z-kb|0%H{NZV>DGjCXVxhoZOjAoWT(>!U<=gc-RRpCYT){c2jbM{kXd@e;QPo=CF?2 z8_Mq*%x7)xaO+#dN+NZ;wFBRTH53{&+OXnGFKaeh^PTV5Q!)U z4E*zH3Hq=5kqzV@=mvdr8AyxH-K3%502CDPRl**E*qdAUZ6L5P3^`^shgt~^Yy+0k zk(eZ5fT)hHrgo+itTj*M+cs3hB?g^POs~q$>0uFMwGNm?4ubbXAUg|-MloBg@Q_xE zb+TXRX!E3ypaY{7C4|@Gq8v4v*VqJC4RL$acnS~IPGmJleNE0~DPG-ILxrQ4wjuV( z$;p!rahBDof&$lzb-_WTZndoDoqgN2Fdw@Zyj2-D0-39!b^@BcFZ7}#a5{WpPCkgm zo^=BB@XM!1I*1`z`Q(mNY}lsQOf8w=Wm<|hA^cD750d*-8Gpum21;36Q0=*Nl7WPp zkgOaW`4f9yrro{5rW9ke>d8oKuAHuKF#OL)+1LBd)rQL>*Qbt~biL;j$8D>t*@wT6 zB=sb4Td_!|)%fV(Gy$Hx(2aL3!CZkPoA)M|y%Oea1Xd+q%CIMMvDc)IJ-%a@ken{- z^D^4Eku&4(?V{Bhjena+|NK(hs(o$y(eq5b82a2)XsIYrQepOzy3R5p(TTCBjv@Ic zqCYJE)pF**?MCI1xcbPd~h+%$Z6 zD@PqcZ!yX|3F3#N>BR zVvHHAW{5rDSQafcI?H`;_NqD=ILw%Hxo|a*k%2?;z9BS|#r?{Fc)rf*M2bp*>dfeO zA-1LtQc&%)te*IyRYaP@>2YbHRPQcd&-8(#dlzcRrAl#z~Q=#37 z(I1Meg5fcQnm{!~1l~Y3RTe1)+8Lr8lEA(`Vp39aBSv%sixcP{cMa%0@$H$HpV4ZaU)UGYh zBT;i7`0tf2VGXc0Y(mcbR5kNppoGg?VQyCa;G`P)MO4}j!dY3F^nK!3WRa}3b=~^* z;_3}09bHfA9yFuvI*QzPytVqvFH=Q{hU*2EbZ9xShTJO_=c{cMnkuvh5R zN(3=QXe=6w6ZTZg;@5fWGJg1s`Cq0Wx)zJ8C=L5b5=c)Lt0s6fE+g-=*`^+r9m zDJX!rCdO~LE#V6g2&5AY3LuBU<@ES#B^A^+s1+}8$Mus~@GPOKfesWaQ2;Pt7)&i! zT3h^`3+jeZL3BUiz!FFlCBR)&T@cz5(0~2@-8mYi@O8cka%^nu>fT+3$#4Q#NnJFg z)l7Yx+-LkPty@E@Nx;w6E1KX;)X2h5pMLE4GQ>de%e##@DEF%PGy5#JlyD1y1l*Db z41^Jp^Y!k+H#Alq5RgkE-Ylz2VP}Pe zMfhGm;QO&UP;x@UH;O@>d7K>{4wn(mTL38!KRzU@$%f49Dv)I$!;W$eoY1;hNwpk% z^&A%J4XNT{|RRT9Pq&;s19V$%J$#y`lBqP70qcpe{u~j5 zTv{hE7c(-jvneOyNcnkL=6MQb6J}pe56&c@2x8(SUJOwa9*wSQX2wFjr`{xz6u4h2 z>kr`IXGn(Sm2ACcryw452)Nwh>ng8{h~sSL`UVBGw(;dQ&jNe-EOw3;;RQ8zJ1=mU z{d%LLtR03EcVG|@S}wGnm0%jOdYH5&k#qVEFD~pV?Pax5z`qjKm{0rN0he?jE%@p& zz_h0W3O)aWHyHnp0WlXDRHyg-cQ9>uG;*diS}=?pW20$ePR1_}55$7$=c3IsJyw#M zZ+_QKkRb{I>Fuv{%*pnfMCMmRL%0&NtsXZic!h8)jt>W9)JVq)!4SUhzfdrp^Bt_@o&35=|ZY-ap@VSGQ zU)6VfJvV~$t|FW58*{TgH6f|yJhd{}@nUGa2Gu$Gi{!)RJ9_#A@#m41Y?>SNR2G+* z8@rww&V550OGZae>#~VV^w*mJ{mv(yx0kSX(_%T9FJo*1GTTpK30K8Nc6L=&0^U^_ zgpiSeJ>RtdBEZ8Ja(}I4xU)joV_L)lwI(j6#G{|;H74`L_&(Cp_DzBV%$`G-V zUC1E0altSk5)xFlPfmo4L{gAA{IF~6AUXG3_loFRY%qgzwlBENYw9-BQ80h^zqpR@0>(roFgRFwwe!rrB*-mB(qKie&Iy(dMbg{eAy_*66`DboevLN zP-?hyOKTdy|F(a--1ZYn*!!L&?g_l3qn(OD*ZQW0j&_8^V6BxpKLKjJl;x;*E~44$ z4CKhcEo|ngo|ZL8h>3sIdAhdibF6X*@bF&~V^pt&@*HhdJF8QN>MAP-VPiVzRw z2cAp6D9;S3#6c(JWM!p^8Xfp#$ov`n&`VFsG0;+yPeyvb%I@xtoW`^tOf%ie3!uX# zpB}*0#x?W~5lvglB>_dEDayhJwCOD2%d1>!XRnPL`<;t}{KKLUIISC_{@)6`p`e{U zrWaIsOHqz@c$oL=xVq~XupW1E4iE7z`irq)@catS`+Vnonsp?5m5h6EFi6QjM;^D5 z{+4|^YB%)DXC5!ik&20dlxb6oPhCkrFRi(zhID2*E9k&wnWv6%Sd7xT58t|DJbBmb*93ZG?>qR|7`!xdw zP)SC)1P=!CBeDU4c6RtiMn+6tEyc1do|YjB z0qwhQL7K;~Kj-#7$_)>wc*I|?mxCRc10{KVT3pO?@s_WjT#<|deWhvPsd@LUWS;z zM7(w@HTZPNZeS0a@)Yp;ePj4U)RDFlIJMynJ_%LT6N!S~{_sitl_#rh z4WL!>dcgnUu3$e9ZwSOeiF_sS{i?o5r+Ihkf^D8JpV!2S_c)zBhyUbyQD0XFcjJ-Nh|`OV(7CSY=l19nA2V2XnEePV0tc)bXe03TKRu<9+{y)6Jae78Kz z>OZHwZU>#iS{x2hvb-Oz%?v0g;K1}?Y*staJ1sO!znBV*6G1iz8uky-slrg0!fd${6@Bq+5u9-`^uzr586gy0)9fHtskSP!Z*5foE(C?fywhDqP;NaKk{v z@emg$Pp)1GVVXNqX(||0Al78*+eX+OXkkedYDbBdpCT1Z`N_N@aK6tS(BH+1CQaI-9g%I2F`*q^V^`KJ;m zg(Q*LHJv%f-A*gQFL5J1AyXbSko0DTj{G$$YfR)Z++!)S<@q~?&a=De>)7=5`CLrg z)tX;2o36yqqb^BJp67&2VM7)PfX9!R-(0+`?g(D@cm>RVLZj0Dh0nLBf!=EdxN z3}>tOr*pNpN(CSh{rm(9@Wq(>y>hSQ6c{a!z>ZbzRepLW`{Ys(p6)9}W;4r}j$&Y9 zZsXG7ver!t^J>{a-XE9NS)WKr&5%^Mdj~m0@F(#+q642P&|ff8j-Tdx`PP^yC-6Ycrt{a*TzW99 zF@m0+!CnL;!ISC3GE-Lm;BW5hi;GQq6&!4kbAt6Huqu(s~I@_pbQh+S$BLbW|HVxE&hQyYK6qOpj zq7tJ?WylvJoTA*gAQ67*Obxj@-hYA5xo|;Tv(4al6kTx%x<@E>RylT5cv;b-*vTmV zGk9BqHVh~vLT=vTuh+v=u$CO)^Lk^Qy|F@BG$#a-&H^tf8F=-iTXUF)Qs=gJj1#vW zO7}uX@|Th-Izm+2ZtFHC(&PRvSoN9!t0Fcj2#s9H{RA;TP(=<-HE1mu2t+P zK7r0-)-Av4=76}kmioe~i=1m>2vJzj`jRz`KxufePRTd+xswa?YhDfxoTEG>qfKB! z2})6d{8IDMs1L!=z|h{n3TqH@bk2l5PUfXRbSD*)R@84z)*|IOj#KY@i@nd|Bd|iVvCq_)loICuz`$N96xgTgtb@Vb?G4yM7AfpJ40VDJ7wVNPqCX}0A62d zC;#)l4T3oh4G7q&sW2z_6|kS;bc{(@!Gba{HPCllH}ZGa_kns#$><=+-t83S23zyj zZC905eyA`m^a@f_TkC|pD2{Tl4TFGIkuA1v&>%f}98mz)me%W9NFFx|I@`d%Rq3tp za=}%A+IUG{;^<1ihGXrH7o>2$m);U+TCQMNSJ2YJ1{E@Xg&s`@R9D;ASIsR+@j^>U zQE@tPgZU9bY>PG_annI68cu|y5h-lS4+N*O-zWw6nJhd##Y}&4!oC$|9R?sj92{nQ zBBgif+odUn_-=5ySQ*cVoroovIGjP6N1^bOZ0pr$v(14nRMF9t7uQ*19*rceif zh9jStJP4UzXJO*_v_0pS)q!iJT1pQbO}OgI($dj`V(JZ5H7rlBt8+ESC>C6}9)7|b;3-Rh4lf{Yt%*9oPXV+ZSB#)lAk7s?6rO(aq zLo}237d>TPJ2vN^!oF&qHXwhv1p3 znJ8#fs=lS}fB7jQI2Ttdr|SzVh&ZGmkZfQwMJ)_NHH>DvWl=%{a3-g!=;mrwZ#N1 zT^S<>deitAHDYG-QjXZ|iC~U$YMO7J9t>~bzmqVV+e10j>E6F0^M6=#bKpoFp^fyJ zeTRd;E!j3iM<=2sk`w!&u9~$)!`TuC1YAx<=Sw_~E+=Q_TGL@a z7gu;v_;~(FZB|2~=7IvOZ2v1Ha`|=<^gWXqE`D}tx{t)FEp3kB7(qHb4o9MGuG*#*@m2Je{G?Ii{_@`33~u1i$!rHb8SF}kom55=R0hT&14 zu`}S9E8q?M3FMmz4c|;nJvr2=P$0D#bR>VEWxwDIcT_o2Tr7iF4j)KF%KA+`w!G18 zO(Xn{%jmhQIf-Gr``qTh6n1!q@`8X1VPEI;6DLszpY`LWPT8k^d#h}lT&LK>^OZ{J zuPu|bP{;1C|sR<>ph!{%&jp(ut@kD0eg_*7{}y z*_p3t%VzV+qp!X^!yh0J0i3`MeD3LnBEtoduLJD3L|bQbAU8j^(bk!~zrMY{A*|X< zoXJ8SN3-uS>;JRN9z+-iF%Jxe^Yil`oxJuA(Y&M}V&ntXL0um-6eJ9R-6%BQf1P|u zL`jLM>*c{4^6rPw;MGg~)2Lr>5tA$ie|b5LqstbON4uucX<}H7S+_L_kLLV(n=M7& z)Pdn?Wb6K@ttGQ_&lJ6G1M}gX)O@lo@oT~wKy8nQ?TPKNr?;Q;M}n*v&XJ-x%jDAZ zAApaX)vTw_=e@+$&3*s5C-Ur>8*G!UuGG?+;p#-YqO{g5^rIaK*o#{RuNw-{Pv2M< zBB2#G5ONQA&w@t64kH~q+^Vy~J_q74U3O78@YmMHQecLd?05ibW|n2ADGVeB^pJT2 ze|6xJ1;pTTq`u{sdhAZ)X0ZA4Gl6+pnlo9SYBshXn$yP>2Md5S%}S`*$Mu9RpZ5{mw)H~+~ADixhMXTe~CQB-33Q_e!cqn|JH*Q91%X#;t# zbe>$7TmC4o(V{<}ma(r2jQj;Cgk6dzj;73_x;5@qkK?*Xbl|nGa;p(_pRPe!$m`VI z>+quk#RuO$+DQ(bhG3^f+$q~6PsU5?cN6j!HYv&;5Dmvi)B=9S!j>gw{^f}M+F zJ3atWf4h|Vf(HkSjj2=B=lt0CHEdHPW6KpugceV&5Xr zupOhXp%GJZjD^dG4($$7G`&D4SZub8OUeae90nc+6z4WoVXR=ENOodDYw)l{Vsj<( zMrXK-#buWM<#mkk8fl1QpL zVqUje)*X~@!e&H=hcG;!w6(RJb~2m@!Zbkr3#=&LvA#e;p)IO?6=4= zIfc*-k8QSy1w&CzjEWwGNON*6_pQyL=Mwd%ZG7w5+tm}tOi+$X%L?w>g_~#p+dDXJ zu#&H-tzEY3gn^M9KnVCgv*+W!kSbT*2?f=J|((z?}Ts>G^LsQd5 zsu+C-ACeD%3T}B4eR91e>z$lh?oWDR70yYsIfp9SIjV+|hnc5= znaCh@3Di#@mG=xf{gk|k=WG)5v<`-j%#bm{5@Q1Be&6R_^KoA?N5>fyw^9-lbANq5 z8PmRTxmaf=uQksb5lBCWl?0gg>MZ>>W;s)hsai}{J+5TYEzuA7{Tq)CrA7H6E8#}s zC>W6|NYMM+pJ@L4w`@H&+;Qt4xv&T{w{vlks5t)cwq_3Iav?B9pe z80L>0A5%4SusUm3^5|L3U%fiWBp@bvFlk9g=vsC4`o&P`ET=rWDk}WOvN#U5(lPqu z;SuExzoGZxvD+{Tw?X4{*qm#HYU6rq?-u)8nAm;4sEPq(Pz>)~y2gzJtwJIP)Ygh8(A3@g`_cB+J=e(rmbP*)b-zPK*E~L;D~K{u{sa2%AtEO_m;*IxjKM% zU^SS%e~rXL7t(3q96-{mPPz>az<{BB^J5|oLCz0OCZr3ZVS)qm>DY#!$eZrn7y(y6djAG` z5`UXQ)BjMu8BAiFzd5&QjcYMScn6|366oq`=ofFnY#x7n{!RXx`mHo<{zr5s2$7bi?fvN+y)@(D11$&_D8OP7#SfPejK>o_oNLU*hXtCVQoPz(`wtX0f!pC2~gzqIZ zcyr|Jyi5gufMQ)6NSN*5<`xna)fhsYqR2xH)@437JFCWC{FIFIL2oe;FqMyR%q#h| zVduHIV3>22Y5U@WekGSKkYdT=S5KjgrGtx8ke$i_`z8XY z@Rw5EK>uIi4>IsB65)K!9mRbrlTNIFmB|>jpcJ)4-LI{j(Q#L^ZYNfHy@-3dh?QQy z=k1}Lz+o7#{)@)kn>>~es>^w-!Je%}mdS8H^!C2XB&hmN8w5Hw9r%`JL!^MbJmULJoeIi( zZ-zf#Dh;}3|8r=9XprG2{ff#?a4!!hbYd`U|67giUv?l89p$@hu%d;KBodL~Ii%G| z7N5M>g4g3Il(#pJG4c}W9t|Cxh?HgNm>>t}Gs=`Un*(Yr`on+cM?=ZjlNdr~{0&sQ z#*N@b^7j`owRAWTdKQPlVhZehMat2r&T?>eW!j}|BuIk+tY(W~2mZGr3y{($ziGTZ z9&&)~X7Rm?R#~$kB9JQZ<;81YkV0`M%dVy#jj6g2_kg?q+qO45Khd{9u#eqL3(j*5 zuU&-Wi3!<)LUZB{1A@`APT?VsZYf#bYB&^b9v*D_Sk%;iT~(CYl8^V-#M#c@ZnXoI z=-CJZOG+#tAt3>FhTaeltJiI)U~PMot&y7lyxo-mYi4vz10n^QBQGr z=LEEzR#)|Q?Oe-b2==xrR-DNgIRe=v`mN?z`hMYxV=dPHfP1_>p>AMAMlpD_V)dgQ z)i7RNR6f5xw{#%T)F8LLWwd>t|MCGK2URzpJVw!!=V+poMqd8cX~b)eW)Bq z2!&YNlldk3kK?Cvlx)I7(sxboKQ1U%~*99a@ot;bi{$@7(s{c=S00Bx{dx^)L5p%Xk zKIqr4AR6OjvLIhNOe6k)AB0?AO#AFz-9|EuR5MLf39;7e_?=I?Lovju9gB%+QrL4% zCi=E^hG(0sz(m7hb|FAC7#$h;q@|@5svlmtvEw<8e*a~d4f&Vk@M(8RfHJ5Sz)-{%a5I%S#!^4t|wjBqn&2>`w$t*#>hZx0nmLlK& zP?r*&(ZH_B*}2Pqm(UA5e-4T`FX$xY;^Ly=l`qoCAjRkoeq*|Sm-j&D@!-l9f_svIv={8f{jbDtXLono z$q9KNnnY5YcTWdi8%X?UWOIW-(`U;KDImz(kg~Ee*=Xf=W;!rD(C#bI`greI#lq1+ zy>qn?SyCiZB_&@?3+4?4a#y~fI-OjuJ|KO{bo`E4rL4a+{IE<`?cHl0cy|OX+-bWE zua$p%4x>@77okiU(yPjrd*lAs)o8ZOH2rhjo# zYdZY#a(hZRol8hnjN+{Lr8+zwryub6bA4Qeb?}_H|L`=B0KlY9j%b+SUkjdk2csqO zfw@Y_z$T0D$F~5SUkJc4b;Nx8e{O9VtII@2s15%;jR^EZGA<5INhj%+@TyRqx}ILX zXKs6&fEh-!SD{Fefuf{j@8BrX>CO#v2s(=-6l_q?fw2`51A{g?d;bGxKkEe(g$JC6 zmbOm%)_J749EAwsxS9;vU*F<)OBtJ-91U!RGJajVGNSWwU9qG1xsR%tTUtu6{#FeG zEVq~7He5%Y6oeX zhy|LHEHJRJ-^g?jl;IB)HVPDldhZ=f=DaGO=7EfWMZk~y?H2VkO&D!0AOL0E4epOJ z%mFIL5}VrJ@6*P)^Nq6YrtY$pO}jP+*{wW|eF@f)3Ql6buka6dD*G zyH&$`|J(=x>C=C7lmQ|P867XIm;0T6T3Q;HfAWVQUj)qJvNCARE!4t_CDa{BTHxb5 z11b1GCT|1@dG|=`>f$zM!#xi)A3HnN*TXdUvju6kTuzp51cWeqN5_D=x`cQ6N$5eo zr!W*TIf0#q+!wqH#94aN3NRFuin$(lMm)T{AXOOR0$q$;{sDne;7&KW$@P_ zWS#$&HtO^}44ZroR=YK))#6NOcj2SZ+lkP7Oy6wya_vS)=%PU2DmkPS&r0LUVF%eAw5xxm6I_BxL z?byk(EIQf+DhC!GAhTTRp<|iK@f@{52Kz{##j@unsH)5}6^3q#_jjs5bHl7;YisKk zL{wD#&#mN}Omu|=3gF`6t}!Pw1d8yZhlUFkfCm>xJ?z?N0I&NW-v=8To8scx4Vi>T zcx!KUjq_%No6SN8#Tm|dH8w!fQ@j| zPpRbRm+{D!USP!kCc1$(LV2+Orz{-{8xr(C(Z~qn>sbf%&hB!GfLOnO@3r~jwN&5T zaUA6!Itdu}_I69x_5gnm+4A+tYpd_;s@DS1#U|G^Ont5}6W*}1g?@|E*Y5f6VdyO0 zP`bsrx>W{WfXw4rl^Ww!#%O;+@Id+p_D*}Fw;ciy8AmmN0_JaI^cQiHgay`1V&HAW zOl0v}QJ5pBz^76`g4ml<6Orwh6)|Y#9Q@_&<$hQbV&(Yec8M%51MxMlC>8d=_|lh& zi77ZHrt#RA?e9T44^r`(cv_1ezIE^ZJ%~G$_2|y#CaSbFTtSRF6gHIHhLajcdkjKs zkdxp#DsU(G)f~()0J1y<1qs^VA;gW@>CM&McofV>5Jk_!KMJ38b_%F1DF&z|=xw~3}A zR%XAc6jZ6Erlx#9tD(=0&j;1Y{t`)|o2V&?3ah(KNT9P?UkM3E*U4BukMtIZ!k_8~ zGW?%gzt&7KmJTeCL&Kx}7}^ISge%|Cd=|6D@cX%gJ#y(1Z#{!Eoe;4cAOiIs;*MtW zJAZ5C$+TCP_ zsnYyCND{Lk*>U;#sTYlC6!eL@^aAM+TdXESsjFB&^Fk3^l1H;1@ z+3}Dzt60?24XHrx84?tnn^9#Ku@Z|E zw4JBw5zQi)nSf6N&<;jQlNO?IOgI*UzSfWSgTV^5;wVyjd~6(kh@~HU&rGOF`loh& zbtYp1^k>a8lN~$D&j?&+d>FYIgR&Etf2}o@!*6bm!y4Rz5+mRwT)sRkTnhrB0zn^; zSwJCed9Qh@`;p-E0|XZ{QK0+bW@Mh$0|2)ux|Z66ZZ` zzm|N_n%27ah+Z0*3CAHI7#uM65$vCB#u`-l^>{kdfBT6sb9!h}Q^}dQuEpI?jJ3Gj zpO>{E@BTrLgmCh+1p%S(#Z8(16snUrO;CyVV;M@BY0fS>o-K+f~Kh`X7K7s^4mgZCWbMav# zD7-KdF3C^s%JB2TrPhOccA22ZU0$-p8!+Xd4A>e_Rpff#@kh*HTzz;w=J~M-u2$#9fRpsWMNk5(0dA(fFO%MJcLu6L)X#3=sch1dPPQA}Z zNbm8jpq1-LIeLTTr}TvbtI5Qi$e_Q~4$t({`_{@W&Tov46b|(t{T%n1y2i&Pscq|R zMrYE|r_cc5x8pVbWCW)C&JS(jKtkR3kUBX|zij@xC0{9c`*#jbWB8H8?0*PYf5&bz z8*~sMpCS^<;7DTl% z(AP%+AnSLloN!A^%~UEms`)0|hNL6tSKT#32^$})&8bu_C&l00YHIfaIVGTvz_T$c zw9)#__38Qc@=__S<;>@vH{1L2iyVP@B+X_}+iOVUHwKv+UNJ&3& z>6nBm*{zRRxZtW7Ov>h$BrERTm01++r6sB_&iU5GU*M6?i`+sbSrNd`h6=V!Zl!$2 zz`B$VbRYZq-|m_B)_=eO@U7DRM{N*62}iW1p}T0s;-D&I)D2HSSLcZ=phUmq2uI(mP%6ZOb^YZ#~F|&nVQ*h=U z9&mDa{FvFI4hJf zITF4;+vW!7uEB%x z5~fK;f%Y5o2NvJGYEpy_P!oV$#nCFvQb`=O85x8$k%X|im;-4@hK6c-$S&@g5%MU<>u3oy(N@V2;}kEaf=`y7FF^ZFZL8ze z#lV|O2&xU}x2&K_Oj$KX)YVgxJPA7%tmov9o^nlD=d@fsbeikOh?0p1brCk$6l1+9|;PU-^Y!e7p+1QKCPv zKGz^&`0LOjA-lzqI*z8wclzjf86s9sc&TVS_t{iT88WX;w5sh<#aCEn=1GRl?}b+4 z;_a9mnpI1oRUHqI>=p`zx4|mrKj=X=>lJL;+T}u6go^&B@MbnOCJQ8ew0a_w)#!ck zyFr2#{_8@H1krJpkJJ8D=c`rOYLe<)*2}?wIzqr+L>T)$Hs!@LtibKi%coRpM1M)Y zAB5c7&kt0Xg5pU-R#sNfI4otQ^cbw>TqHinwobI&WB@Y)Aa+(uUvEovSY=BeOoC>f$SDQVACk&g4*#_U9 z?|gcKVGdrY+|h{HVh6m!5Y^4PQUaH#Fij{GCAQbkUSV+);xC5x4H@DZx%=_udju`s zgdXn2H#)S7Pu3WaQEzW~pSpE6VL>3JWUOnRko-OgYgwrMs*c_68; z-Y~KVkxZ-@#~*f4(eSpl6>>fXOD{dPs}JleJbGUjZ{mHn@w_|~s}(Eh+Yo&@Fqlxf zyAZ!yufSnSXY0;5jU)8hMZVW+A%%j1#=auVqKrX+G@$^xd`nHP4XJ$*Jm2w>A>JxB zVb#5(NCjMTmpM%q;eT@LBb+mRl!o(YIIlQr0IHuUC_gM(MLGsSZJC80YuQLSUmKKw z0O-R8?QpuIt7xFm{15M2{qn9;J5z!S^5l?oSnNK0{>XmAp2{Ekd5G|y>E>N>&@dtK zpkiiU##vj;S?BA0urq8Mr{_t__+pI}slj8fT_XD9Z~ixxrJ6bmdD@5w9U$oo0KxMq znO_i@iO1&{%HW%thlYo#<5ya@Dobg1SH0~Lx4p%5s!G`-bUd5KjVPHxCU~WEKJIyp zwVS+?iXw8L9!-+;3v}F9{**RqeSz`0=X7|MmCE=n9tP?&a+WK1viFVR`~<>Cs@G;? zmC4cGFRd11PA==E06XC)+;Tccm+8CDA7)$dIz3*skC*F6Gu@ufp|5Ied(B6Hb@8GAo>zD3p6 zZdf~#yrE+xm+6nYia9oe1JjU>N#ASL@yp(2hKpnsY_NR zZM`EMDC4lmRDS4|MjGP#NpKhMI*H#?wqTQgY^=E@h-~+KL;U>EkTOL{m3l9gu-lK; zbXVDIs=D>bdzN(bq_gAq2N6|`3u?@-=Hsc+lO4~=Fy|8^DG!&s?=Y`i=rW>;vOST{ zUa!1Tr;WP;s0dc#&d0QO^krRNoj1|`kZd3MXtk)A9V%vVMAzoq3=4blhdrjSg_G$Z`n9H?u?AjMSpmr0ZfbJi-cId*x5eu`UmI+ z7~KpS72O+t&_D-iNmOQoI2_{4DU;(%X6@#I5;XU@k)$sdi~XU(-%7$L5@wOWuYy2e z$OXi!o&HDwZ6Rv0zY}}zA&ZcH3^y)zF)nejSm)rn8DnR()T3y3z&;3d4I*w$LVxa~ zM2l~i;C(&2X_T>?5VvsryeqCJmw%TSR&YTTRt``V`$y&pHzkE8lc@OWw4RPEawH_p zIKlU$EBC8_Jgg&ObW{ zVskWo7Yy9u1OjaA&rCYIdnR8itfdwyOu=iNJ;^BRMo*RcLqj9|pF5u#MliwkZVXFa z65B4!&*5yYvo*%MI+3)kAr<_$z&`jhinw|P9M4-$^NG}O=<7VLHao*eJT90{w~O~< zwB+v3O{_avdeJ9p=XTr4S@x7)K*4+bf8-d=^n~GPG1oOK)^eXKmcsL!e!|$Piq_!I z(o75doIc29D_wGV=qtf%yBz2|-=TE8fL`*cZv0_UC#7hE6MUA=o>1E^ZXe55bk=Ft zQ9If6Xz(EG8?EF_^*Omh35o;hP96?vYF%{&PBF6^Sn2%<${Rnd=dGJ=#2R$+EyVru zi^A|wKXKx;kk{6S=eu6Ti*6sVIt9B0kqz(NA1#XjqJpfwY}yy@vrM9B8tOzam(scq-Yc+5EK z(1g0a{+d7+@q~0-B#C+JZ7wvwlXY^oFV8AJ*xfs5f41gAPZ;~@vT!f*kFpw1{Q4Z% zzDvXzA?U)0JVvkr0K5kjau0v3^e;)e6%u;172=R0O|@N7$KeRjP8|t$YK#Kquj`kV zngNK;kXUi}R1&}XYvd%^($Aabvjt?7Gnhd4Nnby6x4ki&Rvu^vKqv40+YTV>+PZqU z!euoQacy-Kxq>_RxZ+IjvqM5oUV0DiA=StY?3*$axa~?36I`$mvoJH;Y{oTI$M0rj zyu4b)!EITQ^xr;7tZ>g{Yh9w)RvsCuD5>m@U~aFhO|k%Usmr58(0ZQ3-5b9CLA&2R z^Zo@R2g+G&@IWJRI~P)m?J8!=eS6?GqpY#+r>`|mm50rF{D1^^diA)-lW>13ozrb- z=ncuJbGq1oJE~FMO={>F&GyZP<@=uaV)X%Z?e#(4bLLs|J{^%jHRR|AE7PZr0>kly{XG?z0u$WAh66@!E-c0J;>fg`NfVwswIq*sy7}g*^CVoLh=s3>t zBBu)y$hx{X=tYZN|Bh?ceX*|C**_aIHt1eUp2mIWj7%&L8aOX?I_4{weMihAq21+@ zP!Zf}ymH;Shd33Xumk}>Tb{k8}3J5IKrjot&jJ^eCNjco9k!o zP>Hy~n<7Kc0Dy<#hZOc&_4;7r`MmCugv)M0R~N#eYhP_C7<&5S7G*#6fn=kq&8Rzq z)tWNaP)Q{J%J^n6K~cbemt$XFraD{lZk#(JnSP*dI6J5g#qE{wKzQ$YCgZKo&imm5 zO*Bbm-?OHnPeN^F5vQ)NIpix3>#?YM6TnhAoQSI$kn@eOt+jh#S;p8rIItYcOSiUu zZs~rjDRz5Vr$`{K4~7FJI=@v<3U^Kbx*55+E7SQ%7ROZQYO5>GsJCO&E5 zJAEF02;4+S&ov`;8oqNxa=Z-5j&51zy9^cb`>IM)YQ0yu$i`NCK8D1+t=Z|B8Yp35 z7K0((F~j`)n6mRk-~`ONM%Qq07$U7YZHfl!ZPaVG%aMo&f~8mLBbtIS45;&!RMbK2rH3!uQC8W5h;a}2<>A|mRun!C67 zOks`SCw>i%L$bPh;`j#&AQJKn#DVoFp2XJ))huHThp3oOOKAGXgl%MU+uHou4C~Nk zmJrRe_#3~Bj@!|5Gsm<=F2u)}K|T72P z8sv_d0jvn>F>}-153P|UYBDi(DP?6=vQF8bl5vvgp%;VOlE%l!HpGqZ5@oT}_N-?^ zc{7sRlaLTE>28^*Qr{`pbYLba^N07%JdwV=H=ns*>?vsa8HsoyPwCf=)?rwy_rQ6@ z*>8&SMHC;TDD>*&~mmkfHlUt$LHq2#k7n8WPLvQ|}~8 z99uT;X!yPhznKy3YzSBfbF8_^MDz>B%nbXcV*X1<*gO-D;ZY?zD8+ zy0xw-C-HFfPb{{}i1V?6yLfm~v3zGXgw3)I%XL_dWE>8~i4hQGLhX!V+``S=FxD$= zKx2715s;XP^F-D0zkgh;ab$kagru}}Hopo~t`UpuxaU1cF(nUs_rh=P}y_XF*R6%rl>6JTHIGL{0X=QAfe#z&ijbo!Sz?{z%4?pe|zpbN@p%2r7+oB|0-a zOgc8FD4n#UQ9w{YjfVEg`l?-7S^3v7ds8eWk^sYkbx(EoOOj=W?Y2g(+HjF8k%JH~ z^rY|;p3RzL-a}Yli~LH7{TJMpb!)Y~d*4^DBw=SqHY{Ec&;d!boRCEOH^N6-J8W&0 zz>~jMYk2<@;+)0=a{BO#nC=IS z4MoV`A}gUNf6fBSgT93+g3;o@-7rf`jh6vK^qaqkH#GRlju+T?D_lpJ3GB(Qmv9SUIy@+57UyyIfr zjr~g#o+v((8pv*@ykE?|{(9UR6T*S1kZIZz(@l{LYoMx0lC;OkTcQX&A!v!`6jxo* zROB<9%&)_L&sksO@4Si=!6_*j?B2?olf5 z?-XfwH;PtzOy#Vm*4W+8$50ksUHVNUpS*OBCcwJ;B@uvJy+k6uF}LL2^JHE zuKP206!e6O(>Z!03$BEi;iOW za`k-qAiyoaG~sEH*O9!nT~o($CSqbo|J;9pJ)P+&Fc#=ltBwAxl4Wp#xKQp?sNJJf z5|R@bN`cN|41lV&Cldgt{lG<5s0rV_^qw;THcQ) zwpQ_gJDesaA`auMG9wNUO0bvbrZRtAD{b z%s`!BG!z59X9P{835sJ$^15DZ{s&z;JhAIJ zc%1g&;r#9>t*dR%)dOBHSW~&0dgn-P!p~MKoO3rsK5lN!I;rT)n&QeS-a6tyB$VRM zXFB<|IhW3A`LfLBK5q1xYJzL7$2jk|TO-$NV4y^)<0d`V=L%T51;USzpnempS{e_w z0u(hUEGS`2na_%v5eLuOTK~(E&Pms@T5mVN4=%$QlH+6PFyF#qN=r*?&Y${Ig?_Mk zdU^fBU&f;B0v{j=y;dFMqLhgCvXjMzE6rtBk(Hr!~S<=$s0>*0TBuq$%hT zq*Ak;V_oHBB|3ovH6XvyN&2yC2Vr}v&LY(Tj(!@!j}!rDW8wa3cMHiu*kO;4t!fq@|NM~mb*ej%=M`8;b>O|Dnh4SampYgH#Ox`X35OF4801nV61wR1o zrS~j#g#IpC21o(MVc}4UIIl#LGK0G$fV- zZnwlum~*fU`8riH3M}d=0H_iTmIbpx-)n&(GXUe~!)Rxb>4TMN3zZy2wfnexB%V-U zcy9z;y^tL|a5ewI8uOSiH#etbVex&YwhM)=*kB}2I?XIi{25Nz|Rw+z{+#>jK z!vOK|wkL?n>1dA<_I&w^v$n6o~ZQ#-fJS zVMw5HNT&kMu~zR!X!ZqJor{QuR|Y>(7NfYbQX@?~fi@~6A|gUIN**cT3nU*mGa8_B zg3l}0p?frYt^=72GX3SXfGh?`D84!j%m$YGEbjW+YC@r1pR zJ%<(K{+A6`AEuuWqUx3!4pFM{rmR-akWGU+wu={v#)eWuiu$d8U2WK z&;e?;q;r4jF>(gDEBwF<*+O{q3GZd10)Faom!aaHJK+$`{rM9nh3OYLBcmK6&;M|S z%gP3q>Kx$V&l8~33;Bj=xByZ=UnY+mqTu0KNPBx$Sd;=`Z%IDm^`BDu2K)KuxI-HN6Qq+NJqZ(O4$%?IIg&Xy9WxUP|rs3Fzbuuue#G9k0#)5CvKP56?~ z<4*-=`{G;!x{C5P2V!Dk9s3473Yf?o0Q8qe$Hs;SI2u=xJ|HSF8gybTXklSt9=@TY zTA{Q5wB>qMNbsLAp%`7ziI}xDV*}V1#4@(wtS!M(@A5M(t(m@u-TYbxnM6PaQ~Q>hN;Ui@=_+PVoSg<8n)UP0h_ct7GN* z#<2<+iW^g#1&J@mid&f$SIqJ3LB7H8!0i)2zIv<%e=a6Y>jEI4sVY0tW{Vq7 zEFcfF=$dK!n}_9oL^ux_1E{t}ldqJid|!$D5258UK1^Yo(mw99y_gJ93U{Kn-W`id zX^^~LkC0B6S)g6ya_zv#7)nX!fFGMhtL<;O1N;q&RyOnK_c(5w5!pWsJ>A9=i8JsQwW;wz z>=0TLHc=?x7bP5`%F4@@^2x*hz(Yj{F*nfC5D@my5BCkf%ne~Qoq$TJLABp{Ns!mE zE%b}x@2-7bpAY~AS5ydTi($Aho;SfyX-bN&PAovh36S#qttYwi!{U@|Kl$Ry0|mPO z>*kAuZwDRqix7}RhXDhoYJb@Oo;}Jvsg|(NLptn{Vc39Z{`5y-6?oZqKEnLN)69+!!78bq(_O#@2cZ+Y*t<**R zNT;g&%kXIR8nkV(2%&?#E-l*8g+|}#KTKt>;qiXP-dh*cv{pjyYHQH{S!D6ysisoD zpS`_-ao2sDs@n7r5pXrWII{s!>>^>G;Ps?^Bk17ZWG@GQXe~cWW8|(7m2iCcd(FLEZ zFK0OGXOfsj7Yv>+TfG$pHi8OhJ|aNc{B^|xe>jg=oyR`7Q(E)Embw-}$3*Gx zRUAT>Xzx#OXW`+@EQSZVh&y$h3V3HmfVH)C4BRR^Iv_KHtjvDgnA{YXm-<&kjVE2% zE^o?bZ;Z`YU;f^8z0rQ(4gy6F=z8>CfT=hN+*9{|EM3m03fZvf>FY<*gQC%q*Nd#p zlG4*Z;fDmwy)0u4RP3^n@H-QEdw0~1N{%Lcs9BHiYDDr;YBd&X`TXwsqv&vgl(^CE z2xll+#Ajwjw1TiVVVP|u=R7)&X zJ5_{IC{oC>f9;VpQ*;_UUfBeOV=lL)CPrp?paO04{Y@!+t+D3nYWO{zy*DgrRVBv2 zx816@zNx$yR>uLgc5E55i-)dFlTZPiD7e!cXglT;uk#Yx%FJdp2_ ze3~aKDH9EpiYN0=AAYkIBRN^!8%i z8ClQMt(3U9pwN>uLbk^r47qfcY{bHsLl%mc)9pVWep6CV1*~4?C94i`qDunjhYF zv2E?x)zgSsBYR&~UQTs{z`(=H7k_V2Sy`%#i0D$3v1Yu#YjowzCjXrdl0SPdPhM| zgKEBKp33$G`SJQR%);jlXq_~5$HDwz#H{EQ^JlU7L$$j`K$_#*&!C~o2r4cv@7X~p zm`^sjJqI^^WtgBb2t+{`c=Z-amBgPrU3j>yvu`q=b(SCz4x1wL0VF2f>)_{vk;REg z=B|N{E8T{wvvskWN`WgyQG<~yg69S{`~<+Z-_Zs+DEzTF_+_WPiajz>WAiPwletYQkFv zi|DxXeZ1!jugEqxmxGeeTQKA1)=V-b<1uDBg~f8bplcuaAecETL@Xya`Q*M&wb3`vAP?=K_Ah&iUwi$dOAfdh-)EzL7<9w8s zB$yMS*U+;aU9i{P2pORfAk8RIfa~cJy89FI^G#%?qPpO6n8n;2FtXon68?w}gT3I; ziHHLIHoY!@Ax;I{`ohT)wx>^8I|!>>UWnmCh?`|WpU+bk3{=ua{)({(mWf{;uRF}! zh$evswm9xcakju0MuVr(bs4qJoUJes9J9KPr?VKkjCm**fD>_OI$HaE-d*5ohjM6Q zyfI{Wh z$INV3=H0bC9{x&inMRekYp2kkhQ3AP1lK?jT|f|X!G=u8g$L*uZKgBp$ziVlW%iG) z6n3w#E1r4%*g=)>JsqlJHu*~mR0TAozvrA}tBFG)bkTSAcJpfUYvrY~GULUz>RK^r zwDo2dx|gdeY;bv6d3seQJ8InA9<*w{eY~Ciebo)!d$HklrCvCEW3qj5x=tz!bq(ct zd#KBet;QU?f0a+oV85`+|KX?(LYPrIYQgYD5OlWq3-nwVr+5vBx2On&D+2chkdMFZ zD05xp_cO@&dczBfdu+4@MI^yvOMzgkIv+?#+ANp=El4>cSuM|45T?(?mLZFP0QRKd z3y+{d%)AwMZsIW~MKDSqZH`yKiiJYan3W4=R>Zs`21YN&!8!_a+q35L?b0BN^$=O@ zgMFU0^Qcvy14xfCoAfG-XpaMlf-{|~&>#T40P574|xlAZ=B!q6!($s0o zQG}rj3$iu#&+GJ)X5zqp`;di#~Ey0SLyH><+ zx0#Q5U1lvBTlH?*2lEeU7_$MI^ePP5m3%|XJjoRvDLLVToU8;d&25ydIMIOq@$X6Krx*y_t@7F3rM4AgD2yA8}-FwQphu&M| zdUuz=Grn{PU`wLP4y^A`4ni8iUZmNlwtt|J`~p5|7uwU@=0a}DeEQS_;BK47-}nk6 zBf943u)1G3h}$e{zFcXP>vpC|Yzyt29ZM|y+ycg0?&!YI>*?!Nd(IQ6TRZFkDp+a# z5Hz6&sG5CM7LO4g_0WsqLDJx=DsDuX=NHp%+*Adz+pgD_^cI#JJj`?m2oVTb2~o90 zO_I@W@sO`Xn+w=c99j@FahI^n3=AO*+9UNLC=qggwtv-R5$EULiNplOD?#5ue_ICu zA<@OKD&B_V|MZiSuCuKcr^v56Mx>E8zi~-n%M}~KM zN!+5X+>-PoO$wCviQ)KMr_?>a9qYsCsSQ59V8fOmet<9LfGxQNdxQ~~FxK?XrLr?8}R5!76OK_VB|yxbs0U$fpZBwjl5o6H73XTEwvtMeCHURT*FA}LgJi<<7N#^(GneSN0xM^PzrXlw0H z6kpM-S@KP`Hkbv2m+QZYRz0t;>_CzpB2*s@33&yBUC$`pUw##_Xpo2d?)|iRzVFQO z78h*&p%jL}TVf=p!X{&c>wY+{cnF5n|GH`jf8hCNrsku3ra%@6kzhP9qay5m)N${T ze_x!ZxBQ2ZlVfW-I@U1raebKzora%z0dT8iaFZ09Ta%p={j$V zl(b;SN}R&;AK1t9_QqmPOmLdX=Fv!Y`_sXABRG$=;|(?Z07AZ18D{?6y*QfN zdP&reOd~B-UO4Gp%n{ezFGuO+CI`Zy6^eVfI|>azlT<^vV@FW2)RaXNNdP6rDN^EY zv$5pq$ct>an_}>Q3)>U$;6vI5O)(gUDbnYCzvQyBWAyo~?KPglwIe|Bs@Wf^_}7al z`u_B5fOPR!Ut#R=-mf3$rs+x!IbaZ=*kdK^klGSoHqP{*>Hml=mRujr27q1nKeyP~ zqY-eMH>rp)YrnQaJ$8^oe+BzfpPNjGdVX=O3Z1~sEHV&mRV88+izedFOB_#$U{aOX zf&8=D4E^I|Zgb%G#7i997r0t>o`)Lk~ZWT#^#(+u3!6=Dy}j|b~!1xF~oxteiv~;upT^T0om)F z6fkW+Hnh%nb#1Nh-R{|W>MBDy>Mk)xHg$Y?x?V13ESZu5%-@KFqk05PbgO&EE04~? z1ZBNFz-6APq#f=JP?ai3K`M2?$AgDTryzr=FSP$;<}VV25eidtKi{}MYeD*^i6Q*H zvO1n#CdA^hH%3s_uH%*d$H}tx)rVA~73d$|KP}V!`i(3m)`7F=ZEIp8-|BLviGaxO zVq1000kU2xiEg&Pz~o(CQHEwl+bz&jnNQ2`NXB8(lj|n$p6k#j{KYsCVt4g!(T-<% zMMd+2XtVdN4;WhAfmKFD*pT#lm&c&ufKrTYR+v{drI8y334w%=3|}4ePbHO41kK$8 zz6n^h{e$7}35Pms8YvRA-MlF1rAk_s>K>dlxNKHi0xG*n0^YOhNo&cV?*_oHHuFOz zQ>Kt9R!B7f>#u>Oj7lfMbJ68mf(UQ>O@XFVq{oOijVCB$j(L&bb_-HB?r$O)AnT%U z=XsS#>OV+^+@kB=tF2|quyS!Jyfj&?UmUkxaTF_Ze?z8dWotu9^98rs3450J4q;^H zs6<9UOco)#hFo<{O&N_nbu#@1CKi6J;DE2z$+B^l_^L@zzk}x}VYCvRMNbG7%XN_NZ(PF?eROmf7|3zJ@9MCB_*h6mL$lcUd-<^nVT$n` zL>B)DJxIH5Zwl`0jqo?p1YcvJ;Z2dE1w&bQO+iR+SySU4%7wrVjJNfJUx+D+ann29 zq~e85w5SCh`~~$}{Q*z)Aa<%P6dE-w`|xEFS-p5MwZN|jJes*B z6Hju1iY1R{KjpiGHBtn}RHt&}%e1H7i`wN1YV4^Wv7t)g7Z@KH8P$F>|H$=a{;F;A zZ27almApDqcqNZ~=jh0>QGC}+w2KjAF)B;F0}Pg#VL-I3BN9PEcE8Phm_wmJUP{Y9 z+dzV>VD3<9{nz|~WTf6)N#h|rb5~;Y?5HCQJP_2~Kh^h z%6&o)ed;VWqRtI#s;d3MjzI8RDich7w78O4*+fr%*ND0m9ZdK)5Qrr(MEpo@{N5xtEx)Oq zjY7PolJR;A&UV|#x_r^h?C|)wd`WxMY-#;>cBo94E0t+#DnF-E4(jLei=l0PO8e(~ z1hW;jM%y)h(hp~dQ0Czx8X7pfWONnnSy^efB0V4u7==t8Srh^eC0^D)gxV z3EgE(z^FxYp#Vpu4|&eGqoyHXVVli$vqF55>_D*b6 zEO+`>hM33^RGHd0b9n6c@27!bSh2n}gyoicV~p-sXeqeVDyp-I>u!Wp;z{~Ug~EmW zarFsFc?JgAg{0*YAK(+A{T`5T$}(XyJw(IFj8NgnGCD(BoDO9GeC^Eg|97dwz@-Yg zoDxY&4;sU+M-XsX+lzC4Jvis3VjCSA>jQa7m!B^Lx2Po2y|W&<(u6;_%q?8w71(DT zPRgmz(*PB3EG!Z(E6rKC_K*zMo*Rouwsvx3PYn?*nLXIyucf7LaeMKz2 zpBb<%8SEdVEnbuP5u2+PPNwu8gXagmKDDExeQaO7x`St#aSpHhE=KFag);h3m`sd~$ru-p8%j-7TNwvvd;5l`Z{5aa*<{ZA%tu>yjDN zt6h4E1gTG->ckr=+gLEDP)z0R2O0IgSx-$(jfnPc{n ziyruP0pp`fWU+iMFR_z4QO`dm@sU*nS_ed+>_1hmk(z-76RIOZdO;(){f$NbzF#SU z=lC^7(M&2OJ5E-!%E0f>Z1|twKd7y&j#h#T=Dt5Xs3F&$Zgls~fo!b4J6AfcZ0|;` z0RQUxq=t2X9|@Pa2#dDhuzx>WVv7@QOENVq!H0(5HCyApqhb>Pf=67>3V8pQ5&4Aa2O#pyJK zGn0-sj>EA$sVNLb`(y%uw48erQ#7~!Jz_En3c189%?ZU&3dD8JYhK0+ouHKXc-6+k z%IR2tpR(oXvEkutTz&VTz!koCAbXod1OoWmv}Ih^q`(iy#W9;F;oM-%{J)UI4A;0- zk1PACsx}EBgc7Mx1=jGk^%&menIZ#UjL# zLVrRnN`tj&y+8z6j-v{V<-fJPV0f9{uc1`S&#u6B*>DSKS=j|nA7rY_f5#T>kZoWU)3Jzn?r)vwmI+>@ZeFp7 zj0|$==|nEO#1qSqL1ZK2i6t-h3=XQ|YwX9o-0n)}Oc#bYWCk!AC zLk$1S?#O!+m;`BQFTDO;NIy#la!@5yJ%|O`1QluE6PCqvYwarNLK*hqyhB&Cwwq+( zs&_Lk%icy zp%-}gD|>w)Niq&lsQ}fpo0#DpGvD+B6N5=U?Umq|h8O}Y`K0Sj6v80YgENqfNQa#2 z@SLc~ z>wz3r8$Hmn2Df4KjaIvgoLNnvlbxYG5Mvg)yD^)gFAt*OBx7NwNA$h#C^&qZe)f;* zULziJ9)oebUvKWFnO%7!5aqR1^+FF}@J7lv=R-cn@%Tn?&Cgd_$Q~Cry zr7qT4O9L_8*Z0aS<2^Ql0lj@tAz$S35~J^!Wd)G!-z{y<0GF2#<;8D@CaxAmT&0T!3lB z=t&qX=#VwqksxJeCjRywwW?nr4eRCKcqL?+`vZ7x&_L|sKy-xs27B`mb7#KmP7#L` z_q=@Jx0|XTw1Lhi=>ks!{rS<0A$=z*#cCtR=WG%kZfN6#Tr>_(7y0_8JAG-ul42zD z!?Wx@hy*f&`C1(|2l9t;2f4q0e15c`5c12}<1%JG1)+$DbmFKW+uy=_B)7xzJg@IR z6Q#xpLQ9PJ{u8;ASg<@-(8)^?^N=MSDF-Y(Iy$x%GEgunMWP>Rovdg?>uO%u-^o|$J9D(2epo=^> zKSvV>e_rUBB?9(}Q#w0yp)|_G4=aMt`Z%(A7=2yT1F^P%JmJ`Y09@f)?xQvXS$qr~2U_ck zL$3AzzNi@RMUgw%tBd9{8gV=X7E=CrQY;W?jAikQW5ycvM~iQqPdlQ;&oXVyhWhN5 zUd!#JB{uEDSYa=| zLdoFV*cEB4m1T(yt(YrKrzRz1O;wTaR%+ZCCkZA2$rlE78^{DjQ1oX;Mf2H`!r!ex zMjg-$hF3irRHbArb$7?de#A%27*ivyHD8Eg7y#?F82Z)0`*XQp9lk7Z^cena54&mm z{ep#q55QlnzHiw~ZF>D+-%%ve)cu%~A2~^!(1Ubz_k!gV2^@I>au~X>X!9VX2p^xm zc|(;Orx%rXI6O1{=-fX1eBUi|?ho`m5nE%zK>Je5aKQSi( zr+3r}7fQuAoMmUU4{UKDGJuE$2XrYGuC%#aV3f__3Y*T(E+-c%@u`Uu2-%W{EH58z zG|$wA+$uI1AHe@)TU(q*r>K)l5XAhO|Hdk{eKgad9E|su#{4Q&1>W?E;B&ABaIF}z zfJJmnHtSz<^OfD)r~`rmXrbrl)jrBGmg+S}B#XkHgpdr?n$Jv5KXxuls<1#eeav~Q z-1@2*FG23BON~r89t1pWlao8eh6m#?MLJe9KtkOc4`zG@Gk?Puaspm$+lJL`{Xry= zuM@UCTbRT+B_8Q-{8M%$ti;8$CCX|v%%h_TNMcksse6{ZZ48qN#)_ZkAs?NfRFt?U zE$PtaTZ~g=VuoI?{s(}5qukr^w-#JoRjqWN%%}g|aol)Ss6EHLO@~9+6j6pY-qghq zj|7Y?q!1!#plc7xEu8D=v2nV^qXe};O1KW2o7ER*Ux10jShqvEugT$5!^}2mGwDDa zTy64+;U|&7SyW=;myHTe)3q`V&x;Yt!_{1hT8kyEsepH6kT>4ud#&NOdFJfxEL3mK zMip^}@b)alN0TsVtYZ=nkJk7Mr-QLLidwJ_f@vLO@YhaQVtzhjm?>1zs3Txkh~sbN zU;nJHzL${WCJeDd`yCvf?RIl$-e#c3(;BVD1O?T`{B-XU$`LdAiwRUS`cC%de-jH} zY8cldGh4jTY4!>)(`%FggbZD3<_wl_SL6(os>Of1wk2fDlNY~v!+xI{DHnsM%Pg(YjW&hRF-9m;90ZMn_^I*%8N#qfL zwEu&0tQtAbR+IFvJw~LYrL%i-C>d31(Fw3Vf9!&RL;3nEP;XnUJRMJrfeaDaSkho2 zMAiH*ofa}2k(TN|KI*8d?f(A&b&?&#fxIb);~wd#NCE7@0fcG#=1-lS zSCK};gChaZ^&@L>nejwM#x>g(>Dk3%f-$v9A>%8^;Yf|iY%_;$I?oa5aG|w0 zPY?)GRqs142_B z8*|H|V1xd``2v|>xs*HG9ZDc5doU?_BjaJ?Ol;2)FKPnXI4=z2%>RxN_P=8kkaI8_ zo0f)r33)%TdUUSi#K{wrSiiWpn;yW_!*l5)iLWiDVK)JMZ+@PO^_W}+vz=t*o^9_^ zEl!8+@BEG@<9>QxuV&`!JB4(|z`uu>->d3$FiDksvBZeac28^gZU?uGMH4dfstjPmvPnGzGMiN!kYq#0^!U=R37`CfRv4F0_JC($17cirk;-N<;Dp-AA! z@ANB-V*MMJItl<7#Y&|ieD-6cg`G?YxwTGe+6<19J?Z`JnGkrUv`DlUTZDt@`iDFV zbn;%$Lba;{KYapFPQb99;U%T!on91iEl~|J7A-cC;Hxh9r>Atj#4kY#V#<07%(Z4S zVL$~AyM2fXlq2!|_XGvz{1I?>=pIF`#QJpm8-RqGmFx^7w@+52aG=5j-tCxv!5ZIr zC$GTD0GhR#7n6s9DKPAQ{{y=oWYGx!H;FaH?RsAZIm%t~{|5Gx#f;@nStka1d;#A* z&`#3U7Khg&OZ5%id9`6m!NTsTmZUPW5)SsubrwG=TpV7c>w(x~r2!*tR`}9wRBJ0y zoE>LevP1GSLGx5LW}i|l^=tBC^+))9R@CJ$p6dc$ug|c&K|Qg@8!p!_SVZ0Jxkq(T z`P#aV&q6j`FfFyjKV{N>guG)>p`^)Qf83!90Txz^QbUwC2r`o584czJj>G73b8{Za@m1mS;XjRZ>7Xnw)4N*;q~y5t;>oCT~IKD*2Pzy8!zc6w$` zz0$g7vI|)Ew=$jD2(3E!d_ZDCjlPxU*8p$v8z5#19FNpO&}utpZAwqbPN%($+weX4%7CNzRh6 zmx@vu5o0(v-(*7E<#|zKuES}Om)-07W&5n({>sg`o8=@09>@u^JDx|-fzw)V+WAsf zS1Sj=hw#73@lu6IkutVAe5fgL2=jzam+DSPg@i~v?m~H|uO(&48Bw7vW~~+PShA>@ zP~Gelj7ck82JH^V+n8p`SsEFyd-UN=1t#K~{&)G%fXl}LngSJt1P6E59q=Q5u!QdF zQ;UAz^4MlNC}Tc3ANl_F4AA$G9rP{aW- zuBY1#B_AIjCU}mUY+oFl4<2a%mb@QMR|>%A_HjwQ=zFDzBJ&?9N1p1&<5sJ+eusY4 zCxtv+AELbRnJhM#g(?Y}qzl3rH)GHA2&eM#1NH33ZYx{)`5&KG0fdb+;&Dz+K!$%vkreD79ih0eu%zuouCGi&6I}ib z-_zGi;ViDZ%r(vIO=XuyY9I!}z>HP#BH-iGrl>P_l?POiu13rPI6%et{BK_POw@p3 zc*Ttae)!wgln7z8n%w^4cX3tTo*%Ui9s;2+|Km1m9BF189s7iEhdL}gC^F(wWrXEZ z6sdo2>C8JXPqni!X|#OMb8p972ol!1IuxKO;j`QRI0M4_>XNBQh4h`xZ2$sQ11Q`N z5xX7+Dz~;_#>Q)E94-s=GZM=6WSP1|u_u-QWO zFYeFMBj)`K>SaZOx(z>tt>|`3dlM9|Ng;Uk%he_a^_oc2&-;^&M8_H>bZ^T%$?Yz% z$OLSPOq|@j^F|gHy7DDoKHv~$OCl=^w(YS-TwDZeRB+2kMiA#0B!Ohcc#*yJ)gRZ~ zZ4{e1OBoGaawBuaAnVVrizMjMBgsLSPxDu`A|bZu>p8i=U@?e?5LdHIWVo!Spn_nlU$effSo}tt*P;!+M7!QMnaf6^g+lfJzWHie#>Wr* z7Po|MhZ$d%73=Ii9oIiSTi8qi(HB^QI_+Kpv|lWgrt+XvNVr4^>MgU~9FH)T^eQ;I zg9|=V7RdHnl#voMXEOUe&Z|A#1qQzVO8WZ*FvL&7MvbYL>3kJqjh)MuM9Bu4mVvy4 zQYEf>>lweS%s#_%fn z*K|`q#&=KLRl0;n2hw5_zaaA;DiP%`_Q>3g5-m**Je^TV_J_F7tjK}yj+UO@`}5l* zeb|Eyd68lA)?zZKF(r1|`Un8)=lncQ&;C_wKRdd9|M&KX-QR;W_Ii}>PIafPv9}}u z00$dNZIQ>QG7rUxMEXAj=V;c(UxQpR22HKEvoy=|o2)RvB{^D^W$Cg9TLZ@*qnlLt zA7W4X(-{CG+V~{mZF^K{lZ$`Bx2D{&Mv^S41M45CqyK#3!1A)WU51@5FcdXksKUb< z*Q{|pa^j7(Pw1*FUFn?AD_klmwdf+@7GE%{U=ndG#%F)X$GiTk97*dpo~uA)+}ug| z$;LCa{wE!s9}n;Qel_)?S+ZYnlbNDH@m+2b6hn*ZNRX1Fz?eB7MUgRkj)Xzms8sV8QPwc%^q#BJ3+ii z(olu#`P-9HxRo%o%P%Dq8$&<^)C{+ZN;nEbU~)pK89FW&GqB>>oBi4g*l-Pu$hY`QST)5gBmn<7k#;N+X2?}~E`(sgbE*s7p*L2`G zQA>xOrwU{_pM^ts3+8%z|7g`(DojjNR`g5xTSSs+`I|4aUSOGvCXk? zjq}Y>RwM21UBL9E-{yfQ{SGt!1$HT41qQ+EWzjLLk!1R8;~aTr)})yoR0BowzyKd7 z1}P{=0`jF{Z>-eq9Bt_y>iFmIp!A-NWT|g`QMztc`uZgHaB?3*$Sd&plA)8?`|hGm z#EWCDV4-5g3{IZ|rg0F8JOR@in;?P=8f|cg&F4MENMc=8w$qriLHm<`pc5ZsF7JB>TBWx)sE{{sq?|*%uz+ld3GL@d;u%QYvtqp5Ax(vwyJ2ks6ms}ENeyN zHGab+u?9^hj{_xiA|YEPs_RQoBVsg1@3#~VhHt-w***@9>|GcMq6HP%UAg}ur2#?| z3=1bx=iC1To7G*IZl2|Pq`Z}UQN8GY@D}83(O+yoCi%PF$i<&m%UCj+&g)o2p0ePU z4fNOS=F_x`HD;J6tF79FnW*Gsn+)YGB67#=7kvcN#p*2gXtTNN>jvU-)of2rJBPK! zHPqJF^2^ugGle^H)}!GCwQw;|vq+RATtY=AZ~0uyfTY_XpDj_)<&9{^)4iE@_|a|l zV5;Wbp`!xtL6BG^07bwdMaCjocP(|WGH&K#bwlKn<`wf<@xW_qv zvqHM?WWrfzptr!*E7bW9P(48_7y?~y_R5;wrMF)1w(HI5tQ4-m zKetj-{~>XGrsE|hLZzx~B_I2!iMWs;_XusbK4YBmvOLNBZ zvRbNB6{``|h5Hs7PQd6HEcI~>8R5w6F=6b0{`{Mzx#m)|E)$%cQ;{SSzEc)++sa^l z84FQ3zk>ZP<9M9U?(G(!8Dp~cZ+1TUps%Z6`>FGm`U;oHw`=;(HozFD8?h~Q^_Vyg#EEMoA>`qg}4?;l_HQE!>NjKbUP)64Up*jp)FF`_$1s2{LxzHC8+s3{|vV%KXeExu~R-frs?D$sECP5@fR+(PK&`l<4@ z8u5`o=M`x8%yST=09-&{);lDU>-ki7p5G=4XT4Ud-9`brW-^S%l?{HUy+uhy)4kX) zIxwd6E$_zgIC6}STIRHEqKT;R3jruAD=VID8ru~s2Oe4#-BE;V1mrcr z^cOsLvzLni2cn+jOF^iMzmdgcbcPgbKiJ8CVsYHSXZgnt)|g-Ly1lj5b5KDR{EUXC z@vvZ2jY7{AYZ*vcc>=y|VHC>d*U6+!pb5^Uewv)UJzJeN7LAvJ->)L4?q3Ug2jr7# zwS)n-283dQxs@gdJYR7}^cJ z?(k%#IdawYNxr&jecjd_a5V3mszA^DilO7C^^y2MP-2OB97Q?~Q4)v*`R8wR9`Ekv zh2s04;N6}seF;q{^D&=|L|lg((t{9+=0{i^Ec4v|@(VLIzOFmr8|+(@u$sBOnV8M^ zb78>d|3-&>7AVi|wQP!3Ur*!ykXjXQt^&`$=61xibN75)Jw_jjd>gpC?JviU`tcL^ zL8O}8WB2c$_SmtJodX}rKN+js|g~lyi@z< zmD}ob86_6;29~8#sHmoW%mNg#d9Q4tC`1B<$6V(aINU{@h&j}j%4LdPhO9S7D*?pB zd?J{`Fk}!LE`sfwNz#stKfubQod+)(@0-bKs{S}4sJ0*5+uKUaeI0XiLEk?NMPycv zaRYEt4E*pn-^%F3TOa}!Kwpg``vxm>;h}eZ-h~QrJIlLTqKssE=V4@{jpwmpUTYIsm0B^GTcSwc6y0mWo51s%wiRm3egWN$_&y z6~Tp~n(N$GRMK?5^_SU7-^g^nI@jjy=c+0?a&jfA03Q9H#PmFTZrhoaRHCKl_P2!! z@_f0UcoLlzt|x+)?r$^Dvo=j&hcDA+qGm;zN6ct|#>?&WL)DF`4 zqWynH+)BgqFu%6gXAO`S910&XP&t_&(ur%`zil9S7T*tF0a3O)!t$f6z-M|uZkVlj zoYf4nNXaC6aJK;6KPIHOt3{8uwXKSP*YK%h-5dpQOkkk#W1T5ZaDC;AvN6JX|7}1D zC;NSVd0nINA!lnh;N98w=F>-k6O0igbk8hoXe|<*T($qPH#I#>dg*7uyejiw&L0QNvZJ62l zdoJ0W49^f)QEFY5)fLGzxX~M$NXslpO#f5J&hBvvF{dk@pb&b${y<~`mcB(tDuOfO zyK**;M}|%5IzS5j*x{rhU#@Y4qoU6L%6cpm0&y$T9}7Mu;(w<`ZkS6xC5!Y2Z9G?8 zCZZV|`WP@6#}s+h*nU%_>l61u*gyVkLW|UF2}DH<;PX-aBWf)FZ0h85mNc5DI6*Md z{t0Hk8Iq}$>*?mwGMQl8C6gMcfVltIygZ&ZEmGmvic&8op^a1Xe!@uj@BB~!j4RNJ z?B1nfm&!3q91DSuL;E@LwBkL`mfUYo)y4ezN(7r64&PB-8_LMYWb)o;4hw-o)(2Ce zVm7-1L=I;gDIQEsJ3A?*LXD(BT_&~A1V~^E`QhSF>~RM5@?2|OzN4zzE_HWpeH&PvqdA%KeB7RELT^!8arnYpo(8p1yEJEr#HC?%}+^ zNTE9#8uJph=htz9_B$e&%6Wcrrcim$DQMux&qtapDlAQh-^7?Ev8|gNRMU1ZbMoKs zH4Ox2retBw1MCY8YD&t<2_qU)@SETd@9OTMQ%@tw!B}>PK1uD&01UU!;PWx^kfs?7oQ+NEOS_xpodPD%q8Tds8lmF29$!_H`AgE{VVvD8_ z{}{`|Q+Tz5vs*{XRBb73cr2g)9;S#l^`Q-f-9M|7;7hOch+E4QX|D%&Z~CU6#&=F~Ja3K)~-Uk8SUil+AledB+1 zybeg2&qI6*L>b_`%w~WX27%CgNl&t?FrQlXOk`+u#S^YR&kx#IG83o(7JJjJX8sQy z6b8I#z%|8Eh%F40-v8j@{KLeA!cewAGYXC$_exl>zpwABlvG78{pmG1q<*)657iP`iHiz)MWSpUcC8Qdh_45hRaW6;a~znp-gFa&<;)NI3Az!(l|{(c4?(=Ja!+Rl`c+mW|jIPVbpLU1*@eRHagV(P+XRY z^3Qv;N{#Aw%%tJj!pFOl)n8sa4V2a(DwCY8PLUc?r_JkA<}vFzU>;y+qfT zU2#%|SV|6~n`8icjZN;XRWJ|FVge$$dipXfB0{&e=VZJ_c_g}z0`D7zo|E1#T?yYt z=H7dyjWH~kL~ODB+_Fc$|2VqEc~8hpb)?Lau;HyUCLAWOI0ddHRSi_$Q3T9}mD?+{ zGzt>R`joV_)wMGawL+0EAeR;mR!tKU9nf(ARnbS%uUJ08~t0nHAVArPog=J(W4ZqVS z&OAwHlj)^@J%17Fdqkm?iAnA;5cp95=yuL;ieGRs{?fktqd}pCIbK33FV6?-S4aI^ zJLb#Z@?rAA`7$Q>D>W(sr%x9A;Rl%L0$56HRq$RHoX1!?{CyHvmJfBfFg?vgGl`en zS?i1U5biwMR-^qxjpLI-K|o5Zq^GlZ5|c<)7M#R#e#GbN!t+gWEqGWL>r+kqr;eXD zMn2MY0FG})A*HVtF!v5GBRSzc#!XSx)jq-|9~oJ6MA~nqK>@)!eF6?eV28uNb*|}NYD0n@efZ=5475;G5*B*p>tOA zB{-p{2WyF5_h!5g5A0b`E|$90d6~_JHu!y?IhoU4o+mT+og8hUz7l|gA_#NI(Jslz zgzq~s;bR(-$!#Ar>TXSaP98?0Itmp|78P;Nde+s0`fW0mET0}86+dF_wmkXlA<>K) zj{RI6CBerp@!-N^p$;$tJFo_iWvG`ikV06cjxB0PJikawE7vjW=)1U>HU`VKS?A{D z6p8z5;XlPPpzU}&i?5Rles1YdZmx^f`q^-VU!y zr{qACVl9PCIcJWZFjy9}`SVSc3 z)=)#ncGG~}WN-MKjJ;Bv13lm0aopa)`tHAT$FT@#CAQDZ0$p=NrOkmw*bZEAD6ib2 zs-MWo4@&3s^HS>BE}!1RvIwmb*!tiwYYLCOTzxg4ujmXH-XSRiP@7zERIV-_LbF`O9c5H_6rtIz21A3%|Ke3y(j(a1xmboZzE6D!JqYe6R-r#5@p=w6?^4<+q#=eiOOm<5 zW%P5DRR5;RIXC>ex_le6Q#`YG`e5=ZOyYeRAfs^`37DmaZZJlYs}<3ofQ>8Jf2Z=L z2a7kbs!a4Li8@b%`@yE4dAv|Scf~A1<&OgHry{i#td{!*kDJB8(`8l+QBOqE&&l-$ z2vWmZ*8~2xLy1eDP=J-_N$1Y6#Urkra{u^D*S=77*9NC^kI$*z9upCfG>`0=k|rf2L@g#DC_?@!@V3np>3ox+qS+O1xn7yPn4Z{icD_r}3?s~!rjB}#hVQmx+!0P1d@Un=T}~PVk_U)e#mR)6=iZy}hMgE6~jrTJUrwQu7sz zea5e{Rt1_F{1TozZ$|ZN#1@Dex?s{g3rW0wmQuc_3?7QduwKMCPeiJ z&0Vmi$bH;f@X-$%|A`zSSxC2kK9>~5c|P(3@pQ(JQD&#TBfuRDHMenoN&tFpm-LG= zC~6NEIoZXI|Bn~ca;*|6L0nKnF^-$>KreEW%#~LuYS<0IsH5*{!$$T zbe|8^qal6!`x@Uke#%+$>o&Rtl*aCjyZZu4n>QrWCxhlnvtc#!Ut%Fv+O^NJF(Tc*fXXP|JSn&J7#nHc)P0>(WCuNrrSIdWI0tzO;r89-3$X5M!wN2+u1=$)r` zv|4Qo(sRF{6K|+a`;9H=%)J+Ei&|N%!!v76d6$_vIh`qU#2W6oWj16i6G=l6Zf zrp*xHduS+&&ahU+DaFw~!GX9R90iv92S%|xC1v4FJul)H|0fFi@bA_6I>B0F*^7_w z@x?p-?-1BQ_19SuQJvY*#tQ@Jx-=gF6Y2QRaB$B&F9cAxlIFi!oBceh9)6=QR^c(! zVT-AAb>~vbNi`&nYo9H0&YK0X7h9te^NUz)Wd8l0sV#ted3miwzuDxdTfiKnb`gFt z;)JaQ(E?7%;QB%PDD;MzJP;(~v&k*&%HEEszs;-2a~A$vrJ0UfdNqi|TN=4_Ild@q zP~o+|htbBJJt~8LPxI#Gs07xWMq&@eD$cWK*NSgL)*Ch$K~nKUOfR93EynBM)*wzO zr6mFTN#eqEt2W%c6R5lTnG%BsXWUctG}D-2VRVz+BSw3|uplq7TnEP3kpTY}C`brQ zXK7*~8OeYDZZpiM$rTo7jaY3dP;=!k4yoKJewn-6^+@VxeJiHZ#axOV(yyDCW9`B( zP0l%hiB}QOz0og_L?U^6Id{M$pM27DJC!!pXFuqtlYtWWHs2bToGf9#%cP?AF@N%N zrkmi0nfqT3SYAgdmGGuEXOML7s}oA|l_n~IPnTZe;4%NS_cWv*kga7hye>+Wl9#|7 zBlNC%A3pM#C*g+zbuljewIAdH=JSQ<`}Bm$?~_FG{}zkTHk)Z;syO^u>aja#vxw4u z;Pg+SS}2S{X11~!cF{li|xVY8p==SdFKqJ4?%G33^0-+8~bHEv;gpor$p@%a=}8d zRPqP@3W}~oeZ#eH2>Uh?83{@Cd!X;f>0){NQgsXHrgyh!m^3e9R|%!LzO7A)8G0)! z&>3!|={JPGfF4Hf<5lX0n77G>=&)V~lObN`EKbS%3Zg|Ab@+=4Tg%#y5jZ>#y_=v| z+$y&}nAo*}U3Wh=k0+@L-mjUICdA~@r91ew@SJS2X&1G#?Q1AOc5dRy6#=75R_`yDLN7S ziwgu%+8Ze?;UaW#H1ZxRrHFd%$BOyu?iB8 ziaTt6$2y7Rmx6h^gBm=Q8&)sr5)sjjH;7`w1Y$1o;-*V#R;wKO_|vKfQBNM%NsNYa zF07Ab%r?*9>{>$)_af5V2NwI1wb)VftvWbS{Dq!+f&xLpJQ?|qgR@jiwmRnKMOxr* zDah&tos!E!DpZNO?Y7m68yyJ*$OJBAP=?xez5Jh#UFe2QV#@K3&4!UExgrfU52K8R z{}JUa;K7VoMaHDx-o_7=?6ZGL)b@JCF8VoeHYX!0(btJ|tnZ0;pP=ae*P^Qa+9w>SksK-VIZx|f1@^U;$)M@p}wSShRcRoQMfZ#`#Xmu%fZR3Da<4(A@ zyGzca8=}-*rq>R;+QN*5LkLfDPC74G17&EK&4Z^46dK9UJq6nfV;36zyyS{dLlUF( zjAa^y?}J00UZc|3ULFy(}-$Hl%GYn%n`sD$Cpu!|@KUp|RwxO+iR@k-)F6 zlqa>>F7p?&Dev5;KoqizCq@RhMM9e-6Mnc48E{tS30l|S^`$dvvIldn_%-3k$6N(r zNENz9yJ4&rc(146KYVJvdaOOmJ?KN1LS5f3TG>J$EvRMzB@G1F#-S4e-^C*KNr?=c zoX|?gP3;ty}40{8T}-7vJp=Nm?o${4CN3N;~h3&K5;(I-TQ|j|A7A(>LUn~5(Fd>eziNKYeKvsk3~Xps;#JPp}N_heF%j9 zDtiy$^a9B(BN_es^f-tv7TF9qaP%@FS071!ot5MV1!;Z%>EKZCGcETl`hmFcKm9Px z5?_$5LJ-T(Z+7%?MYxru4#|itI1>X;aLQ6Yqw=;WKs67eoqXb>=0^?pAdU=vfy3Cu z#5rWu`?OJpBlnUvfuHMWu)fQjlwph(M0Os%Xh2%5{oZKd_sWul#BYo6$C3PCWCPFa zpxi%ZcRE?sSfF{gkl4#h1>XN|W(5qVov=L2%o)A??>QYKEa*iYNrlaSD0HZ3hX%-j z^0)sVdz{I8R*Lhj0c5c~)sS#`NJiB9QS4hwsfxAMFBXu)0-Et94-UhP(|Oc3)0rH1 zT%x}q>Y8EQ%D^CxEb+}se4^z-R^@VbEj2{5wh0yC>GqfpoNpMg^)5i?gm-lNY_F6* zPIrS$r`gHMxEDfo2zU^J^A2Wu*NA3{$r=q@Kcve}5K%*T$SAehgCFa#e9S!HS+sH~ z9<~dW52i~rldSpT*-@6Vie;tbR0h=_+?d7D3PRw2dO_E*tv|{9uLL3evb3e0GycEY zom6;>r!vp$q0ld}CnQUSJkj&}2wx`mNM2H)9nSkOpqq}T`~`l7hA>Y638w!ZOH!}n zVI2g_1v4oHC+epU99H=6M3#ufocQIp5kVb? zLF9VK{q?F@6d-=YCn8kvv zK|!<7+KC`)9+;bhYsTQ`C{?G&1j)hrW&_JSgG@aCA@KBM(u95;MCkJsw$)9M8)E)S z*P;Lc`%)2~{J*9S>)-R+Vs*NQXK&io@RhU!e*$~{Ga;{szgHShq~OC@^EBF$L@l!j zI1%+LwO3ZA|1QC^`Al(ibBcB$!#D2@Oq#5?x#|>cfgSO}-147=FsxfJz@KvgG54LA z5;EmQGdza_|1qYA3uT_3>>2d81g6AiH$bdT+iWSXwqSh3;2B6K96+80 z0n4?2pq2Uqu^dFcBii-C@{39M1LU#`ow1+kVSQJc@07q`%*yp~JWn!+<9m&w|5@1? zKw;wG4Dqb$^0pMYVouA2-PIA6o=)joNfKH>RS8EuW!VTzcmm0ISMF*a{wftP2+_ z9Rd$wD%C={bXXuL6At=$Nq8(x+pUMuQHv8=6HW2lGb}tj+!!X){tVEk;TaU_{ijbW zMH`Rx3G_i6d|$qNzcG{}4tNoAMO1tBdo~Qo6z4=*;mbD;d0p6nc$G@cF8g2cdHIk8 zM~4c@$wasM8~-0!+@v3jli2~G|D|0<2ahmB`?;RN&kZta~ag8=D&6qiav2xgwf@Fr&P6E zo>zZJ(5Gb-v+HFn3Ob1>IUf~pPf*HCV16IHi)c(rGg@6)nYSIs&QX3e>j*pOB(9ri zzR{#Ya!Xm#q#!M;jxZ0ns@H>06>8H6ZFGiRLN+fYdQJI`uFWR zC6zGF`=@(1Z|_8>Fx5}_v4NZ^)4RtT^CF?s({^u8R3gDVtc^XERP_cIS2xJmXpuk! z6|z^GcyFSx5)U0L-crT6#a{1*k+9z#NPVcP4hrIb=CQJ}Sh;oknjCo#_&M3#Px9xjKq?fAIp^rn z{j|&|LR%8mua8>cQbrR+Xu~BSs2iI1LjlFzVo97r^si$lLchcN&!RoGrO|eymFaV@5rw8>~wq&wVSeJ6d zk%?r0hPa;t-{lpEvAS1?13U|CX8GSfXIS?JpUb$Nt&4Qri^d~j@R*sI(g=3~;&!9i zo7rlUJD4MAjt_It#un|bJpWbcGri*mko=`pdOEtI2AF*W`vWB}ND}4`Ue)YYgUsLH zP9r11?=ln6tE4;q{=X;@XvQ`B( z>aIRMHl-T!5?ssQ?zB%kx(+?atGF&W$yT=Brp$Y?(ZHKHFkLc4x1@YgO>o2~aXC3T ze>-T5DQM(5U(B#(Ia1BPxBpkjQX|3&t|AqPjTg&myU*G;2%?}KiTJ+U4(>ilQJf;j zD9^LjD$EA0LYh-9PiXr+B+84-X}OF2OKTzI0Rk=A1N~4;VeA&$MZ+zx9Ag>7g2XH= zk}LKL)eO1E!@b(8%mR>-l4Dj0D%X%-h`cT9eBd0MG~Bt|>*{EUe2N65;f|CEO7bj1L6C&LH;w-)z0P7>q_<7dPfK~n6C_WOSmc;ZFZH| z>kW6I0Ulloc}l>nGKL*%6J<=V|LhOICohK+9Pqww@qs%M5_Be)DV)yjP@o{QS)ot1 z(poC)Y&f(a5cs7lDE=%MIZ|aIPAireM78^8g#a+b^K(NeS4REsw8@fMq{^ zd~>%E>GE<3XXVM;e7V%Nf`h*43ig^6k?#?%;lKIP1gV%4A1@)TUR{Jv+@&1|GT+Jl zQ$VAjFq6)&4}{8ATZ+g=V&}(>G_01|DP*%)(tw!qZ=e(^+3WG#{X<$F{6sBrh}-A@ zJd$U7n4tUC9$$$3h50;h`<*`u&(83*S`0Cf37k+ViVA0YHUVSu$3@n{PfhNKZQILE zxnHm&0)oo^@5p&*Y_x7N)+w6P%hiZ5(*jT(g2k{1*QFYK= zP7XyjgEN>ggE@?^z-}`nG=l$y^w{|EVkWyG-cPqRs@UhfnzXX#`5$cj4uN2COR;29 zi6O-A@b=#45Y^S1t{ZgU=l4^ERNlF?Eg4bYU6uX@c+?)j4<9G|14Y;(+`~{hFgOn( zA+@u%$#+C8nr!hb=F};~hrwJj<_#LO~Ki2C3ZUv~(^he9B8gSc9eu3bJ zw|Ix}h9`HK!sVdhC)1@l4a$Vs6pUUo3u^aXl9TW#@twm5oP&b8e_<;_cCVs<^cvVs z({Rif4+p1a;j}W|IO~%nw}xS{^8qfw`}_0-ilO5N#JdASxH>DQE0s3uthtc8%|AT_+w2#5Rz?Xp-EvoHG{p}0 zl9JQ2OVFf$-24Vw#)NHb>$r@-5(h%GS0|#&8ae;~kr@2~tPaGyCV zvU{H1sl2YQ;FA&RUmI$ueg_C_q-3W5`JEkcJ;kM0WJ~1RE&&hkrb@9{u{fZcy#^)%yrdyeC-A6X>@r}u)t~9`^Z#Xzf#5Jru zD`nzO!eC=(=$f>gT;Q9eZ^~q2=iR91{`UY|?dAC`bp|QTv>sBPd1|=9p^=KczIp>= ztk$9gDZzlM&2>Z7efsFclz?t_O+wHkVWYr5<7Hvl|6e>Tp9*3s@OtrLFWDrD&mlfB zQT}&8H-FhT2{JYG+$mmZeLt=gftL&3Wr0nJc|S)}U|d0@VWJit8KmVPeVJhm970gM z@7~9&NL<7Rs4zTDK!W7P{EDxVkP%y`3B^0xTA{fCVf6sDi7ib1M-ddCPm_kkY*c^% zGGykT@hUBLsJhj#R7Pzvyq153OZt9ckOiZ0!&7gA7gY1S_U}Y>c{jgzsVrg6NQMO* zzzkwP`zMYrj$p-U?^qrZx*~am4seJ~cwH@!`&(c;<7R$)6Zs<=HKDs&5s0IL-~8F( zMeW|6?+V~l0q z!vRBa$@>-a%D_yHJI6w8xg0VsXTHAKWWm0*`+R-d)-Qeag5-Bq2DoP1`KWHF>WvOp z#kbuV@I%76#B}ZiEChc7;VyZ+VZk^QFMw{q9DcbcPyqG8#qXaH=x_G(TBOqJ%5{0U zjkH?zh!l4^YL0vIZJvR16B2|V6iyq!?O5~T25-+lup;WLLN@6CXA5CJ`?N#ABa8c@ zKJ*W;#}h;Lygp}PAhFv36D5B*K!N}NrfR$3EjFY00sMX@zdiitUi}3E5IT;WpP99_ z2oMM?>hAKg`q@ek58QJU=w*-2xlA(bH;W&_Hp{)ZVmFBi zj{yBQg6O{KC`3FcDK6G%+c$OltTF$3?9m)yZkIMa()4`Z^N4+`sEz!|H+VzfEl{8B z=1lr!+2D?-{HXHHr&Mk0GrcA;ndTR21VTu=2&#s$r?DCzwjWv8k$qd50TSS4`sXtu zziW;rB-B9R7eBAJD%2y#_Bryruf#K@gtIKN$ z5=B=vwC9Hzc=M6Aq-Xf=sM|;!EjznbdbO1W=T3tri7R^egJXcV!nh z5)Y56#u^q1Dj@1fg8x&?&-=t@24E?R0!Ef!{z zmUGD%Ni->*@HkCA*>1IE`DJwpY9?Ot=;;yCK5b4RGM-6xIo@z;HQ4=<5>8J{1+b}j zkn+dXjS!2%r(8B9-#s`QLL+KwQ2FwSetFm=nDhP!ybP$0vmKP7)bH?1A@trO`360K zbZ_%-wT*0LZRI!SOqbLKZ_CI+D8}b|8mRby_{e{Sdn#xXNn@LfD9z+)e@Od9P-BZ> za`kRE{lTgy7L-5a4a588!83*MXLWI*U_CfKm&elUX_+935x_eF@yl*}#Op0Kn0@!< z1D)#q#r8m_T(^X?zWD)o2D;OPa#ZB5=!N)ljSPf8BQR&Y&NHb*EdBdBtL_=y;FOy* zrq`+gw{^lCDk3H!t+$uSW-fp%if?D34}fS}1H{6Ef_$19w#_d5cfXS|q{0LOq&WpZ z^p9}n_#X!C)Ul^Ox)k|J_kVl3;#YRvb8_ps!aAN1>t+@N2-{{6s=zEr$e~BW5y?ai z2vs7YUO*?Vp=fvix9u8U=5+5t&AYw}j_SfMM;Y zZ!X*DmB6Dp+4z5GNtfKOO@L$vFYReq{lbJ}14ex067D6X8O9oxG)wa+o6tn)AI})F zmzer?u?Zt2$jn{SN#{C*>>swPVq!z(h(jE50Hs3S`DKik z8u(6=qteC{=_+uE&5}}}VLq6I)G zg9*i)aS8(+tmgitEpy<+NBX4Q(SA|4vb0g}Qd9DR^5gV$sDEQxeaW`*;|uS(3QWW` zeM)T&i%)h{0g8bCJ0$ie#&|@m;dN}xV;}m80?!t>7Z+Y0o`PwWl>E!0!`xz@(twqu zwGPE!cnFzn3dPqfY8sQqMmarro-sU|tFdw?o(bhl-_Ml{vhXQgzJ~jdhunqaM8IO~ zIOKU|GwO>Y5w9qQUgsSwX+lrEO-%C1epTpm(pkr*D&1?o4xpkl;Wtq3r}#{J#U|l) zw0kOY$3p4e?yg9+zVE;8Mv5Wh#c{cyljEOOyl+d~Ydo1s3v6jMYL5#Aq#Q8-{xe>SA!P%Om_pwCyxcqBkUP;`XM8?Z0Hi+W1&3-k-**zXW4X4gBH8doCE#2PDdj<*;s_ecW$|q>*oG8H3zD z^qJ4MXTpHdY|fN72Y?JsqLmFS!yO3={=&@onlUKQ@7FDW6_f+lneGY^auoIFbXyf) z!nPp0AqZo+_EPn?AMk;xv!gikeSj)O1U|e-ec!KOFLi2cV|7a%COa={|7UZS@oQE6 zGIg!m8;wNoWC6O6dY?}HPdi^}kbeSu8wpAOpD^Nb0~h22^XBsBUA6x=jfmNy^lDGP zsNTnzDVhdQM!w@)D=oid0I0613hJEH$~brtN9%1oHx- z?oUrgMtbz^fTL^U&f>4(cE*E`jC5h8RV!=l7Y<%%)~5WSS>y)5y9D)-5qQC$=dsD7 zwc3q168W2cdBfhR(;n?o{mTC`0TsCZNjux0YQ~wd+59?BS&W=mep9)Od~ zrJnPaM&=RS^>OZfU8bB|s}K zO2M`J7c?Ygy4hjwzE3l*m!8c{i+2bUkbECJmp=B-#K!eN3_`7%rEi9|DN9xNtS1A) zRRgzEA~*;VtNTfz==;cUMJSZq^L{iQh1{d-*(1D^2vHDZR&VrzI!!y^0U1IV8I9^T zyS&<)oPPD$iJQmT90*5NldRN87(uXL%!d2-1r9-NG(1Sc38HQWjHNs)kBaKf!6~QE z2P){P4Czs3!G`D}l7v&Q*^ua}jjMxbvyqU`!+WCx%)T*4FW&9m_B~TQznOW>@sq# zg-QQ&GdJHnJ3s66+h%`JAfcu)WSUZ<5_ZEw%|Gl`zBOg04|^A>BfjA5PsfKgU#9(UQz zQ>)`{*w6uwOcoNvIkT=I?T!r`Gv7nemNbv^urj4=4vDYVdld8Xc9PGJTxLYcKskw; ztyP+*v3nqB+SdxN%s%*>bfRiz9v1azNUSAKXt`LYKuq(H=;m8NZ}o(r69t_qXZf9aGgCA zg{@X2A+4X#c&E`M?)PzHui0i3R-W_pHBzbhd>;{voRIs?H?_5+MEJblGTq|WvuiRM z*y$|9CV!4A_-m-NAc{V0K^B_f zx_XgH^BZC$kFSCTD+@-BjRZ5#nNJuzfj>Z zpA2&qPlzM`Tw$8Cn{-k>Cv6r+r+P^=(|={x;e|-#v(6*7P*ID)2WbDjY*cm%GAO}640?oDsDZ|`+d+J#DNAEpB2m4N@PV|aRcmPSh zZFm*UiWu55>`%Mby|THML;DNz{Eu8t9IHdwYty9mlP8Jm`-S6%dglkfr}r!-au%$5 z{udN%jOn2$EYZYYrwRqtDM1o@x66l-Kts}F2?|({0bN@>E|2DrNiR`Amv?l}vVmMc zr3vuMs{LGrl1%+69GcCmj&l6x!d;a$sgNRp&*E`wF8`it8JwzOb(>?O6;dcilUQLz zNjU0U9>VK-9_{46_)RWX;CrcBwMuu4PLHQDKTAgKX#H;@=Y?f>V&h*E4^=&loo|a3 zTzo7ga(PvtTQ-@wW6YIGOEOhIh2LFX8!(ngq`m;K;c_L0S~iPaB`8}mS3*lZLjVV1 zKRr|Bgnjfq7b<*WTQ3K_Jze-*KGnN2rMt^7x>Lh`Z>kalo za43hhDZ_fuXp;LgA``{ovP?}LO76x-@qwB?lqHXgaX&Y!W)XJ z=7me832;*En!&dKT?5+m1{W73A$YTDBHX0GVjNZs=R65Z-1{;JtmF5Pa%rMJ)7kih zf`h1h0K^D-bRAM=NUIYEA+B=3w!=% zO@It>Udeyhdv_3A%RxDavOd^SE;5dmhSx!<+y0MCc`6fyNLXxqRxN57OcPtm zlw=}#;;90*`_Y^-Ou(!xDDT83%&_6HXb3U=efz+RBRr2IiXWvSx@!0$j z67AX~gT0ze?lO_*9v=QGaqG)fjgW0O_{{6^w0Sq#EH{1)=`Lx6&25CJvtdfWk1V`q zEcNPn8_zp3?Y_X~IYY$f17Zc!!X4YIcy_~YDtMzS ziJ!(!;kip5%AFTF?X%8#_g*L!i^9{WmZv99F7zti>73pW?Qa)9eM?uo<2$RVQ)*QZ zDs4=XY|#rg?O!ga)rh3P&F>EP{U1N=1^HFn2E?kU_PBvbc#GJ{y`T-qo>=ITM zDvRcEy%#C&ni+wV)oYE=1OeID6CEmXoNh+40dw@OTnMBP`}$waph4W4lsw<%^~{>w zZ>N5;8V8kJN*rNj+7dRV)dWh-1_ear6!DrkDn0{ZXvj-9W8 zXXj^t-h_N3iiOcF|9Om5{Hy%IZc11b z!Z6n%#`sZUKOsscjYfREiMo`4;vk>i*}A!t2Cpyp@jUpp*WUY79Oddz30-==S=vn& z&)AaF-x#&q#ILNNQrwh0j2raG-H+0^o$qxFmSp(LKXCa82j27pdu^j z<}cZZWI@nV0gMF9oLA}V+mb;f!9?DFVZV~b=bvpD8}^@2Zlk;4j_?#Zwpu;4MP8q} zh}D|)R9HbS`O}>K-=HBEDf6Fny`OK({60Esi9H|*UmI|$`-IaaCT8#?hptUsg?_JY zzv4i=1??@)oA;OE(CQc)BrMp#Kp;YPnw9ifMgJD2{F@B@`K9Z++wlh5Sw}}CagJW@ z>eUwPf`20uoyPc`G2tw6cw>Ww&cezR?NJak9GgXMD)o73G#!sx0%ps|RIrRpRUXeK9?p3uA5FV{;F26s@ z*1&*Nty?Gugl}*L=wk7F?W8IjZm%M=)fl6HK;n6+!y8s+7Nc}yXdOCw|Mf|F?)j8y zk3(Wj-}^R+_VYy9VJ#D-i_Gi@$l-e|eo}@rjzqZImL>m3E+1a%mzKgAdxHGjjp9A4 zk^3#{An>Z$EMB8d#?D;Rp;Dhe&VlPTw%+Y&*Y?C@V-*Y;w>tH z&C28Kf7(qL()FEg1=mzPLBnH9+L3I(8^Es3mUetL_C8RU|5 z^wLgB-)!^F5 z^&=G&AC(z^=uiIIa4<&ohqeM5Hf550u&tlR`JT^mWrwXPB_H>triW7cViPQ_^{^oI zvmtIJme3cb?AzOX*d510tNaJJkOmv|WTBi znJXK4I(_*f8QURa5Ue}0F7eTC`-m7K4pI0c&yE zPD7#Y;#4F8{Evi(4X5)7-->fYd>f7QlX5R)+)S4l8(fGOh?|l;G>wr5qTwqj8v-I~ zTWMg2t-FnBt+4|M!dBTsx5fdZeW^x&DFn*S&JG3jlPWJ)!F{0-6?v2`t)jdU>-3gvu7bf-Ml$XWgarD=X!8M4vpv&w&Tu5KGSet{)=a34eoMsQ8K68;J!evx-GYxj8zK@-WYyxN@&wM zNFq1|3u0ko{T5oL2rqxm6%CPRJg1_^WRctM^7TnRYAjVQ#l606FIfCCCZv>z#veRL z?(X2@5}zf5CNY-?+TNc~PXWbZ@g#Z$IsN=yD{#hnd<1OC5dk3J@ij~Pa; znS|PgXDp%k+^XibARU>^^kZ1xC zNaYi9S?)(Q&7?Og{RGL}Rbh3yP^Nsemu-snaeF*xfx)tP-WLp8s?K`Xg`)8g(6a`Z zb6pQAC$&3;cRnpS{LN5e$=3UIhT|1>vs!;|2C$yVti~H)|GSwzX9U7uW*9xDwt{U@ zeQ%DxyT9DqJ{{0`4N}!fHb@A5ELOE7Q|qbpe~+PVz^+crQtu=eCI|BqUCbi|Zu3{8 zV?WImIWDoxP^Peie)|mRGyJ!e0D6WJ|KUr{-U0Sn*(w?~?ZhG6ssppt^mWWv?A6a| zYrW==9v-Pth>DTGacKzPw#=su2(w3Zb`9&C2Q0EY^oyS}UPJz$8CW{?j26I5I7`~V zY|@7~Z$CV7%}LQ@;5@RWVqS7fCwBIV(!!6bFT3L{G+281WxjwajeDKwdwMRqwbU1QO0Ok)FLf@;F;C6Q zj1^2ZPB0%iTJK>t<4m2mdTpDtmz21q2|V~vNWeEWQ%cyUmOPB5Z&0asHNx*7FsFNe z|B?VE)C9n1-L1?(p!4M;agjtsHQv%+^uqlG7BB9}r^Z{QRZLN${9tqU-=j>#DZ`6p9 zFk46*k4PNe_E-`b)}`l)gUgI5o2mJ%`s^dqNSxwKlNl%=XXw zryCW)!*IAhkH)n&L}8!ys)%^$EKUv(vbZIV(d8tBTwrf2eA3mV(j<_W5wp*b!U#;M zP%}p7kB&M*B5fAwMVoCNSfr44OFFOEgLta=;Y^E&^s2CCHTiJGLL6Q%))%Z@UN>6l|{LGt#P8cE&etU8@o1x4s+1B^Y;L`aFL20U2 z=aiC6tGP~zEn6LBEwL*5Qv1?)4GAGTH)0d}qj%1f4ox!gv%st+e#g&ZH<&;L(*H`} zG?0r`=>`Bf1@_3apB@%djNQz$!8czO{=0N#iBBj|5 zPuaZzYipzByp#>Ol=s$H9S~J0%~UrR1b(VpOmc(HkL|yBq5IZ8hyTDn)NlSoB)%Q4 z7}_R{oS;TFoxtMQKy(KV$ZrlWQ(G_)q=GL$fT3EdyR+@?>tw!>KZwv^ve@27iad*x zYtr@lqVE*ii3*xj{r_W$BRq<9X zDQm=3TR@wYTeghV*^kGVRk2;~_SJ5--(nnzBf_z_x4+9Z!TTltqkN1f3Wtd?gVj_C zBNK!lc_^F3c}zEEmD^=&G3NYyWWNpc~wJm*%ycku3@Yr*8eVN)Am*NTLL&{ zr}M!vWw;i(aG=n5v##W66dMjG?px~YB)3kT6o;@yjIYu;$Y{t-q&D(qD+>|Y?m^@B+bJZM%@o}kYU=*8=by8dBESV|q`PY-SjAO> z%q-8y;dLJe)Y*L=3_@t~lt;m-Jo-^_EuhDRTrj%gWik(}kAlyg{{pj_r-nZi^*PCD zf4ocmODReIxr1W5vIa7QH~OqQx}n4LO@Ed|kOKzF7MM#iojGuDP*}eABBu!#2Ona} zJD_F8Sl6LF0PP|TfOca-{^t zXDz50JT5sPHT4>Xj~IbLCDiG-P^FF2ZDj`mM!0qt5rl=0_M8_&2hN;xWa(^P_kSc& z#MzhGaYW$0Q)_BhBcV5ZM7n`RsxPnbM_qGWYc=Kh-eJ&X(Kt1ub(mBpGr7J_Xp-nN zdbf!)c#Fx?;HKbyx~Sm$#IjU6l3L?K#tatc*xP@9rpi8JUYtUlOZi%_KEJ~$K={5s zN=l{)R52=beYb@*WS`tY0gfpI4Z5ux2p~koyK#eBtFct2QsXyu z{?;GG1GbX?4ys!}86#fTQ!06%Jjor9i+zeB_ctg0iJqk4a2aSb7RX2=+= zGOzX-)@o82QlxS_k^&cXt3B|}HxAIPdIZi0@`cWA%@)5mFCOpv%1e*#@1&U|A(PqK z%{H228Ehq#xO?X7aplKZUzQ~d=<0&rGF(C>5hv1^YR2HU4k6a5^L5V=T>ss#MhA4e zJeAYywd>TIPQ@@6s$B0pA+B_p*Q~dCN{=Tq!q3@u?2TuLjHfZ&)r98Hl6G2LDQN;> zFralr*X?YR2{tY#@lc)+vFF!oZqgYU(4Z;LsNs-2@17!6YPP&!`}I~D^=ds;ZCH^*sKlG)6s zsXeD6<*~@6rBmrMp2bQe5`@RBCf+Z+I6$`@vn+W$xRCdx`42?g1Ei zKduEB$nq#uckTY0NLv$O2(e4;HwEQ(>XcIZhcITDLwLCeISm?EOMG)39g(RP# zkgQ*dLm2peSLH~bu69St-S!3rU=ni@JW!;@WJss$!6M+Y)K~yJ7QyBZyoG|gJzZh` zd)du@ce-v|nQJv9aCDb1QiKQEz0;abxum1beuP4~6hL|wDfGRnDGO>RqPd+uNBccU z3coycVG*rk{@dtaN@LVdXgFl-g(MGKsMg#7UYP^Xr;x7e$o|)L{_^pGP+rpgplNAx zo#BTH%^N0wnV~1Kr#%SmHO>Az6RtF8HC8tf2c5PK-N}~og;Wswmcs)f59}|5fHi6^ zym^l3Upnn}Icq3;mQCt;tw3Op!V1Uz4Kl`R5?Bn2Q}KG~%Dy2%iW;2W6C?`QYDEwy zeG#;Y_;1GsS51dafD5@QOsRdwMcp^H<0bBA zYRe}vUs#mhU!9rz`<2)B_=;z1qd$dyN@LVjyR+?$XsTT^8nMnmO@!U@zZ6U)mkT=F zj)+wkQvTI~){0cokk0Z8&-F;VG#%WJ0jaYb8(p|hNn@pP@~3tq)j9W&0^&@6SA%6l zaao++rHTz$vOBPShYl75;E=E$3dJeI=W8vY1;OqYo2o%$O6-#6TwGj8dXA1{5o9#V z#VT3I(WIG}xyB>0W6ic}$sv@AIiC>N%k3Yfz!u$!=_q!2K-<7Si?zwXs zeX+;iv`UO!XFL+r&(`gicQk|#za?*N_aP>zeWO!M9~{txmT$VSwM#X&>YBB;R--Mo z+1Zw&;4xB*xud*$TKro`emm0coIJ5njvHuWK*Hmf98abvJzw)NUPvPON8;Q&UovBI zpZ%m%WksRtiYs~+{b|I8@l!U>DGjh47spa`c~R@lW*CI4)apWs_NAc!stQO17V!!8(SCp(71qSh4=9Av ztTPII7GSeU!jm+bh=b<(H}j(q&%dNTgA?yFtx7zwkPzhAt_dQC=Qp3mzl%M1Pu-O; zNa}pwOOhwoZFl)rh9V=52bmd>F@=t{?fL>3f!d(vxP&@eTf2=*6T#aL{iz`Ko@jsp zmc(AW?s2`W?nWvkR~V6S*>(H<@K43kBZ`<0`BlVvd1?Z_|>~W zlQdp}Y$ZK~n`iKQX_?B0y1LqfZ-i-!svG08afleX0V=+SUTj8--G*>?JUa%1V0V1) zp?}n&JmpC=q7W303QzO(qxZwb#6b8a8aH`D=_u88!P;M7w4GbfyoSPdjx}~HLBvQQ z)CGTtL?d5&*xM6*Wx2K=i(!8FPbbe0&aJ>Q>AV;gD=lhu@A7z;u~MnQID=ep-7}@u zsmid)<`P${S*6K6oS9(i@iCv@tB51?oH+ADGzHuA`MuXw6-uAJcTd%i`{{(_g@{^( zBJ3p@dECPvH%*;J&!}bhyjs5po+gZ?JZ1bJ;Q>MbkL9~^GTM4vKPeCknhyRXM$t)? z^-^#90?ba$LreQosIdWylKnZnfw+mqI)-~`A^MN6%XK6R5=#Cacz?r?k z?e<``ev2zxly*pJ${8wT$p%G#H0L{_LVac zuZ#`+_(_s`TDe~(4cM;3qwAOw@&XE@_SqIM09f&;_BIujd`@a)Qi=47+g5OZ043NJ zm!u@t(*jfC{`wt_#0P}NmJsX)#~kC+nXo`>x*}Jga-Q z`rjDj16S*fCuDN6*`ioPo?X0fC%Tz_zX!Bx#2(76hu>`v3J}}9$~+7_>E{bnQaMtk z!v1F3rIJlU*q16xOq>z6zaQyFCljB2)6jqK^f`w039t`|h$46}?M}aZoWy7vdf)ek zcu_+f0>8%CZabr%dLB;eehmj`a@j;67n1YcQpH~uJ#~H%YXiHvP^XvZ{n94*69T(C zqrCYW-kTUeS(1`6c%7qRI*#6mOxRB4g+k|E_xWFhB-bg?kpc-}zxb720q0gTqV|42OJ zMWZl(vmH)LmtlzAbr|=BSV=y6U%!}~Ph|o}+R{>I^?<;jI{Yi>YPYMQy%X%I+e5-- zzd7fP_;$k0W9sHb$&chC?`dwx`FhEsc2J|=Q-+9f`v>t{+-djZk98uc*fm!a*vR$a zpRb1xl>%g->C3Z#s=OjbvPuu{jyP+?$q3M6PvKjk&wR$y>I1YN zZdc=#e)9#ii36e6fWxTQ?~yWT_kggOhs~r*Ga{cIWV(R)bQNw-0iC-av}U&>02o7j z{&RxtP2?y`DqzH(a@qB}_W>BzV`7N>dU2-o*jyZGLtkUNASy7F|5v1jL$0TEVr-_)p!Fhg;9UY2R z&&P@j+SAG45K85B}vwv+w3rtTYWZF#bmjCwVFklfDF}fjH}8{nct)&{YJw25?J+g(~XM z#DGR(AhX!)Ssb?LRq51c^F+TSwK>w0^AK=Isqn;k`;YB}-mdsyhOi{Myow>;(44Kc}a+Fp+&LDOh8;qg(=NfYPH`!)sJkuWuQZDAxMdX+xQ$a~JaA!;h z^aAzT6pX|9+_eE5S?6fXpvLoB*RjN4vivem;-^kvWb-w6S8#ajmHO<`<#m@2DO(5{ z1pe|DsMx6MBT&6E*vwPIknzfE8uwpiR_ifQ4^@(9HwU8HStn>FD}=Ve=n5$;lxgNd z&(p&p(OYEYv3VjCn1p{HuXfrc!(1%M<^SPdpB-ar*7>yE!CPcI4^Q$qT`p@zVpCvr zW6n5q>)0ntk&p89F(50gVjDg$lIE91Xg9e9uYcFz_$>dxLQ@?xYa)jMV0~$_*5qh*uvH%ZYbCyo#VuGQIPBzXh{!yoE4I8}`Uc&Weo-?z1id zqmJfZHIk&zxmQ^`0gDwwKgat}ciC*bM*ovF<5n)2n*b zla^d45dBEn05I8AFbP0uX8lFh5r`uiQ1*)|FJvFXjtKhLk@H$_7FQNCw1>WTIhf{;ClFVW|_>JtdAh zW`*1OOI$5^EB@)Y&GdzkMv1z|2k_8*7-R#zBLp{bf#0KYC{?)e~(r7|QvV$Ld? z3=D4&*D#GQRb@(i2#F+^(1*oRYzo8)iGt(h1)C{l(iK);*L9uDZbyl1?Ae!PAQ@x-?Bj9k#(G!0qGZe?eT1AX;at;AH6K# z$tc7lwxxp!Jcs;k+d^V30@kP3#;9C75px8*e=+>UO zqtDW*RVTYzFKPV<00#0dc8l3hK(3>GFaRbd_tLT ztyJl(BUnY|nTYFDe^{lc^o2dOHiyIO5%GJKO0gssT?slc-{w4MVjPpNlqvk$VKRiI z43RA`x#Hg*IgN;hLVBYtvsG-oVq;=rQhlR{z|mJ_s|1!*C`g)wW017;40xX{x?qtM zg-gjP-;qj6>dz0(*Obb9G)vL*M%8VaHL@1w`q=7P67~ z>3xpWm%Zr}WpIg6xIi6JT2Jt-bNS!0osQ6YO_&)mX{vIVU|eTZD4tHP+_bl@7T=}mkR<<7U?>rH;0tUJ2{fNj2r2lypViO(Ef=gliekP#Nr|ZlIv;U}&W{t( zlG&d1hI}cFV7kHj!C2&jRJ^P5hnsEfuW$;i!Q{Cxn_cldrHZ=9pKLq2!zuTivh)$+ zQk?T+We)o|Uc>$$uAr@#@}#ky-q83u9GVS9;-XT}+v9@H^snOy?(Y|me)p|Uj+P_y zRx7s;x;)MSG>_z2JpICWpLEmM%)y>dsIk-H;u^VHc;q>n9!nw`UJ9s))9UMJlOueM zL$17x4nNiolx}+CNlaq-e0Okr6vIb*IzrYT^$r#b8Y3(IgEcU>7KbSoEmfQ*sIQh4Yxx2; z%i>r`IA1~Qt&Uo9MZ$u9v{(;FIxbML5h^4QaN>SwEj*#HZ# z6wLppzSNIDDlp*2e03_T(4u2=4_hke2t5zuTjGWTRQ%*zJR4`97_yU znm=6DlccA-*z97_=~RDOUrCE=#s48o8n!ps+;1L!0`r~1dh=Ik@q0uyy~XkZ7YaH~ zC>^cF`js|(s0?_BHJj;9gZ?xp-|<;5CDnc5joCvM?w~KNYd`+)+!iIuXD=X)9gk8r zqejwD{hb=k@~QK%kRM{5`?L* zVH@+ML3;THlJ?aQh)8A40HkQ-E11p#i5%xtYL7t;42wz_o6YlWGwN_#OL&qLv$W*-S3J>MLg7EvUw!8*&-Fr4X_41@NPCXsNlsK~ zRD8CoD&krpj@@<+nDC$zS2^UP6{r&#b*+)dOwwWBxrE^1%e*CWCOPVbSE?JT52A;? zetTV`bQgJ|f6q{A_dANRXdQnQ9d6v6X_zQoGZ^G(0>sT#P(wHB*I&Pj#Z|!Z>C|;Y zF7$mu#-k(=Ad0t(Tjj+`v=a6{_hK{w3KwZQ-(zF}t~S(SIbVnHNu_xNt` zoNq;nT+eVx9ODKQREVy)UgP!2Go9o@npEh@jbHq>gHk%apP2nMcpf zZMoI#_4BVHGy8>c#XST1HK54~>^JnsaE?+Ayh;qWY5e=}w-Liar7WggrL;t6$P?xT zg8An6`-&8lr&E+AX7On6Jq=j}OWl@EXbJ*doAr7NY`E^iZumrcNki`m*Y!@>f=p-- zbhSMWgVgSoae+h+_cSz=L#m=k;rGZR;^IHMjDIyt@uPfS_k27~mMX&;rq3Ytx$#1e zx$A8pQ4hAKT_@M;Y+ufq52DfQ_AexM)LcrP@F&i76J@o%63%l+8x}QwBJn%DBuJ-l z3-dB+$JdDJ;+HB`0g8E{`@GyEI z75%pLD!nt5Z+ZpV7WQkN+G$!}9v;FRs71pkFnVfge(#ulfate@Ek2CNa$s_|&h5#lor`4pO|}qWcec(Pr9UpBW@CfT4)TP>Oaz10gf60Z zC6|32y1`CZ<(r|}bf1sJ3{EG?PP4=Lv?$J#nGG}H4zo(9R>kTs=MI{&mJc>doF?y~ zo@UgFmPDr>INfoA!~i+?&Hpr_GxrMPd^h z*20bb^fSJ4jX?Z$&t!6)mP?uu?(6O2^ZCjfl!jmy-kLg3w@o1vzxs?KQFL>bh8AZy zq8>AzufR}^tdSnW+TrmW%j+IdGLjOz+vh(p?cF({fn2tl&Sqtnr$d)%pp*I8S0=rw zOwPj=-9dI;aK`6LaA!!YVF4+2C<_U~@c$pCoRq%+%8L@dZMR#}E~%?zub3|MwhV{m zIG@HGS#l;U6J_b2c>|G{%x+}KwEjli7XyH#2?#Kbd44|tzlq)Bm5K_1&2pqLN*X3&lAcopx5?z#K;wFjsP2!L~Eh@=&TkLfD0$lp}?w_%{1LLbZ>5Kvkeb5+6 zHy^AZEHTOkO-FS~B*o%*et(T9@E3~uCMu7Z4RMSgw{C8_QEP#Wt%m3$&obmL+)i2>RWlq|gdnvo>ZqSz19!*q+xQ_Ha#nG>>8*#{uyuC2%p4MJ@(;SYf~bTI zji;DF2DLJAc5nZy{bC6`^AB;N2hsj6&?ZA_=;h{q)P)ZP1fOjMUDgd{+@Z|OWxOHP zgv@Anm%&KgVEJh$U*x%1=l-bS7lRuY?ZqD<&tC3-N5qJyNcoZ;D+H&r@581_#7yYE z8}~hjC(|t)#N~+~9J;LLpQEWW+k}Z;eHjjYzXZy#prgeph)@jm{*=p^9Ia?1q$3I4 zm`C!THq}t%U UMIENU-M#j##48RpjqL}M5P_y3o9MY*#wUkN~wXOaqeemcIt;WE&YyR;3Z3!*D;~ANNzs5;VGPf4KV!+y?|<7H81S##Uk7SB1RMeI(vJtf1!yxwzPXUry+@aUdG zEeC~j+%7lv4!^-dxu@G&=M++ug0H`s8yewyO9X+2iC1qjo7qLrhOQ2*>>d+Iyr{wc zjuZ|Wnv8x!m$S#A6XlP71#BCY^5X!q+F?O==L2s!DiK$i6CrrUTt!INM#F20GHt;W zb3?8i6_5~hgFx4Nj0tVx(&;F%O#f~qNRV^whv8Zhif>{jzdnJ2d>TJBGN{*)Hy;!+g@oSx+%HNyvJ7&jG zG#T~*3HJCeu?I+;OcdkFPmNDi{vH|~6bbI?F*OG@8GbaO7CE{e4d{35_|_JD^0~= zPL`eJy?i+K${NxWJFP>|PI5x%HC!{e+_7K1L_AfOqJ9t?rbvfptCU`+(q*yf;EJom zdUsMZ8*&W|Y#cPOjJoX=Q2bV!|JvmaP`FHXAjG5_1yo#PlW;dB&Q54WjjGsA@>mYQ z?MIGvnCt%9Z$)nQ%XsWz{!hZ4ppwbxR3JM=iV_B{VSnpw`fPLywYWrFG5FW9?*(}% zm;0MyL$}3zbNfx;!q391!a}z>$tyUhn+pZ+V?Se<7}`HDNU;{p-gcvEfhCczAsL4Y zBkPc@O4a&xo@0cMp1m%g#L~Sr#BkDDd{pw(Vw+xK&hCA?=0O9Uo;s@k)a(_N~X9d#=m(o8}LeIl0su&w@T4Q z{>@{gc(v!0mB?;X+zqL56hz754j1 z=$q=S6&*h6i($gYKpC}23oP{42SS}$Z}BzW7Dp1-?L8}Ii)q5ccTFDc7o%Q`fxa2j z@lsVzdm$*ToDH_yg_KYXbaXz)N$dGT!u|(EwFK>epXF+seq}W0NWA4Lxs)yxsX%%b z2fvo6>-7X-7*ovK%hm-)$yiNL2bw2r$ki&gQjX^LXZ=cnBgF}0@^8D$kmpISgiL}~ zs}a2VAM1P#p4w_ayb}I9i2B-)*cJslxn7^;y8#iTr~UXOaVOj|pt-PQl{Jgv7-%B` zqKcP5#;DGDD4-!&$E5m|{!hB{&+7@QM^0}VY0_nqhz|T)XfV6P7K!$qL^e+?`B6KcR9*@#@!E`ub$e0d85;+f+ zBEc7&^NjqNL`BeGlI0g3z#BAtlP5Cr20fKLNx*<}N`o}v*-lE}Sqjv;cqJ#9Yq!iF zlGv+srEs9Qg;SGpFpnnESZ6~YSg9Hf$h1=Kh)=zb5I}9|ap=mk$Pf@RsEQ2W3eh@I zimR}XTnH~gGbJ4dRod&WE=5BF8)gvdfj z;TX8Ufcw4l><+is%&%9ICEsAdEFdk7+^59e$JSed0>L+rJrYi3xHSKh$PZX8(gzs? zP+#Z+PgKc2;)|!)?92;O9;#aUtJowera)hv$yJD(>}y^kDwL{*+$z z@2j%U%2I4lbd}*BD6l_kb`A)BmG6eS+~LI@2Yi;LN~@-iDIN=D&KaUGrVug5rf{s2%t?|1JTn7){MmG%%~5w_cD3+H z<|7)oNy3o5A%Cb}^@VCaW^pIYasqyzFxHEW(fMvQJy%uic4Y&Z_=7ZJcZ7P8;8o=& z!kOW}O9C*5yeb9bcYZZJ){5fZLsYAk!JM!D`r-9*Rt0ISR$<()^nHl?@pi^imcJ#6 z6wxzS#Mo0|>7uhaBfq?`4x-u;q=ZK)jpFGoaav!e`Umiamd)%lbbrq$9enJzSaP_U zRli+E!FBEH?edmO4gSekp5U9!a^4{gCw^{96PCIY?Mg>=Z&H93x?4K4e_+tJpw?VC z;N2Pd?ZKRmGXl0WV~r1Qc6TE8VLS@TB9)CxU-sMC7dw-#t=*6;E)-_oT!s)kk1B;f zVf^+HxExro*^pGAYGqe+Yc29#Y&F=7y*h9h7HHK8~Puh+GBohj0{d5b#+9?ZT38s zDJF)Br2_KZok5!_XpBjqK1|r=Na>-Z-1^0mQQvlINFZ~3BFU7O z?Z|%e7#)ab@=p8tSuEDd5ABU*LJDvdN-ea@`>^?Xo5+H#T0c~8yILH^g7iD-+fS*K za(tqqhrVDngAuq%0y4f+f~8xS2wR?gaupFUJt)MszNK39%$1;8iKh8Fd}jUuh6_-g z-VC;3D@e{t6q!sK#bYVqZuMHlPqRG)FSA1CZdSHG5XSov65}(7qb}OU_z=4)A?f!^ zn{guLj!oD`1v&Ch-LZf@=UQ*Yw`N6gz$Hdx@XvV4zWnlna#`F;@z8fi^XhL8mb?ji zYvRPWk#S#@Gnw$dFEJMyVR*=>Q_9v!5OP@0$+|gOln>c3T}(#rHjS3#wFR(2S?$1cF8YN z5wV;f>k4CI*yGDS4#;NFZwK8kuvb85Iokf2;qcgq(LX}hEL7xL3cf1xiO?rybkkb* zyZ4NQgmd-P`WGlrgvkdzJcffbZ`7tvB zVdHfqi(o+HX=)$@U3-8cuE!&~1rrpd@}i?~h!217<&{iNFo2A4pd}BEl^`eI(ejbI&T}>Ednr$qW(;cA&4%i%_+>R_7%myj(hNm?`yMvdq%;uQph%5E*V{A6JY3 z=?NE;2qPWrLhm)BRkCZ^8;Upv4#qR?cBh4Rc(e6hc0yxpd__My<>h#d)ot_09bneC z0nbJo*F@fIntG=EO`lhs66~HH@w!g>l=int;dS|ac%ZXYiEoU%FZ37&V@YO2{-xd~ zuaq8Go5jHSWB?J@Yxj{Zr|wLvnat$H9}(S4{AN%@;trKeC&_hn%?7n>pn8IJ_N7c5mp~J3lR3wkpv} z34u3u92d%)+R3uPM$jC!XXEd~LL!KI69B(m4js)hw=z5Dh`vEe0)Oxj@`3&T8TA5) zgt%2}d@D*Q&aZXA&K5qGj>lTq!GMo(Q=hOQQ?xGs!ha#=uvQzOD3i9N%He9shlZU~ zz9(|A%-bw-XkpfdXB&rY+4obAtL`5Z%FRo!W!BOH zw}#;>q_S$KOt-;{u0J2;f)b)}*>t9h9L2<$;OBO@hg2Ib+sG{fWAV#)t$$Efg4OoJ zu!aW#ltMjkoPcvq-qud5gz%HF*zCo+mVBQ6qab`-f_#uCLba0YBp1XPe6+mn$LYfD z`#2;TtHyMl6acOu1n1!17bStZVD*>Os^eAilnsT5FK9;**j~j+OeKCWxl-p4x>ct? z;*?oqI*|q+*2F_>;_-8AcBbzG#)EVSE^QJ)9O+s@39(uGdmY|Oj&6zH#Z#na%BYyw zszIqxECRpV*LcoS#nLtmc!bV(CgNBY?@K$^2zdTij;U`M&!GifJ!J7AF%NH863yay zv(+jxRw`|_D2;q5<3B%J?SMdk>XNI}DvdBOzX6U=A+is+-1y{n;3Wh{jfr(Hm?1PB z921=w4~Wv62YB%tQAfR%t*V9KLTjQY3kjjY;~`!sZ>qFC7k6SoP;72F9&^FMc`M(z z`S>L&h!FEYbs{@M{tQOs$NR4)PcO3ER_h)LE~k@^*noT5V+wZ1hMF!4+rtu~bG~6I z%&2?C`Lu7#z~I3@c~V3aqld^nKQ7G`IgadW>wl@z;sd_W-B~PJ&&ZBCPwRa)xvE}l z%mzk{njYF!4uJVUD>)rz-AVH6194wGcT7~w_j ziIU6aD-P4!k~E%!P8fSC47^HyuIa#Qd>MR>r9p%&YjmkDEM}Qg?GX0WMq5keLPL;k zz{UhC47rbuNCA^5n62i*J0i@_Q#1jNltZ%9gMhk(s!vDislK$KU2-e`vockDhB3jJ z-)xa&27gus|7C$d-)(npSmL5e)=O{+Km|e zZqOH&yZB7|)NM}1qhgMi6GYJ>>qmjGoB%T7e=W}j4Fr@3-0OSJ2>Z&|ZCxeOKB;Sg z*$sIPk;g6fxW`2V6s(oSDf$}p6)0g!mSJBlTJB16kQVoeluNi4iS>k`T&nX#zG?jx zaFt_De?{N$OchYx;|>c;I?`o8u;&d8d{!@;rB>Oslmn&23ZZ`Hn}w=Z&eZ(4ad)2E zG$Af$7xk0y7%<(!g~LsL4~nIR$ZqN};E_r1Mgc`w@A9Rh)BdlW4-7qJMy8f{R1Y$; z0Muf3bg-rs+s}Gz2&+ScMqka1r>ob_bM+|rBesjTuZb(a-_PS9do2JaYJEuO?z8Id z`t76Y5-)ei6ZG1IqFj>rRohr}hxqNVaV%kZUE`RYS6RXD`$z4k+mpNi*63dzcST6= z7Mq?UJa5&$6L=3cXIKw>LByzlco&m_-9}4Q0&V0`97Ly8C;d-_1AuWB;??-C1@gUJ zU-&MblP1=0hg)qCj~0DyJ>Fl{(%u^|E?hkCKkaYr6Se;QY49|T~G2t^CBlMU>>uP>weHx-_pc z6H5~Eb8deBwfbs*T=RqO>8N;G%4vraTQ&hy28M5}_^d{ev;_Ec^d#YyBhiTuN;HBn zKPfO3-PeqLg+vd|Y(@V#jvNrmlmA|2U6%b<1K#ATJzN=I zQ=~0Yfo6ZruK|1k&EH<|I2Q%s=g8aYB%S#NhI?PIqq_a^Z;_UoxUV|lyC=c;n z2xjb4C5<(0lv}2l1H6jsU*Sr+ey1R(J%8Ht{qb$sSuJoQEl+41 z0B4Aa`GB{DzeRkx@=?Gv`Nat8eTRsfv76$A&if&~6X1D?I=0lTM}Yo$OKXt>s9fnt zJ409~dQDi9|D$pG!1t6-8Ws-kGG@X9c&4m)erVSj0fEP;DP430{$IKp98DPiPcd_f zuL4#n`)C1Oi=0HbvQhc>wHUF~;%zL=3Qy~XBXfv|G?BJIl%`9bAWT0RlQYqpsG2k))Gs70CLBi1R|N7MH6;fEdL+jtlS}2wc4LSfiMeK=n0@Bff zwj$YPR>MD25`ltJ)-O^)w!qECQz1C{h7PM0lpzRjX9|6`bWe0* z2$_*W%_eJ!{s_DXk z^de*}B|csYznZ!Zd(gi?-2@w&Ny7O6of}9Wec*hk;r%<)yr`J?nVgOq);+4s2)qF^ z#d5Ufih!Jd0;NjRVUC0qJs90Yr_VxcI|p=x)&#<%3W5px1KneGTV z={#Ejk&3GwmzBFrUnY@n@kMSnpL@GxRyi6-^J1&*jLHdZ=FemNJXg zOxdo?fI&(9ml6DPBpbbh1k}9EVnpxEVI*w5)LpwhyW2&KK25!3Iz^;Cd#8kj6`UF6 zA(d&Vi5a{ZhR)xVj4(+QH@#)D+y&q>@9vGIUldD%r}AETFxif85pMPWq!^9<09<;Z zK9%rhT1QvC_n`poUL}6}n+Ud^7ttZTgo;UBfkf;HLz@gSAZV1)V+-Ua0fBSyvNAt~ z%5U{W(gGvi_-EA5#H5?8begrINmW{mjJjW1jG!((?-Yw7UX`wP$4&$2*;G(y;cS1j zdhSOx_cca=xHDYW5T**delE{k`(^NZiVi2yW9?l32ti=wSs{d=h>QSW4qm1|yqN7l z{s4o|M*@z;c$5k-b3MbEaul<|1t39qMzwg}%Yc(zQQ+~sNS0R6CP}`IoIGd41tj_0 z-Q0ZinmP;=#To!Km4FkI``wkF-@$tjnUoWhMiUnt{dt(JnxOm3{ttB)DZhtH{sXP| zCEQ>BfX))Fh7IEN;Sw{I$vrEB_mo;Y#^(z-u485>N8r%dSy0VvkS!}3u^`v2E?yg7@kirg$`!V$jT0> z&?AKm{;}pIh9qbx;mKN5p&zO9zJ=g^y)ZpsCme)+=d`YMykv0*Y~E97Z*T8r;{Dtm z9GbgChy$_q`~0tngL(oL1`7o}r_gKU&%dH+X)UavjC4Sbq6r*y_;Ngiwo$gTX`BFm zj;sY*GA|-NkAev%F5(ZNm;+;7mZxoVfHmOyE>n1(mlcXt!U$vBLyzHm1#X>-ZaiSx z84iHQG`c@qee#;h6A3gX;&nlTx5f|$9Mw0@elcv8-V4jXbPWj9prG}*>z3ngc>m69IvjGCnYzJc zp3LLj@bi$*>q_l3jZ`%32I5q|R~^bI#~E-V!71GM>21+uW!rDJ>+|La!)2{HqZUsk zM=;yllMU>va29?1j=vJGuBU(|-4~GAiT+?0H!$)yr%Wg?)fG1FR})e^(dw&**Bz z!al7$TzgYhb$zmdJJ^U}IuAzh1w4t$j#auHpTnx)6x&>WqIIYi0{-pkc2DFidDeGX4IhsM(d5>K74*AoIgO7z$63z=cDET$8~GT@1& zM#pNcHYY5Yo8&%N8Vv8A$OXw>Hp^vsScU6|hqcMHPn8iLhV~cb13xWx=!!<-8)Jgp zwu>soy_{s*t><2JE#8CGu&NY)mj*qX)^r&}{0n1U{|RGJKkK^y<84V2Wl?+aV6PIE=F&ZBV*p2%*Li3(Z152Hzv(Oktn!wUZle@{PfAp01f= zpjOtmD1jJ=!45GsDICJ#f~<@rj@VuS-4m9i4jC(q*vd%&!S6#rmfXMQca)bjoX?kpK& zg1)HCRvfB8DqRDgqk^`Ssh0V_j11=zgjB|Wy^)kmY&pnht=>32gKL{w*YokQBW>v1 zX~_aP0g!MqoObZPPjUc;@qhG#9t05KiqnY@hLD;7w5-q|d%MSB@wXY|9mWe@Z}>(| zu8oN-a0TmneGfdk-da@tfSfq~{VcaEmp*1j>#zs^M&2C2t|15RV>s97quJA+fx|=s zUW%uRacS;Nv<^*DjXz&S!6c)4acHeZ`#k3PrOU;GVUMb#|2O~I0Ztgy(aumja8(eR zPv-dkSZS201@xqNgEcI6+x^JY@;N?$=j__E#@l@vr}3%h-R1 z;%uK9VY8T}viV`_XAw?L8j@Spf6Yb}FzS9M7)?Xuh-hBXqCeFx&3bj{jyO`Oxs?rka^W2lFy`KANT%VD5f+r0dFOtLIFZqa5wPo62nlo^_<^iNk=Wd1 zG=*v1()*Xv0U*Wy9~UEVFUmIGpRQ0^&Q~amL{iA44f?~P&Ys_Kg>1Gu9+z#ocKJbD z{Eijy_Gm06`KGtAkqg)! zp@*`@w6@2W{Hs)t^?Tmqj^@fp7gBD?Jhf5BHZ*?#(W;gyI{@T6i7}bWnW%ySALY~d zQI>RLvnO~9Fsn`FJ-YwY;Ag3Zfp90&vjhk=xeM1v4ad6=Lo9ID-gI8Xe=vK)X?I5w zzp)DHL<-i3{07<*h{`gl;~_rT>$JCSvQVf8xJ-FGhM3Zs+DH&lG7-f2>!$T){cl}s zt8$>Hx7X(f*PQV0qCd^fKf^sf;;!P+yE$a)2E3Tr2Yl_kMAy4AS`O*^`iB3X&bcpO z+%Aim;&oqXYyuL-e+`hE8d0DF0-FZI!wR}1&Vm1MYUYvsVBr8!X`M%cu0a<-w!}ON z2t`pm4^xMi(0%tWeAeL3dwE^$;3dp}e@gfXLUf`+twkdYn;IMnxx~Tcw%Q}~DlWL* z=8BkP#{mZP-f0OuboI1!P5Q{#Pcwxwh6zskE%DlIRwJq|;&&%Yy?l-|IAYzb>*{DJ zA>vg)`%EJ2<2jMcIh~b#fg4~Yj&&nLn(afI)X%|`!6d}~ms}D-eh?P)Mw%Vt0z9$H zYlOl=P4(t~W$s+WPKcF?y-7N250BUwLje+s!s$EWrW3B#48(sIt(KDPaGVGr<@hR%Ke7loG{m7lVlHMBmd%gQozPq$ z=HA9Lf2(+o#901X!hyh;T&U8HA4#s3Z1->lOD0g=Rr+vghM+G3C;?H#X0qhS|Fo`r zT@kT?yED4^ce*7n-JOlvj~R_`MS$gO26%Oce(SGNl@%<{4p*QgA(^)29^et1Vy~pyjH_|zYvkQ z-U|nyP{xO#lPz~(>_w0WZA|5hJ!vyN8Vf zm*QN>os|bY@XG!}#0m0oYWU1?+Cce_QZS+0OvPd{5wFEnC-wP3LNts}A{~$411gk& z66JePWi^nn*7H?^?HR;GebC{6QvIu6Sv>~qI>&zp<9Kf|y`m<1CPHdLq@@sLb)CKg=aGWCI?`f&5EM+3sM};#~4Oq$#66{^gVsD)X_YVQ5T#vKge%a73O9wGb;W z?a$D>{Yzd=0Eg*jkqPhOS@#OzgYi&|D5<}#*X>b5Zy1);(L%X#e6WPDAkWI2*^wnJ zl=$9HbSm|_0t_-Y2hGg>1WU9Ud{7$aH7<9`ac2>RumeE5L`6- z?-c@HOiDQkUNEsyH!bkIx=BF*vO2b)xqVfy2bk{pun^anUwrM}CJc^!zB$`0Mk1-@t&_)!x`KP_mcKm>L6c(XHcST0EAbYvOjf?BPtF z`<>CHUv~FzWQT8-Vxb76>B|Q=6S{)buR@dz*tZeD9d;EZ-_efo6c|kB2@EQPlIZX94si$7f)*@O=+%r#_CWVrG%A(q({jVSt!P$q zb}hg1S-acreLrSw0H=3%AmY2XW6Spe>bF{eJ8-Q9Iuncjq?!?ydtPO5NhWf&lZHEn ztp{MUuuIaIGtnW|vYWpoGmF?oa@+Q^r*Qwq1`#ITpZVIjCP|kaIaJPjzsz6ZjT200 zD1QZv00(2&;{(;HrC)8~PFB1>98H3QG7wRSuiIzHY?16k3MArj*-B^b3yxX!4(qTK zOZ2=V@+p}6lKxo3nQ7($awnx0$EzI)F0U8P!QSGxZw3Baf_8DKuGhOGlxJsWC|{G} zoB2Xx3Mu7Zbvk}xf-fP>)tZXpw!x5qGKIna$tnIDJSGiSv(Y2XKeHFuMH~Y>48{

    qvF88mMEt5t%(hj$1f}5k zib*&&r$q2dzKbEL@q?KxardJXl)(n-G0E^3lbxJ&qNa(lv~+ahQc}pj5-hj>(1Hqp zx*r3XBfymmc%Vz_Wz0ls^K$Wy$HB%U0oBK*?x&yWJ_k*P1N?7-_f&~SpBxU3F6fK< z-`;_nQFpZD<8HK3Z#lulmlz}4F$bgl9WYRy%)-3f4vcb*(!$6Ul$HI&i9$w8EL5S; zYwkQ$vvDYxA3kmt3`u*t(M)%~1SPjuzjs9 ztR1mOVc#I&4Hh1SARx|WX2uIirJ1qA0fdZ6e}9GXj@|XTz+7J56E`vH08CaCIOFI6 z-P#xE`m2Pm9m*9OV&r|>$iQ;Bi9q^&CfX(dds7%DL|v~zdi`TF;+2i_v|NRjrUr6X zYkmFOelL|cba{FCTHF@x?gssJW2=jBTu-e=E%)1mbJ(2+2gj#;1oJkbd|+%78E}2X z&Nc`zOvjmX0W1a}vKck2p@Y)FBaQZwJhV<(BFUF1V_%O3cgr$09?s@kg*rJN-ADi` z$ThuYU|1Rmcr2+<$@OPoCRYa!*qBdR#WI@iUI^Z8D=mE4GNmiLgj~ei#@0!MLoqYVwq(YL%yEGJM?pd7L$+jv%bC;N zeNFtqcOTg~c32=qa|RLaE)Hx?tyC03sNTBB6O)tVLXQb5ygJYt-Jc12{2-Q}o1&wW z@P&f#9I|4_d@M(3GBs>p^DPP#BIUNllTZLah1GTp*Fj%Y4-#V8_wN#^KUIm86P8d{ zSI6XQQN;$lFqpUS+ekQ@g;L`&CMMc~h(^NCk7C7;2&ILaGs~29KdbGmc%2WWa?&q>1Mx|CXSo$W;C_3})@|~h+ zhcDEDMwmgRkWgBER{akdzkg+x!AUm9DtyjXY3!2&VX`+u8dlH%XvcI9;$RZpdvvgh zzrfxu=@5G`=JhLB^v5FqvXq7ln1Q0m=BwvqDw?GmtSoRB9yNjK&O<4@V#AZBz%3`4 z`w7vY9`JKu9=Dq5d@D)3pz<6EPV*(JrwD>Zt*oEcdTqG|b~c60N}C9DR7AuIE$PyA zB=^apmuxe+lCB++cQ>}ex2nLQuw0shB;fh8orfKBDcZ&ZTleUaJL2ik!lI%IfI-R0 ztr53Th!mtT>#F*Y?ig;0|3Y%4#DWHFqiZ$hNC&-9FQ#Hn{4k<5K~Bw;GBVYj5R_Dp zFJdjUBKC@Ucn4GKB7u`t%~~QjjMWTCO@qUia2(G+;&T`|IK>ziND-3*mLsS(rT;iD zAcIIhVbX^a0CXcB`>li*n^YOs1bBFvk?JGnd*xMC#ok|l`f0S~-b0|lafi066it$b z>9cQJ0NEqrQ^a$ze*8g8N(1EaCFFwCw}+&MHjcIx&bZX6twp9#KeE>&bb&Et8RQa_8 zI1dwRvLdV2fkCGZ#`-!#g)>S^(Rq1)BB7&?Rc3^ZCK>~VNKTJcz1Wam@Hs%?ydN|c zSq#r<+_jRxT94nPh(LkSi5ca=@F6VT(3en7FxmZnFQLru0gM&_=)*u*s7l?M5t8{Jw9hYl&8jd1DcdSTkJ~0*s8Z47F&_j$k2PpG89%sVC3h&-HfavdX)#B_NaEz=;oNAQm78{1{FTdD%(v6@2GYgz{t7q7i!F+#)*=N?)=wO$4 zfZeBLxh6J|D-~9IX#N^;nK?K;oxejOje&p#A~0fARfYAV$obwlDqsd9)Wk;*z6a1f z?W8v%dPpCvwPaf*tG4zQ=8Q9!|0Ya}cwDc%=$p7-s}2OQZS;a+~x zfPpXd=)eos3~mZ#fIcLHU-397=79|%z)`?|k@%HXxTm|VM-ourXA|~r=jJpDSWHYz zI4!0Iv^x5D!YDI>F{!CLU-{2BgNu+WW%=hWX-XREVE6!10@Zx`IRMSpZ2t}#RAvM02oo44)=`D>78WL z_0F#C7f0TF&>F#h%S-413Ugx@0FsvDl4}oMt*`JpETFhJjeS5Oj0O|jokQoo69zoe zm&$E9z^e3aZEao3Gr2fZgg97Pk%p5FiXfs0f|f=VvZ`jpgv40CkrhlR0>TVGN@A-# zGOdudqZlI(&%1n%7_*(Qw&2*JsFavM05@xv8!X%epbcfF0O%{A_`$r61;e#gMjrWk zQNa#VQa9|iKsH!ZBt}}Ni3m!^Z~~}IDvF8~aX{(^#QwbMv?xTE(#QiQ2o3;UQev|e zTN`!e>&1kX@WzBxqt5F0+>?r0+}cj_9%%S6b!lrWnVV?@<^Pe`j8*;ngrs1f5VoKh zBQzAYiDhTTggd;;C~SBx6UUhEY`|}MTZo=-I;k|qp7G`BOBx!)gYj|7rvQslBv_v< zX}>DzsYOPH#rIk7CuOGuCQpLx*68#X{L*XvCrNzkfXqLAUB;IA`@B<8~QYFHwMSc?-`Js!4KZ&dc>guc!AMm9$+e}nZDQOmEO_uZ=t z`b9!fkYuBx!p-OaVZ=8%cZi9B!3V%JFO7Z>e$pb>lZF5io{eLy#X?MIuJtk%_<8%Jmu1iC=c<%l*^=XZ7!;=S;AbWeT&5O@8tooWWiJ#NAvV zYc9hgRzYE5_gZfMNYt8%jZOLCrpW=$2MDl$Gs$?6r=p=USD~FEt!nOZ-agicUf*`8 z-q{}9e;7Hq!yw@+{vvz}pq@_Q>}>8>2^VKt)fV#_jl2bAiR#mz!Q+))M4J9P1`vRE z><;JW_6o`fa~}*@i*GMU0kPqklgXh7D4XLdSG8$?&KsdeLc2ux)Tz>Eb+99QmX-HQCe*p(DOlJS@-8lRKhMGNPESw2KxDu|fx}LCy>3zsdM4{< z)K>LH!9dBFr4Ij1Gzx5@Mc>b%z^%hBs4k4TH1}ArPuR*TEspEY>S|^yvc})gm?tIB z!2#ft>7wA-`&{ksH?}6Z1xHiF0 zsTc{Hkn#E6-&T$0J7d&0Hfp{^AsWMk01?G2TGq%*Xp28*w7s9#Am&Vs1^1hwSJj~( zl3@UK3)taT(pZEu6K3#MnhlC6ZF1vZ~OvY1OMMMoY9bnK+J6=akfeK;o~5r zsE9q|>@*47|!nGw9x3 zGI3^32zFufLjWk0V-WT!jRCyDGn+WYe_t#DKpf2JeHs%+O7I24Qn%Bm?)nNF8Q~e* zHaZTcEpnnFrONgj=Vh{I*xT9FBtHS9Mz7@LGVe5jFVc#OA4pZ71GkK8t$ z{bT@23`89rU>|!`Gu40hKW!id9h)MHQf1#4@Hnyk6e7s5W9$>)0Z$LWjw0aq?5QCJ z{o(^R^y;iE2%rIccen7S`fvyKU&Xg%+rSd-=Yk>DNK~{&dUD&=M5|w7XZ`*cM0vv$&C*qOmIKD z9UH3~QfEAYVf+2f`P8oZw$pbNRyL50(ZiyI!UA4fE^ITTqd!f+WEBGY#Ne=&L^TNn zm|*hQ9KS5;k!+jq?{|VsOXUFBxHyu%-Pwp>NeOtf-j3RhR!>o@#m}52g0*M=JFvpC zyc$H5FTXdkgWFlHu-R7eAZkWko|YvZ>=G5nAW+n>{Co z5?D!9VwL0uVU~M9IE;rRTwVE@#+PrhVKl>X=VcFHe`r4}FNOABUW(=4h>T!FVj1~4 zWVm%$tg9Eu<#va~I#G%44-We?q=1-29qI-kxpp^gNOqLU67 zBRF^=k%Z~d$bI%{gVR|zmkcc{i-7sW&b z^^oY~sfNqQ0gBOBVwHfRV(i+tni{9N16vl?59~k^5#00hb$HLN1JJYW1{=Nl_X-Q_ z{Oj2`tO0j4&Oh`qNI~;-6e8dxi^68T7QX-U>#kqg8!X_%Uk393NAQvGqFBcUSoc^AyrjSm zwj^(vu^)0ai&&MFl?w#bB1NvwL0@*ylPcEjSr6lVzTN$H)OX^k05%u4#L+5S8p^Z} z{{OzOz^H%etIW$fpy3@W|Fc}T_ZN4{7tS^w_(}HzF0WLkE%}h@K4MhG=5_T*37d%C zuT~TQsK!{V1ul%Q3;*ggr2NC)Oa6ML9~+24p?FL8>woMBfRYafW8<8Kx|{#qNnmzS zRu@b|LvzvBjDW-Zofv3@V62?@$q}PI>uomt%q(@bf9GFs9&PErR#e`5vqH z9o3y9T!jXn2a*hvt*|57PCx!iX2_2~yVV9A-?Y@%V*t2X3>$xLBh2_QFSk?1b7^~n zVnF1Ak%mFgPJszZN*eym-oLbFl~BjT8J@*~OTbNZl9Q_A`^$`EHdq0j7Zc6ybXzi*WwK&t$ z(>Ko!Ttsut2nKwljKQV3U!=Y{7F8EB&nPv<)x0O?DDFd3CJi~=Mona_>*J}N?;j+M|Sr~{uxZ|2WNmBDZM1WvU$IS*J%(GkVRdR^I37J-6Fk3vRruI5}0scads;1|sdvr`^; z=oyCe%^KDU0R64DZ||EcQsF=t7Dww63z@9IP^MvydoKDEyeW8HP~gMVSvPuKSNr!1 zh)xp2*4$B@NA>$&hi9E&cJhHRECYV1%613w)`|OalvlA%v&$Kwki-I+ta$}Q1A|sE z__=u6dkMqfapf<*5`LG;UqsI58YnmbX0}|)T3skL9ZHz9;6W8E&kT)-BSo~g6!H-h z!SN{fy3>KbK}qA?9KM&rX8g#IL;iFQ5GfDu2?uPtVBsq6Su>P|R)&@!jo~0cVR4$= zGKnpZPz)~5YaWL0IwDbLTzx4xcx9%b6*UPBG2SR5{=r6qHoG+cy|TqBi67*CMvo!eZ7c^BhDZjNq zyW12UUHdAL%jwwGWL^MLFbDOzp9l1Hc zB3<=zf8_7`68H0Sh4IWbusU-KseUi`k%6ha87Gz7#Cdl+iYZ>lUgsmf9mKt*In}J~ z|K0^qOtILFxHno$)Iv&j;c~Vdv{%Nir`MGgJw7p^rN?H@?{d1{7q9}eolj&H8&Foo zaz|ULcg45g9WKqSGLWxwl>ZDuC$(LkM5Ru~Q8^8BWFi2i`mhl+wi-wHhzXFQ0cjF& z1oXGL4GzSZo@q_i;_ZyvF<(Y}uE^f*ZMpRmz1Fk%(3vW%q_^GIVpR5SWDR3DmNO5k zW4(sFd9be3m`J>p{5q-_Bkqe6f;zTxLv*JQgzwv-D&i^SL#RzA&s-Sjfg!|Ffj$iJ z3;cG1s}DA~qttxao>4pjFAU%>hv<=N3cLg5DEFjHy=T zd9Lnj<1gNHw`IImp59slNfM~MqK@_bQMT1WE$qCF7ewR;X|qG)ZifA zWEQ_kMpVlZ7Ah);sFYKEpI^aq?7M$-m%{-A#5U`7P$ZC`^X^yYpYl%jIYy_^j~}BP z{(3g*Pg0>OL~gWsqfTb=2F2BVh_#nd%o<+?UNYsYrtyL6>k(oFCg!d&EyndOdBPyu z_lajG%HAT%3|H-&#*^sbWy^AJ$AcP zulRZF@5Co|>5V;&xF2B&`te%L&>Q>&h znu6)j;V$xB?4>Fu?WB2{vA8Ir2)OS~5>t@;Y)D(^9Fwy=)lz+)Pl5zo1W2=iZ=`>e z4@Gw05IF^(rxf(&`VaT9;94Zv?-U2@Pv4d#WC-~wga#6qtmrgb-Y-_^ijNsb%=y*v ze3t%E`CExD408ydFIqmU!-w|dy4isdZKK{AQJ&@$2+j3%Jgr=EIRu+4m-!%Kj?(11 z?xmtQj(x8noO!C$5uQBFkks?ngDs7aF?2trkqF}^+8K3y+br;mj)?mm7s^vG zTS&em2-^CCVUGc0Q*9#!?DdfJyc zpI?cp%^PV0liK~;MrF~d59HOMs0ip)+Ie_V7>t+i%_?ak$X{^6e6VUoW0=X@3uV$G zN)u>5edD8zn#FFGQ8axjq&J<-#huBwV_q3%{a=wmN$>*5fZ!(u6}%6H6AD)%NzU|` zCHFOgqZ@bWcVY7zp1)RD=b$QQx(|7T8`32|Tg2y@pWa4vPd#^0UAo+r{+0NB44#zs zqEe_QHn*)AGdv)qV9=6*w80-FGfMLC4VAZqnJ&j@{AKj+}I zymzvQ#qjdSEL)gq-HwDN`w_kCi2=JL$q}rRWGC78oSn2>X{kAM_hl1LsFMuG^HtQ= zpI>zR;AyFKKxUPw=j%OgNK;!x5>?5%x$N=j?7-+`Iba;nhRjZszbs#wG^5*M17kS~ zVWFB$fBgN+!aS#^cZ#O7Cz7c8-i+bbm*TK_K+CNly)^nByTggNE|OwRmMapXJ>?p0 z=#}ZT-w3-79o|G7&kKw|281gVz*&E^^rDC3f%lmNLkHf*BA3Gme>(R)}N3tvRP&( zCy_chx=PH`t1NH+SrZo8!4*eF$Y+mXNGT(nxyCt{&Um}{Tw4mQkk8@_YQS&&Qj?Ks zh%CThpjo4J^aa9G&&1+gXa84Y_h*K5kXzcF-Q2Gz9A8cc>|F4{jSMb}SbNN~7%5s9 zTJ)ia`Xg2tjkZh>P=c~S6_x7$Rip?KoUDL2Qm5$|UH)4m)zHsau|i5^UC(o0V*<96 z8S*E3$;TwM$V6Yq;^(d~V+h;xA#lDlZ0KPW=2q$VxqTn^Ok~i#TUgIiQ^0)YjUuy~ zJZp>o=oT7RG?6V*(<{Zwra$v}51lUO42UZiAKi7nZS+LCz10%i@bxx0KkL|v9ZVq` zH=Zx2GDGqFgf<14V8B8xDYgVoW|A&!2KABXBj#ewGCb$l&)4mTehc(?eMyJ|R%DF+ zUl0qM4yt2b^zQS!F!ZbSE|_Dt8K=uHRCDqJi(&VucWI%m$i)f;7^#l8aDHyyVIbHY zkJonmz1dRs`@a6F9H%BeH6ABqdXaR=B}=8zj>tbNRM0gabxSwaw{Y~kNotrM`8-7E z;Xm>_5W0Z;6v*UN3Mkrl-7(={gF0+B%n0eoV)&gD*%5CGM3wUQStCU-lEtI4a#03Z zm1uOue$wkJSD>S)3`^goVb(S<9eh$ynoO_rLEJN)Lr zD7p3gVJ{=*6iIj?^w#C2F;7DY|L=_mq(?V4csyW4D_TZ4#v+P(_MjO4wZeX67Ds?H ziP?0vj@5*4QHx#^p77gev9JP%aK{UQ--)twz1zVGJJFnm8XC6s-a$T@9Ih0J0E;g8 zGc+a;2B#T%i;{q&2bJb#tdc|D0i*!E6$idtX)aL7`qkkhSam_9HdTq0TS= zHM!857a1Xf*z4sw;~p^t<;lzlgd=ACKlifHZ}@5$VoYOp$nw)ZS5^uA7z!sq1@cz| zE(~ab|D0HI6l%~Mbl0aiDDR5MGVJ5#DuRhq`cz{uUK$o%N`Hw93IYK;bb(PR!RlY1 zw;!#-DpsAN;bnu;IcpUNx?&J_xlPw$v<$_S%FUm@;cpW?Jwlu0-OS60_Lnk={@SJUD3)0iL6Q9j$$C06^^f zboYu%EBGgVs|_vZ^D_#$Odb?K0((5yrbNbLiXj=JmJnbcvWGCQ7E@TR=lj-9owt8S z2*Q!#GoJfYP>)570XS(n=m{x$1Gctv$=V5R)BJxDc&z4NVj3G^#L#UWV>7zO32@2J zU&?BoUy2o7S77Fx51lg=izcL^jSHOy_g3(DBk%E5aKC*|6y$g`L*gA7j`Qinrd*d# zGv|5utg)C|Se^>j`E$AU^(927&96Z*;L1e}GTo z)h!*V&_d-#aMkf5`9)h0l4PY8k5uFinH;TZxj@zpFLGO>u&@{7s9}w9^WmS06YfGr zogn3_K~l^=WX~U2BAw>M++189*KDY`6awf&j`AglM3=eEPsrk_6_68fFri>rF#5~p zbYBq;OW_8$03y@rPyc`ehVP|!_HRPwMC&L^g8km4e>IA!z~Oxy=%o@~A6=@q1VzTI zVY&xbW~@^8x(=4#DN%2T0oH=Kv%t554i{(79c{N;4`;y&Jmb0c+=RNCG5HBk-bdVM zUDwZqW}M#IhWW!s4hQjW>R9XVGj4`Do$5G=p7Wsct@aaG?qup;9CJpS<$mJT&%pP) zy20G-PCG{5z}1Ky&*$NjN+c?7rN6b(WK#?a(QG{JzRqaF)t^m93L1;I60J;MR$_0m z{w4enotDh@7geX|`LFW22yIEHpIDAiNw3tN=k6`88*0%2V@GEZGm~F=4b8TeJy_xy z6w=?^%Y=--loS?|Wc$O=REqpgn;|j+hXsx_@Y-+oB~J(W7DopK?3g&75+CLHBG`Aa zSe&EOz+D0k$>@3_&}}k<*KF%QTsFaBt4Ob)Q}<8T=`<(cYP9Ja2$IHOH^WlioR?U~ zR!I&a4=YHpBHI~Y{I!{9ZMHMJGYwCdODqg&9pTLb3eG`bb9BCJberacb?)Q6m9^&_ zxW0ve>6b15*N7*Gff}21DGqo<%N2@OPm;u2ELXsU%sk~l`;U1lGx>Kc%Jw6K6MBS} zz_V!|FqD8+o`~1`nV_yPGVh>-)%kQetmOq&SYB30qgWNeX=0%IV9e_o%}DzKIzF5} z5#c{+TzaOM& z>*j-MX1GT#k-_!cBl#Yj%zF3b0m*E3^-wub^10>uj{zF6rbPamFjZ>S1~eVb;?7lo zd|RnEul&@g1iF865N}tqnPPb$OxUqsi;Ho@deaus5CIKX-{33lvH)?F4y*5bZI2Lc z&=6whKxi)=pL6#{qdIrkE8?5Zio);jBk<8`*=*L=>JK<4?kYnry|K%n3hxQ{?6sDu zgC}98kl~>cL~D%~Y1X1=PW|^-vMe~8;LjsA*een+d>%+HRbq?8zDx)X8tUOu z9Tt^)W^DaoOmTFKw`_0U<^c{btZ1row0AkTfL3ziv>-#Cyy2za%z-KB>IBJ+J#4#0 zV}5j4G{6S_zyzCpMcy*C)Z-$H*IjTd!e%a=+F=eQ@{cFzo@hn%Rn=69FgNBMlS>b? z=mNWiA`xoK`7L+I0uf+dxLSiPBXOEw6PKKZI@QN=*e+nNCQbJH_MsQ?neoq{m;A1=t22U(A~DnkB#pD8`ec=*VnV8qD)ODqL)H>`3#dG@@@4 zO%r#+f5b3Rmr)uR8d}J%jTMXXzvjsG(UD|+J|JqN-gc+=YzojjefWynkbyQ~XxbOQ zw*J;g3d?8}+ctI@x9?pS`RxlKKc+Hm!dG-UhKWcT`-4B*%d1%ZLXwe5{&Yxi9p9Iw zO1D^O4O|a{r*U{a;IZWh5(<=)h~DKY$F)}1$@v4#t^UF;6U|m!xcWq&we|`v>dBRs zBd-SxT+=Kd9RnRb9+?r!jj|I|gP`Hjl_L*HcbS9f-oQ!993OA@y3Z>mGL!MXM-tz> zy@(sll(8o{G~fdG4l6E8r=NIm|0+rY-W0^8aV~0MBI!&{Sdi52d8n$9GRmg9cSP-9 z5>4Qek}nToQV&9)j}JPxB5W&xUlZ@u7>+y{4ZFhQ%|VUJx0C-}NXX{?!u6`I=s6Gm z_(T0ta+6-jr8Ef6-##s7lIa&x z_vShTC2|M^T!*(vf3p0dD+;gM<|RyC4VhQxiCtDi2E%x0jsZ<(o*d+wZIpS zFAQhZEvYkL=Ed1+kwTdmrW;<;x65hI!!b^S=iL}7Om6$}?|bj)7f^)*qm;v}8am4hA*DLG zshq-YE*cmWByid|C;R$A)a~iS$y(i5sjmW`miMg32O^KhK!BBK7{Q0 zG@ajtL8%KL4R}AB^eAFlK@}(s%9sqs`44Au=&i)x_XR1u&sc3z{1d)=;4bHGIC;E` z!(sX{!RM&>(kl0!9$YVthwb*#HhpbCaJ*0w;Mgt(U@~77Uc!3Vq3GmZ$LO($Jz{Xi z44pC)b36>Zmsqf7d*cDWLWica%kgu9Y}%h|#Of6mCz zZMWC?Ig`;FD4R@eHq|1Mf~3pI3X-SyQBHg0z^P=VCc!EzW#Hs=r#L@d7OP4L=>V-p zxk9J)>JJ5eII>qW=PR-Pi<`fcO0tv^Ng3LW)fG8~t1L_y+TjQPN1}{yLlvx2eH#1o zEK&?F0qSuM-vv~8nNVSxL$x+{a7+Gs3Zk)%FX zhc(`vtT15nqHBLL-*3S@i&Dlif!P(&LlU!LIQs$i@;Ywtxdh%+A-p0d>lhOv;5-uB z$7W_gCY6q+JJY*^p={bFVH~FG5KD$|l!hyjuto3Yz($Pg5w@O9iF>N^{z!60CY=kv z5jI>eoruh+%aixc44oor6cbO85e%E&p)^IW4VCI(2z`v1^bc2)fC>lamL`lEvRN?; zCF{|$#-YwU;qk#X$CLRopu^;L+oo=*gj~*uU4Kt2htsRc5}?^Zv0t)5wW4t=h+Ui6 zdP@{_Lv_(ZZcE^Wr=yC_YKg#%e|igG#{m7$t01#43?@Iewgx7 zeY`k0tT7e&+k*B*vHwy817`3SnzKAe18}-nv55*%x1&`QBPq;Ah!dzPWDZ2sS-?U87O zcZTQs5c2E4werx((cL%`Y9kLbqispvmmDKj@6XX@lFPUfVS^mGUhd{6J)0>}sj9e= zg(-{o7_fe_S0 z#R2!b+Z+ns#<2w20L_nmJ|x~y_K}CPHmCjEQe~>sOED~-pjqzKjIjFH^yvZWhEaTC z1Wd|{R@kN>aUQqg)H@yt^8JdjZxPrMVyFbHzE<%CZrqu*#OO%Lh%o3z?YuDk4>fWL z5hvH|UV--7xYgCJ*~*p|=&qKAdi;Ka3U-7KIbem~)OUI*|HomrCUR0lWe5qFC{Icx za`rAU(Etih}hgx_WPwgY+5%Qw4wdR4$~xz(>N;+$0A>GG4pi3-^iy zNQN0F|EI(zr72FXOyUPbOTCHs-{%n7axvf)eDyE;{)&jh84xcj)Tch3OQt-$1C^!^I>A0!vke=$G%^9W{fh!#4nrBKD6M4j^{PCn0^lp)2#iH z%;K(+u=@pTNraOY!Qhvq%vd`4j;@rbqbp4<`RbD^g(1c^ml2{nN4`YQneOEo;~BR| zS@bodPG>g1yBB1-D*o~TNi6BxUOUw^GLRId4;}bOlPWk!jFhZ?o3TWDJH0Km5;Gvf zO_4D+DB@RZOG#sOrwxBKrX-uidnRc?_D(q}y0wb)y#l)KH;l+7!4Tf4aE1K?8}0_x zQR}L(q(fKFm}_jwc~n=I_-!E}wKp(KgFnDY+Z%8hL_vuGaeEr9FwYUd>h2E|j-5nA zU|hsgsF9wF_~`fGdvh@q+%;Wq=UE`cu4nxn^JK5RrTcdrbI$_x9T!f1k$d7>GhTR+v62h z5YRdb#G_Av9piJtt|Kpj{HT;GQ%!ELp z%F#1M$$JWk=yL*lwcvy%CykPKF2ZS-DUT9uGl}g<*C?K7f0M}uAP|A$==%ctyOXEf z`~If@jMyg>(T1K2JU;J>ZSpPyUVw%=anTJ1Lng;mDhpa$NTlL+;nS*Rj-&enyrZ&w zFZZPKoa*k)bxVHFTU=+Uja&zMbx|OYd&cvP9q9j7pH5Ks6xt_Ij+|H-Cuyyl&o8z=bBR`~l^xEYTGW9>;)3cXS9RgR;(Y*=4x zeS+itPt>9CM6-a;c&Uw1s0P3;2Xzkh8|N^N@x1jROcC zK17}Exh(70gKqD2234Xml$iMQpTsElCM}mWVsg4keO2DdtcjuN8*KLG( zn1V~4D+dhHzf)6pGtxr5Bx@M=;}ptB*~uTDAE_Igh~0Dj!%09`y_V4qeF8p z4W_rH_Qdfh24_S`(Qv`LJdiTNn)vAcz*Zb0I0d_~A6)IwtI*>3fleW%!?TCaK}}K5 zCs)vFV5-RIZ|q30*Gra*NUKvscN@G+247g1F}Yby{z$t^%yid)3b=n?>I5z>Q1TOz z=7agf(+s$tlZf}13xE~70Z#*f1qt%^@m#|*^#CkCqq0R{t>xQ78=TEkf z5!cs)a}&Rp!;;sbfN#t=p-vpqfBgReZ;vuqL1Mzdf3E+|AC<)+OTGcw_)0DG7ReVD z*jdx)d~LegS|T-UTs|SaQ6!T#BBILx5H+k6JZ+&Bn%CRr-OAv#cFDb6(gX?m+Yy#W zP8hoqooe8d>*B+jn&WtTfWd+hMdS#PC&yij@OdQz zcn3P%Jj#L*40A|5v-k|zVkzZ)9X-(2>TX^oGOYSw>$VG(?mFj4_GPl5Yo;a2ej)ZI zG(t5fzjoragM*_OZ*z+XaaV+f_*a_q(TkzBayamK-iME1X7wyA1nT^Jp&$vLjyxRX zLxAJd!Djg0GtI=RJViYR+NJzc%svC3i~y>oO6gq(hGrv=KeESJ|F#~q12?*#hyapo zf*xt4u>Q=J1!yJz{CM<#XUW{Zvt+Jj*4-Q&9s*RtVNetylMJ5^f*kX-*N%3q1dpRP z5f5m?*)r^8xfZYDVdkb})_8iA6QxwS1boqaRdfS$oeW!Qccq-jyb_}+`bGxCRPZ~A zA1Mx;r#{aCkPPbAU8Y(!$4NiRu5Cc*8y$@%Dzv)1Oyc$Ea*&vO7GZs12rrYxiCrMg zysW%H2WZ?lYWMDS!UCr@Y>InTyxBibrv{GRO9SNjHicX|hun_daSw12bMU|K|924> z|1JW^@@a3 z^$&VP{+}@B^8w=nkMl|IQQg7x>2DP(_4eSH>DhU8E|7dldj`8VdZ}s!GIrAEM7}jO z&Q739A?CPcQY@4a1l?D3$d{iKpT-Mq0vkJE$8xuFWZ{8gqT4nKs$r-LIsF>(HYkgt;z3!w<+ztrgj}*gvmw^#sZu%kysvXCJ{cOKh=Jl084rq$e@_1fm)r)nk3=-eh=k|3kHktzRWzsAG>_TN#1&wu>NB`}TB>{DwpzMxFNV=hs0>*WVR3k0Dp^z@kk-I~_1;FX&G48GKmiW>JTYNwD}hV;=Sb``h$*6bi*zVD zGbHM3fqn2%p_4nFFBM6()9U-(@fhVjnGnS{ZbkA!z(2_X$E{2JC-?UA{CjGy<4$3A zfTPp8l8Izv;ZIxiCtpm&-N`%*|Lf~(vfP(}-X(B7n)BqHVaDg9u88Er3_vmlmshf3 z_+m5aY;Wd}DhZef+Y_|ng-XkG#JJJ{raPX^sI;=OYpn?C&AjP}X~UuU8hb|>f(Q** zC7$n6n2**6=6v}grKy^xq*fC=UvKoK8VB|`e-jG0KaXcY_Zidwj*DrMc}xv1(6$qm zS4IegQfONwgpX^@L}tR_;FLE3HZrl0_#C`Igc1d7v&USN4Bef~!j{ZC+-q`53%X1i zOQ$w$_Kbgr)kXWylb_- z@Q3_j8Tu_1h#A#Q`*JW?;e`rSnPd`iZ!!XOXHTkHnQm)uuhfX+k~};)Le1!DjoSN* z_x|7{B8|PqYX@srAAh;l*f%4?s(JKx)ceg3Y*SzurB6P42o}g*muzdn?3Y%G;5wUz z(@_R#l+U;!UuTlt$m+Yj(fAp{shA*O8jZv09vO2=PARA`^rF$p56?GQK)_>y<(ov6 z{p)@a_g0h$0Y~)cVT`2DW=hN(hYJNaQ8@oZ`|t=r+CL`(6Y_iyX8p~+-Q7cKx>ij% znn3?I%c|FF(`g#t0h=zc*e{k$PG*}6M>GthH;J{>BU){})sr%@d_7qI$(!N`pOy4c z-2ofwtrJVdSx1|@Jjg}eOGCd7VNBXQ^L|Ypis{SRg<3U?V?3emQ>DrN6`{6wRm|HM z4@{Th<7_X}*Om7)oWvew);-%`4(5Y{yJTw?0lDpNeAsfjfNT$ZSiV{%GC7zV7MJ!l z5Jl?yIe^;|=8bR`M<_;;O~kyl5vpuqd!1ec%|zNA^D}5?YmVub79ekA3In9|kitq@ z9>ABM*hMN?T!?UZj>i0C0(JgAN~B5Q8;;GGd4xYlFM{OzQ?rq}K$}Yg<1iP!S_Ir^ z`B%U>G-J&WQMm@>(#a+y+Z(tMc+ zNAJO;ZuaDv-&X1%`p;BWv0VQU30tlHRus+YR->jga-t1fdinoyb&;Y_{)ygNeG z_4KV83sjZLb$`NNT$`*PzLGb{fdKz_|ADZm-sbDl5<$g>H<|wawnoe77oEXDX>jZ% zZjy*W@zf>Ni5suy{3M7auTtvvNp(B5sB|llEz2u}6P%;6czk*4$?EF9W`(TWop!iO z=ouBXAH`D0{s{9ZPG>bC!DB0~sO|0a`inPVmX47_n5k;+?{mlqjqoXeOyyEIIXVuW z^NaV*&j&Q0`!9%+p5vlLx4^ItwzS=^cK{u*^j$3n_T>rZwkEkq4XCAAA~q~#tu9Ar z$Hw;CnOGj$)J$~!)%h0btxo3;z>d2;mx3fA$v;(F5LOl-9Foj5_^}Mo)@%(1YAE)+ zh75<^Yb@<9ZY^YOEzt_XVNXryo{O-qr%^<}S!7Lqk_j_c+&5CE3#PiC%6(h*?gJ%y zv-1jleX zQWtO&Top$SgZXLVtt7!smg^Gax9TnC1nr(4l96g#cn_qACNAvQCuF`*f4hN8qc6>R zaE`|-uYaa_iT)skf04=TiN53kJXcI1@#Ui+k>EdoDR6td<;rv}`@I!gdyVUF=kNDg z0Zy?ZR4P-~#>c@L(;cto0a};%LmfBI?O=$I|y_276H=>^_Rfxc1t(;b2F$ZxCPL^{l zW~YzA!8OG|jV|SDDZ(6H4RreuF_l88r~rmv{o?sc(o#fstP$v+w9eUqPXbnSe7T*58 zGR`?Hdp3cJJ~b1o<|LEOggStL0QMTs6H5u+2PN=6Z z*)SNHUYlKijiN}VPI`AnX{rZwmF9|cO}57~xoDpH)3IXizeSOwKr zd@a`-rI)RO?OQeGQ*SGDi8Q=0#9jowqrT zr#@V)WpUp+qi`Lta+EH5X*^07ZG;udG+gJZp2#CV@`wC#N8QBNun$(~0sv=sErRLJ z-BIc({9!6R0i60nv}p`MI+Car9g>q7jxw~g9*h9-fB*6)H{=5FsAZxO@ycDUVv~@_ z6C)}je5F_X#9wX|Yp~vH6dr3wf}ow?Dfw|Fs_`R!+|_7C8(ePR!TfACM59&r5E`8X~er_y8i4x>#8T}plbJ74zP?DmHX zup@WVYN$C{E^4UjKC;-+bpC;%e7I@eT62{TC*(%=d${R&BQH;<(fIbs>kh6`NgPR` z#`Hq*ai)!u&3e5wIM!IUvzIeaGZKgO{nGAkdp=4dDu)53OdlvSx{#dT%8 z!4j%FvE1^e9w-%kejbh$^%Q-Biw*uLEo6O39NBIz+w zRuffI1AtP6*)B6MG|1<6anysVv?qAN$uO(Vo?fl~8>dx-;-^~jAgOfXs;t^{oeT*14Y`=wp{*vnrz1XpjtiBn?Kf-D@5$ZF^d%g7@)L6hfF^tgVgab>V7BgX;A@2WHIa> z`Bhz@1q+N@mm)NV)dp4&1ifpf1%qz3XRb73|Omt#zC$~1>Yv?_5v)?r$lwX`o5B-uznsytsTDmuh%cduzY{=;n>HTC-0dbF(M<`} zJ8@8{eCO-y3ww13x@4zvMcqra+nhpQp-K>=2~zrgaT!p{5^OERBrD*(Ex3>-iVLS+ zzusyU6k01KyUL_(fCW5<*C1xpzjgoPRRD8V zfb-iC*UVM*?Q>W%9hxt&B4oyk5w-qZy%}mH(8-2XRsQNlLz_{Fg~=8{)#y0L5OUC} zFI%VjFAgb)&izOi^Fr)Qm zGo*b+4PQIMz&L-7aJ#Pu`g5DQG3+jK;Uj;gFDgn34gE> z_17sv3IbuFKM8I$R2(?$B{S{aXsvT4iVoBHaFg$L{8jMN##zuGTQwpCs6c~!CZbLq zo$?x6IUwrTqBAJ3QpGq|keKJSiD(054~@LmOKt zn)T`)rOZd(%?z+|SYmm%5C2e}*npxEPn$j?Z_cloe8DQB*4{)(gd*nY&Ep6{Ds8eV zMXZe$2h_M{Tv7Z8Pn^$g%;xd5rI5F7v$U1c4~_p|CKiKEWjT6hsjen<_Dc!}%9AOO zm0A$1^yP$0gY6*{@euFuLDO5QREe*6%yEYT7Te69>jNH;MDNe(1_l5Pdd=0QDojFY((d?}q?tnd3dfgXv z7gcI?$P5x};g4qhg@mj{xQvvQ=ig&iHS+2oRB=Q+9{G<2qJktLr2yW6CqW)y*PvF# zMWn?d%w6e}VKjIBgkhB7>$ARJ^I=FnQz3Is$VaPv7a#^f@v+>}3o>hqA%8Y{I@)d| z-6;Dp-PBiCmY_&+s4Fo7iv#uW$*DeAt4Z!RAcaClgus))ta=G78tb+8fPyjAoFSKt z2xdLt8HgRlSBj9H!~J^(nK!Ur4+^}KCt>yzBk+&$gVR&%nuxspNbTjLjE!{2$xV&Y)v3SLIh5$rSXQYC#0p|0~7i zZdS|D&>-?s-*qxCH?xN=OJnJrlqpO319n`2;QE;LyJ6rF&Hf(U&;owO8LFz66 ze**vxqtL+yB5pA@Jk$8p#y411SyoUnw zg9~uI0GQjwC6&da-uhE06?!dQi8z>ejd7_FoLa2DO{FJT## z%d-z)9!@6dphz&pdX_i;84s>^6o80l81{|<@P}mghy`#t#Oq9r%nM1L?@%f&@r%Is ziPV|=3?HqxmyB6^j=O)Vi3pe7_blgq>JTGtg~xeFPdLPh%4oX?MdCJSt1+18YIl1I zje!q2YI<_1oz z&maTuq=lc#dm16I3Ac9Y7h$*y({T#hR(gFIrx9hZCLYR)-e^y5+ zB%jxNuMA(lyzs#?V4j!pxMbXGQ?1H%s8*$2v+yDaa0&uM9N%#`9gK?xl2ru65&BI} z$iVeWRd1LPa@ypCa8Sq$trx!*$vIpwMJQn78&=-5l1Fq8p+}2i)nI+lb^D()+Br(Q zUg&4Jm7$DBHGuXEe4PIN6FJ0)OX2&$aP*#WL8>IVx{48Z$Mez@>EP{I$lxeVCp2>a zUn_nF<#;~w_{;k8B*wROZj?ja&mb|e?xla644e|3jwmo8swAXQ!w!MiZ20!UWX7|x zJi}SRfP8`^sVxtjUVnvog2@m38ycKM(vBu&{e86h243!Bn@~p+s70k4$SBMuRW0bi z>uv&VJ@2SzVZ>nXg<$YiR(|>KY@OjiR~VLhnV=_ zuQCO7Vzu&boc5qka%c2;Sn2{;v>1|99}^w~#sqpi!~hzMVs|(brPGtRx1vj+R-w*As;l1}%iam$tOK?tnq>_jQKpY^hnkko!}W_7E`JFjZ!v8d$*B z&8Z%*(Q_q#-0{m|foMA@Y&FL@kDn7q+4Fv*9g5iT+_WXM;dx##44;Nmg#XvE=9!p;N zi->I*%Tg7XQDG&+TCzF~-%kb7!DdvHP1VRaU|OlRmLWwG1uxhvuZ%#ZUe@XwYuzHTWPQ<>o4cp%gH3v z&XMev7i&YJllh#t!Hs82K@$WkR6tvmL5>h%d2_PtKp4UPN^N<*^+}qx4}foWTr5w?^wtkW?bMVl(DQ zSxll&a3LOtEiSj)@6Q0(h-T>hGKi(O4!3jqc&*Xp$`XM_6*i)PKVMG_LK{yZwa7x^ zaXSmyTKr=#edce)VM}eNh?7y+>hoM(e@3ZSh?tqFWPHCFM5;2sO;trL{?3PV(CP;O zWhyDQ0|QY6D5f^cyAa<-R3!IK!=hBo2)G@E%bG&0o9gPsXZxXlLwlgPU2KSzArB?j zKkgJraQg0|>in=LlR1n!X>_kxHrxe*u*%w}LbSa%Wcu-H^b_bqrL~AS=yxqz4K5vB zP(-k^M6ZksWsYJJh9W8Z*X(0pE>B%%GXzq@YLba#Qfst=c0_zB+Flag!hA%Nuo}jEV0Q8U4CBL( zIo*#L0n4EBe64D|uJ6hv)45?k2S_hu6@l9gR>dXD5V!!>>9qO21~2(#@$u6#GhOSQ z>)W!V)vimJPNa}IUkaFMEmes^2UNT*9kZm%(&QFEW^azrA|f&?WUDu21!&^ZZ%7f> z26hTTh&#ee=l;ZkDN%=oLld=Rd{bhL8+&SE!ZyOU)m~ z8BErM9T<|livxnwuk9udw_QSg9q-$U&gY8=x@)XLJN?jcNuuZc$nC<^0$y)XgX2u7 zBt*7y^{zLAX_#{@6odSOiPSyOjSrr!7|%7y5VhvyCLtb!mPuk);b8~t5w`vhR7Ut7Nl z@#aF01A5b$=TN&c9r-9R8Fk_+E9~-irJcYdvVFsTMn&mq{l?5@oa9nuhOmFK29Bx> zVSMr!T%K@%&!{B&QrbgK+$7?NEGNfro^#cFzw-`2=zkq%b1Xqd*bv4@Gt3Nl0uBj^ zbd##igUK}N4o|0CQ)D3vIujO(llRC-MwR(6=Eg*2f5O2K;W1Y?H@n2mIH#wLMS`N7 zX(vu;9Qgd+x#wA=%rw@8X=SSmx8eu^1D)?K#-v#di9lh-M|!(ihZfNFj6fOE!<+j1 zKh1=E4%ftB?G$QX5o_pqy3N0CqB&IFjklSE5Wxg^^yG0)N=k4AhqdzNltALTvy%%< zQxlj69wukK`a+?)wup`x6`>?N=uDYox~9B2>`wSF$IY$Y zD;Qg^?ARITa!QGZtG5}1Sj~P9-8d=nNnL%uFLx8M)H1KZWAn55ztvS94+pY+J~`Yx zz-aj{C{r6NUL2B-aa#{!Hrj0HJYt8E2?NTLm;jQu{!*eIsvh&Y`&PI2ZY7*&{f_Ud ze_9C1Uc0&DO$hjGnPK;*0-kOy=kZN2q*CC_-(4cVBA5~7BFx(E2Oa^WoFS%fZ$?QB z+Uaj0A!WP1ftQJ9JF|P=%)eF;cfXoal1HD$c9Q1S==S7=9U>1!QeT*hyu;&imxJFg zI$TX4AwF5JccNXR(+}15y>5Fw!j4@QkSWwgUb9*9`QAnBHwU! z``1lrhDTvCneo<}%GaP5pMT;P{lND=f0a%MV_U=H2;4kJkO2$PmUDI8ksl0GB%Sg#{#&u7%#sx@==^$<3q}n0s^CAw8bqs+ z7OB-+F5B>tCFBuSr(Zfrg%9*Hhq5W0of<6U0C6^5B#+0)0xVfy);AzdRT1h)sxh6} zQ|cYy_tG7yB#IL@?=~!l7U@nEc_0GxVVpwG(LE(Ug^ujB2>s4aM^y{yW6t#|?=YZm z7O<{oisqM81oejtvEUeOUUJ4MG!uraG`iq{kc{4T7aSr|i7kCk>g2zG)_tbZL8M5F z-{XXqjtV~Cw(8`aBKLYWk`?2P5aLxX1AUyC)cQRJmoLDg;n1+xPJqhvj)$h zSsEiims$IC(=T}6{o~7A100YS6zPy?X7uwfghrbg9<$j`LG8G?ygL#@nB#mEWn z2>oBB<&t(DKVCy4P9uFeX7X)SoiBJGC%(mseRShkCqe22Ogu~`6T;bvijXeerTggC zTn#qJ5k5b5wOrXE=YK=Ie0MT`{|ea-Kf)C=_*ebbDq)$SgqB%63cCVzZC95nUF7m? zDu;pNTmVcgLPj4L?TnQJ-LS{%@`n0G$=;K!{{kx7;4jAv|7o8zjw<8edE71r65|l3 zI5HhLOF9ovuUzp_2wsj#6`Sg>=GcBA0(TiLsS6(x5#iFcMB^%Q1@%~Rt_&~_df585 zuVG+7h%vlz!xX0ZP_VEH)HPpA1$|A%61b$F|6~CT6kdRove{p*D%5(@N#wPT^5+ty zui&t(Vp>?~H}d~0b>NC2A-qF|3VjJjA{89U-_7}b?T`@c_@2{21E~p+oei`4Hg^vv zKY%T?!l+F`-$Q-F6fXS*hQHQjL>7udCsoa3R~$u=t)yvsP%C7x5eU35XN{*$9N#@t zo@($zQ6*4}49jVob2%XLwYwE*xswq{2m8M5q;?LnQk_ns;+)Z{e!UtvbPHwCFjD;KwTn3qr?m555{7>5jwK>SJ$k|V6b6+MSI5W;AoOMWy$M#IYNjP!lBdvW zODP-^kDm_+?q3Mlhi(4Mn5h_PksLD#vifC1T-JSx2i-`V`+Ma@F2n4moL-P7_QTI@ z_Xwt!E>g6`q#}~w!8mtKyR~Kn7Bl+((Rd$8)(%AvA6*Z}38*_%>MU&zyTYW|kC5DU z=a~^2+=5pE5PYR@GZI?7ji#A%5<#)ehA%l+*cssCEU&vuo#!{@_SadPD6vDIMxw3& z@m@*rk87?97;};=P-;Y5Tw2Vvq|7p0k)Z=p~ST8|^)YCC~Up z=1M}Cy#Ykdzer(s@RWO%AcYY_G81TCdz{$d7gDO_yUl|K2_cog{S?Qd&TTjmLrKgB zimJqZoO@~v0%H$Eq<1c-uYIF(9XFbi22k-9l?CK)0|oC-p|A&cgZoq|H7k|01fr9f zsbhc1IpY`x+fxu=acy;R!uIO6t_7f+uLG7((Y^?5cudMw9x*$6cX4- z2=KVPfl#5bMt}YJ{}xc+FA*94HBPNbrgmdVga}6{KpX)43kC-ygpRMB6sQAUlNQD1 z#lHxn&3H+z|H}Jx9XLeu^+%xvP{>>!nJ27&7C^Ey!!ZkFD~*bsVK!dPgvOj{;ZT}$ zGJHB7M*OgT>dWfp0>g5oM)rAIhu?Bc23C*Zgoat$6b7}G*4;C(4&79_fWCuMnsV# z9u1_m&=O<-cJ=$joN9am!Vk>z-UAX-}K$Svz)xhApmuFKkd~Y+tG2>u;>AK1SJF zY6RJQWC6ELxagkp3YFEh<_neVxq2ad< zT=k}64o_d<-b$u32l$b19J32e>wjFCO2;p^6y)3>C2!XXw<&STs=8D0|CKV5EHhqM zrm_?&_$1}dqu0xBvssOLCRW-SV+~BVO_1i-tU$8=a&`T z?>c`*`$aPTPCxpYN8Ft7UGIbPTE z4^9>l?yQOa!4WkSz?P7+S{0C*O=fVQLC_W2S~CEGL{(6U@yP9QEpGc7sdtArI%0dOBZ>h6+zu)S$5d^X5$O`en&tZN@&|Q{ZW!lzcq)hqlvE)(-Ki0H%pb4<`Yh&xH z8fkpu@27JjKe^Nrd!bO6@ezM9i@y^V&aS1$5v%V$)F8F^#d1zl5t6{Fh>RlrE;`%g zIqB)C)gzP|v-J|Xof;gO(d-^)4_}zxMwH-)JLgt5P3`Ob>s!GSiI2zKOWKM0*RPDo z@uVrWVj?*zBX94qyr56epJ?n<`?S#^tTkezo}vH5*9F(sgmZ}xK&2Z{=Y*R|LRNq* ztuMn1P*)g!yn2gSLkkQLheR>$Q#tH2#l69pF?f8A$>I#5TXQQji7=HxQy!6vN!_Nl z`TyJ3O-kr9rnB1B)Bo^$M*uAbv`Q6Q@I>(hFk= zQKP{&2w5>aI`2P<{ocgc^%(gws{P1UVri#dmVAJ($xUo>bEGKTiwkqCt+HIqjs$^6 zhDc{Xp{QD;)qoTmh@Yq2!9Gs&_-g0@SXJ4OT1_NVN%c3YP!heoxgK6UU!s)MnG9i* zCyxb=2X9Z=l+a!v-G~C? zO$W>ZKCq%@WKIJb+U(ufX}>DX*0*L?MY(WCE!43?J<-|MAF#IC)J3MeJ5-G%Ji|Z8 zbB7W{D`ofDkQVPhCO=S-kD$8TFWG2`x`KCX?WRohTA}6$w0I#8y{7=y>#oA|G{0b5 zN+eloLj#I6Kwlk>1?JTZ&yPaZbJ9!!33od&mm-M2Ke{p1ISt?Z2L54g^f%s5=m6E9 zP^9b$zs=zaf!ZDy@o&CAJz%}41!1!rR0IR~XX7rFFM*tOcy>KIi$w{G0r^2@&3wlz z=Id(AT#L4~#2{RUM!fK7JV}PMFcl~$ATH+o$7I^i$n)jfx5IFMuABxy5H{^JmFZbU z%jxYM{~aCIXtqe^CkY|C$rZ^R&alRZ*hi$nD)f5-&QfGEAh`TLb}E44g5+mTBD#D` zObRD2A32=pc0aU&crj;r@HqOvHshZIW4eeT7TzrAre{PEe^eC4I*YaJO6Ki8BWx=? zf~@mXXc&KNYjLa}iHo#H58uGickKS>x7X>HKkF8|SIXzZspBw}Pk;p9@ zNdoMn%8w*e>im|Af*h!q!oR z-^(6A;MxCSS7UrBHEj@YG<|3#mPFpk;`9&)4BXRp^%YuqZZ2+82bsB?;yyGnk5iG2 z{06~@OF91FP=`+@RiO4oMGXe7yFEF`x*^FAM+;Qj{c&0zxk;p+S8;6?^W;gCny_;t zv3LY!*ZfkGVl+zwWG;?L^s10l2^qu7`Y{K;=Z^s9E5RkQW;|ug^nMt_<@Z6i@%?bc zJmJR~(!jZzQB?O137`=?7U3TWwycfad|FYG+(MO1vjo+HSb=xA@Hsz%NFk zxT{H0?Z(RZUvZIuz9a_0(Tq z0hVn z1d@{y^wPZ%-`e_Q+~2AgQJL&J0yuK^pRc#OeDZlh5E^lmBW=Zyl0zk3+u z{tZn{ioqScr!YHj{J*Qg-WEsiAYXH`i>}L?_AsJ>5z79X7JbkDmESh*NiD#fOT8>y zdeMp`#D}?+n|x#sK*<#NlPSM?$3&tr#I!kKww41&= zz*r)ML#m|`R2wyaAfP)J-sul{6ilQ}!t{RlO9W5A9~FtgAiRmwoQjyk>v2VBx7k_S z@Z5;QorpY>+R0=-YX9=rhhx1M`w0%Yl&k(*QT_r~lKU60g!Nta9t7+dyVYj*P=9{s zV=&CIT!g>8q+Jiow<(r|D{2BpWD)>um(1D3QzNwv0rUCw$&@9{x3|?-SxYzJHh=MW z-yM?q`GYNW@YRXe8?`X(Ab~7`+Pllgd!~9K_6L_mR6A9QO^fGgKLBaX7VRyO#@e7l z@Y!ub{v+Z0`l=_NPyv(V6Nx+P6?3ix(#A0WOfdYF2`z22(Jp3;ydfXane4fz*pyA7 zK>wiM@sWYSj=aYU7Q=G~jW$jpelTI?7-r1wlEin=x0}qdrx!YAc5@RRm@`Y%GmdCZ z1p(&rafp5pcAkp98+jDJs?sZ-Xcrs}wneg8Qd`|Gqp0cB^$Z-#eD!X7iqTnv&8G!v z@sx`9-%^Qo;+sE5qYeR12H~?+-w0G_wF5bnsiejQgxG!QIpGX_b{8yurnQ7w-jl2F z@)-SE7aL6@p|Mn{`ni|Hh#EWE%|r%4hfrCMSeH0{bVRz0fXAPAD#l#s7Q}`^<>{e6 zTri?=seBI)j2+yLxAaX$5I64*We-C*@L$soNDQWqD#>%{-d|qUNs|85=qQLzmeE1; zPtUpo8trxpbOpmI|2N(dLguRTm5v;wZ|B@Fh9-jZYw2i^9j+Iu=)Uyy7C+0k+8Z1& z0sxK%b@p(;^wa;Uan?9BolIcNrG7ZRJUCcyc4~&eW2=;Y{3~jjoS`knhhH9S5F`7AuJvkJ$ZsR)VJ8U;`= zW)##h1JpLAogX3F@p>3g#UWl({Cz9@?}*FzoTi8u9_wN8nHR2@VYUHL0EKHGCXX-k z72|@GTE@^-zQw?5san~KUs8o|>l?p0$oFJL6{#ws(q==Lj};bTqv)AIgVzlf?ZB>z zIVOiU#De80-|4w#=VZ4MeEfKgL2VTtu53n3A?4B*eh1jJo64jL0!^v#;Y@Pij zvly7eZXI!g-E(_%hh#L74GPdZ2}3T0xJ)ZbTXS@8HM&q>H*6)-Bn>=BHPjm43oXGERl%ie2spT*fDm)(i}>WTc5 zsy09``AnNbq%RPf(RNq1yf`*jsV)DZTu67l)d3~3WLoSX!qi>%oRVSa$;C2%x>w+D zSj4ksGL1dc4%~W|8$onySKN#v{>ARVU_|ntW2Y>TJZYXbn_ck2tV&>7f`5wB)Y5%@ zJuFdZwWRr@SAB$Ch`UPh1Y`&hNM2A)coknzTn15+bF`M1aR$Y=^e3z>EmL;!T z!2%eN&O_x*Cx-h+dRyum!ZDg^ozd0m3>MZ01YAm=rCzRT&-zD9vM zb=ZBf33VBCe&_RtFX0Cao5(Y8=u{KYrT}@%EV?{`v4`M6u&gEvSGk9!na>NMG&H}N z)caFZI+&q+;~jTB)N;>xxzz*bj)4C=L5JISK05Wm7eB!I@&}6#@vwYmz@L_LLf{r$DgHye@epKDe(=I7zQV8;Qz_gKGvNio^&DCy*vw@EC; zG+IbW2^^vQJHaC9 zYH#n&ar*Fw!s37T4Cv206$d*gSCD6vNZa?^{W5{`s7nC-)oWd_H*;DOD4L_4Zk%G2 zs>u{Ev68|RwX}S2t|f&3`u*L`dvb7bfpIhBKlCcnenEt6@VL+gtQ}1XCe!{?k{kTo z+EWb4&jQ>Qwf{@7g$N(V&|U(V@EY4d!xQ(v4G(EOsL}djXD8rnt>s;!jK&`faW)<| zg-)kMi33j*^=&E>#?J1-O;R4peHTHe)-suJ3sZR%Mn6Q4khpH29lSTPH8f zu!sD4>2v>K6+We6CnNSkRrH^wnmp+)c5{siWE+ZvxH!2&mCpLAW#zDgaCgD!jFzut zGMNnqFfdyBCKCzaC%aLUDwJD1&6snC)qKxMHj>)ZP0!JovFZv0fa$(1@H2&70fe*3Nl&lf(A(Y(e;CIuIP!p7aIjQ%@^xMDtn6+s%k!CnFX*hr}~w& zvYWrio5>hv((q@#SgMOGQYV`qU49Vwf*V48Ad_c!R(`UMHu2Ad?dwRX&_;Uc)LG%# zF{(1k%=hlG$Hp8r2Cp^^`)Js|OEsH+IahN*Rhqz<@7qnG60n}I80TvM6(P_{18kv| zsl{m~E#M}@Eqz}rUZ?ptcW<%VQo9eXN6h_q3O(K8jGvqk{uHJ*WoW@ z^a;l%|D^~i39?*q7=NttXFs?EbmUaC(LpY7@h;bga>A5V} zsWU!>Sn4nsM5kF_Sicx|FDx1Pew{+6owPb!+eOK0`kZugQKeCruO#Fb0Xvcex)DR3 z+3fx8`JvUheio^S-?*9{m*i@F+!)$>tCmu3;UZK|JH zPMCNn|Bqq1km!sSL%OokEuTLwrg*!;n+s=aO^T0;=6^r`qRWZW+V}c7$f8fuiTuP) zRr`#*vr%u#QpAjXJE>r@HXAki6w@j`ULa8NQt^_PR=Ndf?Xu6VSS^>zIj-+F!{KJ?(38xSLKVLCjjNEz#V*{3-&CxD>S zZx9x=dVgZM`~09s8*`%Z*C(h+0~XV}{oyTUEPHg6a-mB3IX=toBZA~dZ(2z29_?t? zrM3>4x{8rNvQOs70sm$n@@MI!23KVTfli~h>@)pE?)(b5D>>61ei0VR*Xkt6nEMiT0L*eiD5iCL_Mzf@2;~cH`5tY=g9x1nyOHLK({;HP4sxNC6#>pEB|BCL(o+lNoH^OFC%2cQPSQ94e5R z)(wy>)ez_6aP-kq3y%)nEg!AU4`hv=&i&}IPgBCUC(-`g+%>~S@96gOAVppMz%1@3 zH1uUcYeys^4d(c4Kcmyu7_psiUP{xLR;wK{JbdiIdBN+)!vyM8TelGG)ifIXR@5_= zek_*Q5CrwChnLE%*qzx_S>VHOmT$8KUIa=vwu++9z>uPr&7GpGGlxj51?7-l2a=~g z)xB=y8X!30>6A^wpRJWaekbt0A}jas%2yt_3(FfqrPo_7bb#`{8ge^cYRITpmLg|3 zj?al6QQk+{DD!dpnbrd`M8de-=^&BRwG9%JQ7aD>x;r}b7`0zDAig#3eZ*8 z3fn+6+V^TBZD)`o)!%vU4|)&(AWI~m{BvhC-gtZ08jYGW&B!w$k??~{bjK{4{Iu>I zK!OEm!Ikn|;ZUWb{j3&GB7$J^S3|25zN1iovs?{}bZN2MAIp3EKUkXpo#;NT9oa%z z@S{fe?BQ89m5D^($aN3`jmqOgeWT@&$xR}=5Aw`u&CA}PfMTplkxtYF`p~HY^m96E zdCe)g@5>{{&Bam%?DK04w>|3$0d29ct9qIJfUb0ZU_@VCj{Ya5Bei0IU0F#xxJ@zN z0jhFDJ+Y^P&E!#SVQ8S~B~{`UFN@U(SSUADRw)S;!gIZBWOD+1L`(Hs*s&8Ya53EX zUH8Pf(ca;d#tm|)vmgHCXg-|oxe?UOZ5z*1#>N-$_hpcLPYn4EwPJNjp3jCJE=6@S zvll4-<%G&nXNe=nX@41}$iVme!t8{)b?=5(%cXD&J+*vH_}C6ui3Ni}Et6J&Hu>JM zsa!Q|P&coV@Z-%zH05gfDCtG-n~w^T5Pc&GMry946aDFfePr%#lz?1jaNU+~$ehZ4 zh}84#J_jBaUL;T90=1R)$LTl>j>IaVHCM`nWaDp@8jl^8bR!-Jcl!*Jbqge7vkO>G zGYOJU?JkyORxZCzhYA7Io;cLB&U54ANlk%LyAy3)oddvPY*G^Buvj3pLfYQpB)D(L z10N@r2%AF~Gp&4Y6lo}qrrIturTCmMKcTt(}GyQEC>#kXRZ8op@F$D@T6SMT#_fC)(bS zkTyqQ6@$a7mq1PUO{S1GCw1dHY0$1zp3|dyjMS35EdTv^R|hrvdCuN=xc&a4lw@lq zs=VwI3sIKn9m*-4Za~B{A1`@qynKYK9%F5hL)vLKAo!06xZZhF=`LjM7O6=kQDw#; z*>980gAU)lNqz@2rr=^QFxT>drg4n>&<5LbLbb)ki|gm@9jR+J)%OaEh~ka9e4gQV z)a20^_8mAsohz%eW!F$;bNFT@uj3|$gpeJi6-s($p?w}1!C#RzFDcnl(T;Wbap|s; z6z4~8WmR$I!8uIvxbhXmy4FLhQ=eHWayNT{vamah*WV&8=tBKikFTnOzk&{%WmXUN$D&6>quD{kR582RRs8LEu5u%EsK!=gmmZo%KbmpeVUH}7okJ@CHW zb2116$AuZkV}gAii2)^Ky5G^91=7w<+Vwbhu%`K2moXd$`DI!u!_Q(<<{CzaxbDUv z|CAa{qTD8@EDJWJZUP=ZBGhVndK0hP2gn(%$$5>v!=4>O!yQX_ttRX0lM+m;O5`OU zP-`!z_HtV*Cmd;@KfrCYa6Ed-)MWK8M^&L8~v&0Yt zxbl}(JD`V3p@16wKQMgu^zcB0(QW1NYl`b2GDd}5jWf^a(`ZAQku|ye)e8tK0IGRc zSWJxC9hNNfpM~W18iawtqmwX3-|5Xwd^XDk!Jv;{zQ#9u79#~TyI&eDwwxDhyTRX& zjj&oTBQ{wrshu33?g8u;#?`9^CZb#g;O$D>oePACNXX~fR1^crC{ z_mAI9Hm3wrr?QeHoDH`N{=M$2RQ|@$|9RbRT^4jkYO)y z;&>Lfi&8W0CwSc61Tq>}xrvOUw}9EpBvqN|JdP{qmg&9DWO;Z@U{3w&}_EiWD8=gFa{zJ!|1e|QOBd1P*(Z* z`HyD{K6xx~Q;FT~k0mC@+Q<*G6I@(e#Afq(B?#=iFMJdh%s4PDPb<8Y6MV-&>@+5# z<%L8$zK`r$vRiM>d&zzOlS$#D0Nf*qhm<2}=#X07W+DFURaUc@4Tf+r0LcLGBPcKD ztY;cFA7^3$6r4T8#Prl?PkSdhKbX1R-)ytH`kjBx> zJ18(1Uk+0B>lYRg?xztXy1ToN=SyO##3L!ouQmUAg2@*J_h7 z$H!s&`zAj#I26iQ zolYWccse^fXX=b46a|k!ah+!NAa}+6Lpf?p(L0Q`rhL-eN4M{L4YicvtV$yKGV%c> z-*$K@Ez*8Y*i`Sk%!~i`_$c$@De_V-IBJLfZ~tQ86)^+0?2kT4{cG8jmebM2#j-yhW^Q z3LYkeJ@BY#0V3q8C`EJW+<3JAVTSQ$e@=F`9;LMO`E-B3@J44crJ^tzs|j+t{`6R2 z(B%wX4UNiZO*mw_Vw@T(cyM-Lq>}3zY5PJ3)>yy1RXhG=j1Iq4Pd^Fd>?(VAx)EEo z=C+dzCTL7qi883BqL58J(mI zo8Pb^$A%A|e_9QWDZhzbn?)bv&un!fr&>>b3?T4*f>v;|j_v)>HauxI$Chobwd&HO zoHL|8cN6N5IP)~{i`Hu@+8-Ec2K%%BN@IF$&CfT!9FIeiBsO(mBS2UHf6o1cB=C@M z5Q2gxP)lZdLv*#UUT;dHce0kTFe281-Ql(cVNC+*xe$7RIzDKg_LBznLm|o}(SOVM zNnP7?xF3JKD|K6t03GQEZ9z%kXzTo7A5wW-t%T2zFQsV%|0DJ2(ZhSh_#H&gicISYBY2}uFcwx@(|9sN}*gjp+N+=Ql|?Y z2rLQ93ZRm&4nIW2_eFw_xwG6E6jf%8Xas)t2n)t6!bWkyhL*E<5{cxqfaot3h+!~E z-^>RdCFQv|k0W}`kq@S06q8d^@W~r|Uqg9I$CovB7s?PnWB#qz zM4I6kEvMl6jV<^ii6YjmnLM5xj7kPP0tVR=b8{U@uM5BH>3VcJ8`R>r=DHCp%S+(U z3JGG+Zxe=Ymx7Q(cbSXA34p^8R?K$oxa7ZXu54Zrkc z?+bv-wYI8|!o@IKI+*-vHt4)j${|)oJeGja#W6JW+7=qO^>y!dp@GUX+-XHb7ng1~ zr!GaUeyQsa*g}K9yZfF)ilvA+`W;xqE4^`c&;g-{txiYq^m-Ab&(W|d9~4{@`9QIo zek$m=qWly(-NN@ss^+t5s573Qy8NLk*J@$jx)&GuzCv^)%tJ#Ih#LI9QdRlUY!=Vo zgGt7SoHR?^3m2p#k*CvEFOE|{1tJH)w4eD%2f1R9-2D96<<07XWlBnH>tXa{i3kU) zO{k{=?myHtb)}dNbE^epXk?NKA?4eJ(}$IUzY5^bxQyzV&`GZ(^>@F9*7;t$4$j=| ztfgVKO02bE1;L+b86H`p-acKtBn;W$DfHcBCbKD zhxESS@Y_Ldv|IXEO+OW~)^>9z3bse;SE0^T=_~6*CGsX7jJPB8y`vNnhPw2pU-gKa zy$uz_)!j3-u`yLt@QVb zhz>a{DrmQi8q8E6Fdk7wxTVwWjg`E9vNyfeGg$7TZO_gM`74EN)UgnCWv7-ubDbmX z=DHv-iY|jAgibRtj7we!G1cihy7yh*{jFPdN4)^5f68Vl(?||{>^P|$oR0z{F#2D<-9CJ{t%ez?5EY9_YdnEGzCgU(w^8t?P#C~ z^nfbuAjPJ11b|Ht$&QGni|8&45*8WQmdt0Y6ZaJEz4G`0Y+jJO`~((oJ~$1F>7jwH zg8Z3~W`z29{*H~_UnLHc9Idyolm^Y0wB*s+I2q-%;&t2uV!r5FTHQ=ZbF<5DZWj&;e^Lc6&2B1*K|Du#Y|w zrb>!G*P?NL1L)ny*RDo&7r6-Hk671ZNXsqnglL4$MOke1#@l0xi6l+I{|xiCg(7Lu z#?qzI33H)At`{pZDL=C9Q$mEgkZ%-EbWxBjd7mtOpG`NvKSK)&4OV<1y%nug%{_LFl zW*meVMEjpwdi_5)|6Uw^<={cJY=S170iF;4Iu~BoMpZ%16H*OXp~~w32u#mS;u; zd~VrtA2(`Ijm`hJ-UVI|OQy4wrXBbP4aB7X71XWnlX_-nu8SDf?Z-xES}^ZNV092c z#dX^J0qJs_r12cnLd!xWVDkIC_R}}YZD`L z|69#%pYbRrTc_y^nhm3tERi~K%c>bA({#Wo#tr3&FF3tvVpbtRIB;;NL~lR@VoG9`j%OA< za4o{tJi5?>8XqUyOezIHQ6c`DV$hHrc5uT#6vWY`>w(S-7W%_)Q75qZWBS1|#XDa# zte)zy|C}~qh*$xzd&SY1XudQ7koOJnPF@+-&C=#U}fxq zfk{;atQwvu;MlQ$(8DkhfdA$f;-O(K+2?4iDs>`jpC10NDgzP3IX+V zY8w03>_@WP$HTM?+X}u4i_dx6tsd9pyL)?m+S}5}3Hfv%+ht9DP>A+wfRBTNX(U@6 zp#$E!6)+$D2!L++qK75XO%5Qe{zIWNPE;*;JudllEG^5G1X5`==iDJ#3M7^0=j>mf zTvV5RPdt^AwNGvbF=AS3YEb27?)UJ?+sSNY^A2L z%Izgp!sVptSEAfW)8a2l-+aO5-45{Z@E>W|vDqwxTU$B3UoJ-;j&l4E@$jm`EI2qg z0B_t+simW^b_a&B9ir{CW{@jmV=zoiOqNaKi;HXu6>1(txozWK(w5k`r)Qsoi$5Em z1Gbtr=j#6ZcC`OK(XtRZV3I(3`?;RKK)1HFBkmYjMG>Vt%X$;|JYJy{aM;C^mQsGkVgIb}bBoq&yHT!Zw^WTsuh$v&^QUZKNeKxf zBcjz(wb5SbBI=8GNaD6C$lTrEV`j0d zV5K$x^Wuk>G5>gYm5w-b$UL9$%@@%C;kfA!N4`|S$6bglf)8-+@PO=dc$k=(`eT#T zqJMI-Duh?k{B5-0=Sg)gg58_Vj)6m?vJjn2VcK;P8GpGY6%+b>=prV zFMMZLS0iGz830{qH7I|}3;^)zL!6`BTtdc`wR%&TB1b@{$l3jPh*PQGlilpZU7*+H ziOFeC_;hzxQSQA|30%QDsHXFMHmd@wQci3Nc zsMJM?Dohg9k&)@^?dD_Ljb6%b_2c@AND2N|G=jQDawZ0?Mt`$#`rLW{>LZg$5$s^6 zGqg*+mjCpY84(j+H2odrMl-3p^TY$rrmeoY^iUaE9xd~0zWZh6+s)+{KeNqN2dE+Q zOr>AHV&jHi-nyYe?Jt!`0aJ!hd0>J+_2n5?M5;uWR7Sm08FX_!?VMkg;BnvIBVkOc zE29SI{C=?E_ebr!7Uh1w44L#XMgeXjU2i1t-9GSgE$$<2KNpXVN~+}IS+KJ!sZ7qG zOyxYjV4)HCVRbKxLD-nTT(fMhy5MXpxUli^@m@K#XOe(1`5@si7({}eT(*-#j~ojW zq#;=?{=vCY>&KKKEVsN90Azw?;Pi)I0K{tS{C8XE6zC7SJa7AfxQ>OMqiipDz>MOA z0A2!#TTur_=X&7XV z^z${o4)z*a4X0YO2gcQOB2Vmk*uC`_!;c-vM*R%P6NZ4~r@u#q4u8vbvy8z@;iK6s zc6Cb9Jqr~)Wp!88DpK;@->HD9*m7I|5F{`Xa6AN9Y7w*2e*QOm9IsiAVHsWm&hlZ+ z_#aWw5Z(9MohDqXjkYj#sEjX_f4}(>!%!@L0!CYSyAU=HT%&)&^8Kf1j2t_EA%;c9 zs^5FoQ`Z`7>mOp z|0n=tZ=j5gjk&+S-Y35f?+!(dDU?&EvsysXZ}&+bhlPPrrNw zMlFq>Dj$EaI$@pCp7*Whb5_#QCH2>7FcBt(QS?}<&NMnet&{hNSOf~@5ef3csQpSY z7hhtlNOfih1B(p1pUZWSdKXYC=YupAZcb*ukd1}vh5E!$%tXF)^cWx6;&Qr!e8Or? zr>qU`TTa2o?bdbJY6hz zjrPen6W=upKHeJ0ot&ITyJcjOH9fS!P(5-&7k{o06yM&D@bU0b4F5{1! z&gN0@7#;{!+38;Nj*E{E2S_)2z!l=Y^Y{%3bGron`Grz~ysKt5Cgu28!`;gRnVig? zh!ywSRJL-&DK^9#svY#LW+op#5Elt*B$`dBYyAG<;~Gy3x^`Y*dQg7h^YOPvy7D|XkxOp zm(i&NjBu3)gmWGN%uK;~vi9NydFC_Pmf-bc<>5Tllg96nnugspDg3vz7WZobde4Cn zxJNHF<|{4_RUZ?IT$yC*B5Of+?N=%0JOQtB$g&aTy^{MwG#x)OpH6p@h_ zUc3k#)P#fkaETo~uY6DkxD$I-eGF7aUKia#mVGDGWpl`h373vr*(WTvrwEDLnk#6@ zzX?#PWDmR9-TmwD9-caKW;&&ynxgq=cZs%OGIsZ-yA?qmV#MXTx>-o5ojA>+ z>+pCt%rCFy0`?onyJmCFHyFlf5R=)tPKR{;U!E#WLe~34B$7R!vyf`2*lrJVZA$eP zC6IP_T+c+7rtoI=LTokFux^;5CWW+_jDPX+Zv3Wd+E`nii&*$sRcAXuZoQ5i4)+1? zf4u;rIP?iB|3ALovaQN!Z5M{=L|PgI$w{}AbazWPNO!k1NOyOKbeDjnbR!5zcX#)` zaXsrj_Wtm^KR}#!jd6`LAH|caw8dM+<3clB`<~y358yOrDTVsB_}u)`VG!dm5iBHN512Q{cVGEYbr59JCIm9j}alX|x}_ zjjHHHl9;M-UAD!1t?^`o^!LNy!5e?yWmI64vz;i7`q83X{0dNl1UJsR zfUDBRlc;#SaN^Tz{@W@)e+=c^$OX+LXRkLIIMYeDWOM{9FGcc6~c z;l(@Jf7&I&-MR8T;$!stqyovDir`CHO!@aEg~?w|bG(O?J_WSpYe0`aVX0t%T8d+m z?HhcdFeWJFf{ItR9JYomNQ~mcVgvE4NL+!$71j~`n=naoOUNKTk0LfYOJ>Kw$S>( zm&d~WXhc|{lSDs-EsXTOq5Ium7gtT>+3TpJL=WOp1bm3in_XH$Uh(FKXDNEjQy$yd zn}ad(TiemfwBmgiU-230{(3)Gc?NJ6Dz-A|?p`CcXP8;e&ojJf`F$9bB35vaXpZanPGSceZFtAM2|kv$b=0t0Wn7>HC@3ba zr7fTII{xQCIVa-j*??<;b%+gi^X>W~jC8F10mTsdpmLcQ6hKT%_Y(ne283u6derNl z!+*XQcPLb5&|^79Dk8cQ#qh!|Bvp@24KrRT7`=E9MdmuT-x5K~x=`W2@vvu9ua_Bx zwGFiVT-B(kS{K$@49vEofo&0GFyLF3D4gGO?2}Pe90j=8&=fiDf{Edd-0?H$VosR{XWY12(~V;bg5a;_{8F z#4t+eM{y}1_fU+RCVwcwHfR~FhRB~DS3etBu2mpBV&%VUu4TrU%W$r1NqL-U>~ z0|Zzs@J>NIX(vmqWsmu(>dzL3C`MgpZ+kH#QO|vjC3R=9%#5?2G&>%*J)FFX;?EP` zA$NqJ_2{aw>$FNXw8bEVMLyqpiAp#gm;0KGTdDH)qoR3*@svap@cVgt>53>$u@(Ae z34mGSPk^ykY#4h~8L4Z*bsf%n^aarJ3`LRhWWeoudxdDX`;Gk!GU(#Rn6@u}t5s)1 z8iY(3!pw@pqw3j>mBHnV$1REb{O7c*Sd)B5D6Hkjvog}%o}!TXG@br~4?TB(!2U4;PP>sN4ci%G+K91xUVlViz@S6){AF57*ubyaMMvJ`;^c>TOnRXobQF z(dQ`qFYk5`qIP91ww=xs`CZNv&Mn4>L9UzM_2OL}`KJs0y*xskmK#ioH1yj_;g%xP z{ZG3*Z1Y6U&^IgEa0?}&7oYA-Pxo+E)%$90#6$f`NF3vi7x+;-^I;LQwqTV4; zE$7yl4}TH`+mn>S(M+_YTd|9Ioqme#vsV{WB`QCz)F4IGhT2OuzhF9uCIX~Uu#Hwr zQToEVBy6>?K(s*NCmp-FjZ1@Rme;u87+ zC*w&F$&p-5g9^6-5|Lj-3J!Q~Xn6Q{d>AU%&K4`DD(VCh=+N<4^d`X*g3h4RDd%ZmCLk$3!p^yW8or&vj4utwwi{qKt%l%aTgTH!ND-6BO!!s z0HRjX|D8URK0ni&=xvjFB`_|6=s``y>y5q?V&t5AG`8sWZ~+K<>>kE=+n6q~YI8zJ zxuXY5-(#vNxfBME=rXzp%UIz*r|`($ea!w2`o=R>r^B_LIwl_rdidmhd&jO#?9i0Z zkf;b~l~#gPHA3t)yA;?9)24E@;qE3gIGp195fXl&D$3+c<)4Z6>ELRR;^kR8QOD{=x|qxU2t(RX1bh# z4$~gzdqYz22bfE6f%>ZNkbv1$`9_kTU)yuqQpKb1RNXcBW-76OobDn7n&q75`>W$9 z0waxX4mR^?sEYxcUqeIaJ;CqtW?eD*EW1<*juUl4oESrs?=L?y0ToYxfz#ihMfCcE z-|n9V#QB0bBb5G}6+~$3&&|dkBS|`Myrl=^JJ>YtUwQDsjq&H|YP*aMRH;_^wD_M& z)IG2yq}%X*WTgR}&d4GaS(I~|Uyy-+l~1?6DzxyA1pnyOXT@qh1vfJ&CzJO4I=lGH zr5TUE^4ay$Q(uJup&KuBdWDv^ov9Y`wGV?vUIB!2{%gSC9&2rAbv2cw(dkRr%H*1q zai5e)d&wVnYX^R-5BA{*+w*at$ng!xceTN+fI*|>q_O@iGU-tM=d38A_7C|9lOwgY zrlplcs?QWt0eif7D`S<~e|o6-%Qb)Zlrmsbl-b7q9qsOhNEen%hp)6sNf}8i5<9N+ zhO;ChF6Q!STzLgx&Tml>QuJ}kKi?4^r0l(vY$A_>S>k_jvQdnrdTzTNQS<36vQPcS z(jPHgF2Zm2rKKfFh0`}C-01OQrdTYOMiUgOVv-z7BKdU5V%SgSc!o|sZ9iyqq>yI> ze_q}=j7RxB1?ddf#DWAg`X*>VkDTZ@L@2PZuvG(dHet6KG;FqzsK(oc-_Vek{@jHho;xA@kfE%5zi=<22#Q~tQ ze=Ilf>CDUiAS)Mt{d7p1MW@*wKECi;f9msj;H0B>q;LferdQKuJZk#w6K!O<=9n^!Xb9ZdXW-8U*}i2GE4D#3*s+9}%FI*%yd7h0L&G zG>3wRz^op-loKmB*F!s*OaLbceeHXFdFo61G$!#+$Y8CkS5O+wuCq~Dl#;b+fegYlO zw(V_gX?+gd@V^m~Z9yW$q@)<>P2gNYEY1q`Tz`__q#8Kr_Lq&kABWIOrJ{9o2Ncl# zZfGHSpjNtR{b+4-e_v_{DlTqe`KJ0Fqg7< zlq3>o#b%R`0lUY5NQj9$&EQD-PC;0d;Snt_!3zt+@jMy?4&>H%T^o;qh{_^|JXmKm z(1_BAno!jO?wUW#WO|32AuS{WyUOPdTlN(Wr#?nPscKjo8yL#Y^LT#HrMFUY7d`;0 z^Y}c=Uk_76h&Nr8yfcRzeX!Z9)uwLG*5|V|wh+X^%rcEV@WRFLodfy5?>8!#FdEp2 zqx&VIKI(zp^yx{~m=6c4ba--*eF#pyP|C6yDams1u;kG_D0{{Qb;Y&Y{X?Sgpii%_ zhYm^{+K6-D!6qYV;U>xxqy68*DAp5+Vf<23)HUDJ(E?zYCpB=uHuW8?@bHFP6Vh%G zQtHd%$JjiSHI`vN09vB^H%sbJcl8Jcm6d}+H=&=t#!|QIMAeo^gLA9x=hpSn!uyxsjAk?C@M)7BZJ!uAvD>^T508d(<&HF8Bo~7-_{x-p3ApQ( z!VcP^3Qslw`8a#`R0r{?5rA4h8F36MY==UfcyHucRA@E$hKJX=`+xqX!2%H9rd8Uq z;Bf4Sy{tw!eWv|f_pTsB9s4z%f3Jug8&fQT!h#Xj_@`cca?0OAt1GYQyE$oXZK;K3 zD6YmQA%PF`-y}G;BcU)FNco@rB{pn;E)&AoyFCecdCJ;9k|p7eLc#F6TePYWif4o% zWN%Tl%JOYqpmw?a(FWA#bw$lH@Qyrcy8n>(15vWTkeM25!L#&2f{^;HXkiAmLf-rN z?-`VV4lt~?e?Q-G)Rx};8brasQwHTzbDiF;V;0iS6IBw z(bG-3db}<6%i9{#eB?mh z=xZ;Hh*-b=fB;0Xq^4oOymk|~G;F@7UEZ(nYr6S=??6a5ur1!+leqQkLehpej@W~M z?V`|W(aOoAl9VSTMYrDI4OgRel-m7GS6agdpw~lpx&$i(6XNonQ9T+)$wztfd_~E0 zb!^E4h*c~JOH^4HcRBWr5)r8L^N~x!28vb{jLv`|Fch^TqGrRV$jx(Y^-FY*xcur5 z(=65A`P-}0Q|V&C8k3mX{moRz_gBwamvuhaMby+Kx0XXri*3#r$P#;AS%hsVJSdlC zQ6(%pow0E1-~Tq`rhB+Payx!PTfU~!s0A0v=|t#Jc!>s54sS$v6!}?m=z5n>MiKG` zp_5Cm7xM8YX0LZ+9aLM-dPjJ>zDN7}8}7xx>`yZ^uo)`Eg`5J_2C-jtJA87!`HrR8 z+=-!5DZcJ2inqpgq~g9fuJa~<4)M*_P!ij@nJi0NeLbtDue{PHQ*x<WLbLH5F)=Xiu`D#oPE4J8l)=aC1zcTr||P zly&6K0>m44$*k9W@Uq&)o;Wj$% z@THTg6!#;>GXDlt(6{40@Rr6a{s3s7+lOpiv)@JJC0SauTc_M7K2m@1-T+Fkbi<6t zyeMBno>OC)XH%{zNlsqRUE&5&%gMMcDRUmXPU&0o^%E+sKdP}^xFV?X_g>785@@_R zdfDae^Cm{?wn|^L`reK^d55+CT?fS6LltnxOAeG`gQKao&ir?5!$dl{qji(?Qz*E@vJqL(~O#6iPWebH_z&aiL6{T(h ztnq#gl${jLUt;eN7O6k<#AToW75`;TBV;ZSI4 zQ9I$O1bcnPA_CA;Id7BM#EYzg&?dZKJ0hv$`{G_=qG{+jjISYW_YREH6}U0^rRr!C z1-{GZZ6970YzIagHK7ul7DnW+<9jtZgxzdGPKFGwxi_4w+CByIclx z&zj-)k1m(3c4P+;hDN{7w&sY=wsvD*F(n%CoG;BnU7q_Z?~9Ekc{ouS332dfJH_9h zEt92L*K*tGnRMG|5F9#(JZ#!|dNRCd0XfP*fMLLFsMGV8=^eA#BABo3w;(Jex}XXm z0W0;Vcu9xPgC)ZLXRN<~X@+JY;>QIJ_12kbwr9!=8xHMeyZ>$*CM1=>PmCKsE^q*0yZVhFnN70b#CCubrMY7=Q@-~b;O8Z{*1#G#mp^KaYh zv{01SOh~X?#5A2{b<`9ee`X7{1?;R$qu@v~T9TYdBM2Fipf6gpjLHaX`}HS^hzInU zvb$oCyKVF{q3yf9;A{Hy7}+ykh-ElG)E_UzsKgLyfFkt^<5`joL^2)GmDt?-EIM$! zSVsg@kcf0FGg28lGwqXPDz{AD+nsYOw6SSv-y zg@}BRaATA`$Yd)^P{a)=6A80&cfgL?9mL@=Gm)MxD(YcPYI$3jx(>mOzb_(E(ZzJ2fFKEo~v4 ze4hQMw?~~dN~RyT^P9|7^#0$4v|di&h0UF2k+yXs5!x#pQyh1Z73~XRRK9X_SMc-k z-^5|}rX+I{8+?r#(7mD&86ZB+Rp_|~1S_ASI?2Mc(Kv(}Z2=3x5HtB~z$UVwKo4B? zXhc&o?9uy*f;BoaCr6<#C+%-e^DG-78egUUOTX=DpboHr4#gj5nl>c6)&*cL{~Ds8 z4O(S)eXGdN=lx`Zpjc2E5Fo6&!Uf{`bX=>5U3VTx^fvX^PY-U49^DT}m5sb!efSMn zrMxA{^Kk|z&%*D|EqrR{)x$N6ew_U|c$3(F&I3uZEinDE`9S*7^%=dkLZZiU+@;SL z(%k=iT6bAoT13U2Wdh7?09Pe%CO zk6+2qRz_>jw}qgA-B$Q#3)`Nab;FG1?>_{$ripp{Bl+Cv~a!6WpG&q;+o zheX)6q>r5VgR1N+7+d3TrHR#lAPh@W>ob2o^KB4H<@W5Rqo{kn7x!lie{f`az(k9& zqv*CR3ESPGDDCOd4iRc@{>nO&kbtRdlP4)0CN=00cmGg3G0c+A?M60RbI8xoHiHLJ zHB)r|5Plm5)WKmq@zqnb_CB3&BfoEX(v)stQ3xnd%-DaJT=DGv_Opw z!XsUACwTU5H{z{HFQ-!V^WKZih%1_&1SHqp_=@*7bM?q$Sop=!BDHhPk>mW{DD9h9 zBT2+c&zZt1`Wpk%oXF!mON0TiSKo#F*M|J_EtBPnHAn>tn`3w~*yFiw3xRl|w(^wi zSDlH>{gJJ9SXd!P>Dm>^n^i%T4h>@-k7c3qm1Y$z*vp#=xj+<;GL$F&mnY&JV`1caw-MlenjjRfidVN)(Cm>HWYD2l|LFME*0;l7WFMVKLPaky_b)l5<( zo1GtW`M1G>#^)EVBaXqBjw}%>&iLdjr(1=!?#6=sXV*X4)yA+m3I?`(G3$?oe2nNn z2)-ewH+3(MB@OH$Ik~sI;fiH!3KW%0%x`}t1=qL7BznX6wUO9eSwPYnkHS_cJY|>9 zD;($%(wlUzd{|$+R;{anBa*<2x7S`BgKM<+lQ=193Js_Y(E`wb?7If2WKJQ4L(%2{ zoOd|JccIs=s*K{k{ka3c6w4$@&|tqd(#}mQ2M2_u49zK`lc~}-s_%7Lll%d4hK4)r z@TV>%ErLy<^n#HOF7+n-09k1z)r2#R+yJ59yX$vhm&Uy#344h4*Pa}FL|1RI@J5t- z-&G7c2+gV|X3D_4lvh;LmM%C73NbjD87fI(5pNo%?yiDD$R+(MNVA>2s)e}7$yEsS zXcqo_<_k77VxG@Zp2Eh)-p9OVuCK3GCc7e_DN_I2w;jvp>{TZ!4giKK8ZL^xNO#W1 z$xNFZDKR_vQ&hIx-E!YicW%_lfAycu0)aadvk#xBjID-?d2FVH8*Ho?0Mdtye?xEk zWk&=jpuHX7Z5fRU^#qA+;Z99AYPV=h?DnKIjSfV6)ypP?v1pMm{*1>HjS?uSOoLdT zdA8~A05hgCir>la;}{2n=+96hThoiEs~taL3+k%`o_iiWUI(F%(4#lz+8L>c-ZjRl ztQ%rqu#P$r3;bhsb+h^iibW{lHZlBQ5R>x+1GtQO))L1FHvuVSHQYCGZDnV=W|M^f zx&{-o0R6GJuViA!OKR|MQ=zk?*%MQ=5iz3XR4^5|K2@f^padSQ4g-V@q2qJd;WJc5 zRTsdpcv(49GQg{uv4;^H{h}IRK%~e4MBGw=m5URtYOZdv&I3K+0clXW{;XfYx4IAx zH%g1y%Fy$z!s4`C$_V(~-IVv$OLCci8wls*r^-?bk!>V<$PL;nF&8GJGbOxF?{(F9`)XQzjt}!W2 zfwmbWF!ASTXg~5v3=mHhg}kN)CidpGA8e&ST;h}_iS3?+iZw-=n&L$BopAS;euoW~0YP6kWV4wPW*yH7 zmL$tUfKbK)v@am#8TTA6Ozd@jtJ5DQkMqF9mdcZ3-XGv5@WB%}`T`A4juSxGhi3`Y ze+;U(TQ?mOxjL4AN6CAmW$nFXnJAw>42iN-)XsD>_ws~uQIh0@e|>A@`R@lf2m4lS%Yos^xz_1PcFy)pxg+(l2tFocl$4~@ z)VNlAIkdawk-MWTQo0{k$K6P{MjW35_1~Wz&Ih+fm;Zs>%*B%7dv1BCxg3_!XF%rOAD1hn) z1$-hzzQW$9;1Lis!)~MLi8uJ|ZI>Y7;)uamLlzmpp37$jH6;gA1&4iM)_^9!!HJ%m zQwM`@!z+Z*Mg(<(7k`>Ew(;<;(TRxb=EiTZ`FAGed^zW+qG^=DOJ#(|06I0`j-*PE zC@HCn%Eu1`;mo}9XaNzjzc2A;sMnJ^l$%2-agG959oh@f@7NUhGU&9_i`B(ckHSMY zM$T8;BjT}@MkC0{Z6~x$ZVrhOD=8@@ma4XGM4D(iAa5T2xY&0MZ1VVzB07e}bWI4s zjGOyM-@jMzJGr!4j_yyc(@RuKGOw|!|MeTJPP+tb{F^A6qJ)dAT*2Ge0P5$8>TqBG z)ePfAq-UIuHU|KX%+_d-0E>ZWiQ;;X^{vLh0+2dKKzUWiLZci5lio%`DQ*b^y*#)j z+G;?fQeK=GLoUZ6zB({zNcL=p1yi3@^#;K0k*b{-;ow020^YC2hmL_kcYj|Ow9nox zuqaAcHbj6C@0c}BWz%z%kl!6)*@*g!soGe%1ACIu57bS9`~LkMgFf_xU$L|xfU=D< zj-;efa~ZyR?=Flw!3^SdITpmSMQLtsHseGNU5Y^Eu-pB(xU^UR)tt4X4RA7V=$8K7 z`xrgo-j6XlIAjs$=zvos!lLDKq!zD%$$iQOg8pNkxDy*c{S$1i_ftzT0BTQejt;Hv zrl>DUbOh4q9#bjgiB#4U7V3ywY2z;rzDy2HDH6EujOlDuXt%l9(-f&d0~|oc-=#(` z90RJNootV{f(wm3aeWp5(S4t0X&3DtaKU5G3=WRQ?+ssSHP*NRk`WP!e6Hz(koTf3 zRJ;g(OOM%JEHdO-4+3d8qE9Uijyxg#{0@&~Q3Tw6@2s^9HrgRPaJ~u8a>H@oD#_)FZsIRr5Jc%36-cf zru)wl`U+CGLrK5G0?UA*DLjETr-;5tyn$;gj8r8h`2eqBQV0W@q*m;J{D*u=c7Ru1 zdAa-$_nZ0b=2sQaMCf59v-*6$gXwv7*I#e^$QL?Ru%1xzTcIU_Uf2CcG^9G!YRfYm zi&l;wXM8A8&(#0^*`v3!!1=n!BXe*u)^=7N&Ud{c`Ot$dZzS77x{hTV9aszDOtK3)|3!vS(4>j_k0LHE&J^ zRXMF{2d)oYV_ptCt|!Yq+40|VzFzGEMwkk`gSCy@IeL*I+eQ3{CS9Tk=~xAegP(|4 zbHh=H$iC?Fc9W4^Pxjg^hIKoo4_PYLZN!~F&dxj!r?$$>Mk?GYElmN1U8JJVLDcM3 zFX!pmV|2%kXQEVw_jVDtoGYQvRz^6^3z2hHT3XPXdfGyl3cPySUR{Ervr~I)>>GP& zS?QkJ6Ut)C^^0+Sl?@)5kTORqo4z#uji;y59^p2zI$_3@bW~8d`DBsq&%c0&;$v<( z8J3dR>f(>j%Eju0E?Y*F$$UE+|Lf9|Qo!LH_UJUoU86l;i#ICN{fQC`$gK@u`~pr% zg+~1n9{MIK^*a2reV!0ygd7EPo5Yb+jBIUcG6CaC$!}9bz(5g@$@QF>#^o)H`!pgbw7l4ieQVa za1Fh@3@i(;+h^ukhq$|D!_XEk4I<=;<#Z@+o{xVY&*~QqOXm#>2x!8HRmP!DW9#L_ zTS&#m%P_-d@N_%;B?(mcym>G`INzpUIP0?vK*c4fkofLkY;EuBU;w#ija15j^C0XQ zoe@RrAo)IyN{Hq(Iv0yKmo&H9TKo_*ly<9WeB!VsM8&+z<>*`@G6(j>I_Qwk3e&2W zm}uAe0LfWEukqAd8?$F3l!3Zf#-uAGH4)mmVF74pSS^>)2)MkNIDDljQz zvM9;4$GQ{sWLmum0EfBoXc{z?wUTCLPxG09zG!za4k8N+%g3|ij+iu;00Z+#+R#wD z%H3}iksnMV9q>o$(F(=agL>vC_!{TORu{(KwuL;k+u9vH5kBGPE&9LtlbaD^vrY zO_p9hnCF>eyK4g5_8q1K6tIgC!)-rMWxq7_&cU|MDfYPLlZ6k(2~>HGMdIcAan6WV%GA@Jw`e-`9(+L5K;$AKPB8Upq!b+W^)nFCW@tlcR7T5c^$wgK) zK{JL;`~)X^9MXf^N^1mjHr`Pea%mKplnJTJcE6{-1^U%5H7=&RvC5oC%^$aAKjcBj z_91J!_9E*1h4_Y;HhO&t{F0+F5JC zB?11z0o^te46$}L%&UXxzBU-d$pT5k=`tn6T#@tV)8AFMn?0^GH&#EHLK4=7-K;-CLU*4Whn&tsoYe}BkDr1SdV8?)#oBK= zjk_<-$Llqe6K1q}-APz6p`o8yy{#X)Eg7Ux1y?GqX<5nh)J$u-3(m#$WPPi|$sy%B z6GuN7fJiX*IjNrBACTB}@sJ!!(HkXeCIA;I6!PBQyOs)k;x1XWhL3Ll>0zYt_1CXo z`itKr-7IfWTWEX(5tHL)f*K$d(GzTwjn2~bmcKzj^#mEd@PKy#Au|hejn|ECgs1B_ z$RZBChMd$rb67PWmP48YL!fE)UOAl}p@cdxfXRUU*eqv%pOg~OhlBz~@1p6=-#{j6 zUTsIb!O(rC#XrXXlHuRv(Ed6(tV|4s`um!oO;A(N#vu!&P8G7H2ZL?+00-0{;B*<{ zj-AOAou5-&8<}?uQ^)6WgIbpyZ84nHZM)w0?MI);j<~}{-v6UOcEp>9mstC-isrOG zffw+$t)QITz;Gk6n`84(gHF9!hPr6}??&qx#sKM$cYt>#Fe?0)o|4JxM(oM@^xkjv z1{W4SPmlLQsL*%4P5j+~5_$x$PTX=n-8L)od(=IRpQX`?bCr<1R?>MXR$@KtOlvV2 zE4q>v%6y%&?IGKAcooTL06t68|*%>|T`ol|Chc zzcG27aaKUDIprm7Gb&zCA?=^3n>U}M8b8=VXv2}U{F%0*vrm_9_;YJpxph6;Y1cyp zUK|d=ioX2OVwl4toEH;$?kubYC*w*)dR`ZA+Mm}Azr|~Q{k?7NR;Xti@o*=Pw7-I= z68hd}<0u*~{(GWqAH1UUxLzAR5g^sN6D8_7XG=f3+C{+{^O3v5X)KiH6%eRf_gwbx zl@fg3KF!?>wT(RDb~Pj#9sHQK0GM)O^_pPr_-_Uv*>`^t$rJBzjJnYZsCdjyh+mxxpR4xF9v)> zCL(iy#1ee9dvErqo{73&2^9+~KVfl8;)p>O^`j_w?0040`4)k`t*L3)o}?T_NGIS} znaP7c(f$7dE&$Gw7dy#7;)7V=4iuU5!;=h*B0y@ih=@s93vd}3qA;~BfIS7EjyPn}M$ zY;B&p7++7NO{Lopd3lHsY1g-nwS39}X3|6+U)zlZMzJzbAcps|Z0z`G+9yS+t=MdD zKc?)cU)M+6_L*%d4;W0tMi=#f0PN~q1&OpQEKoWrmOhvqYrnP!kjp1l2 z+0c}21DQi>p0i|rGzqD`ZIP_8G?r|*-J-Df9jJIwRejF_?eqDB#5w)(>mIAayot;Y zk*;EBV)@EwJ@EyPPfu0mQj(r=LGJ2A;Bb8OLFRrC94}?%NF$xpLx(R;y>P}!^7!z+(HYb!|yV!MD%;uBKN>6 zas>Z&K7FUrl8|?|=(CDS&nNQ>WuyJq;e5CRhaSFRpJ&eY7e4AjchgSaJJ)%wo!{K}1r7`TucG)6&~`ET8j`aI7` zVplv8$ol}uJ#O3{*OS)V;dQNnJG7( zGIl0@bMyT4HGx5AEBt9T!CXpk(1qgg%=2`BNu|>k3hh2Uz-1y&NV3p(j4xY8z=wTJ zt+vBWSO@m-C(qtZ$7G8!q7x;=11ECnDG)6rl-GA*la|-xSSXR3b|m}YZ(zbuL#@H8 ztrPe%*cL?`uH-;ID5KZI_MMW{9lc-|fEo?nZhb#t^6K-{OrQ#4>}#vKPiHfQp7+-7 z&G9pO5t^NFK94i$zP}m#J+PkNdj4#}X#XF5Ou*yeeDKqnK}JzZ*`^U5V7|J9<@#K+ z1m&amTVg<>4v0&UpcLyye?{B?$mhpfF!Y76Z(>2~05@xlr=0skDFBJip?&T>3%PX6 z1=(A%C@4ohxyQ%HV@(cO=-X)19b-Nj@*B&`qZp5`O2F)Rw*8!bXnCjH-U*uuV6 zsnC(CI`ST1gAFyKi+nxi9-?R`!5u~^pB@92{1j<~S;40V8d@0D{cB~gla4kyIlY!a zb>f=^?&^KFflhBJChoJQjZQ&QuYnCLq}`2zejEcGUvmwVXxp z{PMtc*?>s4w)wtjHSay^~#?cs#Hr{% z7QTwg_`q^$XyP$nByEPry;0ICjD|WyyN(ZiY zAMY@F#dXA9PXl5+oajKJ0wq{|YXGKYPCEIgG4PlAqM&e|7FyWU=Xp4AHP&QQMe2;j zPO`=~-eMseAtE*Mx}}xV*RnGxH=FGtw0*k8QqhiDF+}&+J5Qt%?FiQ4b|vd}rYs3U z!9deY3Y+JQC$i%}ODlW@g^kV6ociB|MaTiJ$rJC4Z)*i#xAT0g;N#>P@vh&wgh)4o zT-8i34;B#1W~~LNX)Wt*W{@n|!E1CSmh%{MNuFUxhXeq8ODmePzi#9an^^XTG}4Nx zS;By3>vx5vY2mp}jRk#DOot;-2y4elPOy~ij=(0-JG0U2cdP%nb4nV=%|(IzN~quL zWGifxL$pE$ip>&86%!uzMMUo`Cc0;JAwPELHT{OhS zJS3m~>ksBILLmaf$!>__X2qch__o!yBUf6@OBSHZZ_j+X8#59W16Fd!`vK4>|E=|b zo*cM7P!yhK`{wcuz%Cy7Jh8kH4mG>Ev{MU*vf_*r@Lt^t(Kea7nOHMSD66XN86bwa z);jSF8_QgnYP<9_+JVPOe0)5x7ZK=y7l-wzg_;*>7NwR>o}&$Qi^RsEK{xN>TV`T9 zGg>o1dCruO`{AcHh`12vQfKwcmITnsPxx zD9;(^>?v8A#%|ecxss0-&+bXk|1V@*?z=22B8RN_DGWh4UdhZwuuky`vqbA>8&71^ zXTw@Xa`HsD4XGg$h=15t4Z_S1#a`HL>c~Sh1B3OD>hf|pVefdHfRuV;T715&&>`vM zyadrGfSx~I{2^_z#j5C)Oc8exrYOhMHQ3Qp-ffKHYH3L;>U*Atj`Of#IBz;NI+}>^ z*qaYHA;Q{>kQiebKjJ(&l1QUaG|zhnSTi|AR&XjRD%BaRt_QY#?ronul=oVl(07{* zV4?k)O10&Z!4v*d(tcEglD5TtL`r!z0r6gka_SBp$PxPeTN3`T+(p~2G8I#r^^TAd zEq3e|jaGRzLPm$FM>G{zFubZbB!V$MyP>J+I~N5t6RtK7TUt3C&6E&$XN<#5fd`1% zO2~Hdd^A=RX<5gPeW2V;tAXg*^jVR|uJNhxsMHAQw6|16*qgaEhh;Nuad3>K%yBrw zjmx@Z*r3k)gq~K)yuSc->KF{AwwWTL!*&N&!CZT6^SE2JQvlML!Z7kbl^VkA<@ZA= z+1x&YXnQMOY??YmC9JR!?1o)G?09&5YV6O3_*pYgjifQ6R7`WbFI-|P38F^BF?SYC zvAQF7H2!5YSuNMfx>tt}t^CuxZ~@@M;-2F|iRUD?(kEG`m*hm$iI*Osc8u4Ztg{$U zbN?+a67~mpEq|y5+;6cCA(H46UI6|NEHD^#AK0x)9zyQJzOh}q$smBRL=e8db|ju$z%8J=hIW5ztEmC5bzQK=T}$nm}ekLsbVw}(jE61 z?5B;UF4d~OuZRpKkJ9r*FvRvA!&?~63?oZhQWhBO>LY;#@^2P@D4+pA0hMQMRy+eG z7pPcb-cSjSNKu_$g?309Y8*7#h5@i&w%;&Adu3rZ9BwT7J(g%#TOCGL;Wo~OK%&i` zM+`eWRO#wO19S1BKYYLmW>xgB17$qr*3p)5zEjn!TTf?lrgV7m`FiOuI@tc3u%OWu zP<4DE7@YU_jfGTuKT+Jqk4AcI4T<69ej|tD34vN*pI=zu{o-}CigvD~{~FGqpF`yZ zVD-MlIj{g={^w2cR3Y(~O}Nv>(>D(&3QW}&7Ol;(!L)1epdoKx#F3gA(1MHv3lB)9 z$i&EK3gmnB#(N+rT13nf9qd1V@wdM}z0o6lb~k>Nyx$DP??}0z9zkcLt(o+n3|w2g zxi%f~XtOjY(2nf`Fbk(;a=~i@w@l`h%9QmABIg%eRRGWVZy;fj8WM3WFr(PO`dk9p zWvEnN{|B*rvw5>e8%`nmkpKuwK}P(Z7m^D&l719VgnxN=&P6t!h*8phba;6FwG^Kb z&J5(Y0myOvv!j$lKL1Mq@^3m3P$42}9>@#T{Vok>_OqKd^FM@v&nscT@8VEIoTCC( z?``HZqXk6bwAlEXnIbIV={T|}pIh}VWj%+(4ephi=OFWnEuGojH@Lv?E&@EC<deB~|-rh57#G*4ud>7@uQK6BdPEJS`Gv&|#%o@;7{AL^3PsvC_ zsYX$BD?O`;Wy?%Z-~$}soPSSP->C@juAe|jRHKA`tE-vCnvs|j%3$sPdRnB$|3Q41 zaKHmV1ZQW^!I_~$Q(Rmhub}wUve-E#RFtJ zI4Fq)!DRg(AAsuD{}9FkuY@u2?WU2SKs?;yPY)b>`!lCER#gaLTS;Id*DDM3Zc+yH zGc`Y&(bg@O;x-1jU95lHrfwafY&vV6Wlf%Lf7}?6s4vz84x{K4q*IjDW*Q3Uc8w+^ zt9p+Gf`^BM*tIPURYBTp?GX% zpl_1pE0YX{CI#YzfKu7_@Y1Lgq(H*Ya(Bj$i?eB^qXDHGLB-V2qKFZhZ^3%MUK78w z8=GyGyx^342!f*s{_Zo;>}jG75J0yx*eJ9AqEe?dh}p=v^Bdv9hXS3BsNv+Q z)qyjmxq5fnYXE9DXojWz%D=YH_IP&D1PCt+-`2*X%A)ycH8Fp*?Sj1K29T&kGX@#- z?qyT^7k2Yxgw3f8$8k*Q9O;gFlUs&Pn^p%$*^{Ho@ zCr3iT=PPi4rd_$-b^*cL$d6Li2YAdC{8ojg|0pJeSC$^eUtSIy%o-{{L7j1POpapa z!@mWcnUeC2_W|V#(87}P!;GHJ4rgoVZ)coxRBt$rxYWKglFtzqYq1)R>&eoL(HMGt zA6hf4odTO{^bnO+Bb_>$PE6l^K69aULRw3X7Z%yOiVUrCbtVxpG17H|g1>xL#>N_} zoD)*Vn&I&~VOnNp@A2{R6K1BKEZe?xuxR!OuCrN#ZLsG05xQ#q!F$vvfd_dDV%mdJ8s556z}N3I6_V z3!$NSTT^q!;eC!YmVqd2A(f}dO+lg4Q@BfeYjMf_ngL}_=YM|l-zXn1=gYpxFg!78kvg>Ml~I#$;4z`=XONJbxKZlx>GQdC93jI-z%z@iBS2A#G=- zEVR0YT9oglus+TI7d_l&7!+eYnRa`*i^CvW7Ol*A5E@D6>|x8*hsib#j9b+z6Oq_d zT`IuZwwxg)1}q0(Aiz_yhG0;e!-3odUhO}EKpEuS8Rfi z^XqiDf7VCuav)0z_LI5Xm#%KLg33x&8rq@Xi;5!w0tVo$%%`>pB6hJ-4q{Fw-)Si> zKfoX(Wp7`D^gbr}vYn+w7`!WU{>d;93Y-D)g*r-T{fG_H5n8>Pts3+TfCbV<=E6bN zWs_wT`^qRXoGWf!7*FTt4ht;_J0z%7e%sq7sSi^;K!;jfLJFg8!Y2A>Xw3_L_6#m^ z;kPIhI~ERSl+z2xydH(c)dJ_|s9fkEiNboyYLBMWjW>w0Lm!`Q7vX8&iY+3N#qi8t%ybc&Ox-IEZmVxjnJVn%`1X(;^KBd3S{pjVFid zb{I-2D|bJrsKqi3@4W?By&N#H1ymU=ig(DV#?N3S~fr5r}3WEYUeLq#e1<9dL z3~bMo%??%TJ5yxaw8tBE?v6lms5t;_26k8_l z)K?7kYP;BJJm{9vTIQd~ywP^rtggsE#}E zf4_Qxs+H=&5DZ~ssD--BOZ_6;3P+bj4OU#HHo5Z)4OYp<7dFuSYHp9iYNuc-kGcQl z5ikrT*r-P&rpkRei!;{!)t+T4 z2p*pWV zlhUQZPBe>E!ST94aTj-Uy?8WX$wxmvFVqxvpIjt%#7~J>SbP@t)Zj&u)9!G_;gm_z zLjQnIOZJ*US}hJUco+?O*4S^W_@Q`uYMky)(Q;bxsgZ@CK&ZQTAh-#go>*1lRdi@s z(`xI5cO`hr)ISPX>;|22Ez20v=rmfi`=W41|IA>HfPidihs$DoPEV1(C<0Qifu2kd zQBUtN)^mw!L8(WC#h7lmg@Ss>FI8NXD0@q?T=#e2M`aok7F#T zy^l`__C6XDXRve+>>2;c+2MMjd28S(nE4nN9kcv8Ui$Il#LLUx4~TbuyZk;*{T;&J zt7sAz_-dRM=`#n=Rb85I1{RtbOK*1x+dDcQF^v12?%uu)d!5h*I!xVXXOE<&cz@(9 zjCI^hdugy+=M<_E%P*dVp`NHsFa05cksAZ`5*~uXXuxoJJ9wY?5zihr=8d;!S znDuKm_&dHx<}0xCB9wOW#_QeJ3+JdDCoAoSlBQ%nP_yh$5o>|LqQury6%8@^zf@tu z@=LdQT#ASeLq)6wifnm<`D{lDXd{_}wZJ{vh{n)SU-~=Pax7iZMue{Ee-Afuq+6%ukVej%a$?RB#_3cjK*^P-rN`sn9VUse`|z|`AtVfA2wOHZFaJw#mH zLHIt`S9vSGN-!`oMEL*U`82+ z2BnJ=2G8<+TALVBc4ru4o|Mz*;YxROaxjKS#OxC(+l2XsXKu)0OLsSa`VCs0C2x}4 zmm`~TBClQNrbb}Htoy->oMbIur-(&9v9Vw2t@^@*K^gF&yUov!M9c(7bS&GQ%CPO( zTJrNGw9K8;&(6!U)|<7ec{_#Zp6f(#~r8d@DoeR zZyIF?8(5u+#bHY6LJav@UFLPadW`q zthm8m!@=gNygZ+tcIz(XE38TyTajBD9VU=r%oV_t500SPoK3Cb>GiC`8NPk{1Ihyb zoS0B{!ROMb!Sz2qEisp32Lsj5AJzC?*eYQP!Nf0-E4)w7`HPDwHbcJ|r2mF`uuQ`J z69tQcZldgnn9!g|x{Xo6zZM_0>!ISGWZSfX#m^R>ro*^}_>#P!O>3Jm-SCXp?}RE{ z;26h&ERuCV6@`sHTH8s$zY@8xd!nJ=^phzcpXZyeavI|h2J z%g63Y%yEl`l$x{#h9sq?`uXJYkz>da@C1Wq+35;Ly!emtzr=P3RsjySFIOg6N5_P! z=6&I~H(~BrA2m4!c!s4~Ck zsbme1@ai-e`VCaC+>m`d+Cx1{Rw|)D4t1h=HZ4{Q-Fz1k~>*8OV<%P z-iKC65(f$PU>P2Dd!qN*v+7%)-911LTy3m(`eR0r5A2spg zqn7Liq}*xYVQeM_5smj!V7j2wr)j-eiQiqL1x0V4-&`MuA*?%8&enr2@37t){mzvZ zF9~hBOJ!V}?gS6{alyzEfY)&cRwa|$IX9_aX2NlN+yP#slS$VYI?*r;iNSpCa}$ig z`|^N^As6G>W)miYCQXU_9w|@U1ncZ#mfTTkzb!hzVS({dEWa`62`jC>suV34>1VEt42PNe) zTT_ruO-&Vf$`@)Y7+k$I;dhF#8tsCL5)tNNbgHco!qK3jN=t3uLJn4P#2>a^jyq0& zvC&wkR128_eFGC(ZyLG!W@-G{%@fAjXw%{Xq5K)Y+uz_OBqw+3ogcXqkap4!|NQx| z@?C(YbGA=p@17Ro5tV$fWZ`MlOJ=8`1CkQNspJ8AFX($-B}KU>9vn%=r3$8a6oT`W z?Gi;w7Z(a0%#qsL=^FOY42I)EX#%!Y(nVh;{2ZUCvnZ<+WRMoy62${vg7-~ZGP}lC zmzzKl@16MO@sJXvDpzkrqP+)4iU zYe>;{8XY0H0`C)d#q!NI6Ny?)?|TWuE|-*~l4qT>={v&4-#nF$VmLw#GNBi-MXv=& z;XUnhHmq>8Y}NCl+hF6iv7le&QOMVOY$PDD%5lES6m}wuRE01hgwImpYw(5IJ$O=v z+@tf;b*XPV?Y}y!=!WO!7!JuECaDm!^t1&l*jqkkrg(qDCG2~YMkV5_+Z9D)qx13$ zEN=IDvC$!@UGGQq5{pPkvVPwv zsD$!Ej4&S!7?u@3eb9S97jo2_czb~XG+6(-Dp7w)yx~+<-%TJCX1Wo>jRS30UYZ;d zXJ6@x=BXdV3VY!l#rhhGPb127hH)#di_Kz@;a}#zPC9o4ktG4vDD{->BkUQOumvMj zM?;!1*DIrkUP3~WCCsF(_yp@}F-uxaEwZV>sxazvd1`z6t4#f+x18Wfs&*|zJ-6$A z#*!}|S$3M@0Abo}jZ@fNGO~1qdkb9l+sx`)(BJ{%8dZB0>>TWD3*~XdowQbqcomAr zTUR$?^%8b^F0kSBRTo8El#$^?C&>XoLVn?5*NaesB$ZH0mQDo9k9D*nbyGO!wP$K3 zIf*z@yoGOs2l^J0Wq`) zH2@4?ue0GeFyARlJw0=JefO#6fj^_^@V6?dT;?YSbtlM?W2{$2hf`k$G$6pH#nav( zw0-u!3Ee}l8Xb@kc7B;JqFe{;OvEQ@Wl9b#f7Gzt4JWskr<=W2!5F*k-=Q#|I_`y5 z9r5B?HQ)KDK>}mSX_LFTYCT91n&)6tnqX-?#M>l77Y)f8c!YA;RSMFFO3uDcm0nf? zH-?UpcZTHXydfEb7Rz~^yn%BD_#U`4LJ}V6=o`wMxk=nNtpqp0B z)CRR~Pd94Ws^=)uI7{v8cDo^gVtFrN z9z?;Wjh<@S+C*4(ol zlUE+!A|*+{Xc%L%r!&Rgp0Z#2(-yq&-g(1;(Nl&lqpkmtv?qMI!+<<0^iJDTC=xAq zlnI64CSh4pi_aLQPW)zL{v2#(oK^^_L(9yX*?`Zo%3BFlF6%8;3zlvmeuVCOR>E#m z72(PLphwHgUCwUt9?>wC_0YR!UiU7K4YljOs?#sK!tCrE0wbOerwW=Yxw+{fAgh)5 zZTyLuyuIx0N7xV*W^R{jSqdI2^7V2kDA)NfT+@aLSyxB=G^-5o`hnDz*Q#IQcEsZV z{#wYWwsyIt+vvW@M7di;xsR9hNOF%s-KgDQ4_)PaHop<8>#HHo z$MWMy~bsL`0HEh|C^Pj8-58r>MwD| z&K9?aV3EJ1)-!qqcY$yy!p-bk>c%GtQeXM!sRUpY$c|z^n<2lt{P^ipQr7l%!D-G0 z^(VB_vmAk-)t%Y_Cafw9Vk=;t%#yNcC{==`$*~{$+C=;uA6NFFuredRy4S_CF?g3G z2NxHoUAU==f*9g4QwNbl9h-ufw&KTjQSb`6BF*&IXxXAd!+g+!!qrdzFd?*-2O9@D z7De*ssP)*Ia56h`;?%xieK7_63n-~0;c_Ba7cTgY*&;e#y71CAwLe!T#D9Fn3RWiz z*q%<_AFyE(lGU=bsB8*Ni;hqY9W!m)gF`YK;RuT`5Y#+0H$nw9OVHr{RIdJwF}s4# zpLM~uc-_yhA3q1&&FpjorQ~U-n>>iOz!b~G8y`3TEG$V+)lX#v1kK`b$vDG{JeKH|Jx)`4 zP&QHho$x}zgO3_PPrndCunGSx#|AOiE{cYTyh9WFR1n%`NqiYEMW zU74V_cp39-H6?n$v1e>~7(#1y9hsOOgYAqw@F&<(q8COIxNDcBCyy`dWB6vn$JrzSl{ z@zCt+&r25Xs=M)FoK=DNV{INI)~(adrB)b`%)O_dafUNww}5_gfx)fZtU192Dsj$` zi&Zs_N6nC&OwzLXf(L3szyC{(OYJIrnfK$TK9a;Qd>z=^GtI_taUy@@VdI9#Xkz{= z@MARPOw^vZRijJsq@$BIf4qC4(TiSb-2X0(f3>pDj!V6Vzgmo6TpUpe-*w! zJ&QA;_dE#nI_{6yHL*#@DK$p#))0Ug3 zKI>vMNt-f`wVH1thq9%(z$6A8BOO`FQeELTyByHq`+yY3;Nd=YOn)|gZVqmmi^t0zs-p!?UmcovIX1%=ZF(a3_(Y zLHvqADV5BaVKlNI^K7+z3vcWGo6+9EA_`&W+sSo;|;<{r0kUE#G}ofzMbgdwdP+q{CFPm~G8 zzbB!Z*~p|KF`=)p?sY2`ILiUx&Xk8jj4!M0)05`~4Gn#Bsp*yf>8G6wdGm zt(@NsxnX!^YlN~`Sv8g`&zY|ji}4<^xwRRozlH}v8hY~VAa_aPRewlHI262U>t}XS z2d7s@TgIV)?hL#UmDCy({t=d4LiFA;Y_-gv^XYc-_j&o)DK55;DF=fJc8AcVpp<_(-k}&!GyUoZFtY@qRjN@35 zCOu?^D96%P7?1=e$t3LhzS`t(sAS1CS;8E3*`z@;)J4Z?&iHLdC{njhKY#dGiGXf< zq1lg3Jy(ksy4E6Y9!gLmK$I8mE?MycWF|;C{S_Pr>UjZojBglSoUU%s9g}Ugq!f4! zSMMVHGewk(p`?z5vY?|mZ6Iy73b;aE4xuv(|r)6*FN+KNG`f?_TZTPZ;vEb*6NXB=+`f&?B6KL-mY8lN^D1 zDqs$#YMdIA(oA{I&d!|4qmn7Kmd?o5`aZ@pS`*@i1P=leD*by@7zMwCU{q3pvMJ3C zvfxk~7kHJMIg>{x6T;|&i;8T0Y$qJu)27rDM{}88Gg>QKGVY&OKS$AttgJLhg{4p@ zf6l1&SBCNvZx9TX&-0WB@b74PzLJ960-m4SBIB>;hoC%vw*`tOdMg@?YRW=~n%`5!7e!AuIA!8}`XuY9931up zF6p(K1%6rc(+>u>p?4CGEig^b=_}mE9f|>7`#P#4HC{&w{D(oq_7kn1F%PS|usUgr zjxG%yXZ+Cd5E$z>kV3+b;ov~Qbk|V0^M#9}IF~Qdu-17#>&rbkdO|@6CHg$@z(9eU z8*qMvJ-bh?1;a+JcYmI)T%DlcE5Q3gzd>{2CcQVi$HA}|II9j8a314Y%<YEOC2)+i?ZdHulf%+5w#XpkAQFAZXoy?d*_ilYe3kC(e3W)#`t7>Wr=FqCx zmd_WXm0lj(4d=3Lj00rJ-PtaIuT)cQc=X2r2YxTXt+ACZ8ieB0-sgdF# z>W%z?YGzuZ0(x5$`rhZW!JHasa~emxEa2N>WMe${ef1y0t({EuH(92&gq) zCm@$T4;yS(&uzgOzBmC*s{KxSe^89=%RulFO>IWlBY_)uw70ZweX0>esC+BfaA+_d zOo;4V+T67EP@*M7@&_|2#bii-rs`dDozkpyEC=lW338tj^-Os5uGg17BCh*!Agbq- z5Pr=q*EGmI7T@+nV%Vu)u(%Awg&s!)g7eDK1cN5kzR`HJlIp;4xHQ4-AorIFm$e3OhKwK8>RdOSiP+(Da;y`!SDmKyq?FD^4Qn%CVs4YWwwb1-p#@J0y2QUd z;O({3SBD&ZOu&J-zsq?}`a((d$rBs4hM_b`qFlbNalE?#FG?(@nM>pl4ZuDwRN=a50BpS|M+o%PR=_#!+ z-NngHs{L4zs@`HtVCK>}6S2XczB|R#qEqtHj2=uB67%;X+Qx={q+qXf^(=iuz%}o< zHEb}Ua_YTpzZmw!BQ*sTjy*vJs}>_H6}4j4I4D9t=thP@#0^R`oc_zBD?J6o;{m2M zj?=$Z-3I#m)zt3s;6O$I>Am1wQ|9pVk*22Rk(#*X9rS^Y(zBI|qg{^qhL>tO_pvQD^6VM*_cN4qc>V%26kj1NtH~iV;2xgRelFp6L+x8t56xN}6^Dq_eJp5l{VUT$vxQdK1HE zw1US3AV&E)r`yx)e75STAVj=tAd?yh+m>=2j{Mp`VDqhuK&b3xSQ{wkZXUuo*l1X( zRIfeS0I45hbvB;7laOp|fbS0q|B}#}gu*XDj5Aeb=-l}7Rv4k^GEWp9n5y=n(RIY? zFlXrnV(fNFqMRTn7i1>30jl%A8hK1Gk=^ez3p>jrE>wP5cGf1_HDNzAX64W%v11(>v)!mw_Xgt>Gqh@_5M%ctq9%d251;)5HpRqm$~?rowPE+)V3K5jA{d-` zKaoX)&bRNMFe%NXtVFLYo``X?08D232W-rZWCoHlA)?{rgv`u)pT5-drU6k|5TsMiYyJKf#Kgt>9{>=2cWPRH> z72x>f+yj4|Uoa}Ehm?8!<_)I2bmL!2CKwF9iYWT7O&_b=HQXSgi!lZc%wT{apewcu z;jr-~D)J~rX%{|Rv<$_q9fAd}tMg;`w#^@czX4TfeY&UBvisxRt@kunzgJj(ysw&` ziFlX8eTJ(!Xj*F2n z4}*NS1)uAd;1w7qEXkkSJFJN2Ka^sPw>BR zuyfpAAsq8nXOpHCvJ-yxeHpNDe-5%H7JNJER=Pn}NUXB3HR4r~G+h+bBW4|eAVjBD zTo3h7k4;;O-5C3z?4+TgtdvI6Aq&?ekg~u;d77jN@o*WbjSm+ z5KAKNzLy-p9@*Qg6s=fX;)9_IiNq}Tepo*C8BN95PuS%l$tB3dfDurOI@S=xD!?@x zur9rXPYcg@{S8WWGedG%31N)EIXAyp-OGN+7gLBbMRhz?r)SCyJ6l^@tKcTDZ~-X# zUpz$a$PgAjP0~40M8+-~3OrpPrR`p43>rL#?s>qLe*{!##C86kXtmgDM(%1x27 zke&)nJz3EPL3lc!TOSp(_!KOoKt{zcwfFq?y@6#%->LDoBOy{;`ZzT7HE+o9#{?Y+ zD5r2BRlk+FMBwR$xdHq4gMvJn2NU6YG8a9#%+08OQ`m+k!pYDj))mruo@ON7B@1(p zi(=CSnSk@9Z*n>&;&F20I!lItIegTZ$c)omR-@>9x<(cD7Z(lv@YU>!rsfF2(!coB z-AU;Brj1xj^8Q$ndIG)Ntwt+ePg~(&;3ieHwcjH$e)9Y`^^dTeG=b!EZgY!%AkaVE zozMO!SY%tH7WJ2Zi$$uk*b>koDBeXZ3jq$UJSUyQYc7!8{;3XPv8zk5-gWsEnRY+@ zYgV^cvCafk!VfSp&&+6^(T>;M1#oo=q+5s%ane=>z47FN&i7Z`O0cg#7E|Cvp<#pI z8+9;?ZTQWTyHDgj zUl2WA{XAAzH;G=bpsg8DcP@*8)6I6}KeBkT9~|BN)8>+@mcj`l-*M*KO7)AMObF0f zU`@iOHWY?ZCZ9fi%Bmn^q_42q@~jLudx&{<_CF~JN)5zMBKPok=f(Sw8Ip2KJ96xg zX|?X_oQ!Kv(ufh29m4WmcXlD_tei+ZRFO?ha_CKrZx?1>TPU|}yv z4n^|2!`1+1B~-=?28gv6l{Nx67=)9;a7p_8dk_$0E(Q%pwj=GnGvPo-v*BwTITxV$ zlEXm&sryHx(fLB6VKJ}W6sOtR{^rv;j;nK<`}4O%kCdbzyW(+Ud9JM`JpV~6Qs()l zZP2u_c_^f*$wXhhYY>lRzr=hf0g!*`9sa!ovd@A64W!01h5?I?oQsRG-)XoFD7=JhR`d(9|3!8dqxnh%Wa?dtK;Z)~;`83A<^a*K1>+i#$X!(sfpfhIWggv6)6SvJ_$3kqQ!AWwT_eL6MSoZbt|;uWDf}C z<$K(a!ovG1dt!;A^6)_>7d|~y9N~m=@KuiUHKs-BT;{b!*L0BwbkH>x2of!cU4`jtO#6X^N(&ObFPg5mnG7VA= zGX_9R#s}Lj=p?A25`Q}jjI&H8BZS-={5iaa$CgU=@do^N8Tri>< zPwek;5wv&}JQuiyMG`-+9oV5*e;6}c0GI&Hsi=6=)?ZsA zO&1CZR?iTYc^iop`XW4v)f&*g2w?EnDp)n{h2*9|GW7iZZ)_T91L)T7 zA6!~SBai=I)Hhbc&TNphs_NVOnLY1aMMbRhN2!KU+6E9qs(~CJ^L(R=ymXE)Bcx#h z^xY26@}T{*<67H0UAm#;@c|bTV#!niI7{~wh4%;oa>A}szKf(54_+l*c1%eo5-9NuEUU99@0T;uhM4BKkd~fCeb2A zG^}zOukr0g`?mF`$LKSNUquB%z+N;zWDF(X^SQ<|BS+plb2l~gl zN*&8v0q1lTj%OvjQ}Ur-EmklnQ$-alcGV7n*w<3$iGnD`2ddO>UAyykZqj6N7@|m4 z5X|yh&irM{(1P8W-5gqb#l*h0(4fzt2U%Kq1F4Je6SLVSh0wGRzo}c~W>R7H*1(LG znz46;O{&NQ2Bw9s{V?D(Z(&H~G(Ygv^w?XNr)cCGe-zNFtCymd9%1kS1q3HPo35{p zu5zz(`wC?}9j$!m;1CLuhhp`wj<_KnaUS;f!0N*pp`0dl4?sqahqg&zG#?AVpBMLR zL*w9UWAk6!Q%^fN#2SNONY<=&E=m7D7 zwmiy@I+psdEJ=688s}L~0F;=63jUFK0Lg4r!uSXoFP|7X+Od2u3B(0+RfZ0s_&nMd z^~?7xGGdi6k$|eo$l+Ui)ClOQ68#M0D4z^ldt}_Vf+Y*K*C}mHXOX}@l8+R;KX(0d ztHxoRl0w{@5DSjAZ3IQa(Z|G54?qS(l1-M*diMNeO(e`UbtM-JO_5gXf5MgFkrctq z@qjK{<{340UT`s5v(KR(1#iI&)#p&OY{yAp6PA`_xS}U?E5ql&&iDtR``NJ3j7=lS zR7K^>KQqV=4gS0((@=fqE_C@OnIS{a_R$lNgN|(bpRYmmr79Q}ex4}@^J}mY^pvIZ zfC3uGgY)(koZsokH3n8eL7l<*DT|Xj8&x3CG}L?lPPj`-n#ONL#gv6Z`)A`2C19zr ztk#iu7MJ5-f(v-irlSLK3Lbx95CgC9;LTV2)rwrs7AW@hV<^Ib-~!{d+SuQuAie9x z=zaTV!Z!Gw#e@KhcRrt@P=rjDnI?u{5vasRv>~-PC%GGhJ)Sx^JIDJRlJyltth~Ya zkg#VK4PU6V%R#2D&$?u?8X6k*3caEUAVSG@5D05EPtTfiUlTmA%)lNF&31NL_LX_# ziP=erDRo6rtWrbpqyLBNT6!;c@N3TJAfBMv{QPKRC2F1=C%A&B&TaMWC6a}pf0m|c zQ7oL>apkG_GGi{}Bd3+aCRU!y;}!F0xE}4@G&#E8G7kPrA0}OH7H<;8_UbegV<|U2 zP)^FWQ9y2vqg{HhTv{f@vL-0A%=h(5)J$2rYUwG@6L<)V_3`o%^5OjiJv9Sc@1T+JrdB2)v5`3iN5r6 zgU@ZIV3d;Anmde;c@`b}=)>us&!1ZmTbgj{Cv$Ye!^2C3)>cZP_paMj<8Mx{X!}jQ z<{U;D$AcNA0S$-VUTBYkI%R~yPAyH&vxD@n%l_bsb*Qz8y>?wy1rS;mM}wac6SAQx z?Jm7HRo^eB%fZ6JGCy!TI{-*>_^pOeh}FQ916Lp4+Z&vwrpqC!;WcQzZ8vfNyE|eFv|FG>yABkN;0q^xzNg0u`BKXxDd`RAo*n0MX&m zRhJQrQszI)Z0k*Au>rW$?bOt(BB0huy$KTB2I2uB5PvHE$owAc96VMOQbq?B=z@SM zPNxz+9UH>e2q`WX)P+wV*FoVTO2ZNkKT+TkAlrI104CryJwX06kS-^LrGS!AFz-VscTNxN_z;{N%Yew)O9``H&#sr>|86_S_0bVK7N$Cv$draMY z6IH0K0U*uucM&lw(!*8m1h+crb47B4e*YYEJI(e2bfL|QvZZ1=Y;Td=0f~G-kmFH{ zLeWK)r%ZpIjDWdNEMndAVJCZer1rNU8wyKP0PH;1;XU3;O*RX)ybYb9+wY*g(RI!g z385Nx+TZx}0TeF0W!gc7D6VO6Ma@#f#z88Emw?Q{zaSc$XIU!b^ByVSOH&oaIBia+ zV64Ehv$eC!R3OG5RIizQ0-vCoGp^l<6Pov`J?fX;SE(VmJgEf)S#c=^8%XH~mbghk zv*`AJmIrcxVwuGEa-qVOT#B+wNrpWGeVg5RDs7Je!!f9lVLv;=7@ePcGy~>qDu4(!?=A_6 zYAlU^=!eEclvwr2lVlw|y^P%=D_ZCsh6lkZEtDb4qH{gjHk|(QG!@8$(a~qHV=b)>P*LNk|BDh@9x5KL&zkttbsmGx3tyjv zrkUjUR#4urN5%I5G20UPw5s!ov4E-MnI;&>+o+|+3)Uy)N^WSNWM#LBzklm?P!TJ- zsdD+J&qoK+ABrR1bK|0m-gUMnMBQ9e2^@^ty(!j2<`^M}$IzgFB2e1XSppT%594Zw z>YtObahK@wiOhHjZ&eug6`@DB5{16#-9ivm=kV{hZYX0uljvB!JX7S#OBvd?_Jdyo zz+wQ2G&5xA7_gINg?K!qyw-dS=~st(ph$aGPR+--9hRgJ58=z62f&qq8X^!L+Nodn z?3f+hH8&G`lh8+qyo;))F}&VXO~n6ThJ%A6I!>BQABP9x*VY=JNIVMODKT+Aih$8> z7wnH{0mywzk-zF{?sqWcV%{rIdUX%D=z1k&_0Z4@F5{$x-!PjhV z)m*@0wYUNe2zX-GJ0J)H$&tRg7f*X=DKyFW*py&k2>|vce#4W5(t4z9*tT4sZ+#u3 zdZn+eO)nxcogTw@e+BfSdTYWX<-?4r3Mi1rRv6i+vtI!R{Y8Ig*na}1y%bI|i`!Y%3c$HHh6jf=G6EnmL0B-fmY0jI&qG{h41APGv~{H}bp z>SU|GxfW8}1B#hR9tjyV$mCzsVP`iKZ)~T@;`QsL^&du4?4~cz+(yOtE46YYHyms~ z?l?F)CUxPG6-+r&Ge_%vG#yx?TY?5F(FW-hX=Ph`x{v?kB=*GTGyh?XRhWZ+fc&eH z2x!#6c0Nkyv)$6>nf3Yypn<>j0j+0+&TxdI$y9!X)QZ^1l zCbHZ^fSlsmaJlFPh}BS)1}GQMdOz1^C=w1w1gZY_!qI$;^BUY&4Mg!JE`hofb~1U6Ad5JvG=cm{gyfO3E&7&7iGWw}lP97zINNd^Ul zH$c0+n)2HqGaiV2jjb16z2UWQ_huDbhdlx@I3_aS0zXZBxKV>xI;!+S)z*)OR>QOpcd6h%axm6RZYYk`z< z>mx|AUJ?&`+^SR;4VhK8TcV{A(2-r5sn&B3-!XJ45g4iM^$NxTMi;8Wa(Z*b=5^dF8egc;(HUJXd1l?9U8=UaX1I0WUz)sHBbj@ zUuh{yM4uV3WZ%Vurs^mm?^!^I;!DOSjaqjh7FPt4$FRo#P1Du4i->m(BAjJ#X$xpZ zMg{_vcd1>J94q+Fiu;+C5|y6=)(8BO3?WMP>x0!`Z{~}G`cMrGeI{W^2xS-Wn1`pP z9)Q^nDa#$31)yk;LjBcyIl5}-k#w!oY>gT)_iR<4X8ltA5ZnK|NF=F0N; z;R-5Y^}k;NwjQXk@|eyRU) z!`I0`hGihpU-v&g|F^3P(!eEesijZCGP-|viuC{A zI{)i|`EQn{A)_tmdOfHQ)bF~?Y!|2`B24#uQ-Lvk+cDDxo~JlYReZYu$(YzEYO#sK3**J}i5|Px zm{re47<@JgQ@3;C7Xb)Py1u&TAAT>l8jM8~2lTOuW|ahGCiM(A2V6J5u8MEMhHuoq zvrAoVXxXd}W-qAi`LB!F93XmsbFKhd@*j)Tu9}++`D^3eGjMzFFX-GmBUaFh39W$5$E7a5W0`lH z7*>zX{C{^&G&D5mF($}6suZ~}W_ylcFrrm^?k!0CGS40p^V~+}O*9r!Fc$fofYp zhkDB^dbCnIwD-V?Fyv+lOX@+a)O8(F{AxYu+K%h(L0RMO9Y$5G5mQ;Gg}{rQuin3{ zczz}MBN%Ca&x8x5HnbA}_8`@7x4t`bMRbyN`99%Z^BhV3Rfp82T)LQ78CmK1NNivg zdqz`P^l8(gMqRiB^co{X;+HUG0Xav5FQp?4hpDIAGFQsHyT5?wR?ajuMW&u{Sp zLDu@`O)cMA`SoV`9S6yUW~F-#rwe{#*H>O=2R5u69LeC;D6r*!keGAiY4_k16z{$@ z^e4R99v`{@K+H!>&BM|BYE!@SAx*epcuH*0C7^iH_FD6H?ByE+1Gii_6xCC>0)Iy? zJiY3E_y7b|7)`C<-90W+k7z0)aGX6iSPx&A^gG@x1nqnOiF|sPAoQe@6o18TQA`$$ zxvi(odxf2U1ZD;t&7F59N{yLR`}_xMiXKCcv;q%^hXfg=KVVkMM^yyI#=tcTvOS8Q zem5OrVJ=i}Nb?Ax2Fm7;3^jS_MYvRzR^1=7>s_zbd-bhb&I#U~4+rJTus08j*%@+I z=KV~3Zqkz!XjVYC9X&?!#id(`*KEHq5=U7M8loQw{(6C^M`AG-DwU`RAjSoLz1W(m ze$L2MONQ>G<1T;B*M5C_WGr40j6V6tag;`4gS3fC$?Yf-P>2PC2>K5Ce@m4*&zEZ0 zNE2W%Jd|2nU5!xG=q4TbV6S86UH`_VewDO-B}S~NH8UflDJw36@oRIQlf#7~JS8?@ z`{_Xk?e%b@xEZucQOCX@nKasSeD3{Dat%W+sCVN8;Q?XvqbFhlYzBw%4)SS=YxcUO z93a=2%#!1;1I+WaQt0h_YpX>Qi^s>uCwB~+eXCFN&jODHRtc|s5$%iONLc0A2Q9ym zN=3(myg`1i;;%pF(kKN!D!4!2G~m}6a7;g8xYZp+!Dx%?&J^~+&E%_fg#Rz%oc*rP zBetTF1BcU7Q&UAQ$wU2Itq+ZBosVZmFLO892TmzWFDs(TCMTXAzfqi_mGJ%fRK#h9 zpF+Sv7vV#BYTl*l$8>v6qx`Ln+qzm-S(jMLSrwxMC>UZdjjhWMy| zp=c2{F|+sK9-=ofFaG;X>ma6$DX4q1R~2C!0)i-S$%rNk8YI=Q$)H2YTIKkid+75M zCcMUzF8d+XhK%^&9X-G$eYo$IUm~&D2qVcPuT}>AaUYLSk~kZ@Pfai@;v9?Obed;CE_5UYv^!zg-*EW&K``O&&p(5aVkwDN|;M9+sz z62`LXjE+#Ffs`#h#s>KrKCAxQPXxRQce=xIgqmnB~8WTd#~uh`um7 zIm~A0PZfBC^V>lCw#+7<|Co5O_mXkADsTydwr=^Q)5>AajeiMIE#nRnL+VGA@KW{j z@7sEeqKfC-VLR0sJEJsQTWbB^uLZ5NN>-n~o=`TuXaGO>JU4E+I;m|9z#uElKhqd5 zL;ec0@o1jvc8FC>Q=0u4E5Lg_mUHbPUn*Aurh^R91 z%fH~sQ!g=lS`%SDSu=Q>5vDj3K{n!OUKO9bwy%Dn`+WP@X`s+MutIR_q9a^na=NhQ zd5|c9qdhxhjEaB4;rqb7&CEBz5b4cq3SButi+e6aGqP1)xSQ_o#GGr3RXyGQZ2np6 z2BCFb@Fbrre}@m(u2d1$Xz4FA^gG5a)^>m4wB=NE_9c}qeaSf0kG1K=1wX^}S1Oq= zwJiE7UebM^920wXf>}Wg^#2&OoTscL4HF`TXT6xxil#pT1%P*KV$ew!O zH&y2InTK=LlrjsI_eHThl=e{j?N{^7(L0;hVYaI|Kim4(e`Vh=Mja57k@J(yB%uA!MzhwIVPiHXe;C$;{LcJY>tCc)B$ zjpVi0;S8Q%h7&OwC1>Ku7=hji=zBgSoUJF-<<*jhKln<`&qq^@G(2)nn`r86BF$~} z;1hwzN^g`&E}uk=V`O;9Nasv)jNAaXzi__aLEr~T!f`%tdh2GS{yodCvUg)cqP)E> zM#iw4SW+WKZ%@=|#mU9bcq>0va>jvS%;?yHEr64qeM^+5|83Cu%z%Lr>6caw>(W}- zdnd&Dy5EY=X7L9jjhKm^_epG=4m=|(@2=SVH;kxe0i$=lWW9%SpLe_U5{Y1^3|tapIHY6UB(VP-k6pN4fhZ+D)DX8S!3i1ZbTmFO8+%ZJfranQb!J1#Asuv5nG?-lF1rZO z)zk6-c1?LRega+b>XwMWg>wnfS0WR&eY)-;4gv0Th91Xy?o&xDueP4sKKDcjuwTDS zvMI3o83L^JP2~R9yf*=7>ryxMrx}XLO6w%m44>Y95@H{boiV@cGzYRH1$6ecPgj)h zo%^|h&eY1yNxnO6fUmvtJyY#V;PBSWS3~p1!5UY&dzR6{6JXo$u~Ii}d3<@$AdlNg z82}bhC(C;$4P&mXQTNIc(|KpHoOYHn-@J87kbfq$-@kP?dHVzIT@9Z^y#^n)ZvAQZgG}t?&JS#DoW#7Rbrmp5NyJ&dup=Lu9 zzx6&*&_>?J6SC)(oDLg@=T9-8wVuAHmRe|LIl21aIg6QLAm)KS!ijvQTg?FutHO7X znD*70ePD&!1(OpliFXQ~cu52&|T_^oY>t2@q& zNdmmuz`~3`s+_}OI4QA3)XR9r78qZ|}*vGu3`CJe{zOeicnt_?hUpL(W?VBP&eq(x*F< zu;HnL0hhorzj;JSOu$A%>q||;cuC*dSIx?di0XiBt`r%it7qLGJw3a)Jn;iT$p6Jw zA@KPO#Ay+sc=`?(ScO%Zf&bIqwf{4{|8X1+N1aMql2|yW!a)j!T?kEq8M{2 zw;1MDxrO7X+;WM|+_&bkMYe^|$#s?6=B^lvg_&%|_p{Ea^F5Ep`4i55+GG1X-rMJO zzdk>QIfmXrdm{-|#bxLxKLSw^^!`Fgv4-d&y`UFxwB6JQe7I0(%$5!~B+B+|;8urR z;*#q}*e2IPIci`6wL6uVRI!eX9+T`xvu1o_nj@`0@T_rm^tHl@9r;v<9eTK4U-bjc zLyR%OBTdv>UQod4SfNc4Aq4(Jt?0ao)xuo^a_KH&{r(F}RwQ}(cwO=w@mF+7TKMYPo4~A_`fwx+aoth`rOsa%@N+*@R z<# zP-CnNAh9g;50g6PD0EUh2lBxwQ!flnGmq)XLU!JC6Q^u=0tO0hfO3n3vDbnq=ntv_ z<@E^n!>8K#QY3xP_T8Y2s(%xF_-ge$T$@;sp!#7%ptO30rNQk;!{DSx*{E}NCCY)#1jvzrk3)s77&B(EMu?;`)>s5RZ%(sh{9ICCIYIJ$VCXHmL8X+f(5S-;cGI zsHADH3_~TVGD`a7o`|r4!p?oh5!cWByE(a=A4S~<<+|*R#wOI(3OEOKqU#}pah;O= z5BQPetA#H~kWTY2oI6|x6n(u3+p<6bde1o#$L9Wfd*oG?MG^TJ#c@YB{t2XV@oTr( z!O;PHd%!hHu27BCeJ%th<}ubZI@njVy-^hD_UW3J7$f{#isYR<%^@_ENC{;&f-?6? z6*~wG5~d-c%)|E2>!L>vjeh2h^Ej6%IZneu13T4p<5$L5=$r4}hb!52knsy@44pIG$ zF;oiXt$8|awD-2m6-H{%4zchU2V35nnT$<*;S9PF7(FP{Cz#H?Ap!Rg8ft5?jXMc6wuoA)d?^gL5gH!;(+_pNX0vBeg-H66I{l|jJ*24PvU zdH{niif~c`y+sj`;rR+0Aegpz6~9*AOPHo`2_&Z{*VVSQn4K#arf(SbAkwS)aq%hW zd7{rjZE(Yi6IG?jnIcHT+dE21N*I+u-cvNfb!93@7iehO!{nl;Mz-64AK7Sade+L7F;e@}i;rMT`F6QO$ME zflr{~#o8p;J^3&G%XEsx}m+Nk$iKvCI6kGbd2tCGWxh*=xKKNUG9M z2b9Q+3nO5mui763M$O2LGxNL$F`iGR-2u+0(cYq2lEtwP{9p=Q&Y~oKWL2? z$w{#q%m!d=ld(&NgNR^IixCH(0f~xegUo(c&=qEupb=s!1JYl-;&t$k`Js%kyUR*` z{7-<24UabZR!J68gVh*%OWBc;I|GiGm{GEg*D9cbi6DxBg7M-JcsMn;_xNpuKF1 z8RMK|20RF7W3DJe;s)0h2u!#TcxfB^=(w;Q9$UZRKJb9?b8q$K0b43N`Do$Cj><&D z2tHu=iK0@sk7-cIMym{Y(hWz)>4?Sdr*zY#i)(~R#PuLDAZ4AJc>W2TZ8!n3>vA-F z)dff?J~gw*yk_PR>%v2LH{4Lvn=oVyTQL~zLL-;PAh9;c;E)h}?bf@a#`7cqL3p52 z`>Q3F5yjgxvsf>dRjS77b|u`CIy_?CFgbTD8CLJq#$`p52ft zo43*m{jpQ>vTwfOJ{w@TZS0v-(MpPBep6Dhf6Tmvd#yEbgFf3caAsEbHC&T0zpGx} zu(%%wUOe#Zd~7=C)WDH^krrr5)fnK|j7RC7{SSoXStP&R)&tFbVHX(Aq06mM5`e|x zT;y9Ps$#;7bP}^FP+O2Te0oCveLPJ!=$$z$l+=s)Wb4W2S_G>jjhjxFixX%iMGoBN zE$2U3p7NRHJp~4*UJtFj@2dF@ZnV8SYVw=RA*1b$J~KGdB6e`cW_N{o!VLw6hIE&S zOv@nFO>RW-QL0s*W0!^qL#a{_$+lUqjh7jto|eoixIejkmN~JtI03yCVifL)_H6HN+Fd{XE%YC2@Q{pq3+=nS4s(Z?T^`=k2b9j*e z4k4d}bhz$_H@z(ZiftfE_EJThoFw{vuhu6;GHZyayG$mNWE6U-$-$rrh-*sRGti@j zFXzl+(+kB9E?0}^j6~onH175Fh%y+uAAo#T99c)GFjiGU-eL{oJxP?%M~beGQ$f!| zHdVPJ3@6eSi4~2TGk#y;uR4Knhn`GDSNnwe8R)|cr1OrB()PoEdJU_>PtnPpVALzY$sh(S75hJ{BH z5S184&`Due+(2Kjfm3K4^afy-)xMEi!}=Soyj0FCcWtn} z?PmCc?QL&9?@^*cN{IjMR>tk5@L-kk$ZlFSd4oyWLT211;FK~?*tV!OIbVUvn3SZZ zLzyOnM4VHJ|o?>^l7P#LkWF(Mzf}@@sH4{?%ICNT44UtRQ|-K zSIFvk@2~0Ooj`X2D7I;@j6g%_Hx}j|uQW)#UI1JD@u*$0e>o_Z$&@d!LoN2an#nL; zN?v)8(O|tD^D)^vYWiE@r|QDAl%c#{GgkES{&XEz8XC*=fWaVd!ki7*!8uutQ}Eq2m)h96UF4=$)*YYYM<-a_z5dgY^YCu$&h2k)1u)is z7McJ!Kv9N9thuz{gI4HsQ`O|;SNpP#I$LVPuZF93=e~5&(o=*$6oWsxylb$G5vsE+ z65i?n13_X{fr}H6{^aVQKI1cfZPl*>q4hJ?&rm__s<`XBaz-^Z_u$yI*TP97BAN&9 zUt3|ndi}dayB~8+A^#HDZJb#3eQUQv-|rm(qKx!lvseIN4}0rt84U0u`oO5p1vA`b zX+bqs`|!sCR#azdyl$eHn|B&!{CGq}`@6d4rHHLPak6#Ar2>S;^XHq#_ES|(>%+6u z{xQ@P7Xsi(7wm#R|5gyT_?&-!Xx}lQ(YU@MiA49n0vg-9AP>{^%-)D8cE4^1z%vtP z*Wo}DKHD>bR_`;g+&p>&D#xMX#1Bef3b`J zfgdUY4-~jtbzFeX@PBvse{W-9!2cgxkr@<>zWw_(*Y604)Stw*+I;=xFI~T{=L7(L zH>}IuS%91Q@8>d%6~)=D9nHA^Dj2eTVgXNRNs__u@mA60k2;m7fi9|F30~5;SgJV literal 0 HcmV?d00001 From 3c0008d04835767916fcf83cd19917d6b7ef5236 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Tue, 22 Oct 2024 16:30:31 +0100 Subject: [PATCH 385/496] fix: copy_demo_nbs function package path --- src/codeflare_sdk/common/utils/demos.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/codeflare_sdk/common/utils/demos.py b/src/codeflare_sdk/common/utils/demos.py index 2571a23d..485f47f8 100644 --- a/src/codeflare_sdk/common/utils/demos.py +++ b/src/codeflare_sdk/common/utils/demos.py @@ -1,7 +1,7 @@ import pathlib import shutil -package_dir = pathlib.Path(__file__).parent.parent.resolve() +package_dir = pathlib.Path(__file__).parent.parent.parent.resolve() demo_dir = f"{package_dir}/demo-notebooks" From 9faebc2ad70fc8ed7425609974a81608b0bfe48f Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Mon, 14 Oct 2024 15:25:01 +0100 Subject: [PATCH 386/496] Dependabot Enhancements: - Ignore updating patch versions to focus on major and minor updates. - Limit number of PRs opened. - Add dependabot labeler workflow to add required labels to automatically add PRs to merge queues. - Add merge_group condition on test workflows to be ran on merge queues. --- .github/dependabot.yml | 31 ++++++++++++++++------- .github/workflows/dependabot-labeler.yaml | 17 +++++++++++++ .github/workflows/e2e_tests.yaml | 1 + .github/workflows/pre-commit.yaml | 1 + .github/workflows/unit-tests.yml | 1 + 5 files changed, 42 insertions(+), 9 deletions(-) create mode 100644 .github/workflows/dependabot-labeler.yaml diff --git a/.github/dependabot.yml b/.github/dependabot.yml index fb121a71..41474098 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -4,18 +4,16 @@ version: 2 updates: # This is to update requirements.txt files in the guided-demos, and e2e directories. - # The group configuration option is used to group updates for consistency across related directories. - package-ecosystem: "pip" directories: - "**/demo-notebooks/guided-demos*" - "/tests/e2e" schedule: - interval: "weekly" - groups: - requirements.txt: - patterns: - - "*" - open-pull-requests-limit: 10 + interval: "daily" + ignore: + - dependency-name: "*" + update-types: ["version-update:semver-patch"] + open-pull-requests-limit: 4 labels: - "test-guided-notebooks" @@ -23,7 +21,22 @@ updates: - package-ecosystem: "pip" directory: "/" schedule: - interval: "weekly" - open-pull-requests-limit: 10 + interval: "daily" + ignore: + - dependency-name: "*" + update-types: ["version-update:semver-patch"] + open-pull-requests-limit: 1 labels: - "test-guided-notebooks" + + # npm means yarn in this case, this keeps yarn.lock up to date with constraints in package.json. + - package-ecosystem: "npm" + directory: "/ui-tests" + schedule: + interval: "daily" + ignore: + - dependency-name: "*" + update-types: ["version-update:semver-patch"] + open-pull-requests-limit: 1 + labels: + - "test-ui-notebooks" diff --git a/.github/workflows/dependabot-labeler.yaml b/.github/workflows/dependabot-labeler.yaml new file mode 100644 index 00000000..86f4ccd9 --- /dev/null +++ b/.github/workflows/dependabot-labeler.yaml @@ -0,0 +1,17 @@ +# This workflow file adds the 'lgtm' and 'approved' labels to Dependabot PRs +# This is done to ensure that the PRs that pass e2e are automatically merged/added to merge-queues by the CodeFlare bot +name: Dependabot Labeler + +on: + pull_request: + +jobs: + add-approve-lgtm-label: + if: github.actor == 'dependabot[bot]' + runs-on: ubuntu-latest + steps: + - name: Add approve and lgtm labels to Dependabot PR + run: | + gh pr edit ${{ github.event.pull_request.number }} --add-label "lgtm" --add-label "approved" + env: + GITHUB_TOKEN: ${{ secrets.CODEFLARE_MACHINE_ACCOUNT_TOKEN }} diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index d216df9d..2899f0a3 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -19,6 +19,7 @@ on: - '**.adoc' - '**.md' - 'LICENSE' + merge_group: concurrency: group: ${{ github.head_ref }}-${{ github.workflow }} diff --git a/.github/workflows/pre-commit.yaml b/.github/workflows/pre-commit.yaml index 4aa51882..e5c16646 100644 --- a/.github/workflows/pre-commit.yaml +++ b/.github/workflows/pre-commit.yaml @@ -7,6 +7,7 @@ on: - 'v*' pull_request: workflow_dispatch: + merge_group: jobs: precommit: diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index b7d8ba61..7391bb40 100755 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -5,6 +5,7 @@ on: branches: [ main ] pull_request: branches: [ main ] + merge_group: jobs: unit-tests: From 0aed8046687c7ae6e34935816b9e5af53a778f6e Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Tue, 22 Oct 2024 10:43:45 +0100 Subject: [PATCH 387/496] Trigger e2e in PR if e2e label is added --- .github/dependabot.yml | 3 +++ .github/workflows/dependabot-labeler.yaml | 2 +- .github/workflows/e2e_tests.yaml | 2 +- .github/workflows/guided_notebook_tests.yaml | 3 +-- .github/workflows/ui_notebooks_test.yaml | 3 +-- 5 files changed, 7 insertions(+), 6 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 41474098..cc89f631 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -15,6 +15,7 @@ updates: update-types: ["version-update:semver-patch"] open-pull-requests-limit: 4 labels: + - "dependabot" - "test-guided-notebooks" # pip means poetry in this case, this keeps poetry.lock up to date with constraints in pyproject.toml. @@ -27,6 +28,7 @@ updates: update-types: ["version-update:semver-patch"] open-pull-requests-limit: 1 labels: + - "dependabot" - "test-guided-notebooks" # npm means yarn in this case, this keeps yarn.lock up to date with constraints in package.json. @@ -39,4 +41,5 @@ updates: update-types: ["version-update:semver-patch"] open-pull-requests-limit: 1 labels: + - "dependabot" - "test-ui-notebooks" diff --git a/.github/workflows/dependabot-labeler.yaml b/.github/workflows/dependabot-labeler.yaml index 86f4ccd9..e5825f7d 100644 --- a/.github/workflows/dependabot-labeler.yaml +++ b/.github/workflows/dependabot-labeler.yaml @@ -7,7 +7,7 @@ on: jobs: add-approve-lgtm-label: - if: github.actor == 'dependabot[bot]' + if: github.actor == 'dependabot[bot]' && contains(github.event.pull_request.labels.*.name, 'dependabot') runs-on: ubuntu-latest steps: - name: Add approve and lgtm labels to Dependabot PR diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index 2899f0a3..976be87e 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -30,7 +30,7 @@ env: jobs: kubernetes: - + if: contains(github.event.pull_request.labels.*.name, 'e2e') || github.event_name == 'merge_group' runs-on: ubuntu-20.04-4core-gpu steps: diff --git a/.github/workflows/guided_notebook_tests.yaml b/.github/workflows/guided_notebook_tests.yaml index eb6c5cd2..7678ebd1 100644 --- a/.github/workflows/guided_notebook_tests.yaml +++ b/.github/workflows/guided_notebook_tests.yaml @@ -2,7 +2,6 @@ name: Guided notebooks tests on: pull_request: - types: [ labeled ] concurrency: group: ${{ github.head_ref }}-${{ github.workflow }} @@ -13,7 +12,7 @@ env: jobs: verify-0_basic_ray: - if: ${{ github.event.label.name == 'test-guided-notebooks' }} + if: contains(github.event.pull_request.labels.*.name, 'test-guided-notebooks') runs-on: ubuntu-20.04-4core steps: diff --git a/.github/workflows/ui_notebooks_test.yaml b/.github/workflows/ui_notebooks_test.yaml index 283486e6..9252c34f 100644 --- a/.github/workflows/ui_notebooks_test.yaml +++ b/.github/workflows/ui_notebooks_test.yaml @@ -2,7 +2,6 @@ name: UI notebooks tests on: pull_request: - types: [labeled] concurrency: group: ${{ github.head_ref }}-${{ github.workflow }} @@ -13,7 +12,7 @@ env: jobs: verify-3_widget_example: - if: ${{ github.event.label.name == 'test-guided-notebooks' || github.event.label.name == 'test-ui-notebooks'}} + if: contains(github.event.pull_request.labels.*.name, 'test-guided-notebooks') || contains(github.event.pull_request.labels.*.name, 'test-ui-notebooks') runs-on: ubuntu-20.04-4core steps: From 1f9e9bf0965f2b10d81da20517ebb3e0cc62794c Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Wed, 23 Oct 2024 10:06:51 +0100 Subject: [PATCH 388/496] Add workflow types to trigger on label or push events --- .github/workflows/dependabot-labeler.yaml | 12 +++++++++++- .github/workflows/e2e_tests.yaml | 1 + .github/workflows/guided_notebook_tests.yaml | 1 + .github/workflows/ui_notebooks_test.yaml | 1 + 4 files changed, 14 insertions(+), 1 deletion(-) diff --git a/.github/workflows/dependabot-labeler.yaml b/.github/workflows/dependabot-labeler.yaml index e5825f7d..69c94617 100644 --- a/.github/workflows/dependabot-labeler.yaml +++ b/.github/workflows/dependabot-labeler.yaml @@ -4,12 +4,22 @@ name: Dependabot Labeler on: pull_request: + types: [ labeled, synchronize, opened, reopened ] jobs: add-approve-lgtm-label: - if: github.actor == 'dependabot[bot]' && contains(github.event.pull_request.labels.*.name, 'dependabot') + if: ${{ github.actor == 'dependabot[bot]' && contains(github.event.pull_request.labels.*.name, 'dependabot') }} runs-on: ubuntu-latest + + # Permission required to edit a PR + permissions: + pull-requests: write + issues: write + steps: + - name: Checkout code + uses: actions/checkout@v4 + - name: Add approve and lgtm labels to Dependabot PR run: | gh pr edit ${{ github.event.pull_request.number }} --add-label "lgtm" --add-label "approved" diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index 976be87e..2caf8a01 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -2,6 +2,7 @@ name: e2e on: pull_request: + types: [ labeled, synchronize, opened, reopened ] branches: - main - 'release-*' diff --git a/.github/workflows/guided_notebook_tests.yaml b/.github/workflows/guided_notebook_tests.yaml index 7678ebd1..46979aa9 100644 --- a/.github/workflows/guided_notebook_tests.yaml +++ b/.github/workflows/guided_notebook_tests.yaml @@ -2,6 +2,7 @@ name: Guided notebooks tests on: pull_request: + types: [ labeled, synchronize, opened, reopened ] concurrency: group: ${{ github.head_ref }}-${{ github.workflow }} diff --git a/.github/workflows/ui_notebooks_test.yaml b/.github/workflows/ui_notebooks_test.yaml index 9252c34f..cac6641a 100644 --- a/.github/workflows/ui_notebooks_test.yaml +++ b/.github/workflows/ui_notebooks_test.yaml @@ -2,6 +2,7 @@ name: UI notebooks tests on: pull_request: + types: [ labeled, synchronize, opened, reopened ] concurrency: group: ${{ github.head_ref }}-${{ github.workflow }} From 345546e15e561aba730b22a963778e0357f2db3b Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Wed, 23 Oct 2024 15:32:05 +0100 Subject: [PATCH 389/496] fix: update the snapshot to match the latest changes --- .../widgets-cell-2-linux.png | Bin 33158 -> 42051 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-2-linux.png b/ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-2-linux.png index 6fc76020c35377a8ba64e70a49d07a01e6653795..1fd47fcd29eac28fdebec768f2f32cbd947bcbc9 100644 GIT binary patch literal 42051 zcmb@u2RPUN{y%IbQIaGpgp7=Y?5u1uQjx5X2-!j+E31-(tn5(PdxdN&t3k_-N}0)u z`|FV=szulfx){JwhSWOlcoOpm+P}nRK zjcXsj?!d@SgTH&v-fA6nF12~NHfX6p_?HFN(RnW`xr&{HM+1B*(wc+t?;V$j-Mt9^ zvMO+cdN*E$J#|~}==BUTGP0TX<=@LYS>=4EM@G`JvnR*LW8&l0*lFa#+%q*iJdOn@ zOwY`0R9QYfSIR9SvM}Doo^+d8;^xhpfkk?}AIt6EB}=(ckSSffm@RuwHRawtT9N=^ zNy(LmU7@~2s;x>9k&*iajMDP+e~gcdYP!}R;gYF4AmjPeq?(qQ^j2+p`sj%r%U@5O zJEy@;Q&U?zJoYoU)M;3bpZ`flMut`Uos%ijZ-$0en_sVo&jpnKaXEVISc>Q+YKr0d zYZ9EC&wBxq54FuBd0OfNZrWjL3Z}V3l~Oxx1Tc*P@r~ zu3ftX1qD}2eaF}-MDXJ|b1gZ#W%X)8)B4@Bi6J|8?ZU;-d8JfTdN;XFe>Jn?ypWx^ znApn7O0TULCnu-B|JK{LXKDO5R_3#{;@jHWlai7QcU~0*N2^SP z0u%;ouO6xLnCOT;X8*Q>o~ZlYAuXL0>21%6iHXTb(lawjLz{p7{{4M=`u_djEjp4r9N=4;&5zY;$?gLCz+X|SATw_+AnmYY6G8oK?qs*LwSmU=*aef`0M2er9lw7EYF4SlNd(Be`&dzL;{Q{@OV6BG9Ew24rB z?Uhf`*RNlv3@Nc_BB75wr2l~;Fj?;U`}gk?wYg;^C9@tqvXnV}(;+QKNl9tiy(S=_ zX|y>sMCl-P=!a+5*V}G0_Ys!o(W6IGJ*oRUKMW7E(a~8uIP5HAVq=Rs>$5yFw2%KR z10&k(#d$uSzs3hC_IpGdKepf z=F}1RaI{W1qBrpT+z@77fVq$aJqJ_wxOZn;X~F~&DJ}^ zVPO$IS07Xt# z77a!4hf1fQZakJM>7wMfFI-_ZT0JDLK z$*uPBAqVk5_5%kFsH&>&*|W#vub=Z34lb^)OsuS`G+aVLI_J;J@bc!?*4F0asHtk& zEiEr!wzE6#vqYhIw>zG&+*r`S^92P3?NMy;f<$pALaFmjYw3eeo;-<@>GY{4guTPu z|4;UlCo*DU!R#G4j3p%{k}NtN9_6@HnVI7q9TXi(T(p7$0<7|W0(6uE{r&1(r|j+d z1KIcO+vg>_i3>qHxxeAs+=wD4XXewV0^&&r=;>8Cq7xGnTSQfrl^;BK@Ug+aAwB)V z_qX|COoGC~G#_2BUj4na0@5`4jN9e4-ZeMIu|!Lx45`C zHxCbv`Nul%y^43^kC>0muS7&piX94SQrFTt5!+@hg546Lbiv8#NQFYW-t?C*Hum=2 zmc?GOW;mv*s^o#}x({WW#^;Zi)hW<$iHgQ3rKg=6bP#8aa~ttm%;Tt18>vIm-*Gzt8bnd{`vE#-{16lLY*SfxqB=&&-87u@{(T54)j3zOZltVfO> z-6Q+TzWh9`FR^t6x9=lbYRxO;v1@+Dr#^j`jt%}qa3(@*QaXES$L zT3X5;KW@&?%y3&rM`wBx-w$O#aq!hEp@RpBgV7Bt9fmj_UjIKEcD&`_nKU^icvdv?-pgaH8Ej5FVH_Db>xVyfx%v0 zM#Z~pYd%=0hUV^`p6&5x&Yba*WtC;@u(L`d6VAQ(T{**tijoqID72Y4-qFz!n>ryO zq0qx?@b8^M64l;3KL6|2udy{nV%;AWsv^gZp-5B~7w_?Y9cQ4z(cdMWl$ORW_8DDM zkcHLL^1lCa+bgA`M~~9e)90GkxlK_=tIy2L&>h)oZk}|_E_g6tTNbo^cSbxmHTDJB z=;Ozaan!;cC>$A%^MXJmDu?BAfcvF zXMi7RY3Y%IM~}FT6CXeRwm8*uF-$NTHKM6Wkvxc4XdfLNy2JPH-w$z%iiwfMcXf4% zG3^R|^7t`M!mp*JqOZrq#LUi@-@Hj)GZwY)89sSh8j&d-Nm*s3|KdxDK?d=pdfPsZ zrGCH;RGNj2CoZ_A;q&;a-@bjzy)u0N*Y|g!At3+)-Oryt|NPm^QVO*n2ig8zL943e zmfJ(IAtIyoqr$>r5fR_MeoYXw>yOySUtm^WbD{YrjySF)j%>t~2ln0aaz$ljC3-|f zg}8)-np1SAwHS^W+A?45`6o}FpqowVb^425Ut6kno5I)Zw-uw0eDm&I=nqK&+8@>E z0&7cOZD;p}w;Xx4qO|$G@6hzuft+#YD&PqGez%goGi~wH%$4!}qR^wS?1AX6NRr zpE*-rS$Xwk;<4i5;vYYLgoK2UlapJV?kePnW;~M^G3c;da6SFwd6jW7J1^5{zL9Bl7 z;X|R9`I<2&E6TLug^s&Ur~{OpJ2zBjHvkaaGJqqD0x^Y}F*&)qwq|T>oRNCDh(6@z zMfdOL9}HnHc+L#&VPQ#5OhjpwwCm>z@8n7h_VRGzBC7dC#6Uhxvg)|=1D}XH$I1G! zw-lDjV;3LfmbG=+D*5_ujJHR*xw)a*v+mpHF+Zk)D-c$ZIv@_)-n}u{*%=ue+%c<{4Zt*EHD)br5sW0gzWEk;dE zP2i}(Y7M5weHW%I8L#4W-H(mcCsm=zDl0qf>vll>rr)9(O;$kxbub^D=A}!Q?CieU zZ;RPW8RGi;=cjkvoV4MCE&!<+d3kwhX~MV~Xn{+Y+<;JAH-qmerOWznveXtTvK%`G z?svnle#%*LuomS7-SpC{vXfl2z+|Z@DS}b5WB3FYnz0i|AD%dK<_!w+ zv17m9m)irv#V^;1crRG*-yaW9fF%X8cr91_fl^aTD_l8a=1R66ueZ0i6IXS4`LC7v z@tBwxTKw6}Vp>Bd(+0lEZ?2335|)*f_3q*`($?0-+p@`eSy)<< zaC6W{p3&DQ78(wX2vg1oBhI|AS{^X@!^TNaFu$ZkxS6l-QD~s>oPJR$_Hg-le;G)X7!ll{Wcz-d4hh}*!3+z^b&#K33A7DT8og9kcfzi zuKN0Ud#@g8*x+z{k|XOdRB}jOUS3SB?9ro}&7;=V)-xX}b!CS@f&91r{00XRiU)C< zNxz{Heg7VF?_SGG?O@P;2m@druac$qn##xT#S1w) z{y-xHz*z!(XliOgyP{25yTvHl)!hwhE+Zpz@tT~Jqc$0%babv6cZ^2P04^$)0DY?h zob%eU12Z!-3-u)%o3p4vW@glW^eJ(1+?l7sZTRwcfE@!?=RbYw;N;ZP(=#>i)kvpe zAdU|^JOctkXL=tR^N5TmQ5ATmp z2l@acOiQ(A^hx0}%AGrJMMQ{*i+{$e+s~4dk>UK3ZQF(&it==boBP#+qbKSHaXIgH zTH~Vd^YaVEe;*kk8UrTKNtJVSa$;j;y>jW&)a-1}sPnTw%OBo#@$C{7ErkXE@Q$^S zmXT?1Z`a!3kOcKe_1|0t=BB2mMi-={q%^McI$BlkH2e(A02NXtk{;iq)9xi63J(tl zQVkwMGMeC@~kV<%3W@b~wB+700Mq1r76(i3XY4lJ_)6Qp3}^@w zlarl`>Z4!2Ko%Q(eI_oAxDi z0B)URBYpkRA3u&mb4g1}!U7SlEh1OE?N}u;V8YWNPH!f(uS?g^#k2@SFW^;6ChnC*Zzs|yZ7z|UTF;` zvi{NUJ4{DM2enK)S#sWL2T?#rkRI<0nluh*yt$vvRr zD0+9idbK$gc?7bfmR5I<{FZk1sbU$egLk(?eZ@j~o-t4a_y&p{J2o^p===NA)#&JG zwiPz>P7NRfh+MInyC^ArYH6W1;#dNa;TQo%Q~K4D25bzU2Mu!vGd4h`T2z5a^+J%kM56E5CoV^JD1lj7S=*@S>lz+|k+aa!a$K@xEDe@* zeb(@mYT@8&*6`flx~N6l4O=o4E5mh>$*FQ|@5tqjeeQ11 zRH8wHnHboR*Uk)-ZU1uJYja~!*ZF5Z$~x-koR_SR!`f6Q;5dZOi*|N9biSR#F>H^U z-df~~itfAjvY+tddOw)WJKK83D+M39_&t-KDFL~q=b5EFBUUbA=a zUfS@9v9Unbd>{~&>{ET#UH&cdT~3QKmP$0pwqe}HulZSBnibV zbhc;LYqPTtC1_Wamex9tl7tNxTd9FoLcH^Gv9q-$Qrmy!W0k(Sd8%f%kZBFwwVT(C zjo*K)cKhmHlbV{^av~{C;GFBX*L*b86cku|$S0C41q}`6YHHgl1oB5MPLAWrIIdz$ zCnxAeKx3VpW}(-HzdLViT!S(olJLSpSn0%x?d0SNzG!a%@{lPZhu_@Ayy7dDh@L&% z)D)o2edJt;^0G@FhwWv;Prq#$y27??A=tDxyvP&@b=J}GBzMdK7M8}oyfJmUW9ZVV zne{%uC{4{(cCSmiPIREkS$7P)dxuiEOJ|fM^6s?T66ho^9i`Wm>91dhN-sqf7azT? zij&*S8Xu7j1OU!qE#m#T&YMH+il)FQzzufEhy&fyul=U|WK<>^8XC}d*VfjIlA;Yv z`?a%cYm*e%PoF)T!G~JZ^zFNSySkH;le#)ZOedL@;>NM#930Qv+qc(i>*xT`l3(~= z67HLiA9sqS6&C8U(^P=RXL=OI=GWIlWi{8;jnd4HFcP2+y|yw3A)GTo+tbA*@9|@j zXb!Cq0s8LN)(~xOTvt|UcM8*O`<%*^J5^R7J?Xni`>qK4Oq|rzq=dd9XbqS%Xt3|l zD_SF|J!DeXmY?S53x+F$o*#Fgrb(5&}O7lsK+8BBAz~diVfS!>@nS&jye%o1O|Z(i!wq(w)(xW z!QI0H#)7pNOGnSYNtCv;jg1P1hVf6He8(YNUt6Og^In+P!5Za1Kep{JeOQdi5mF;) zO?7oOieBy2S@P^pRW6+T{E^M0sE_OG>oU^PcW&KcrvTJ1c=80#FryBQiH(K16L8sx z9G?SqIv8($>eQ()WlM;|Vc~b~Jd|)U!;uD1^6`;Q(P?m-x|5s%neV?y%ml$aicMbV zyfB!`$_fsaBnx${CbTfnZi+zVzf>}&ESLW#`*KWDgggg2!KXiCXb5aRI5dRI4^H1H zG}@P`hL3hU@EaTzbZ&N*{rhuZ<$MV#C=dcSdu@}Ir$uJ>XklSeOO|!Fz}`j+;TyRn z?O}Sn?rv_S<>eQPji5wHNWkklk(Knkxf$+Ap8&KmrKlS&Zf+myyxpviXoxiwNx3;!Ip98QLF%}e(!g;5cWsNeHgo=1753Q3T(BV_fo8Dg7 zeGG~;0SYkrOifG#uEjNWc`r@t0}|Z7PY2X*Z0rO0Pov94`|`yLbgxtA&oeVJYH&mw zpFiKajcUh^qY@H5-QD(ZUnnUv`fPFJ7HqMR-=MPLZ2*S=7|G5-hq`s^77RW#DEy#E zj7?0CC06_Vx(rkg`m&RnnuIDUD(Z-Uz{KJzFfe{i`8-djf=|Yq^hyBECCzWDY6|OafGmDmuy#DtZB;J5%M%rAs_! zB9W~mPWvntZbw9rxJgT|cG*hal@}7C7Ti@9-##vARAd3AxzRjATla4buu)+T8RRFZ zfMk~vt@comDRUH8RM3~X{QOwmxEhezGkMjEi#A-D0}s)LyH9rQbAE!}V`%t$^Uu2N z*nUm)Zwx%wIV2*9I+yz$IyA^U|JkQi-izFC&(ZJO7p)=>IpGtFbEu#A z6~>+9Rj2jzgvG?378b@&^)Yq0|1%44UH|;~^KGA`+`gn;fF6eu6*vc8i{FC;C|cA- zd?%QG3Y-%thPKD@A*8J=jTi+lK)Rk(`Te@h7Q%Q0JQF*cVdu^2YAH!_D1f%;zzbp~ zbspH>Vj?0DckZx?sy-ich9W2VmBHj+0w3g=xBt{_Npnc#p2I(aC))!VUJ%Vo{id7> zQMV+-O)ztqPzE_V)PV@1%*cj>m4Xo$6&0PYymHRS zXz;@aErF@>{1|TLq?cz64O5R_6R%1ZcYOaMK@`$zef{w;#n9#89eX>hOR5O;9K2BZ zZ0GwAABGFwSkxa#p#%p~PuGK{0`>dawQKL5p4-dJ`~hUe$SCZ%^+tKAGOY8)#v5Sf zEt-9WQdVuZR@c54ww5lgZa{8<(B~}WIb-Dfv066q-Rsw6#E>yrsDE#)x_Nl~!W%+? z$N6(0XydrD^RM9j$+?W-MNjDJ?heC45L;=SGQ~7Le)@#6jZOk#0FVQ&TWf4DeA0dU zS}lw5fw6xZ>kjLirgEQZ(BkLi4MQdPp4Mc<<-<7yBb%FxtEH{2|Lc-Q3^z*hMQdwy z?Hw#Z%6duU8JgNVR8{_$L_0kZY197YxmiTztF|_7Fz(`Fm}HuC+F4?!WdX~ms3M_> zLA&KUeE43cwL9m|3c<|Ej@WkK0z~8FyFkMXj}e{ue-=)`6f{(&z)E zyo0YUJ2)&YE~4L@E3#<1&u0KJ5S|dRTz&VE9YIZ1{#$+!0nE%E78VwQ8Z0mJSO6gb z24TY?U~@`e{~Sk6J)n2=PJ^lKGn_{fKV2o^Gt+E=r%5< zfN`1ao5yDy#LsUOAlHExd>@Q-4UQ&Y-`lqcD3n6lBxo=i8hKEAk1;`eo8kEmH>%Q} z&&ZSXJi#<42(&WwV*2_y5G5?Et+P;5EGBg!d< zLX^JF%m@ey#+su2K*mzL%80#^_TlNIfTGUC>G+)nFj($EAzdr228faJS5iyYdnoTO zbS>!~6rBpOXyRO;!pTXGFE1F`*w~8p67w?tI4gCYhhS<=Ps14>DXFhd>6x-(Tj)&} zq@h4Kq&0O5Y@{ZGQ}>Aq*M3D1%yZ}P>eRQ<50N*xJ~t8&9=?O&s{lP>K_n(!!!PE? z+Z%qV67n}L<8A4+WrQHjD;uXA@58ZcbE7AgZl5H_>Vq+XgW*q{ZAFQv^?AFrAIc83 zwjM&I6%rCUe%!R~8;)_lQE{L$M_z6&3_+{f5%U1D(441FXC^1{60s>MpL%;~SXp09 zx;Ck@(}v?jdcp?~{u;LKKlHK2gC8nKUf$Q&c?P>#Wr>)dKzIPt-+(LuZVtDu$Vfo! z&%Y~-IPPcjjgLu6qWW&F%v%AwuP-~g%RflI(aEI!%;GeZI?@#1i()3^*M9%>aCKdn znd=>gE6ByuqVK9iyO^=-!Q-@MtkXHUR0N5rd2o;}+H z;07x#FW>j+ETyxXTP+ZvmPCWdsm2w&XcW7T|1_+C(a$o<&Lot>S) zK@3e+SXl-M& zu(|=`(cEuU#kcK(NCNba?bIv`SO*ps1S#a#lm#yBaOKFUu}+Hk53RYQxmFB0l)>=$ zq@?bt#l?jM$k{6W4-yj2oIM-JR}L;BD7dSBA1y6*@F0{qVEfaj$xL2J4Etv0=01RP z1VtJZ9=lCnTsno7hDHxYCu}s2?{7hKKMW40?Eq3k|AK>m^~ZbI+GU8#MAHII-b+no zV`4%Yp?TC9qTuq64?LM~IPOK>7r*jR5?Q6OF-x(VO%~W95P#9v1Ec-0Y~E)-6~o$i ziZsAMh&B9IC&6Hh-u>jsmqYlBP>K1_V8$ze*ZRa@cYAjrsDegG6ZO(rMP%_ z*rZ&^e6OAZ7zk>Da)LIDC>ZQg)q!i?-hBN0cox!7F4Z3aYSsV~8>vyY5gYovl%uqh z`~;ME?A#*QjTXbFvyPdF17!eyo0MOU4eK#5G}P79v@9Kpq!)Y-cRnTMz~De(R+c+b zai9oTL=YTME1c;d0oX`d>VzLi)L^1UjyLufBEMi~`2vV=b7QqTPH_E4lt znyv9DdGc-BATB!*-Vc!_#HKn7i5W9Uv`?PYSHVYzBM`+bK`j%DFhp3-E21>(*D`?_ zy;^`b139`70D)+bG+SL?cauzZYWQA;PS zOrfDiCI6vH2Ta|32T}Jf;J&3L!d=vmW_5EqsNagU%7HeNcLIC7l^n2#d1h<+eTk>z#WTbVgG!$ z{Kk#X_lC*w%m1mC&!71YSp@w88WVu>#fy{vpJw;(t1>BAk?^Ka_QO@$xpOBx`-o5X z$pVt4Jq}~t)_#4T&2&cU1VOA!3uDz?prqh zhm8_Yb4Avsr>FJuUV}L;wR5GC;>*Ha7;&v8n1GZQq2F565=|J~XdhSnTl3vopYy8I zT`MCaD8UMiGNj%Pg7#qXhdt=$V?HWT%;s z^|7IYiqqECM!R)U2Jh$FgX5ozpWdBRfy(EI-NV0uWJ&{B6xe zRw>sB^f0)=brluMk4`3iUV306fEMTM;)1*^)vjIjow3v9F>Ik>jJ*ZM%1XHIN+!HUK5$6qrm1g_6tz4C6Ui_0xwvkRxSGC(0WKmPy|)7saZRM00)%?N0jAxcL@ zMX3PD!^8LF9fjnjnVk_&P_YvQcL;QQ32c zkzB{dfI53vP!K@?$gAuNvkhCHi!KC(himxwKxrSw2FuRN1Vu#mLrK8Z#twyg#lG2_ zege?}LK_6WgO=v$xom$7LEfzoP7T20OZSZr9m3IhU_%0gw>f#i{8)!{PvoeZ)X)wP8G?Z2CkB^25 zP!Is(r4WbQ7XuF?$E$o8bOv7Vv>bc%^>k3Q{|Q{NQ3Z6X{o$B)@s`uhK=z|4Gx;D2 z3)K#4T@{s!f7X71!$O-l9M%2=DJ%D{wA7(iA_?x&2OP|DA3uIy*~^+8>m6H!lC5<%O~PP33?ndwQI2hi6mu?e?)FPfBa36D0c6b z$N7Zr0MHQ>82DLPtID+26Ei|i5D@S`f@UlsvEO_$CJfJjMgaL48yj^}!tp(sU*%l$ zZuzi|kuP677bZF}24TesR&ouy2@v3BNC@Jx{$N^wotWO>CgGs%Q2l_ECfm-pP{v~Vcmd~H%#Kc~Ab$zqyCWJN?o{!ISHH<1M19?q4 zO9D5cqaZy5&HbvoJ4^m^c!rHmo1n6CE>7Fu2BX-fS7KmpOFFGNhALgpYh%SlDE>)S zmU6ltyhWg3apMpy)VA&04f0+~nA-l~#rZxlv7In(w@^5;*R(&FFotJM0PLyjc02x> zxzql)nY;hM{{PPrlK&S3;{Vqd3G}aQ=I7%(NJD{aa^@NK_-;_t{QUfqm>nU{vA+=Q z5B0&t_GI#U>MsxIMHii{cqmUE^AatVp&h(39_|Ahv_=0ozM=+NDhjN)88g8$c9&Q8xGrBq#|-p9|r!d zasLXRSz!Z{AQY#-)rbyRMITT4{p*(ozav0wW=^QYI4~%t5ztsrt=_$PbG^ME>4+Z_ z6I?%pqSccj9>|;}-lJCWemD`1JCdPM$dFl!3_&4hXHR?d$mX>@VoLOk)b?+mGX64d zxllR;1dHvD=X7;lM&Sh0;#dGnM5hPj$6KT`79vl>PSe{BZxq=Fo$vEe1-P8bE_x1Zk(wC>V_3wEC0m9)AQ$W!|-1dYsjAi%l<*;Z&v7VtmezE zE}!47sv*VwS9tmPQ>EP6&`V%ZZ~TOx3}*Wd`w%$gHed$6G?;2GXe|;gaCcFN)GIJ? z0q+XPDX^Q*;1Q4}(``@WWy*5VY`X8FXH4SQv2V1q`D5b9tikVqAf)$8PH(dE<&w@trTtspMAh4psQ(OC1Vxlp|OQ5iWAwmsR2;@||)BANPAb^-U)NQgW z7S2+8Tbs0`WZ3;?n^H(s07utw{9NSc>4T^g@4{*=w#t5GLmS;NTCxiIg)aNKKDm;J zx304CHx#KPamN~XrU(|)W}t_~9WlStJPHH?pD&OdR|~o+6ilbbgZdZBeAMUw%ERUd zbmhGf4v>=i@u_xy3*G+xFQfU;9kQV=kTjMT#5lMlxKU^p4%okqC@^ugu`Z$&sAQM(v*E+hS=KV=v}Oo{keA&u3m_Ac@$f`{|M>9x+Z+D73#h*s-QbDo z1h+=>hqq-L0P6Gv)ERs=HPu{6iNuzK5bOIZya3L@BJ1UBVL{YF*nRl!YHFcP7AtFO z5#RMxwYHHoAxOrslbm;XqBDm5!|qkiNlK z7v+9-1Ha1$IXd$66HW{|N00Sk^sF-f6rt1|2 zQQbnJbGJA=_Lz{b0z`m&VY`X!*&%Tg;~vd{&${w|g3DRNlWyO+Shj(+MB=XZt2=rr z@a$CckEgxFHFUxX*JvmCIC}M5V4l!2Zwmc2Gv-F<$n!eaLKj7!34I!i+7JYBSNU~`^ILBK4EkM zkgiV&F}mp3SWta94p=V)<=}r{#ET-3i;GKAT3XY-Jgj+OU|@Z7eHKPC@G)9MU0t2u z%4jIk*;on`KH3^H9R2+A6 zbwx=4!sEK5z$l0T)7+pSa6uJRBn^#DXb#l@qAoq87a`t;#nQ>p&+M~Jt{R0S)IqKYJ^q;z+6L3#c3=@SIHCW8YE>nPZ0 zdpJT$sjR)X@HfcT0`!sX;_R$4uG=tb;GF^CX?>^jW_ef^t&^??ExS*k{|loq5eAH*=pqh` zSqSbI=NFkmL)pUsuDvqarR!jQT9)4&13erc54s&aJvX<-?Ct=L;)JnIeEj~F*e`S7 z8%-C%^I4;2F{MhwB`)qcRC3X_3!`ha;ar;*teAPd`qRW>*T@<2AM%Wc!X#w{NCIxC za|P~y`FnKER*dN~C3KQx+;3pbcVc!n>hRv~e`p+oFJJCTxwMCDbu^T%SHJS_5yz#o z7cLwOS4JuWp`^#qqKzY!t;^j1F_5TX{@0Xb<1JF7YS%+vT=)ApIZz`p{;||QFyMx| zMG**+h*;0a24MDI(n*PzXE^nd|H-&CrNtf;jKjbcG|srtq_6piN>eIOCh3( zpeX`DAYe4|!onaAWNXF$3I+`-VSci2m0HWwQ(Qgh)kzLzRn-iqDHj(PjC|Ey>y$UT zF^T>Jrw?ge9xkr5KOB9dyLMuFeFgM1gIZTj4RYOa+FwEWBnM284uO{1JbI%Wl@Ai6 z@U@I5GPG2GPjL?ZVh!IFUwTQUjOVXm&`Y+q6d!@%?!o$@=w|Q{X?z>Gjn@S8AP|a3 zix5wc^tQajWp^U;(IfJx_Ml57;T_|1TzA}ELU|k?5|auG3H>aW_O`Dw`Dsso2@D27 zGfZquny{bI)AMpX(1;Dfya*&|8vY*DOx};`Ijz(>;~Zm27Wy4qt2)oNO%oC z03ASH=N9AQaMD%iyS?XK<8CyMf&zo9;?)7@_K*<|Pk6Yyf6fprya-8Y`dwn4K@RFj zhPmQ1D35ZfyPspMnMWrXph-bu8wFY5j=kK-2Bf8)c9CRB(J6fVSn0!gJv|!SRUp3r zdDgdY#pUEaBjSu?59&6AsS^3xh!FsaA3rcL;RNP~i+SY=^*1IKme&IV&5vI9_TK%( zIR;&B`z~e%251=jA2dPuh8qLO71@fJm-A(|(aHT86=IMn3H}^#3CaygjTsZ}Jf_v_wriQ!_=!N%MDm6j*$6YF56&FvwNIXWh%c@}%!-@uV{g8Me68r)( zwBQk~*nwYxYtAmvkmm$l4e>l=QIHeG1dBXUGM1JZz!ELoq#+hl!FYef{%3(K+@(!# z7U&KBa}=^^5~&lLSIM0=r6@3pw@^%4Zh=MMn$xzYsW)F-db;yRrvgaopOq{`dbBB) z9+jo@1aAC7wl_>Q6S`w_=x#I+okX!6En0*svTNuOKM$>1jb#6q1gB@Ks;Z%P=nGxE zfCoT3p{&XOS=!6ABaRz26~0ucD)p{iF4ZP>kD@L<58jbkyGpr_bf;jt9>u1RQPBv{ zOySErf5akob)O^g8Y_hhs-*>BnTTPA1Z`N@GLgX1# z0e}Qq!3QE2H8FAr#RHLqLmx^os1Gk7CnskmAA1aq4L{)0xT^rBlrv6Hv1ewZr9^8yh({3Hlp2yAHZKYR8q1Vlb7)qCP%Sa;;9 z1~FVce?Yb5{O|YCLS})ty#HRYtplIINzi@(0D2j4Hw5$xq(&V|U}NKOfE_fjNewgn zo$i=F*~0;ja{T%lT|M;g>jSjt?f{k*koFiwZGaN7r_w7R{cuB2Pu<}v$r64D{~D0z zScYp)aKq;2@Fz1qF6u__c_b;twxNQm^LD1`;d6|8MTB=cD24~OSyvcUlM;YVggHXWWa3e~ArA6nYUF<6K zWbC&j+`Hs=`76NOzt_A zjKBk`8_Xi%JS2^^wD@xJB873}XP>PXR8|@q2O`&pSiB`$AUZukpTaqWdtbjyOk$h% z?|Nv|a>-6!x-KBz;+tH831tss(~rX}sr2%SJ-RgU)Q}vNY54l@176nAM>#c^nFxCX z+8<~|O>J#)>n2ht9BmtqI8A-iIv-q;BPzV=g@hEeIoSM4R=KCYK`22Dg?ErD>wU`9 z^Z*94rh5+}FzBwLPEHb##zcDgKEa_)O8Ss=N80WwjtX=X9JIrHe4~pHC|A+ZFjSST zUJ}c1`1aj9q!f3BHY2q75R&OhEj>fSt#3~XjRYXtVq7pHEKGqdP@U@@#-P169qsI* zSL#Sv3s-zMvAP&CQ=3#ROuL!WNzaLM>t$luB${89*z{M!^k-d|LYR5^D}N<^e2 zH@D-{r#}F@czawJ2(lj#I-(B67G42>3k(Vxa#+TMAY77JOqpHmx(}9LXk31no16Tp zT#A$%`9{gE4em?W(kLsE{+qI)&G^z9Iy!Q|Haqo;F&Sq06rv3b;D-S2bu<}x<5 zK7_rGA3qKa@ik1v99=5v7`ErYpqNRWS8-T_W#C@GGd~nPAOEdes-2#M|Ph8y16gtnJAM@aB0UE?Dv!@GAeXPpK5m)Ua#Jrs>{baWI(H(DwJIKzmOEuKuIFxlkwlz4NEzzGcvzd;eRBLs@j6jWDHoL}Jz?{o%dF zwwWOuno1`2k%!Ke$~DNpnq9AZIBUu(QKtQBy>KddD!FG-Dz()2q1(er78&w2Zwz0n ztA{l-$a{Tx4CfLvOD3FkGl;SrJ?a8bi6jy9nU$Oa0`&YG93;NRE&E4b3L)&Te%>qr zf-NLybXdqyga*^!Pr}BhrK#E0+Im3J*#gcGCL$qqw|9Y*eEI$zlLWGuYX?lMqZ-0l zVK9eKHTUW2Fz%LxE2QY-ictr>vuB~~5lpy8kG{;z_`>9T#uwu74bd<8t&K&Db7I9+ zZ!@6-mtX2(#Xb@s_L@6NI4FK=cF0e_!uH*71rmwEA?dSZ3yOwL3AD8LJ_nS2=y9;O z6!cHS;Dw?5I#-`G&Qw`>0!j=ta7=fAdqOmQk6UsLTL)f_YxIF_20VdSYiX&$if-wS z8dla(``c8&dEls^3FhYJik!4JoZjJ>wL#y*)j^Ggbn*v#1Go7ALw30yED`n){qpD3 z6i#wAv`2UZdcBg1-Zb#USK3GJ?g;_W)zYGx7x-r01uz911|{#2PRd%gM}R`rYM`l8 zi|1`H?;~?Nj!#hafBKYy$TfHG(!jQjew%&JT+sZ) zyCBo?;XjME9qd&gK=59W7)JseTI+61@E*zU6OOrm-`di$e`rXfh@LXQ*YBAZeuq@Z zPKhvC`5~vIg!a{6vy>tn|Io}i|1n(o|K%S7qsQh5`7jiwJsBsiToD3v>uRIIQdx^s z=jGWM{Td5Gf_Ww6=SacIS8Zu`X1_gSV`E=1-%VK`z-NdXkm_(h1XSEcKSS_-cNZ7^Q>REi9JaJ-M>C;6VkU6p2v|l9=KNtz()8Gr zqLA?O@JQfh4FoAO#sOraqL&npZ##bvst{7a=EasF$P%wat>7g9pfJ>kA0)>xtTIR| z&CT700c4#V(FASrq?eTx5MUlYe26WvlbjPVK!44t3He_tPM-cF<2%ODIwh&wEpy@#ZNdv4rd zws&+yrGE=dcIuR6u@R=n$nB`vnXf&O*I?vI&aE z*|UV`HZCb>9@rT!DclE)@MdLM!(u@Ff(M0%+@+7>rUT$db#<~wj}oSTRFFa1iMx2r z3>G6^i$()rXUw;0) z2{{561qH`mWs7%m&7^ip%M<#kN>CVg?MYuB}BB z4>w^m_*?cEp;NSKgxcZ)AS+;NWmSqHL}%Qgq!!u_jufJ!n6p7{6x)$b1)?x8JU%q0 zQZePW&7&U8nL9@6zUSoVXn+Q4%ppmZI|`wfoSdSzAwGp+;=paSH8r+k@3A~k;54{S zVerY&6S_1s>I;ep(!&dI)}EPz-Ck^fo5TS)Ox+{Eqa#QBkwX@M>fnxffx1q-2Emt@ zLv7zlPHGaSK*BIikY#-#3C6{}MLnhvAaa*6seSsgpEZGQVCf0 z7yJRU7O*sS1;YG?+-`B7hnIrYEYwg>k4PsrF3z$a!=B^g%@ga`6u8a8pb1w4^O_g<7gwwlH0;L8(pue}5s0r*6_sQ777od|96a*&@ zY!zIc)<^*iN0A2pjld}rlMQg?7HMZi2O*(RP&wa_xeUY)s2}O3-eFhAtT4no7e$K3 z*EIY0w`T#n0@9np(ZPK;U%x)BqjSgUGq4cu*Z{&XvX6!gm1OVz2aWGR$5QTcZ^Kkn zWLOxS9SwDLI5c;a(%C6+Cpb6UQw6>nq7G=$xRkW&BlgWGO&C<_%iL<16=t{vVFBs= zG=|}}#?}m#NI%@`B{`V|`YTK)4J|Fwb@}USWk_g&jHVhH8#Bq!?A=S+PPl{Ga6#xL z=O3353?M_~YV!?+yVr`u5{e5%_nxc%pFs-odIHuj!rA`mrn>Q}vFu+fQO>&% zjh7cXh{wFLzOsGCj)>FU&&}^L)pp=2p^mk;-$kW-#3}XlvXPDB$4TN1`}XZrc|cym zV{#lfZXOy3*pmW7;egiC+JjzY=T3AGLs+~r3PB5tF65+ZNUjCXb7fB!+mW}84` zJspw&;~qVH$ff3DZ$F>EOYzz!N7CPJT+IYUQa8}I>^5IJvNC~chewHxg!_P7r(mMO z;xXj3?%Wx!ce|uSd+2=CP1Y*<#v924rvFw%o*ETJ@hv_ekyuTca-TCQO!Z2|=Z1Ed zg}Y6|%5)TbCr2*rEG0N@Jy?-TmoDY5Z~X;wgFg-O2B&fH?b|1h9~XV_f{bF95s--@ zj;aNXjzjA-zM)rh|4N+Q33=NPomm(%~L8XjN zj_wjkhXvt`JkhKs+OBnY{b7kb+rP(_L6K5j7OXx6pJWK1#@w5!v>> z8=@q$RAEzmyfaM(=rB60#v`L6B+Av#pD%@vp*Ui#TLd;pmHmRb8#2G_2v{CqDzAgu zoGKejNH6))r%$z%mGg#PPd5`dQ9e&d8tS zZ-)0+*KDAT&WI<*|2~0G3OBvPjw%3SGsg+wA#DqJQ>_RDfSa=)%Pt>1Ds;hAye$m{ zZHq2M=IKos#mDP6j8=oti_e_>LZfv5em9&*JRkB30UXG(F=54{T_CPUB*r`?2z&s< zNE$#Y@$CNn&YA5+V$4aOc}84UG&^8EDnYsnJUx}QwSPYHSPus22XM-fND=Yd0se=T zRV`Z{k5BW5OZ9|UAZlE^0!If&E_x>LW=#zZFp+nv86wIPI8Nwf4nF^RGD^x7F_uD= zprV4fUm}sw-^SZp1$`lj5A7eJ1G5O1gMXtB+9LeN#>U1XvHqle?`mo~VQ|4>y=c)d z;YAUvR#jy&a^z~Xlk~;w2pGt4mC@TUr0*a;J8C85#XOL_UKTetQrGiZUC;X7-2`qwBJ7`P`T)W7#xhM-ofD(g6zfK z-c1J|2ujv2cnEq&P{;DnlGgt+C1>`>Nhz6-Xyxjy`cN&FIcq5lY8L(06lr$)TlCZE zZ++1J$NwSPEgYba^+bV_bGBEj34UBUhuq^GNb?eMyhpfaEt!pSVdn!m8J=i!P=`Au zB{40zFY4+#Oc#oo5aDuVFL1zObM7>kk5EBgUgJ|2i%802cp5I+k$V^U_^7I>QOI2% zPVw~;Y)`Y@M#8=UNdzc9I*7{-F?h|x=>|gE#M$V8?7bf7Z2jt(ws~YALV^_ZKSL;p zfq^fj0*+!q47q3#M_0}6t;u>?=0O@&+Pr+(nI}nt1WG|U&s|5S2g6(NM=TiJscC0y z5lh&nmh2l{QBaV&f4{UDj;G7?6#PbB0>2M_w&SDa^!dObEj0>#>;AvDKMSb(*>a}dMWfvw71|GZBLBa zhu6WzrVx2UE80X3ZCotZfs)}3LWPu*3_^?GRRl1H-$4<-4zF_N2T$I^>t)|5E+3xq zxU>8XST6DqlBNNTHbq8GM@K(@{u}ylVoDlnLb(+%X6?B@rmaGFf!IO`g+CG6FJoA9 z6k>g2Y-bJBFgG`+`qcsAsl0xDE{~P-j+)Rd3!(ZZ>njrMeZAuB0E5w9xt^KRUahL4 zA`Kz}xF4lke^ckAe`o>X{82R`d?tmLatrL9b&9cooQ!A}956IBxaX!!LGzOiU6Lm_ z%l2N7H;AkxoW;5q6>LZ~mjcbu>eb(aWq2kuC`y|_Y`8Dx4cUcelv-_*HR6gfPDAjs z;w`v284_Zz%q&fk-mqt%J$viM4VifW$i%%0_`MrA`}Ev(qkolxMWRyZd0r(lg))>{ znGe+<5(oFN-RS1558sE_;&b(wF@sz;9b@BMj7fY+m;9e32TEJdG~gHo6G^hZwIdy}rmo7-)2El@MHXBL!>K?% ziS!w%l0p4v=C0do-K+8ad7YO)T%gZ~@t}EmIpNt)F$gapS01#mU@{5~)I+uXyZV25 zH%)OvDP?hrucv1+^1C^=0I2(Ie^OpvG_{zuidu~;4t$LMfI4Bn!5bpi=zemsDfgUe ztHQM(uY2Sj<&CQ1OL*1hB3`JMk!G8!-a5)T{v-Anx3oLOZl?tCF%c5#1nlON$MsVq z_n9uWg-XJcLg9OL3H_6@=+8HASWWbuS9i=_RF%Wp_Ve=MvLahvz<^eJM~Ca>}O zp#87kzc0b4d)XW2^Eat(MwSB}ErdcGYaxBZFIrUa5n3P_shN- zuBT^{@X2lIab9ey0igb|!-wVSqzxju*EU$~r=<=tTS5eA>Kq~K;MtQp}dG-d8EWkqt4~EQA+@R96tKw!sA*DOn*;!S7M|`~JbOjY&DM`?L#9rI5NnS(QGqe z^{rf~t~B;pi$6Mq(31bN7L3q1sENr}Nxz`sKDd~_eK ztLwwtC5N{ zV-?w%uZa*hHkK2EHF$?Oqt!jgGCPc82-f0hBEKnk_hUYwf$_?A9KIqseyiNJXz6oT z=7h^jfI6S|Xj&S8Fa=s0Sv2=9@~4mbfB{=r^~J?bxH2bCUj6g?&=}j!U4FLO)8Fu2 zN*S`e25y6l3V*bAwV#3k9=HI3Oq`B7n7fdGs8j(KC9btHAah zJ61|nq%vy->}fxV4t0$V@@ES=mhy5tophEYjXxjSRlz5{`-8uN+~Z@{* zPV;f&u{fv1qDRr=wF1NkI><_Upkgc2HZ30tT#@1@7&-=F?F5BhK>?WDZj!oO?6qj&(1E#F=1R z_6L`YHtVFWK77z1K6ajm$9X@^byrnCEzE0}UpRi{%Hgg%I>ih5+EWfWWHEFFzUq<2 z=;UKmDG5F;soDw(co;jp8&kkwjCW;hAV$7jn0`Rmu|^y<~bwu{`f3H^sXitAfvc;tD@VQLiSlyk4u{yZRzk7+IHTqUD` zeXk1r39*yh1L38A>L|Ontrs%3$RfuX2YK>QtTZjdyO8=2O%e1A^~UA(t?SgHY+4#T zGyuAAfP;$(;IfVVxN$FEy&}el+o;o;oc#+Zg)o06VtwHYM+F2UtjoYfLg+!Ex7XJu z2!A3VgidVpW@KsvWfrKthCnA}cYIJ!a(i%EV0xuMf|m$d-8DwPXU_m)Z5zz5K%S;g z;Y8-~3(8bdXmM;Ocz8pgk@@poIKlgF8DArPqO80;0T>PuW=)BWp-|DFAMe<{y=+H$ z9tT+*gOks4m?OgKe5+$bOkPD8Y(@bx+Tp@v0{-se1q&)j?)(k-gN8V*Sz~7R=c`xh zs;Z*R3ezP-m(op;BZB+*!;2SXm6hZX%Pk>0(NB%?Z~w`Y0!fH_j$-x>`tGta)xfSf z5UJ0AuJ}&Kd#H0*mO&ARCm*E{K{hRL@OAr9EMq|UPeT)vzT_eQ`X&>wJQU@zG;b-J* zDBcVLBm9psQfT<_3y?das@(NGG&R5RmN+H+5G|4nnh*<;CVlxGT0SH`h`KwgsIdDu zrztt|`hv_6SGj(DD})GYa(Hn-qu?pvDqP2G7>W|~?C5)t zg+%AhTiJ42BjYi(bI{tgoO2vO+&ogMy@j~8t4n8l_ih-rAM}k{FZ~@j-%U=C-twn8 zCk)3ET?Hr?__YbEt6ZGONbKy)lp_`X)Ei z!w8K$mkdA}O*Dz?B}mn-(}e z&wcH*wfwS21+SI{0tCHi)g!6qZmCfIhEea}Mq~(8mmd`ub>Iow{=9U2iypcu_7_ag z#KYY*HQC<^Pm*fbXyBzhBU(s`h~_Dr4#Ke#W80>}PjM5aak;o|@L2yK}P^-kI>VTHCqlc z5;O^Bx686DI(B|2_7X_v+7cy;9sh|^waI;yk$i31)TtM+Y17oeT6nJPUa%qgww2#f zah7{d&^Y~VjTLfMIjhXyCL!X82ZRZh-B9%e>}hv>j_!XVPcJSlfBZ_$#tFd$yfkM5 z%M6@3#o*$?ieZ)URNxSSe32ONV)P}{2@qZ_16Fzn3ZJ|$S9`X#xd)fUrV3hpFv0+h zLs}YVZo$|TgD24S@fMNTgud{ISZ+v0flW}VFC!hpY;ZIj0M)6aRSN+q8|M!(Q-|@fgVA@bIA^$fG#48^Myv zJM|JWpUMnu9lQq|Y~Xalicog)^YNLpq3HUgZ(@gDQQ50HYi0euIcTxAv0fPo zl<>ShEd0uZ{P~AiE8N}Jft~W@IzTK# zi%>`2RN6Mz+JOvnkLJPC zqX|9uOJIhOOXFel@iY0o7wnguny75WVlfvPW55Q$Z5SJ0AJ)Bs%7cUpG?W8p-%#~? zyTbc~ii!$V8&xSP0KSs;?VFfP`h*9gE!j~3$U{YD0kgmo^yz#}s?ivJ4;w4hMZ?k13rG`fucGswqtslQAJFPv1ev1H;;y zoz9VmPo0q@^~@CoP%1x4$W_hPF9o5aZ^GaJ($=nR&xazA zTWG_f-wd1n74<1xvF24RTN))rd2RTM^Jfgi=O|3rL> z|B3jl`Um2(F*&PFCv(l$lT1gWEkLP~5F3jydQ7&_nFje;OGRR6>A@_=Q$7>qn-VoX zwKLY!o6YeG%JcGKfSTx$)&lIV)g7D!{X+4@-}(jVZZj$>Dnftiev>5C(4iZlJ_43{ zx{hwdb7jO|vInIRpvUx-*L8z`kZIwk9Q6HDUr^gqr+PT{hbwg`0_G;vM7A#%I7cm^ zM#K*_S~tv^8;IVN)zecsBE=U4B|+^gp3(3q4K8?o<;j!pa9Gu@v0l>6n%zUBom5%h z6Cm@it18Owe#nrGt#-P8#+T80IrJCHaPBb3PY{Q+Ae;dKTeneNMMY*aq1=!TblvSF zD3XY#gtJ(n@EgA|P42`AN>K)Pcx8JA#}6eSV3R}_ZdzisAM51Ez5C+h!wPZVyHOci zShP~y7^(>Yj_1$s82*FkcwxYpEYLT~8}!(LQ*bxYZQ-%!#a zf8)L8KHuD^^I$|wb#!$4_Wcuh^@`>=1C~9dpw*s3gm`ktUJ3zjRkvNj!O5xh`O_y% z>xmA)3I%i8UFo(XFIcMsFt%duA0fayic>yTe-%ef~d;!*~tW{>t5_t(N7NB4!q3S_h3TM$%{)&%@jkR^& zO*5{le9!6`nH2c+Fm$EcL4RywaG8t?@AHrd53ZKSK3dtToD0|(0P z-Rq>LHq32t=XTmo_gb^=G_{sU!sAHaaRx3tT%{rWW>F`){yHLZ}Gw z1(~Co_@5Z2bmdAdtw6bU@!0${6&H0k-EijdaSf?S*>59A2h95k@Vi~)HPoslnlc< z7@Q!Ehcq(Fn4Vp0U@TE(Xbqm@;OA#pd9L$)+NWpFO1f2{-kCmK@ldYYk+d{!dgYtA zfM-@G@oCTgrIcVmcFxS1F{`eFPT)(V2bHjgQ8*B`RR87;s=v+$y89Mesf8=KZx_bE zth4C-mr%wqV(}hPyW4fKao4J~_tAH7k?=|>z|DPoqN)!3`eE9vWq4oj7jSI{x8=9KFL*i0b5zhm?ihTn|Vf%^b8TsK1xvYm6=lxR(a3@u(R@U*M z7(}iP3?%-bkDgxP_3PXdO8v?Zs)OGMxjeL|)G=(7S!y`^{{;mxLU{FGQILe>0Es7( z4kBt%cII8_Ys@W%CVW2TWOfc;v`fTrNp!#ipdjaQFxA&XP>e9Y^thWwa5y&=RhL}r-G{~iDJHWLk&Z#&6FCH#{=JZRn-~0Wh#qf5elI5SI7<+ zp4?_1d?~|+;Ag??Vr)Nt`t&s2tIY`N94ffF`g#BoLW*Hb&l3IVe*+^rDb~;-4i0`G zL(Sm}vo}IkaOHKb45plAX#p4XnC(#8l@SgfKYt!NY}h_Bm5IXGR;=atg#Iw=;oz)h zgWkdCu7Yt4tj)y=#kDOjF=Q0Y&8-wIOmeBb+yV_Ie!)Ql)UZVe)Uvrr6G-*yZ~tFW zkD7(%D5N;knt%M5F>M<2#Ad_Ah;G(7_j^DvJ|iKrvK|wN{`SO7wW!ap@AYn3It|K; zZ?GOamP9{6BSiKcvWIfXndEE4Stv=8w;wuu_|6_v)T!I1_5j@mO5_@P%j$ttgP`M@ zw^8;%nM!@(W0EMWams{sy$1a5os#LDw36PNW#1V$W5#aiADH@`E-!?%DDT&ckG8*; zj)}B~3P$^XDACf)pZtYUwQj^dTVbPeE%a|I_Jjpc~Spq|cu`BQ(! zPHgXBd|=*pQ?bGJh;RTy?)MD0is#54wvn)|-x!39@_%b-44RZiS4 z-DhM9u%b>4*Jwehl(9y)Zc&9?$6M_VfNZ83eCJ2D?>3AAx37T#C@&>v$PiXK6u0XR zrq$r!c9;F#kn?M1Ww^pXvk8#n>zMx*?E`~wl`uLA&w@Vres3*RhQa5a|{IB4IJnbOX3|w^r;Yug+Od(Z^okw}_^Wb^65j(!Dbm3+SS_7_seh z>I-;o#sZH|J$f_|HHKXpP)bdd2RfD%c(xCSF0i_X<`auUIKAjGcbA}C1ONi{!gp1I3&fFRA#OL;zf|i)6@whcxkX~l7J0lF>Ug1 zY#VBKJ@J>4K|uH0`E~0!)RJvRj8e5>dpq=s1NREtMxK=i#`!!6N|j5n zy02SUA3Qu-1kh7zI_Oo3ryg$`KtWKAM5eq3Z9qbuA1k^h&lp$MF)jGx$D04u(V}xT zKfi`~k8Q61Pj;K+_bg8^EeL4>IF`(t{0~D(M~Ud*bF0__dQ$!ycyAygc)>OOMeL|N ziVeo4OIj(o>yylYRCWW1`QpUD=(hsZx=@ zQ3#+{1#04$lG}3{7q^qsH-RIJirS)WX8U`t8?GX%C8GYkD0pGm-A6Ze5Cln_s%)E! z7rWZrqXs5u3)Tx;?%@Lrc{qYfL=sP_+(1zn2Jc2vSMbykI}51YVWon&*6BOumh756 zckW6s4Z2xs?A-|o!6)tfodB6=r6_^WDnY37Qg78LOk3%8O%(I^%9UOW6v32CgZ9=D zmQI+7iXaol0osp+G3af8LXFI_J#aka&!fZ>z~l!D|HCOGTO9&>gt(}|isGM2GDF9= z8$>b#G}t_l0||ey&w(DG_bVPeC>&}Jr+x3wPrNyn4fQQ-x}(TTtNSSz$&)_&YP91b z!Z#o#0B;g31S<2c%-%lpnow$5FeIpU4D@*Auy@IBS_;YUT8a zTg=Bq^XY-m3Ptw&-Sy@-YJ`Y4SASNKa9CW6KoY22^-mAaY(iU^?3*{JvWArpk|Y{0 zjP*&qm#6K*K}<~q9q2J3Q>(l9&rIbP4Kx;L!^59_QkbGFrlEm*_M09|jM~Pp0}}FT zFe|JS;sn^(ZH6oUs~qWCOzw4Ngu7l|J#@r}`ugjW^ctnYnd8A*Sl6{=yr-v*OA5#| zU=(n5NU?HNR)dLeEq4kIP90QC0R#fE}7AS=mXo8?g zqt;0x?T3LwlP4o-WBiH7Szi<;tU`7wb!~q;OeSYN)_ps8{5U4iPpL2N|GDg)tGm1T z=+W9rEC)&GU-qFTmnVQEI=Ih+=0z4dDAD2CSeHNkz|PZKp1G(-yxM4&F~-lhhnRWA z$;9UbaD=K8Ih%2i7-2EI4gG7#Zm}@Un~rUB|7pK1O5w5n6iOq9GZ~A`Kl01d_z<`4uQL^s<5f-^sZ<_BZptXZkaX`BoEHL|Z>QyYKbSg39NnAA!jFGos4 zzf@Gi5pzuv5tYH^3?4pwuAs*mV{1Dn&&1pKE$)FXr{0qg5}{**JmD!B z$@jhzo#xF%qWt>uenwB@uYEPP7K~BGKXnasp1Z4L4#A9A4-~Fqy7E6L8syfHRZi13=14_|RyxAlWe;;jh>b9?mZb`9zg@?(}KqY;a);+V9eHv(|MrxYb3Z)_Ivz z&KHqQl%XJ+%y4z3_#`xQt0JAo|s*ONmE^29nFWfn>&Z-A89U)qx&DJ zXBCld?+I?m8~aFK26^CO?M|Ju}%x$P~J#=6f7D*mI==V+3BF*3vI~3nk?YI!i{k{Y%2KDNVOd3wM`m;#+>-|%a z`$uN;zlnlJLLmf!=kIXi4yOJ?uI%pU*uYG72tCG_pJL0%OIVW_FeFaBiSI#PXBi0e ziGPR#0;m}U$prwQ$|3M!G>S;cia>LyD489#jzPKuEvm23<2ySqgB2t&hr56GHKPVy$jlE3inA#+=riFCG_{DcCp(II0*J zi6z^vMNHa=rKQxPh5FYJs~~gkO3}peLmRKIj(4u@Spg!X=%IUqP)+|~oHM)3=0b`6mmp^cW(qf4W2y9@u>6Aq33V}8Nrlb zqAEBToE#-=zVdQ3^i*O?M`%IspMZ#3tigc(@o=Cs%C^lp>G#GKCNeFR`KZ3 zS8yeM5a%etGTrWOg%p!lQtEGLDC|1`LM~nncZ>#MVOLQu<h#}ZK1E`@HvGU1<32VvX0309m3>CVoWFNa#H5K6F>LhokTs9% z#7o9~3sVG&FpZbIeYtbp5_jyl z_Bpu&JP!F|haD1Y-@F0LyvUt{em`f+(aZ$mox;P%Iy%OJAfSU}krh&p@nRDl4uyo| zgFaPtDJVd$1Xz*$3^3RZz6D#n zF(N?HqpRa>QfoTsPnk5yc<4~JOJaXL-!;5qe?pPH5NRL90QQ}YkaP#Gc1*T zaO8j-qHLWzP{Z@qazfcx)^`u|6}7!8^l)6q%sJ=I1}(ZWs%7_x0B^B4gqUc0T_On4 zmGU6+eu|ml#p(l{W5U+!7cZ_3H`^(c^2SMvtW{LRLM_MbNB|~ZG_eHqJfAUh&6-Py z`ly=tDMdaeUtT4n&~91KJ}Fx6)RWrFq!%Rj@0qxPt3rx=3x8Ji4 zTpSSvw=SYQ#??YkkNtVBL(@@B)80(J2|(OZHAVPomSF`o@~!#5F-DzMTfr;1E>OrH z|9`Ru25E+a>sjk>%V=Fn06W9CKw};j7TVY;aGvvY3wBRdhDGM3c8_F8bS4k5qs^7{ zr6@oVCz&K_Ass_JgI%C~!iK!9ydQ4HB+HUJ(&G1igyVqajp+pJhc7T=uk^+azr8&m za=j)#tg6bTX5h>+Npy2{9TB0lc0-JsjAk#7Yc&F9jh(i9%gUcEEtHg84y_nK75aM7 z#6mPyyE-{F!pK}XpsL~>5*lo^-*f%uUtNJ96y@a`K7XdFLqHe1b0-k}4^{Ogf1!t} zTf^gOPV7B(pomKYB7hFwjeGgL)B9weJL8Fa=sys2Y>vZ&wV}ao$tx;ek&1K^ZvZzu(R*T6?rm$ zuIrKG$AS3{O-8fVN&ZEDNo%`$G%MWDJ3v+c_+#dIziHSZ&Y%9uTf{_57C41U_uRPz zKH<^O+dC1e0avol`HlA*5*j-QNLh9;O^@L{ z)~Syc%U%!l(3Y8l3-I3!tCn0s&7IOen*Rt9&pTafyQyxt^vbIWg_bM%VXL2UL+4v$AlhA%2z2 z;an&5iOz^Bpz$17$jd+f1kkVPcRZU)8o?b(OGA#@bI(gxN|45XS<(65OGCkIK)b z+=x9!_*b)Mk{!VcYSVN@O~%11*CccIGqw<&FSbui3kce0%)r@3w(bTifI+TBqO;CH zI}lDZjk^To1XiY`#vctFg(Zq}M^#RYxf!k?yI?C|IN3;-sBR)txDFV21kk5r7GrxU z;Cb7~6jmu$j86N=6yUqmd%mz!+1*Biu()L{k=0@pEpJ5WiHgkl#BJNg{0`GbdyY?> zDHZ1sBIZoX6d~pyhbV+fOacT@L)nLLhd* znM}-m`w%^mfM^rT|7k+XN680}<-x4075Au^?)siZDofMMy>Epn_U_$=)K9DEr8ijv z(YMJ*DvO{>TF-L_+@a8?M-@^|u5Nm<2@o;H7JQ=iax6wFLdVo_(M_E^nS?@Pl3mbw zZQhIsVol&u1^b6jpBjRaFYNlDvXa-+in3NYjU-mibEK({p`Z9n?zVF^bOC)u#z#a_ z&I%bB8Dv+>xyMO(y+UsiG0s>_#%wcp z%=^SibkO;pq85)eb9U1>&1gOeQZt6p8!b)L_WqX&oDj~isD zl`t(z*I-lp#_1LgNh#asuw?qYiF+qi_}x#OowzAv(#w`#0qeA`?fH1@^V|G8 zZ%-|~WBPTeMbgq9m0$O)Ilr_q#ZDL&&Pm1_`u6MRrO5o68ZTUY_#}@yLnnhR7yq6= zJ!CA>gKoq&#em#_*w`N|<^3b&%8IjLO z;G@aFfx&Mm(gtd@Gm+reptRbQ^e0^;d?n+}`mLFFWChj)6#DV&*(YcYQV-p)UI9dp zFK&R<&U_$T{|@G6sJuZuI9&|Z&!2w-JbLfu;^J{UXb`a3efW9VzD2M7nRU02+?$}^ zLtetds&^r4<=Z%X`?Y19MBV%MF_XNxeIDwzcS@7rXM%~tLBI2}?K**ZZ^S=W2h!Yw zA)}(9;(d~}bS~YDyE8o}O-sjv(+t(N+cSL&j@xQKxysyKQyKLL7m^QZbSFZ zxo}}&#i82b7A=VmWCEOX#$ZYc8We+P$bn5wHMO-*xFxli^bRa%TBpckxz+v#KB#_5 zsU6$3qDl1x_W(iMyQVb%To)`u&iH*KYCR6dcq$29lCqsoGz~%N`{jMCC9}}mr#!G z=pL`{{9?*}$Job=^Q#@Q!74w%NHud#L#o!aDUYqMk$JEZ2@xkWMieM>K7Fv9Pv7jv zqz$-}G~3m2QS7(fU4crY>U@v-eErb|zg_taQ{#Bv9sm4oJ|E@Fp2u7KG zcGZa{28uot7(VdW&tz)m^qoaXhGq3alsWppn2m4u!nP%v+U_t-e%wOTunKMvB*m8#eC+h3hBhe`u7sKUuPp>R6>yL`&vnu1z?twm#k z%JPWwF+1HWi%;?>*;#>-EzbRB8XQSl6D6IuSR7MLrrVs^vx)p>&M$M|(JdXA|Jvp= zNC%~bOo)z=Q3(T(o<7~969NBVPEbe7H!fw)QHIVj0a2nuXzATgXFz7N+$!l!rc60= zWD>iT7h&c$zgbGEP5wA*u`}l%je%b0Q1S>75X18V+wZ89>2H$hSlxdv+zx6Tv4tkF z;G|LH+0dcVeSsq`$;!^mY(|HdX>oSR`XX26Iz#yqNbsE-l%W&k1XvZ^2HIxSCo7)w ziz|>ujEkEKjH3apnF9{<6Z?qgd{RwI%Z)=6-x&wLnSsSiYs|^E*4A8EDFL}?f6#P0 zTc_T`3&&4F&U0Y$JeVt*MRI}E+fOT~L979m1cih)=j9U9Bw^g~jn2WH_}o;Z3ONr& zV%V!1puNlppR#2m*MjhHlJ}9vmi!L7F!R}8PXmeYy(rS@&rhGqOMSUy6*lhIuMH-T z?Y`x3%gtU=&HWvFGXron_kmW2>v%(`#W(~E4cm3ea5V%uLHB_mdQoMzU=Wuj-p;iJ3g#uPib|`Z*=K|3IfM+C^OHdTKeMlA7ie`Y_Ou& zSw#XGS<#l3!35&`x{+q(;Lz^PHt6-6KBNn-!xO#9`ZSx9W}A7X)bk}(DV9r9t`stpxd^9Awb z6N7GmQh`DFYuo(aWTrFDXy*t z+ltwd(4M^8?R^%?@b~9`8@=A>PzbdLr3XyIlhO`bbbV;vfPT^Qc}Sd zd-dAQ6TU0Y<3`em0}A|;RIZHjDE{#LLrNn2{^N?44g3-OhqQcm{!H&DmEVRxU1W6v z`6KU`l&B+rx^@Y!mgdhi$W2lAK(9W7Z7;7y z#WC>gp|YSkCU9DD?(N zj_^5e&uw#bHWTph@c8uRn(Ng!-Q7AI92`4$?mT!f$H#}5*n8nr!S4s}KYU=K^xJjN z)XnsWL6+eTHnugF+YD_V3$LC~Qc{R%`Evc!n=_|R2P^QrfB!yEcfwcK)Krs);oiM_ zau40!?jP}8e(?JBYuv(%7ccI*xVZCF$g%kBPO7Tf>VA4UPcY?HQ9*&5P(!8hl`B^$ zNo!x8%NZK~O0i#9xV5!4>&cUeks4p!49(m2_Nx*`D<=dy& z*w|=M6MVGJxpTWR-pV(%v>f2&9XoN#*Nw{U0T&}XJ3jV;v9a62NDX~@pX$ZWO-#oR z9pV)c`Lj52qFUusTf9_DOUt;uw7!1t^z?K`cGuR_>F-2ludmnI-?^h&XD4ZN>eQ)| zCr@thnn_dogoK8Ma{St%%IC3ZGB|S4=0H{eEj@iWx4fywp8m6H8bhZxDJG6bOBh|a zP+necBN6xN)vKJG`N;iR3hSvS0$4-^n3!nJ@-#E^6c2;s z_0NIXXGeNRM@I+C?##~4Mym0tsy5ZuDtT|`W@o3Srta?S?Ck9wEV{wH(mnej=gwSz zYDR`?dRIb10++0_SKTPr@w>`A4APv0@1$bQmEsi?Y<~YK zRn4#g!U4fVb}cQ!py=JvbZ z-YAvyhNh+^%DMS@dOA8uF)^OVW&;C*nVA_`w%7UjnR-I(*KBR~Fi?(6zBbI#e`#QQ z<%*8LBkZopFJGt%Bz<_A!nviSr1nQPOG--OJNr3=l%(u4e0+Up982H4SsAKuEio3Q z3;6Wu!Q`XkjCMG6_59eR~ z@G7V>+w0h!@508#4>b6%UcKsi_bweBo#nY?H#aw@`5_Y{qtiSLldZ8AFJFHA=#k&! zONU}fiBt3PBCKoF%{|F+CeNKacPN&&J2WJuljHu_{CskJeEg$Fq@`@7{kpok^C~)|2Ph+EsiAhCyxpB-Hzu2O(vQHmAoI7#CSN*AF@^D}v z<=l@84i2wNN{&)4g`cf0eVWVvx#X7h=EmCBuV3Th;!ZyncG*}J*D722K3I08SBOFL z)G0i`4>@KOUHeWy=9F_YbYoh)dLZo+-@biptgPAs%w}f87n{OT&#+i*x1&FwXL(Xh zEwHMpDlCl7-tWfl%PuN=Ig%HeTw6U-K4iuA#7Bc(zedX>qZ+ z?z?yI7Mx`xo0S>xOjXaFlj7w~!;)lX+KxAduB@!=-Mg34r@o;f^Ti8tGO~)hKfnH* z{lM!$onBRCcJkzx8ea-ZALC+sMeNS*Zbst^<<5&ZLWE<{k#zj}_3J!#Ha5@G(<76T z!g$_Rxc++I+ZzxNu!FSZ?$0YuPEJNfM(*wvIXO9riG%(9tO81WjAt}7xa8d4esSpO z?pEXL#;If=&3yH$aeVQhStavx`K?~Uk@))cvbFW$*w)TYZN{MYLqqLmV!XUcW@cM> zfFdFy@siilx@^3?y+0UKTy2@h$jB(btZr?+zgU?!!ok5oVRiG*Z)aI9 zzwxm#R7Rx}CysJznwcg0`kqizqb%0b)>cT>7Bw!$KgENZo14QPUHjSG*0zr^2tV1` z**L~HFCSi+oM4#1sdARBEG=bkYc?0_?(Wu3$5K6tin>|yd2`bvOF=Yxc4ETH);2ae zy6JNM($do5!-sc}X1;t`H@;X=Q91E0esi_1re>R8-bufo)}qlkfmo`K{r!eGIujFk>Nr#lJw1nIWjRBA#A|A5 z_Ta86D~ARLck7>Ml_TfF9p83x3d268qod9bQdn{*`AkN`A}uYghf4bXeQ|NIp^;H| zSQsxQBNx~AnVBF1$M2s@j^3K|66;{ymt{nKRmsy+K2}4{VdCw0Q#h_F$GGIj>f*&D zC*0=QOg%B)Yaa_vsj6E4Uin7pV=v8#uJZczp~W-x>DhtTBNG#|pFMLLttFF;PmGD7 zG`;G@u%oYf33| zxOp7q=3oil_kMJgY#XPko!uN-KxgZT6DRl>Gt<(DKXVC1y&JW!-nZlE$&u-4R&H+k zb#)C5+92hx-@iAm>=bW6sXQz2C?X=_%^OLfDE6H@$uzik?ksd$bJ)Cll#?z(&Cbq_ z^ek_L^Tz7UWg*`hqVR!ZN6V#d3_VbL^5_vW!xsnC+xCVqcCVTmMaCe$H8c!t8b7;~7C5tjZrhP~Z2!lP_!U&MJM%*)57*b% zXW^k|mwIP(4GnRHHa0bJu(SK*Mn(1JU2Ib5|MbZf(j{{%QpLFV_(kXP zRl1zqTn1%qw?$`J=fe#7;oGQ2QdzDsc~6Q63Gp&eUb}XUXMK4_7i*oBl~p7oJNM6C zA&zfDWoc_`+gMTDcH^75@*xQc4K=mg+}shDUX~tZ*^l?XODZZV+LRU*{c@F-ovRku zoDSXXZ)s_%p`oE&<-9a`hBsn$tbxX3Etv~T?%0r^lKyR|f+Mu=!v_jdQZM^!*S`5u z&^t?#z9}lg1p>xk3635;deecruDUv&!1y^o0@ujF23m~W8#)a zOMcPWxwy2nG&3`(#+;RdW4=F+B%m(p(WBPb!xR(ftoQHV2eKkAA`Pg!efu^FxA#kw zs^=0eE^AXA$-&x2Jm0>3!^Wc}CsE$V$LHwih$lvNxu>gZ+gUtQ_E7R#R2TdLJ3D*- zhPt}?jqD4jPEnaGHVK&#PPCq%DD}3|vNF%rQ3^Zi$mRs88(i$Yy}cEVGY_RbkGy*E zLP)HlveNYI*&T)7%Em@Vcd)YqFkHt2x^_)OTl4hkmXVQ^_;?P<9ltwVY8^?;e}}($ z&Z<*qdjT(y&v{v4EnU_DZ#HNc^;;fNTS!$|c!S2->Ii7i`yTC!%6Y0a2 zSiGjj#tTM9H6#4%+crokZ#y&bYgB*V9J80UVb#zj^&s2kTNrcHKQ$-Eb7gK|a&pq! zdz(-c^+qy)+{bhE%rvDYIGnd`<>SHN2?J{fH@tZDikyVn+4hLI_^H#U(G}xR9AY)N z*w_^Jsi|*%kZS)WcU}oU%FJZHPqJ^nRbuSo;aJnm|ss~fpL+}m66?AfkIg1W4ecL&+To*u}ez*ZBRnB9>`P;|B!m|t}CuKv*4i`RG~1P&aCkBrcLCRf4QA(-I%*^GH*nN{& zR+QtlwKYvm&3V1YYJ50|%!A?8R);9a$w?<4J$}s6_ss>Rcrlu>t9PyKG#?|-c;}KS zstH(-mkU^I-r&QGy^DH6xkmS|6DMkJ{rq5-V_NYdEzRe;T1{F`ju}qn)+X-NU5o+U zTmRfS-{~f?9T931Gu1YRZdc7+s=AB-ZC<}-{6;`AIyySevYUy1Zn8s7qatRN#&`~c zrMK}nyLa#Q_xInnZCn2tud#^!dmPVD{ua%^yub=9E(v8e3 zpOcl9h$DfPmiCot#r<|>Tx-?u?`1e@V)m-a$_wShG!p^b9uFQq#1TM+{Wd?p%UxeZ zrEzfZI97K5{*0$j_sPm8G*~XqYd?&NGI?Wn9u3~`{PCM($^-yvWls1RnVO15Q|#(G ztFM0>9niw)c!LF6BQAB%o;^5z=v$!A78Vv81AVVdet$6YMDxfq%%a&PY78jS7I$Ri zVP4)%b#?4P%78j58k)$54;eE|Nk~X?KOB&kuL4+1Ng)#cZm}mq4ZP91BZ=&4%QPQV zKphzc#jahu?%ck8L_z`u%hv+voZemf^C zE57))_IK}O?k+zM2%xyz+|cj^;2Nj6Uw^MK6&2O_^XKg(%PK04A2@Io?RL6$1ax6* zW2pmFVs&*jEsZzVQ>*^k&&9>&_V%#*_pg8=T)(cVu0A$@SvG`2>iXhX1FGv`>vrW2 z9S!wB_h{I|!&_bHswj}C3(*k~JEq@Tx5nUs^$rZIq49PlX|>8V`clSIO5~sN|5{;<6M{fU^Xoh(5=FOX-l3N+z z>RDNH$G*(X&eCk(zH_DWhK-H1q$D|KQb;Y;0@@-Ua^6$Hzyx!Am$l!=77zz+=JUA%IX(Q^(1B$`v!Su&k{Ayz1`$$7N{} zdpBqx9=L;%i3u(H&!0c&ssJ0{fG$7YHa9mkHkwwtokzX-@L|`i135W4F)^{Qh{)jQ z&(O&FPq*FpX6E_lh=9QK#@Z4d+W~{Dx2>(rTf^3PcArjg?O~Dl04R z-o1-zqwLAYldI!BsAI^)ubE(IV)AWf=BVq+TuKTLZ~<=0BM<-XiB9Hpz@CNkH155d zto*OBcS>*1dXrF0wkHDOd>tS6ea!gJHgr)(aq!+v1fu`u^=l>1b1Nr#?4u|5$yoBcRWro2wM=-eh*PH9)_DE4kgErABUc^h5O3R%91ORA!Y?F`}*~M zSQzl`)Udb!9kA}O+tPM#h3T(f*Ktaq5II>3lK$|#*D<-Ux{GAbgM$xNR-9;Pf|EVg z1rHti!L+p)74zij(`&<(iRw=YxuO+Wpsw+eaD(Gz~njI481ks#2%B$AQECi8aNsSrRx&}egfw5Ar& z=|Cr!#QC^o9BoEx{kc!Pri743MM}J0wJCr-OiHZSN+Q6(P)V@|e}QE?&U9;~4F*imRGq2iBITrue)LG<&iBvZ=YB%x0-kJW z6}EiDpxO~G<)$dN&up2t;~#nS*44#>Bk0P0TmDL^pa>Zm8J^dl%E=!zEf0jEL`9XE zmF0NrmM%0Kaq+d!>h3!KCCqZ~4oY2aX;c`0(Kux4^3W2Gj8> zH@O?!tKF*to>6{0Z~OcEqu1m$xJhq@wf$cHTzZ=(q0I5iyPc3TWx2w+;x})DJddM8 z0b$$ggai0a^-XUr?UUcjtuBQ>gQ7_Pg!v$#08TGJ#qX&!xfe;vUZbNH%dDn>Hl!U3 z_posDL)kewTD%dQyLWe`NByG{#$`W!N@ikDa)1MM0#yX3g}7tFeSJ9-d?8jt^6=p@ zr*G_PXx2t=>`*@U{S_Kfc(%;|C6<(wK=m~rgVI7ACTd*f1m4acrEXGezh{N^cR@h` zR-$Ldk(-;_J3vyzFn8o_#9p4csVP++1{^CfOLadGV0hK6-DagCX}8OA0iwi4_e zg4I}+q@<)^7Ll=>qO>&2=7;;N4#n1(`}z5myDX7E0Rs`aP!On@F*`k-Wm-|1o=zg7 zV`yk|@!|>o$X&vQJnO}JT+n^+V=M6yg$DHxx27QTzbc<2(|R7HFXEDuwbHv(R8_lv z>M!Sp1M_7Y<|+Bfp@D4xrs zffCCG>O--z*6o~0d!@+KrEg7!`1+DabC4x&rk##6_df7%@t5h7b-hG%?yDbPdk+wM zYwPOzPQLHyA$gf|!(CEUclz7{>J*dAmNiPKKA60)1;&}_9 z3R=Y}zK5x)qe;hIJ741{EUc_N(-t83FE_`@tJKW}C~0Y*blHG0L)}4bgKNSebl&gF z(vKgI@1`API&_D-yT`tKAtOcmNHdcn*d>gN(0OyPjO`ynf`jwlydiI4O-)VZq@$2M zGWN?GRUZ`@bmX;Q|uVQ)^i?HE>7Tx5AETt@$&9Co+_~Lprcn4(beMx5`lnbsznNK~h5EeOK2hEiF}s3Dh2JNwhd# zcDiLLF)@&XafcH0=adR)GBdNYM&-_VP#6I?e0Tu8AZ!*D?NWS@l*Gjz%EG|V`TqT* z+9_nXEQot>Sbg!rrR;kvvY!PkL?H!(Shu3>Fu1wMr?c0coqr)L%V5(E?m(h~bo z9{?HvYlbrC#jz6sbU_BKU0vdP_H0vp@K+~O=Zyfl*g=X`b^%i8TgbYv9lpGypy$dp z%)(Qp_Qzpq-W$;Jr9ED%qq#Y=vT}2@E&x&^AETg%$e98^{OZV}^D^ZN9K@31VlR7R z;{+55)?L^woSdAUa(i2iWhrNcphZKYO|d3sr6t$aUp`#>#R2OHbgkY;Lx#ySE(>2wo3(_S2_NDSh9qMbx6vtKP4lhY1H0pDmO208zVry0P{N_ z8be`&^XFp|6HV;xa{~>Qu7*8*`I08QDgC(Y!Gq!IPs2h&cqm`n_VDlCo%rn&_GRs; zJ?Ku$yLZarjE2Y2s&Z0OBOX5N?&~uE>snv7eW1z52%I2lT8)3SJ#rvz zRACFJOOM^=V0>E=T&8>5IlQVS*pmJc0~IJvGE7Xi#VhmfNU#m_uN!3us)sGq*@!+w zGDQ(4w`+Jb0*EUJz8s7Vfa#j25NB`S{#kZsJ}vg5rKQ0uW5LQ7*y2E&<^sU$+cas8 zJqk33m6GxbjIS8P`PebDGNdWW5%G!oqw2BrU3E&TQujr!ZCa zBkiKoI(?d^;wqj7iMxtlNp9e@(2d*9wIK$7^;<=$YTjpchb1ND?tJIeI-gXdsXVJZ zYoMox3&cZKSOy0~4+5vNsR4HOK8>CPp`!Ybq3Ub9f!A-}s_B{EyeS@}%+A5V)OV$e zj$Usa${GV%&Lz<05fjNhdunQHeWpwd>IJ;NHjmoBwL0{5A{UQkdfJA8i-V)*{ribu z6r6N~OJ24oRfIwW+t|C#_2ri?x}MLULz>2+7%-3qg@&3G*zCAqWn&Yl%Q1DN-8x-lLI~s4arGJ)SHVb=qV}XR#p%GI}4zs zs+v(!az0@#4fhxL5<-s+Oa*j|PdskR(_OHF0BL>o>FoI=I~Obp1i}_~uZ#i!PWZ zBjcglU-fR?T7o}|J%eg)r@6T#uKfIpxw*En@!Yp>COSG}-@lszz8V_~A3P|3{j>Bk z?OluM+1cddt|0DD{t8y+IJ)rb0S#fP(b3Y@kJ|sE{>t7-*qXNHk6Z&j??^bxq@1bu zA|Zj3GFZ@?8>C`+)fV-t6X4-Ple5a#*;y2)T!a#ltFwGH3^(&@*t5f|JUtys zIb0?t<)%;$im+q5ntnhB=ix!^s~b&{_k8vIIbn%MYDkE^y+nG4hM*6$dFF%2p_qK` z?*0MJ`T!-7Un8b^Sj8d^5})(kyU=6fflUENIXIR$MhPzX*|T^OgM))GrOl0Z+Ppf* zlsfzr2yZoAwob-IjKzCMO#&J*IAw5X=+mB=Nd7Z$Q(C}UW0<7Ly;I!Tp9_iYcNW4Q z!dV8~Dkv;0D=jq?W|3J$$yrChyz`{3gP%OOfFRGpBU0Pm7UHVTAS4FDU z{FBf2s-4TRsoDZ)CHTg}Qc_b6CDNRtd-slKU%aoNe>S7{^JiHxF@>i(-dmCI9qIPf zDwrLDiU~A#L`*Cf9`gPB)bGzb{1tw^&D>l@GHF8ZAM~b3YNuTfS)F9;(_d|)v`V9N zRZ(`lVR6KApDUj8`SV{UCx@y$cPk=ip8K+mm0 zVYXxYu11L9XaGfa1Bdt{n?3da5hkxGxww=-cIOHwbVpc{aEp&!Y^1fdodIDf=s^WA zmp?BhDOqWDy5==Uzk|p>Laj`d7}oFhT)#N1VDvR8DmVq^bww|>nI6Is!IKnVrgm;F z21oiTEXe#Wc-KKlkIF4*c-WuQqQ3C^9ODaP80(caZUs#XVJAL>UF<3O;lqdhlUmb& z4NFrUCvWKL;25fpyAZ)vf~299AUz6v?8Hm7H@!%|wQmuoCCYHWon)XB__w2@x!I3S z)%dzm2}#KxRa?Xcty03xU{P}GD31k}{k(>VHu3h6b#`T_NDf1;w;-M&>{ zfz+gr9YZkY=+P30f^#CP=Xk0|;Xj~Dsv?>&F#!tb7Z8wdD4e?kZ9`B%AmO+x3$4t? zrkMo7Dz>(^kZmDcm3VH+^&39T%G!W?P_?zm5l}Z+?gF=+zD@pOV+c9~TnS{WV04Or zhuYY@1(gPBw3GY|cVOB>OnjT~5mvVv-J3tZ>N_s%?6Tj1h)@sqV{h-yxxEczP(4ti zZa~avwgT?HgY}2Jgd=cPPjBOU*}TH(-|OqVyu6(Qjqqeq6aX$mP_$%Se6AlLgCUN~ z(T(9pW55p-2q;JJ#K>IxA^t%Wc3oW7eDN{Bh6V;KT3k>?fS$#k zJdTP&g-VS^ej+$H808MaGPZzM%i3rFH(Ha!ty_>(fB*j7V3(*ch_Dnc2#pt_{IfIh zm)qhlSy*^q4h#zu6%@3+c5U#@wfKU9Wk`Z9e0HCU^Kx^=`1qcHv1MnA;Lr*SYbd{T zk(i2*gb{Q6cm>RT3I7^%C|)S4Z1AD?|G~nIWtCoxh>ToXSm>|v+OaDV9clIy;x^Ucw%PCwD(I^o)bUk4pF7 z)pvFQ50Lv>pd(pY;)IVP6fudD3auPSqpP>~Ql23}ss-Xyl+P_H(o$Ey`yrWHoRt=e zz1XTxz%@HN+GDyq`s}+ParA%uC~EX3q-jb}NQi&mKJnOA=(yL0$_L)Pqn?|Ie*9R@ zaatP{8jm|NG7_r^V@luuc6`6SbY!@IMp&g5D^&} zF$a48n<{y`MPNzO@bKX&qij3b+pDRm5e=XBFN`z0#r0Uki2O~c*EGz^OuesuR7};@ zhZhttB*Hap9~&5f(AxIS14pm;~95)$}{6Lp1GKZsr^P{Y;)T{CfZqi+FU zLQm90CHeWa7w`+5H6M3fS@2X{E%)Mnj+6r!I!=U~=jKN6&nC0|_V*yz%E?0T)Dt?w&rkkGy!nFy zf@D=y^1%%cA3ug91P6qpUzBK#(L8_PUxM$J97=YuoX9_kTzWD`=l`d|@PAL|R;mOv zS>{3IMLx}58a87$j1c)EmH89y?#I0q7?!4E=_%ve`g(epR-?ZWlD+$uj>yOggB)_E zFDXvX&GBs<#1_ZmR|40_KO)4dV08d%Bx~>+85*Jr z3grZia&BEPv;c75{L5!9U!DZysJR(O>ErC`D(|teisFF|48;y#Fx|X}%ZJDcu0qcx zOC*SJDw!&utBcD{7MAP%xit+9`_LF=WY*Di%;O+dv*MT8+3x~TgPWOmSY~Htx~$Bd z2QGov?CI%gZqB17Z$DBUb>N)Sw~x2S8@J=;AbP%^AG0sUo=0cU*V97|6*j?xRpf85 z9gtIoatE7t?qlKUn4@5cpFVxUn#}_Z0SfKjy}kR_+yD@_{XZNVmhTRSWyur*KwJ!#QL!p9(h-*hb1?Yw3iW>KOZSBwI?|B3& zo-@p4lgjkiNW70YfzW*w2}`j5h7>1VfChiEoO|4UB(nk)cz~t6rvbH~w&uTn4P7EQ zI9(?bGI5&*A0xEJ&S@M%-o1OfZ2m2alp!S*6x0lH!ucx*AIusw8j3E+Lb6=LeSIsF zZ5)h2uyTpJON?PV-GU&ygH-Uq0VG1vKt^~f12QR* z4$uN@0q_Ow4dOo_Lf)GiZ)aow zcVVFa^XDw6%7Sb1J?Fu-EWVq8CSJI}9<=~Di<9mHh=;6fR%RwuH?)Neu&S}$meAxR z*hAYc&kvSK9XjOD{p1wSairHkA#hO88Oa4X>8xx0sIXlK0?d^wLZK=cV&Gt9h0n|t zqZ9o6q=u}lEF#A}Jw4to5K~r{Kv-z$=t@gUEST~FR4i`nQ!p@CfqJ-Qe4OyutSAk; znZ?_GWMy0E0+c(8TId!VUk5gy^<1i>mq&rLzI+*_^A{xf(EIn{v7JcV=Jb*#y5-~t z&9&ZY6^K*0(Z$B1lquIk_P3IJW!_fJJpa}Ca%)WLvuBq9e^^*1kwuU^0W!S4x+;93 zKtM_=y4eb$*Bt?MU}`?RphDnIz-W*-(A%q3#}=J&Om0tEz6dRhf)=Ey83v0GS*WSD z_&bxWY@1`%>NsHiWl$diE{{}|Aj>lng~1htO%Y0aXJy{AAPR%DK4-AYE%FC=K4NLR zx)k{s8#3=HrGBn-HyU{gLo4_2uWU3)5P&by zUr11}q^vAi`etoQ3yqsFsVMi);E}N4B@0G)W>j2XXmP(LxL?Q-NRgnn!53zXZv%WI zh_7@3hsDHFdS=X*l>U?Q?9n`eW`Xz$fI6}RHdn7gMxv#msT;M&$cVJ(rW?#xY%uJg z9i-cO*heTKJ^TQWK*2+QyI^S88h7OM{}f8XwFbX(NA42IS6B~?qxLxeM*-oE4XELc z2L_G`3BAb8eb8Wuln`=_OIl83FwJhG0zuP<{V7nuzz!QjoAoRoz#xyZ-M5wn+92_v1EKz!7x#+Dl z<0NK~^+Y8>btWYq1zed{m4*O{d%APyS7oIfTI+I?HJC4|Gng+DY`hU_|HN$=OnxJT z1Y*R(#Z?LF2WW&oPlz-GN7)n1Nls2U*WZ8!Fd}uu&aSoU-F^n3k&`DI&`1$!tgf!U zJ=^C!ERtnbm8436U>W`#Ga=?oh;q5Z)4+dX76=C1AHX?$HoB*5;XlzDsMGLP1Ox?L zoSp54%Gr$BSy?G5DW_*@ z6c+v`>i^u{{`~|MHy0Oq)mna0(N=B6ZbK&15{G@uPFQ9$3P)$lnlBDWMb%s8AvYq( z3~k{Zp1a(m1=LiyUAmd?@0i&95pInU_no4kB|zMq5gL3j4l^({w6L?Yo z(*)lCccSngV|@Srd`N{qc;7ugKR46eaO!NXU#F`Lpxb?+*nV`Xm*Pjm!++u?zeE8O z|26XWNbc_(o2<(ZWk*TT=rXWJ^sK*efF-V-LBk84Hff$au${3#$F;UbF21Fhm*l3e zq3akPPFCQ7sg)*}ateO9jr!fY>zHhKt|#QLbiUZ$;_h7;wJktwN+s;EZ4dJTd)6?@ zR_wX8N#8YerxD=#^y$+H3Fw``>c26EB44SK){l_@z!kaTe%D{$v(M+NfC4F{ZcNE< zl{kHitG0t0W}JVS026`;>3l>o-8MIl-i5kK<~obCBPli8?%fC#=OD9zaTf3!#G>TP zPl%e7O57Ndl}XfmssniG3HN*%B^P11zTV!aT{h4mTxZ2Z70mrfmHczt6&wzhnF_ z^MqU)$QPjfgnpu@H&IoJ`%hO5e`Qo8Dj}h#qw@nk6Y?u4y^wMimzR;rCmjIie`7yd z3ysEi8nHo)rC^Y!!c|I09T;G5!L0A-_wS;Y+mSFtb?}w+5M>|*E(!_@n>@wM#s+nI zbN@qtQjhhS3{(e*?q8rpV-4XWBUvL0n$ZY<6ZrI*Ml2yQf@=cx&v^1Ag{`)(4#th5 zGmIMIS>(1VaUI|Zp7pLmB%KuYiCiFxn&66FRMx-OxRsX(oabxME^2=`=6xfwV_r8sKYZ&TN&` zCbLS$XF~@}OZ!3fbv-(c@`r_ao*^YSxGVzbPtgJ3^(m^KKhFjgePxdAUzvnW?45pI z=%K*0;K2RL=$^3oxtbvoZydRQ8D=|7J(5w(nb>`L6AC&wJ&j~I#415CF$Tp4XnkXg zs|Z>YF#Fa3Fp`WCYTu8RgWzT0e0+RJ=@Kb2N;%zFrm=s{*V}qdj3-5r99h`q^kw?hNBO?Hi`n;FY8WBgJ%2 z{b^do?0nUhfxdnPQs>pJ%Dh0)UCqsyjB0xSei>tEBP>f>;j(NQGkfaf5@2ni&8ApZfce1o>fwq~ifDlE`M#a2G@y zz@cL`oE#kyxKv)6nw*r8mc|_e#NqH)xh$!eGa(C+m7V=pH^6`=e;+0BKCtC<87~jf z1>2q1EDA;eTl-ajjWqp&9v?IV`Kt=>_wSlHz8a<_^MoLPGbp)$iG)BLzg7r8uIeG7lai!ZD z@CqU9T~RcP$f}UH0`e|oTTg)=(hg^!Q6n*cnJTkV$4o>5Fs#Qp`rmmoyj}UNjr`P9 z3y_aE5u}e1XC&T#Vx-v(KsffWHQ*I8Ul>|NZK&==MFO`vaNqy~co{|<&tRYw@a^qe zL&y*Cs_+z$Z*oOI8`Uz9#Tn%j8l7E#?kEZklq|SljaRynRzZ9nr-)15qZ}6Vv1AZ( zNhvAS{rjLv{QR5DUitBZ9plvFNI5JPT_3~)JZ3+laQX5xo4qmd@if0Zj>^iy!an$M z5~Ue|w-1;(e)v#{2`CV*G}s0B0I2Qi=_8Vo@USrz`A z*RJCQ0H{_7AyO~`JtE38GB)-wHWm%c9mzMiExuLV_^xaiHD+e=d-v8sAS){s)lkCt zvVsCpsLIuA*Vf%4JXssU3G0J7-KIStr1C^aB91f%NlV+hTMz^}V3 z(@~Ru{D4VL7}G*5BAAr}WbBrM10GU)vV4bS9x(|?12?_AO3|7lW&9`*`yNVoUYO-; zJ6vakSSDN(D)ofgwRf@xkP&@K5MGf28nWMD?Wg3vQX>O7Sg@|en&~FWMssl22&No$wD$D#Yg~NiG-NP z9@;lNOrM5X4;0L4*HC<(wxOQ_1Rn@ndW6fEqeUPp%>W~p1;|O?g3Ax)>?0PThWH=T zWq62c_SPp^Yfz8@%r`C_G%iMB0jR={%B;$R;3si~gA&$iDj+q5M*0Wa9kTy}<_QFI zH(z5<*N!iO$M#RcE2qo~RO924wkHa`>*{*rV*!=o18f0I({!>S@4`x};^1&7?i)@Q zq~Y+U$Y!g8faR5y?wMs+1~`-Oy%!ej!9$)bBB_e0%e?tqFHgBTrpx_B?KEd|dv<|z zY_4sm2l~P50&JLNCow$2gIfyzS9<0=Mo&O*;_{1&W0ZMeI4n|zgoPnMDH-Q6R!{Yw zk{8#`*P1MEmU6E@LqCCy3pv%W-4O1Wnv~%^p~?u}ZEkI~!;a=@{o>HTrhxFh$M0`a z%iZwNx(F4KDt*0ZW?%gzAqp;k! z$gogT)|TPjU|I3yx3al;3IZJZ`cX08naX_w?@e(G;Y2G|qXlAQ#B9Q0F?nRrXY+XKf!VR&^20+{5f(2<3hXKEk?bYt>7EC#~q8Qsd?@k zR1iyltvCrPKj=AO->IQs;i&Y_GXV+q^K%}*D}fm*)FpHIsC)Ow)plJbO^V&egJu9< zS4JjIfrm2R3EDzwIlb(KueBtX2O?7(G#R62GbL{1m| z5RxpCPRJdit748;T}`cea_;;0O_;X1g21o1_Uh{?vuV-wk9fvxcb3h}6ZtDC*5OPX zFTJFK@afsJc|}F%3e>?brn$(F^vdV~d{s2uSfRGUB?6hk*@1?KlK{;L^2*LVD?kv? zmvHqw7)8FPl1|0+3SBZK#p|mqL=XPRqgVTc7n@FaY@RAwvnu~$O(ntJHZ`#vU@n@c^W#TMP2}5jrG5Tv%pd8pICe63 zCqr>7#>}5Qdxo4O7|C%Xf;GFsT-g4b2mWI0WJIboQ5mi zq18jQ-*Casfgpjc;?iBC8#?$21~3g*J0QcQ&4Yv|S$yAQSHnFdMj`VA+`9)A9Fy3c z=SVwE0eOL2s;gmr{Q<%7afCdEm>Fs#dKk_E)Frg}A0L9*6DS-XQKYmCj`2LR7^cF@ zP>}6HU>H3cLD+d%36?C%zd5EuM~;p;ln{*ctSU}xTxKWZuaIhgQV2Wq!=JEzNrqCc8b+Sz@EQqbk9r@R+Ba^wA z88Q{osx{-o`&KJg98;6v(k?rYsQEv*8hHA zMvd@gIM;C{!o<@3y$hgWyemf7-7#2-m>93rs>(_(SAEXC8!*~%lwf;~(Z4enYF@fd zijW?d^u#cU25$smR!3W#C=E&`7I36OADYjary}QRFjFO5l$pf__rA z2>V&~>lxr^%zxpKK_DW|cS^d8%nrPWJSobQloY%GrqgWiqUQ!nqbsX{1|G$KMREN9 zNc!SsI3mwJrDzL4vhlbo3H8as84}oc>mz}OncWty zFEu-t)&0g(z#Mk2d~BLdCeU6m_ElKeD`1VBobb?4g!jYo;tpw=5Kg)bE|R+eP@bnI zCj4HpQYsr5I015i&Wi{LK(+i->3%GrPB8~cEC?qwHdJe5HXL*O{>e?OD$Zj2f;8nF z0%6AC!D~={CvR;~XkecvTpucL)-C)y*+=23aOLV%scRo4Py+y<#0p>I#Y9)@8g@;O zj^3Q<-Z#<$fnhCbGy|cF*ROT_*a5!ix#dXZnZSAY`*-uyQjRp4PTyOFP6){hrDh99 za-=cOPlc?P1M9s+Om*$?d>rojw{BV z#2BwJ~RahcO8e_ta*$gI4)2K|$sn!5I#ql3eX z=g$?Uw&6q2wl?&kvikY^mz0+BQd*juGqAByxqT%p=8f41F>mx!Kn1`v^j@4vwA$(U zb^nO?cxc^w7E~jNklq0xxp?s~;z~H?u*LBzksT_lp5GE(Wc5FBEQIr|ohs61pKuEE zt*eP}p`*GPto}SK^?|zJ;HT$-{HYJpn4juBF}m5}n3HX)<#o~8hQV* zFoT7KKtulpTJI1Nhzd@(ZvF0ix~k(*|H_Lax3Vst2V4ZOruQL%AyLhHkJvD}cI2^gQ6rH&cF*N|@Q{71^ShTUeHre&)HE#JzAX#3xn>G6|x zju1vZt(5*>{DE252wrmr!7FUt2`7}BJ6T+$%Nf?tOJHF^K`M{DKY!AnJn`FHU&)x* zul-^^0To_1Qx8*lqQ?)1Qz7L{N^GIQxmUsCy7cA+c!~$$_X44T=%8GpprM>J*%wJ( zSzcboe(Tgla0_fJ%tch=eP3UaoO>xj=qYoB)(ATTW5GLi?Xni8|GUuxaSFiX&=iaF z3JVb$+CnqIvmDX7i~0iS0WDACdjotLq$H5%hs7_#!?RF$Re~D&fHSlPV3m|T7^fyu zV%5_2#)Q8^e!~1 z%7?xVZ_g1g+l>%LB9wV=LTkkt*zEhlezBjU9ItR1(QAODr_8&D@?moF{Fg77r++(L z?wVzYu1biXiN9pB7lnFiyjcTB(iIE_j|I6&kS1WHckS&STbsu}en8K}+&M-=2wIqk zNLxdLe^(0?GI!ze@kWpCezkevJ zY-3mjSXNZ50Z{{ut3=7^+IOV=#W7y&R}H#Z=$hl34!Ob0{=npjKZ2;aKV9BWZ1!`T z1rsx~a;i4`wXyMW@HXHQOs7w7&#Us;SzJP}CA3RtE^#^hB zzDL~h9=KA77yYFV%tbm7`hX5=Yb)r(gI?n-i-6WVRATG|L&%OmEQU;|`Zb3;!Jx&& z?D2Xjbafc`5SGCiB#<8|cU${`#hdu@<^0L6!9n-wPQr^S7=sF|njgXqCRGH^^=TSM zo(#!8q;(*~C2EF;gh=2`fe~sTm{@!zUGoDKQ0f~Qp*`|aDy3@UH4|OSFjAvMj4%^p zbLo=4iAg9wDrp_;mWOHxoxH^B3$+yu_SoOO(oYQD50<-M8nj>tkO`;&R5&VXr%(YJ z5cGYj3hXsCUEL+To?`C{(HDBqud2rvQRZzpF=vBP=QED^91J=_;RIZ`Ht?EE@c}`A zJ8KN&M(Kkii0MouHV`GjI_}&_9HkBocN0_+5q8LVEqZ3(qu!*Z*1&~PN`>JHr5yFv zX8|fII4)z*lNT?bfek{6s{OOEk&u|!6=HeuVi)<_3r{di2q6N=`9rbgSi*Pj)EOdm zk2Mgv`{4gS4~$80g0#s<>4#l?m<8I&_SN1oy}cO!fikuH7~4JnCmH$spHR+`g271) z`TFjZKdMW0BpEh5z8wMhn}UAgIdpH@?Y|yJzr=82^&-}J(Rl-&44EcGIS}UHuSSlJ z1qihGD*>;!pqre(6u4qHQjn8FAKo-v?!uIS8vW`*p*qH1V7`1?s-barbd-pD3mAxP zgtuv2T^}Zy8zjM_xol-6jX|j!?*(dWYjH7@J{S^v3b__m0w5>+Lbx*3GvB~z@wSvI z3|QiAc*t$2c-Dg!qQw{G=by&C($Q@qE-&A5QHYy+IAX7AcTW$YJpIjDdzs$d+;9PI z1cU^7g|}Z|tBb8kc-PQy6~<8~Mn9_ z2tX*g4U@dj2=knq8%tUvMfsJL9-xtgk^o5`QXJkENVJjjrl25Y5!xS-aWW3D*fU?A zK;*)&Uw>6q<$2-4N4VVxX?DFN)NJsg=g+lubf{6IZjRq8D!PM#sEmv$f@9Dt{iYi; z#h8|<%F40>X#^ZM+8^81)rEhFM}koifJ2@IoJ@2-Yz>@c6mejT2|lTijK;@krX>aj+Q3du&>ZL5Rdd$##kh%5_F_~(P4+o z0#Lf0jEto~WpI88*>k-blnhC`e)#Bsw>~~Tn_!K5Rm1Fc5B^`Joq1f&X}tH-defpT zsgNk8&~7P9A=#=4p-?KNRhyV(scglRvNTB{nkGi2h$2gyQU=9nqNpZI2r&^t=l#n$ z=k+@0`R92(PyY#Z-}mpjzSsBr*{+IF1o@#)V@1*hHeQQps;yNz_*J&SCpb8d42)lX zS@1)$+nT|WT(V%nSd6>``p$Jc?>m@Bf6}kKoM%B+0zWcBP{S`-W~b-j=?RVi)HX*k zJ1wml>PbT3LZMRW-zODz32Pn3j5)RV92OGv9cXvJuxRVp_^<)ZAK70FF`T$SA2^Lc zrB^5YM$RqACjq@{ccb=uczNSGIz4~?aTXRm^uzEv*#0<$%d{jp)CcS?x7~70K1=g5 zsPk0yV}}n{*UhCzR1yI_N(VixtsOajJOKJGjTqKGLvigP9&(%PGioN`CN&Gdle7l- z08?F7H@-S$#FPzk{&&d2JvX=bnj&Ped;`YM(nXdYg>=39e-7Z|Meit*EN0ywEyCrY9A<;9~$|(RjMMXn* zbP|ZSB$$sVbOHLe>&3;9oI|ZwfakLMGD0QK&zQdPJwc$I%xffQ;BSM|e(vBWIYT8{ z#39Vf<3Ib$SZC3x^p}(4OpIDg83JjlxMF5A)Wsgf4=nAF)%5aJhdb8$V8J0;a`;!8E7s5i1mq!s?iNpLqSkIZx8@d2RjWfeM+$MjOukrI zZ_!ASI9XSxd}NBde?B3Tu$zdR7cYvnNtt+y*IZK*1p5PM!8)hSSv_nio2ATWI&EN?i={ahN`K z73ZyeNXH&f6H&Mf(Z$S>bYI_qB`JyQbL;e|6*y8^7@dOa{HJ5I-adYQ>(Vcc$ugJo zl?mU$DMZiB7qA_3FpXyZQK@cJ7aCujDcx&sw|~lVBjT9L)$P-=KaO;JAdX zJ#xH#*uZ+A@{l1ou=x7i)bsuMn>QTtU{axmSB{@P%{l7Ki00zrLQC%u>*{_7l@S(- z#>Uk18Jm~oI+Y1ojr;c1QW0zW1{Bk;7JppX-v!wiUn0eo419G+0LlSBLvtlIQX4chK0}F-=O0-9D?=)v16!Nhm(_y zq!*p~i?bW28SLH0@^QfXLWg4~KO=!rLRa^cQlQp>Dj~D=px%lYtA98EOpJ{k4cz8} zPsE@4>525HeU4GeR%z$h&(Bdya>Jf|5!RWimB zmKD%+^$K86&G)<|Nk&#k4~`wXMtnu;)rcPTY0_JgiF_eznwbJ(lVdTmP-W8?B2Gy} z5A87b%}rc7b7m=&4gN_ETh~0R`SV}0j=bm53xsXlShCcd2DV~3%|TGLgNw^msL&5( z=M8)^8(k9(JWl?X7QoN1n{`8X#bCW|-8#jXg_=rA`;cuOFIfFo%s>^r-?FkU!&=jP zjYTZOA_U9YvCd$}yHg4q-qNR$my5Pyb*_8nsZ+_oCWVoc%sCY4t9@`T=^7=$=s zgRTNno4)Cz9M)`dSDAGe{}5x@0zU$UGFLE)^E=Vm7|*69po5=hW3w5L2&W5YP~@^r zrZZVj=$5Eb)J`G57i@yLChzv6Z0904_#Wh0Dzn8q5O|oGNl8mHA)uP0|6#f@H#3WK z{F$N$^o)uY=NP||?VX=*TIWqspcMuI|AjI@`T66IOIyEv0Zz`%DB$DgU-F3(r4!s! z`-X&gLPJp4*45V^I&wsQfX9}Mr{7N*wKO-|J2~|YAI5tH)Jw8XXG%Q+1;aTMYXpTx z$BzoJ?3292u9wE1zon-S95Q51dMzIim`QC*JGBGtBC%H#fPG}eYlzyid2$RN1Q*aB zqproBM{9_;VI^=VPN|fXBGelY4na<~gXA(8ifBLs0|U>{Dgw}-N#rDq=z4P`DQOu> zHTv|u>eGAyexalxQL?kY0o`-M0|~X1clLZpbts#MW0C{}&M`hMhzXfn%IQb=z$Y6T zcC7QO+;Q;WUuZBf`F%$BfBZm$%7nY5{Mgw^r})FkZPVWu)c-<1LZ$FVP9Hn=CNb^u zPp+O*JNDAGOPw`nss;`qZ!?3>nI1iYGj347r|Ueg??C*6vXmj1lQseeC(Lt)Z`hL? zN);npd3j8b99{G*k++Ms7UcFA`vBC^2hpbTaAa1!MfU_QYiKlFNvY-4tD>#n!X;;b ztMH#Jz1-a0?>08_(5FK3QCR3^aTG;KGI|62(5d^v&p-ZIWm$LbLVNpqJ_;J~86_Bd zD!$AchO+QE>KCLDV6m>!tDKxRqD`==rRC#S3ydIfa@1C89SE!8;bSPr1z4c^pl);7 zzbw~~FP?HI1yX`@i2Z2ji)9wk(Pgw8l*0XUe>-_{Ou+Pa&stjOliqiAHR0L@Z9<_= z>Se-e<^?zIwqG#d8?x{Gj(41 z%XR$}*B9Rkx#EI0#_4Q%Wu+Hd5~>8tANyhoZNzFD2wtaiYm|1EL-HSws{i9=3}#i- zne1jdgo|q%E&$6Y>XYn@797b4?i=x_AdDBe6A4_W)&=@_{aaI0&#wGR-A3uJKoGWz0hJJnfqUEp0#vs>Xd`i6+)+Ipvo$YWXN>8pc+ z6gSs94yx(GD|gub@v8*7Zd(Ml41;7-5tkpR_IHw#RN0`j_h= zgy;u4|7z5%bNY5$;ij&fq@*N*Xp>}>n}$u!pas@QICc!sudRP?z~Mk~@7}v)j7LUF z|6hOOqx79yx8y%4e3)h5oBvb)z0XB-aF!Z|-)A2kOgF3aEjV60;Owril7<(Cm>;k!L;b?{AtiY4fxU&p<)GuRMOa>9gc5 z1FQ7dA6Q%@m{weqO9+n4yCM*^bmIVvz{A%&-ro>)g_9a@(Ir#eWZz>( z`GK=ug~NwtjooAvuBS3PgDWEY;MUjodHEa3q zCgb4Ni_hk`v0GNXse2NAQ*EeP+{dRDi$_G8MhsGEV3iyI$((dsVPWCl13|fd@A($* z_D`!dtB0xsFBp~bwz;`E(D}qW;&&2ib+`cqCrNr(T9XCHZJMtTnpo%Qw&sbhEgv+i z0$(B+5W}g`wR0Y-#`UG%JbRv{Tl1*3yJWjx8o+6HEX1Y zS#i$-MH!Tadelu5yrg>R@IgI!y2GJ&jzKdO$M$rD( z7WtDLA7v>Pu%4;$^_w?9YhysExkrMe$A4T0J>!%-y)=7IOiT@6%YiDJ2T^7%v}8BS z%ROpO%w`rWp7HSc?>>QnZ^6c1ykKRJmAM(+`CpaoJPBS2jo@4@g82r&%*+MNqwMl( zsY5^fGa8p6ud(^?JruO0~fb^-A z43*+huHN3CkQSSm$Em5S*WJC#G~=c>^1@NW(#aBQ0s|wk%CoZ(V7ZMMpZzlLg;oz(eZH2v-02=7r$;0NvkF2NIvegZEiuX> zsJf~OsKCh9hta`_Edl9KRAlmb>9B>ysRz+?`yl{A`b1_reZ2Qqr4e;8ch{tpGT)RW zI73giN$WU6*lXU)zjo=8`Mu3de`Dv`Vj;8C2nyNzQ593rsB!)e^Q#&fh7N;f>$fum zD-lnK#UUQWB^rbwKH!fW3%Gy@Lm4T0Pi3JPafr|`0zNDr18+JgVH?_zy?c4xcF3l< z|0OEZGzkA$eQ~h{cL;Gu521uJj}xR~GWd7y#F;;z(00igifJAK*p-A5P5{m|ZmCcPg&1L!YP*xMj%Mn>59OZ9CW9S_lD2c44gy(LmiJAeKI z&^x*^j$4{@tOnC3+3F?gjUK)J`R(E6M;_PIAkAEdwkD{Rjl2^5>G7pYVLz+GaDS%t(lh z6=IN>7TbkQw0hI-m>9=Acd5w;L2yOCY;Tv$aDlyebLQsicT zG9quUGHqew<&Ed^^4{@uL5WT}EbL3n0Id^d27V7Dy3_LIw3hU;XP=d7k$;XAaVI?& z$cBfP7a!?}$w84H^kXYL@kLXtOn(PIrbOqC#lS@V2C^YaE)~jhEWZW;>u@!-mv7&C zZB9c{#-m2AM&AM6z^8$jTXVj2dbMR09X0=va!ub&U?5MZFv3ygZpB=|X4#7myS};$ zV#X~{58D(a8FwUho9L}2azQ=5r=vx}Z;M3|%~X!uW8o^U1uRl%1Iz1C%KLV+un{+C zi0%?6MI_TiEwwn9Oer}RGCNaK8yRnEiqL>Uvp6p1a3KqGj-m%c9R#n|fEQ@5U_yA^ zI8SGEze0+sc#Sm?U%7qnu7w)_3ep^G>`yt+5OlIGKTtNZYT;DbEXm??ygDF|$+zqV z*{)k@&pA0;Ye3Y@8gB-S^9)&}t|lcB^vfh+nGX@oBBPT~iaS8xl@w?Ppfm-b%JJM`A~5<TQAncmA7P_^bcYkpkS1) zvUad&fOa)?b<(>USe_WSDV#w;_8_?B$x}BPhi}5_!L?2I{?K0Ww>(D>2Os7*GP{C8 zLWtkKdFRd$W#u*RJ>qX)zQxTq(Pe#lYGYrpuOL{G)(%(odQEGGtfxxN>%S3T2=xg@ zMr;Vxc_G7`&U(zVvf2R%2evae?!MDUkSS#Ym(+v=FuMdUVQ6S5;(8^+0*L=hK`k>C zxrof4yZzWeaMtz7h@V9wk*gK|R7!^8&zZ9I^ZTbUgWYTg+H(62ZoKuZ&)$SI7#IOR z5BhTdcMflHq@znDeH?g7quID zGAItt2&*}BBI;h(i!EyxBl@D}dv{>@3T`Px^C#Ntb?fkGUESSpypj?0n?K@$zkw<$ zj?0(#vk_N__6rm)OyGvGbo((UyJuPJGQM#J8m_(g12cbi)|^8diH*&=zkuk;G}RX( z52rBCA%>1z%-{GYS|cGG-65@D$6-TmKW1)zib}ehX%58=b-`B(9tSTSDiC*?Z~b60 zqSom3^%EVQknXpB$&&jIA1;40x~y-$7uUH6u48!RvTwq|>55M|pdaV>{!E``((%D2 z;#A>#x{kj=0mg7J$jfKgkD-2mL({;yChlE)wz+~=uQZmR+F*7gj3~YUR)<8la24f~ zDs|_9(((hGr_C%GBi9;kyj)is_v!z}gAoJT2te=l6Q?!mMY6I8=_v+$NlHs&qt(*Z zW}q~hEn%<)1OZBfTJ9~mH2SoLWRam}zbU50)^^9*yb=&`fpH~m0Z$CB znG>0rP?l!qM>xZPh02%Dz3|RRZ>Q3Tam9wiS9`=Aw@8KHBxs;Uz`3Vy(bLwZ(0{wA z>srh@53644lk6Hg;sz5!Xg|FefpR{Vlfx?i8HNZb6BsCJb8gDHA(wQtwZ*qb2k2kVnd~P~;=Rj%lgXRWChmvZG@pHU z3*J95N!PV)!0FVB3!4o*{+vG5Kwlqg0Cp>>BO#uiqeWZ`afrZ0IEl%eH8TxBA#k99 z;>8{+UUAwp^b1PyOK25K|`)#xl#BSdD^!o_b1f3S%q!9Yk75-2; z93eFmp1_q}n1nmQXxeSv#lNelsi{v5GaNlSgYAaY|ARhxR{Sowp=nd5G?0-aNhd{1 z!I&E)T*?FAH)`LFEpC&e-rR0`{dxWcd8B%%l_wFG$%NeILej&cU6D0d(ZZ7Dg_= zx-8Y&2~$rWjkTcVVq^oGgcnD)$XM0ZAhvHVTGm;&TsSks8O%4cM`P}YOBg4Ay<^ei zT1l)5d&_!KXnsM#x`(;9wdOI~@#s&S5y(GeD^R5qw%EnVMhmiGCT3)dStR5QkK3~G zdTo~Kr0bEdeZ&H^{&lJ@^Gr7S?UrXhl}o(Aa^7Bm&Sn%+4gKFn?tU7gVKi#>nhroP zGMzz)fR>r(N))V#=+n?}_45<5M^aK~NBF=Vk@|l43MV+<0L_K(=BQ+-$zkLo{J*Z8 zv#HACJu_wG&c#cYZe_^y@j10?SB2N$5JVMp9B2q|W}^jqN*TZoS$ORcGA$5bW($50 zJcy~pfZbZ7MujlVq$#Fa4<)$Co7M&a$FgN8A&dV{fYq--;%zHUFee)S*z&mqAF%GA zJ&4b!M09q5X^^Gf9vfD*&Kp4I#aDALK*hfT?lPgbB!?TrPzQ*H|`Z z*Hu+v_%(!`&B+0gJ(HKG)c%Zwdn~_xGHGkdt4;r$?Y9338wKc6 z2;U_77H=_i3PA>(%z^)0aM&J5oB=9`C(-BL(N!+N#V8?YDx@~d;lPDMi1^iG`htyx z<4kttU$p5nD+%d7!_&%@*wZ-)NXYZ-9z#z@>4IJS%x<~!GUtKu8fj9$=<;s?1T9-O zY4qsp4<4vq$!EF+Y@Rf!fUytxKBdv^mI3kxtpJtC-9Ek3;gkIQ=peubOj6> z4#S5?e|GM?PQB*=Lm`yRoy7KSY}|Z(&tR|ws44nGz8Dh;`9-WqCO1*53^zhnQ0Aw} z!${)T#L9Oa-@o@y+{^=Mc@)h&s{|8h@t#4}b?Ns;`~ZuE&1seEcK3i%YlJ-RPJr&A zLx!Mb@Wd*}tI{{*!bSm<=HM`eK$VvVl$J4{qob$CC2EF5)57aus|FD3D-z}A*sM;= z&fY@r3CYNqg4_zFb#7?2sDJ;T!otFcNaiqL_ytr!ba;K@1nqhBc;EmRxrUDVK)#7F z;*Vo*qFxX(I1eUZMna) z_`6ZNb9y6L$f^Ya@j7Xm&C_Yz|K!TK!=t}kLlYGAWleK0$cEmw?RQ(zlTEuCoDZ*> z3kJ<0!2a}P-+X^XQ%lQ%h`{UxB{tmm2TjbwWWH1Bn)SWrhj#eO{{WY-uHUVp z7|?=H+N1l$PyvF|u#ksMsu}VBu8(e)KT0h2r-fQSew<8u&-b*9j0uFU1sllqes7iR zr_8|u;P>?_6)JCpU=0?U_rjHD3kt|qqNlio-VQEl+SI9(OzIZb6KZkU6Y0Eb*T$8# zu-g@en+tCh`Uk=q6>!@3jl{&Va^VX9gk1CT`n#^K0uKzgQ4fov;=iDfa7@#8Ft*ri z4H80K#8n3temVXc6$Ha50$w^bI6fvjNNE1p^ZEG*q~;*hkSx^ImUm&~>|n}aXy@R$ zb#z(-xl7SypS!zhYq;*%jq1*3{qx2R%PXUiebI3sufTo=`tuez1^F?xth@R=L5+$x zc5ENJT}MZ!fOt-hVogn03=?QCmI{9yKYsYYf!wEx;&lS*fX-_#Xf5E0Xs1z=qFlqk z$#DpAjL7fK9fK)ThFWz)IIymugU@iMZ{vrnFu3~q3TnOA8O55dRGn&^9IFoOv%=0! zVHgRwF?;p^!78V^a5v7#k;LT6hb75;Pe&AtGLY)}J#gy*o%8}azYkqq_;rk*Nevl9 zf|BI>3C-2byn^GElqZ`^hSBIUg4C=qX5vH~Gl(As!bXwLonNWz6ttSK3t~ctEbD## zisLHU%l<_sM;{JNl-zJZJTNGrkDZ;QMn*39K^h~~CV!%b+g=$jl(ucNvRPOx+S9chhgrVA`+Z5O+%s%Q;e046Ar#aqZr#e9oMzGc&}~F%CEI(~n%Iw3@c;D$d*@!Vw8P From 13658b82a7fd1bb1ba656d7e3d9a27a608cd49e4 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Wed, 23 Oct 2024 17:33:20 +0100 Subject: [PATCH 390/496] Test PR Merge Queues --- .github/workflows/e2e_tests.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index 2caf8a01..8d2bcaa2 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -1,3 +1,4 @@ +# e2e tests workflow for CodeFlare-SDK name: e2e on: From 5b805bd61f8e4af1a78f18d5bc9c8cbc3dc92a22 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Thu, 24 Oct 2024 12:15:32 +0100 Subject: [PATCH 391/496] Fix dependabot labeler and workflows --- .github/dependabot.yml | 2 +- .github/workflows/dependabot-labeler.yaml | 8 ++++---- .github/workflows/e2e_tests.yaml | 12 ------------ .github/workflows/guided_notebook_tests.yaml | 4 ++-- .github/workflows/pre-commit.yaml | 6 ------ .github/workflows/ui_notebooks_test.yaml | 4 ++-- .github/workflows/unit-tests.yml | 3 --- 7 files changed, 9 insertions(+), 30 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index cc89f631..9d2233c4 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -13,7 +13,7 @@ updates: ignore: - dependency-name: "*" update-types: ["version-update:semver-patch"] - open-pull-requests-limit: 4 + open-pull-requests-limit: 1 labels: - "dependabot" - "test-guided-notebooks" diff --git a/.github/workflows/dependabot-labeler.yaml b/.github/workflows/dependabot-labeler.yaml index 69c94617..f9bd27f9 100644 --- a/.github/workflows/dependabot-labeler.yaml +++ b/.github/workflows/dependabot-labeler.yaml @@ -1,10 +1,10 @@ # This workflow file adds the 'lgtm' and 'approved' labels to Dependabot PRs -# This is done to ensure that the PRs that pass e2e are automatically merged/added to merge-queues by the CodeFlare bot +# This is done to ensure that the PRs that pass required status checks are automatically merged by the CodeFlare bot name: Dependabot Labeler on: - pull_request: - types: [ labeled, synchronize, opened, reopened ] + pull_request_target: + branches: [ main ] jobs: add-approve-lgtm-label: @@ -24,4 +24,4 @@ jobs: run: | gh pr edit ${{ github.event.pull_request.number }} --add-label "lgtm" --add-label "approved" env: - GITHUB_TOKEN: ${{ secrets.CODEFLARE_MACHINE_ACCOUNT_TOKEN }} + GITHUB_TOKEN: ${{ secrets.GH_CLI_TOKEN }} diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index 8d2bcaa2..7c461282 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -3,7 +3,6 @@ name: e2e on: pull_request: - types: [ labeled, synchronize, opened, reopened ] branches: - main - 'release-*' @@ -12,16 +11,6 @@ on: - '**.adoc' - '**.md' - 'LICENSE' - push: - branches: - - main - - 'release-*' - paths-ignore: - - 'docs/**' - - '**.adoc' - - '**.md' - - 'LICENSE' - merge_group: concurrency: group: ${{ github.head_ref }}-${{ github.workflow }} @@ -32,7 +21,6 @@ env: jobs: kubernetes: - if: contains(github.event.pull_request.labels.*.name, 'e2e') || github.event_name == 'merge_group' runs-on: ubuntu-20.04-4core-gpu steps: diff --git a/.github/workflows/guided_notebook_tests.yaml b/.github/workflows/guided_notebook_tests.yaml index 46979aa9..466c81aa 100644 --- a/.github/workflows/guided_notebook_tests.yaml +++ b/.github/workflows/guided_notebook_tests.yaml @@ -2,7 +2,7 @@ name: Guided notebooks tests on: pull_request: - types: [ labeled, synchronize, opened, reopened ] + branches: [ main ] concurrency: group: ${{ github.head_ref }}-${{ github.workflow }} @@ -13,7 +13,7 @@ env: jobs: verify-0_basic_ray: - if: contains(github.event.pull_request.labels.*.name, 'test-guided-notebooks') + if: ${{ contains(github.event.pull_request.labels.*.name, 'test-guided-notebooks') }} runs-on: ubuntu-20.04-4core steps: diff --git a/.github/workflows/pre-commit.yaml b/.github/workflows/pre-commit.yaml index e5c16646..1575a654 100644 --- a/.github/workflows/pre-commit.yaml +++ b/.github/workflows/pre-commit.yaml @@ -1,13 +1,7 @@ name: Pre-commit on: - push: - branches: - - '**' - tags-ignore: - - 'v*' pull_request: workflow_dispatch: - merge_group: jobs: precommit: diff --git a/.github/workflows/ui_notebooks_test.yaml b/.github/workflows/ui_notebooks_test.yaml index cac6641a..28f7e06c 100644 --- a/.github/workflows/ui_notebooks_test.yaml +++ b/.github/workflows/ui_notebooks_test.yaml @@ -2,7 +2,7 @@ name: UI notebooks tests on: pull_request: - types: [ labeled, synchronize, opened, reopened ] + branches: [ main ] concurrency: group: ${{ github.head_ref }}-${{ github.workflow }} @@ -13,7 +13,7 @@ env: jobs: verify-3_widget_example: - if: contains(github.event.pull_request.labels.*.name, 'test-guided-notebooks') || contains(github.event.pull_request.labels.*.name, 'test-ui-notebooks') + if: ${{ contains(github.event.pull_request.labels.*.name, 'test-guided-notebooks') || contains(github.event.pull_request.labels.*.name, 'test-ui-notebooks') }} runs-on: ubuntu-20.04-4core steps: diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index 7391bb40..8b7a3a37 100755 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -1,11 +1,8 @@ name: Python Tests on: - push: - branches: [ main ] pull_request: branches: [ main ] - merge_group: jobs: unit-tests: From 47d41aeb4580b3c077743b0fd8613589d7b4ce90 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 29 Oct 2024 07:44:32 +0000 Subject: [PATCH 392/496] Bump @playwright/test from 1.47.0 to 1.48.2 in /ui-tests Bumps [@playwright/test](https://github.com/microsoft/playwright) from 1.47.0 to 1.48.2. - [Release notes](https://github.com/microsoft/playwright/releases) - [Commits](https://github.com/microsoft/playwright/compare/v1.47.0...v1.48.2) --- updated-dependencies: - dependency-name: "@playwright/test" dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- ui-tests/package.json | 2 +- ui-tests/yarn.lock | 28 ++++++++++++++-------------- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/ui-tests/package.json b/ui-tests/package.json index 17b79185..cd6d230e 100644 --- a/ui-tests/package.json +++ b/ui-tests/package.json @@ -16,7 +16,7 @@ "license": "BSD-3-Clause", "devDependencies": { "@jupyterlab/galata": "^5.0.1", - "@playwright/test": "^1.32.0", + "@playwright/test": "^1.48.2", "yarn-deduplicate": "^6.0.1" } } diff --git a/ui-tests/yarn.lock b/ui-tests/yarn.lock index 8084fe3d..f0b1fbc2 100644 --- a/ui-tests/yarn.lock +++ b/ui-tests/yarn.lock @@ -1151,12 +1151,12 @@ dependencies: exenv-es6 "^1.1.1" -"@playwright/test@^1.32.0", "@playwright/test@^1.43.1": - version "1.47.0" - resolved "https://registry.npmjs.org/@playwright/test/-/test-1.47.0.tgz#69fc55b10754147cc20021afbfa05747d4961bf0" - integrity sha512-SgAdlSwYVpToI4e/IH19IHHWvoijAYH5hu2MWSXptRypLSnzj51PcGD+rsOXFayde4P9ZLi+loXVwArg6IUkCA== +"@playwright/test@^1.43.1", "@playwright/test@^1.48.2": + version "1.48.2" + resolved "https://registry.npmjs.org/@playwright/test/-/test-1.48.2.tgz#87dd40633f980872283404c8142a65744d3f13d6" + integrity sha512-54w1xCWfXuax7dz4W2M9uw0gDyh+ti/0K/MxcCUxChFh37kkdxPdfZDw5QBbuPUJHr1CiHJ1hXgSs+GgeQc5Zw== dependencies: - playwright "1.47.0" + playwright "1.48.2" "@rjsf/core@^5.13.4": version "5.21.0" @@ -2079,17 +2079,17 @@ picocolors@^1.0.1: resolved "https://registry.npmjs.org/picocolors/-/picocolors-1.1.0.tgz#5358b76a78cde483ba5cef6a9dc9671440b27d59" integrity sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw== -playwright-core@1.47.0: - version "1.47.0" - resolved "https://registry.npmjs.org/playwright-core/-/playwright-core-1.47.0.tgz#b54ec060fd83e5c2e46b63986b5ebb5e96ace427" - integrity sha512-1DyHT8OqkcfCkYUD9zzUTfg7EfTd+6a8MkD/NWOvjo0u/SCNd5YmY/lJwFvUZOxJbWNds+ei7ic2+R/cRz/PDg== +playwright-core@1.48.2: + version "1.48.2" + resolved "https://registry.npmjs.org/playwright-core/-/playwright-core-1.48.2.tgz#cd76ed8af61690edef5c05c64721c26a8db2f3d7" + integrity sha512-sjjw+qrLFlriJo64du+EK0kJgZzoQPsabGF4lBvsid+3CNIZIYLgnMj9V6JY5VhM2Peh20DJWIVpVljLLnlawA== -playwright@1.47.0: - version "1.47.0" - resolved "https://registry.npmjs.org/playwright/-/playwright-1.47.0.tgz#fb9b028883fad11362f9ff63ce7ba44bda0bf626" - integrity sha512-jOWiRq2pdNAX/mwLiwFYnPHpEZ4rM+fRSQpRHwEwZlP2PUANvL3+aJOF/bvISMhFD30rqMxUB4RJx9aQbfh4Ww== +playwright@1.48.2: + version "1.48.2" + resolved "https://registry.npmjs.org/playwright/-/playwright-1.48.2.tgz#fca45ae8abdc34835c715718072aaff7e305167e" + integrity sha512-NjYvYgp4BPmiwfe31j4gHLa3J7bD2WiBz8Lk2RoSsmX38SVIARZ18VYjxLjAcDsAhA+F4iSEXTSGgjua0rrlgQ== dependencies: - playwright-core "1.47.0" + playwright-core "1.48.2" optionalDependencies: fsevents "2.3.2" From 6c0005aa5ca2111916dc3906e1714cef46032946 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Tue, 29 Oct 2024 09:12:15 +0000 Subject: [PATCH 393/496] Update conditional statements in guided_notebook_tests.yaml --- .github/workflows/guided_notebook_tests.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/guided_notebook_tests.yaml b/.github/workflows/guided_notebook_tests.yaml index 466c81aa..7a77d5a3 100644 --- a/.github/workflows/guided_notebook_tests.yaml +++ b/.github/workflows/guided_notebook_tests.yaml @@ -124,7 +124,7 @@ jobs: ${{ env.TEMP_DIR }}/**/*.log verify-1_cluster_job_client: - if: ${{ github.event.label.name == 'test-guided-notebooks' }} + if: ${{ contains(github.event.pull_request.labels.*.name, 'test-guided-notebooks') }} runs-on: ubuntu-20.04-4core-gpu steps: @@ -246,7 +246,7 @@ jobs: ${{ env.TEMP_DIR }}/**/*.log verify-2_basic_interactive: - if: ${{ github.event.label.name == 'test-guided-notebooks' }} + if: ${{ contains(github.event.pull_request.labels.*.name, 'test-guided-notebooks') }} runs-on: ubuntu-20.04-4core-gpu steps: From 71e067b8274c2f0fff1ca8d646135fc1f038287c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 29 Oct 2024 09:49:30 +0000 Subject: [PATCH 394/496] Bump torchmetrics from 0.9.1 to 1.5.1 in /demo-notebooks/guided-demos Bumps [torchmetrics](https://github.com/Lightning-AI/torchmetrics) from 0.9.1 to 1.5.1. - [Release notes](https://github.com/Lightning-AI/torchmetrics/releases) - [Changelog](https://github.com/Lightning-AI/torchmetrics/blob/master/CHANGELOG.md) - [Commits](https://github.com/Lightning-AI/torchmetrics/compare/v0.9.1...v1.5.1) --- updated-dependencies: - dependency-name: torchmetrics dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .../guided-demos/notebook-ex-outputs/requirements.txt | 2 +- demo-notebooks/guided-demos/preview_nbs/requirements.txt | 2 +- demo-notebooks/guided-demos/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt b/demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt index e77d612b..6649bb8a 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt @@ -1,4 +1,4 @@ pytorch_lightning==1.9.5 ray_lightning -torchmetrics==0.9.1 +torchmetrics==1.5.1 torchvision==0.12.0 diff --git a/demo-notebooks/guided-demos/preview_nbs/requirements.txt b/demo-notebooks/guided-demos/preview_nbs/requirements.txt index e77d612b..6649bb8a 100644 --- a/demo-notebooks/guided-demos/preview_nbs/requirements.txt +++ b/demo-notebooks/guided-demos/preview_nbs/requirements.txt @@ -1,4 +1,4 @@ pytorch_lightning==1.9.5 ray_lightning -torchmetrics==0.9.1 +torchmetrics==1.5.1 torchvision==0.12.0 diff --git a/demo-notebooks/guided-demos/requirements.txt b/demo-notebooks/guided-demos/requirements.txt index af1bb36b..489e027b 100644 --- a/demo-notebooks/guided-demos/requirements.txt +++ b/demo-notebooks/guided-demos/requirements.txt @@ -1,4 +1,4 @@ pytorch_lightning==2.2.5 ray_lightning -torchmetrics==1.4.0 +torchmetrics==1.5.1 torchvision==0.18.0 From acd29a3e167277a3138080daf78051d40255f624 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 29 Oct 2024 10:48:23 +0000 Subject: [PATCH 395/496] Bump pytorch-lightning in /demo-notebooks/guided-demos Bumps [pytorch-lightning](https://github.com/Lightning-AI/lightning) from 1.9.5 to 2.4.0. - [Release notes](https://github.com/Lightning-AI/lightning/releases) - [Commits](https://github.com/Lightning-AI/lightning/compare/1.9.5...2.4.0) --- updated-dependencies: - dependency-name: pytorch-lightning dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .../guided-demos/notebook-ex-outputs/requirements.txt | 2 +- demo-notebooks/guided-demos/preview_nbs/requirements.txt | 2 +- demo-notebooks/guided-demos/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt b/demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt index 6649bb8a..dd83de8a 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt @@ -1,4 +1,4 @@ -pytorch_lightning==1.9.5 +pytorch_lightning==2.4.0 ray_lightning torchmetrics==1.5.1 torchvision==0.12.0 diff --git a/demo-notebooks/guided-demos/preview_nbs/requirements.txt b/demo-notebooks/guided-demos/preview_nbs/requirements.txt index 6649bb8a..dd83de8a 100644 --- a/demo-notebooks/guided-demos/preview_nbs/requirements.txt +++ b/demo-notebooks/guided-demos/preview_nbs/requirements.txt @@ -1,4 +1,4 @@ -pytorch_lightning==1.9.5 +pytorch_lightning==2.4.0 ray_lightning torchmetrics==1.5.1 torchvision==0.12.0 diff --git a/demo-notebooks/guided-demos/requirements.txt b/demo-notebooks/guided-demos/requirements.txt index 489e027b..4d2111c0 100644 --- a/demo-notebooks/guided-demos/requirements.txt +++ b/demo-notebooks/guided-demos/requirements.txt @@ -1,4 +1,4 @@ -pytorch_lightning==2.2.5 +pytorch_lightning==2.4.0 ray_lightning torchmetrics==1.5.1 torchvision==0.18.0 From 26d24f2edff4aa8b09ee09a175fded79c9d623c2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 29 Oct 2024 14:41:05 +0000 Subject: [PATCH 396/496] Bump coverage from 7.2.7 to 7.6.4 Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.2.7 to 7.6.4. - [Release notes](https://github.com/nedbat/coveragepy/releases) - [Changelog](https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst) - [Commits](https://github.com/nedbat/coveragepy/compare/7.2.7...7.6.4) --- updated-dependencies: - dependency-name: coverage dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 128 +++++++++++++++++++++++++------------------------ pyproject.toml | 2 +- 2 files changed, 66 insertions(+), 64 deletions(-) diff --git a/poetry.lock b/poetry.lock index 5c4ce93f..1fb76311 100644 --- a/poetry.lock +++ b/poetry.lock @@ -698,71 +698,73 @@ test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] [[package]] name = "coverage" -version = "7.2.7" +version = "7.6.4" description = "Code coverage measurement for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" files = [ - {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, - {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, - {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, - {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, - {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, - {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, - {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, - {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, - {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, - {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, - {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, - {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, - {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, - {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, - {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, - {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, - {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, - {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, - {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, - {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, - {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, - {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, - {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, - {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, + {file = "coverage-7.6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f8ae553cba74085db385d489c7a792ad66f7f9ba2ee85bfa508aeb84cf0ba07"}, + {file = "coverage-7.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8165b796df0bd42e10527a3f493c592ba494f16ef3c8b531288e3d0d72c1f6f0"}, + {file = "coverage-7.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c8b95bf47db6d19096a5e052ffca0a05f335bc63cef281a6e8fe864d450a72"}, + {file = "coverage-7.6.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ed9281d1b52628e81393f5eaee24a45cbd64965f41857559c2b7ff19385df51"}, + {file = "coverage-7.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0809082ee480bb8f7416507538243c8863ac74fd8a5d2485c46f0f7499f2b491"}, + {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d541423cdd416b78626b55f123412fcf979d22a2c39fce251b350de38c15c15b"}, + {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:58809e238a8a12a625c70450b48e8767cff9eb67c62e6154a642b21ddf79baea"}, + {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c9b8e184898ed014884ca84c70562b4a82cbc63b044d366fedc68bc2b2f3394a"}, + {file = "coverage-7.6.4-cp310-cp310-win32.whl", hash = "sha256:6bd818b7ea14bc6e1f06e241e8234508b21edf1b242d49831831a9450e2f35fa"}, + {file = "coverage-7.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:06babbb8f4e74b063dbaeb74ad68dfce9186c595a15f11f5d5683f748fa1d172"}, + {file = "coverage-7.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:73d2b73584446e66ee633eaad1a56aad577c077f46c35ca3283cd687b7715b0b"}, + {file = "coverage-7.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51b44306032045b383a7a8a2c13878de375117946d68dcb54308111f39775a25"}, + {file = "coverage-7.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3fb02fe73bed561fa12d279a417b432e5b50fe03e8d663d61b3d5990f29546"}, + {file = "coverage-7.6.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed8fe9189d2beb6edc14d3ad19800626e1d9f2d975e436f84e19efb7fa19469b"}, + {file = "coverage-7.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b369ead6527d025a0fe7bd3864e46dbee3aa8f652d48df6174f8d0bac9e26e0e"}, + {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ade3ca1e5f0ff46b678b66201f7ff477e8fa11fb537f3b55c3f0568fbfe6e718"}, + {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:27fb4a050aaf18772db513091c9c13f6cb94ed40eacdef8dad8411d92d9992db"}, + {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4f704f0998911abf728a7783799444fcbbe8261c4a6c166f667937ae6a8aa522"}, + {file = "coverage-7.6.4-cp311-cp311-win32.whl", hash = "sha256:29155cd511ee058e260db648b6182c419422a0d2e9a4fa44501898cf918866cf"}, + {file = "coverage-7.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:8902dd6a30173d4ef09954bfcb24b5d7b5190cf14a43170e386979651e09ba19"}, + {file = "coverage-7.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12394842a3a8affa3ba62b0d4ab7e9e210c5e366fbac3e8b2a68636fb19892c2"}, + {file = "coverage-7.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b6b4c83d8e8ea79f27ab80778c19bc037759aea298da4b56621f4474ffeb117"}, + {file = "coverage-7.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d5b8007f81b88696d06f7df0cb9af0d3b835fe0c8dbf489bad70b45f0e45613"}, + {file = "coverage-7.6.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b57b768feb866f44eeed9f46975f3d6406380275c5ddfe22f531a2bf187eda27"}, + {file = "coverage-7.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5915fcdec0e54ee229926868e9b08586376cae1f5faa9bbaf8faf3561b393d52"}, + {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b58c672d14f16ed92a48db984612f5ce3836ae7d72cdd161001cc54512571f2"}, + {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2fdef0d83a2d08d69b1f2210a93c416d54e14d9eb398f6ab2f0a209433db19e1"}, + {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8cf717ee42012be8c0cb205dbbf18ffa9003c4cbf4ad078db47b95e10748eec5"}, + {file = "coverage-7.6.4-cp312-cp312-win32.whl", hash = "sha256:7bb92c539a624cf86296dd0c68cd5cc286c9eef2d0c3b8b192b604ce9de20a17"}, + {file = "coverage-7.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:1032e178b76a4e2b5b32e19d0fd0abbce4b58e77a1ca695820d10e491fa32b08"}, + {file = "coverage-7.6.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:023bf8ee3ec6d35af9c1c6ccc1d18fa69afa1cb29eaac57cb064dbb262a517f9"}, + {file = "coverage-7.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b0ac3d42cb51c4b12df9c5f0dd2f13a4f24f01943627120ec4d293c9181219ba"}, + {file = "coverage-7.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8fe4984b431f8621ca53d9380901f62bfb54ff759a1348cd140490ada7b693c"}, + {file = "coverage-7.6.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5fbd612f8a091954a0c8dd4c0b571b973487277d26476f8480bfa4b2a65b5d06"}, + {file = "coverage-7.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dacbc52de979f2823a819571f2e3a350a7e36b8cb7484cdb1e289bceaf35305f"}, + {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:dab4d16dfef34b185032580e2f2f89253d302facba093d5fa9dbe04f569c4f4b"}, + {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:862264b12ebb65ad8d863d51f17758b1684560b66ab02770d4f0baf2ff75da21"}, + {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5beb1ee382ad32afe424097de57134175fea3faf847b9af002cc7895be4e2a5a"}, + {file = "coverage-7.6.4-cp313-cp313-win32.whl", hash = "sha256:bf20494da9653f6410213424f5f8ad0ed885e01f7e8e59811f572bdb20b8972e"}, + {file = "coverage-7.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:182e6cd5c040cec0a1c8d415a87b67ed01193ed9ad458ee427741c7d8513d963"}, + {file = "coverage-7.6.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a181e99301a0ae128493a24cfe5cfb5b488c4e0bf2f8702091473d033494d04f"}, + {file = "coverage-7.6.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:df57bdbeffe694e7842092c5e2e0bc80fff7f43379d465f932ef36f027179806"}, + {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bcd1069e710600e8e4cf27f65c90c7843fa8edfb4520fb0ccb88894cad08b11"}, + {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99b41d18e6b2a48ba949418db48159d7a2e81c5cc290fc934b7d2380515bd0e3"}, + {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1e54712ba3474f34b7ef7a41e65bd9037ad47916ccb1cc78769bae324c01a"}, + {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:53d202fd109416ce011578f321460795abfe10bb901b883cafd9b3ef851bacfc"}, + {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:c48167910a8f644671de9f2083a23630fbf7a1cb70ce939440cd3328e0919f70"}, + {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cc8ff50b50ce532de2fa7a7daae9dd12f0a699bfcd47f20945364e5c31799fef"}, + {file = "coverage-7.6.4-cp313-cp313t-win32.whl", hash = "sha256:b8d3a03d9bfcaf5b0141d07a88456bb6a4c3ce55c080712fec8418ef3610230e"}, + {file = "coverage-7.6.4-cp313-cp313t-win_amd64.whl", hash = "sha256:f3ddf056d3ebcf6ce47bdaf56142af51bb7fad09e4af310241e9db7a3a8022e1"}, + {file = "coverage-7.6.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9cb7fa111d21a6b55cbf633039f7bc2749e74932e3aa7cb7333f675a58a58bf3"}, + {file = "coverage-7.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11a223a14e91a4693d2d0755c7a043db43d96a7450b4f356d506c2562c48642c"}, + {file = "coverage-7.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a413a096c4cbac202433c850ee43fa326d2e871b24554da8327b01632673a076"}, + {file = "coverage-7.6.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00a1d69c112ff5149cabe60d2e2ee948752c975d95f1e1096742e6077affd376"}, + {file = "coverage-7.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f76846299ba5c54d12c91d776d9605ae33f8ae2b9d1d3c3703cf2db1a67f2c0"}, + {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fe439416eb6380de434886b00c859304338f8b19f6f54811984f3420a2e03858"}, + {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0294ca37f1ba500667b1aef631e48d875ced93ad5e06fa665a3295bdd1d95111"}, + {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6f01ba56b1c0e9d149f9ac85a2f999724895229eb36bd997b61e62999e9b0901"}, + {file = "coverage-7.6.4-cp39-cp39-win32.whl", hash = "sha256:bc66f0bf1d7730a17430a50163bb264ba9ded56739112368ba985ddaa9c3bd09"}, + {file = "coverage-7.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:c481b47f6b5845064c65a7bc78bc0860e635a9b055af0df46fdf1c58cebf8e8f"}, + {file = "coverage-7.6.4-pp39.pp310-none-any.whl", hash = "sha256:3c65d37f3a9ebb703e710befdc489a38683a5b152242664b973a7b7b22348a4e"}, + {file = "coverage-7.6.4.tar.gz", hash = "sha256:29fc0f17b1d3fea332f8001d4558f8214af7f1d87a345f3a133c901d60347c73"}, ] [package.extras] @@ -4180,4 +4182,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "4463099e8d145fd823f523b134f18d48766038cc3d2ad466864e5a2debcc3479" +content-hash = "22e59cce1130d5186c8fac0c3cae42381d12db5a7b1310e10cf187199898ff8d" diff --git a/pyproject.toml b/pyproject.toml index 17b59880..b4fbbda0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,7 +42,7 @@ optional = true [tool.poetry.group.test.dependencies] pytest = "7.4.0" -coverage = "7.2.7" +coverage = "7.6.4" pytest-mock = "3.11.1" pytest-timeout = "2.3.1" jupyterlab = "4.2.5" From e4b13fc04800278fdfff2b638c993848aa7e75f0 Mon Sep 17 00:00:00 2001 From: Kevin Date: Tue, 29 Oct 2024 14:04:40 -0400 Subject: [PATCH 397/496] codecov wasn't being updated on main Signed-off-by: Kevin --- .github/workflows/unit-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index 8b7a3a37..34327533 100755 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -3,6 +3,8 @@ name: Python Tests on: pull_request: branches: [ main ] + push: + branches: [ main ] jobs: unit-tests: From 62cd27be350f6e59708f0e814f5b9f8eda18a77f Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Thu, 17 Oct 2024 13:03:47 +0100 Subject: [PATCH 398/496] e2e test for heterogenous cluster --- .github/workflows/e2e_tests.yaml | 4 + tests/e2e/heterogeneous_clusters_kind_test.py | 74 +++++++++ .../e2e/heterogeneous_clusters_oauth_test.py | 77 ++++++++++ tests/e2e/support.py | 143 ++++++++++++++---- 4 files changed, 265 insertions(+), 33 deletions(-) create mode 100644 tests/e2e/heterogeneous_clusters_kind_test.py create mode 100644 tests/e2e/heterogeneous_clusters_oauth_test.py diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index 7c461282..fea42ab6 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -64,6 +64,8 @@ jobs: - name: Setup and start KinD cluster uses: ./common/github-actions/kind + with: + worker-nodes: 1 - name: Install NVidia GPU operator for KinD uses: ./common/github-actions/nvidia-gpu-operator @@ -102,6 +104,8 @@ jobs: kubectl create clusterrolebinding sdk-user-localqueue-creator --clusterrole=localqueue-creator --user=sdk-user kubectl create clusterrole list-secrets --verb=get,list --resource=secrets kubectl create clusterrolebinding sdk-user-list-secrets --clusterrole=list-secrets --user=sdk-user + kubectl create clusterrole pod-creator --verb=get,list --resource=pods + kubectl create clusterrolebinding sdk-user-pod-creator --clusterrole=pod-creator --user=sdk-user kubectl config use-context sdk-user - name: Run e2e tests diff --git a/tests/e2e/heterogeneous_clusters_kind_test.py b/tests/e2e/heterogeneous_clusters_kind_test.py new file mode 100644 index 00000000..8f814a7c --- /dev/null +++ b/tests/e2e/heterogeneous_clusters_kind_test.py @@ -0,0 +1,74 @@ +from time import sleep +import time +from codeflare_sdk import ( + Cluster, + ClusterConfiguration, +) + +from codeflare_sdk.common.kueue.kueue import list_local_queues + +import pytest + +from support import * + + +@pytest.mark.skip(reason="Skipping heterogenous cluster kind test") +@pytest.mark.kind +class TestHeterogeneousClustersKind: + def setup_method(self): + initialize_kubernetes_client(self) + + def teardown_method(self): + delete_namespace(self) + delete_kueue_resources(self) + + @pytest.mark.nvidia_gpu + def test_heterogeneous_clusters(self): + create_namespace(self) + create_kueue_resources(self, 2) + self.run_heterogeneous_clusters() + + def run_heterogeneous_clusters( + self, gpu_resource_name="nvidia.com/gpu", number_of_gpus=0 + ): + for flavor in self.resource_flavors: + node_labels = ( + get_flavor_spec(self, flavor).get("spec", {}).get("nodeLabels", {}) + ) + expected_nodes = get_nodes_by_label(self, node_labels) + + print(f"Expected nodes: {expected_nodes}") + cluster_name = f"test-ray-cluster-li-{flavor[-5:]}" + queues = list_local_queues(namespace=self.namespace, flavors=[flavor]) + queue_name = queues[0]["name"] if queues else None + print(f"Using flavor: {flavor}, Queue: {queue_name}") + cluster = Cluster( + ClusterConfiguration( + name=cluster_name, + namespace=self.namespace, + num_workers=1, + head_cpu_requests="500m", + head_cpu_limits="500m", + head_memory_requests=2, + head_memory_limits=2, + worker_cpu_requests="500m", + worker_cpu_limits=1, + worker_memory_requests=1, + worker_memory_limits=4, + worker_extended_resource_requests={ + gpu_resource_name: number_of_gpus + }, + write_to_file=True, + verify_tls=False, + local_queue=queue_name, + ) + ) + cluster.up() + sleep(5) + node_name = get_pod_node(self, self.namespace, cluster_name) + print(f"Cluster {cluster_name}-{flavor} is running on node: {node_name}") + sleep(5) + assert ( + node_name in expected_nodes + ), f"Node {node_name} is not in the expected nodes for flavor {flavor}." + cluster.down() diff --git a/tests/e2e/heterogeneous_clusters_oauth_test.py b/tests/e2e/heterogeneous_clusters_oauth_test.py new file mode 100644 index 00000000..4a7a687c --- /dev/null +++ b/tests/e2e/heterogeneous_clusters_oauth_test.py @@ -0,0 +1,77 @@ +from time import sleep +import time +from codeflare_sdk import ( + Cluster, + ClusterConfiguration, + TokenAuthentication, +) + +from codeflare_sdk.common.kueue.kueue import list_local_queues + +import pytest + +from support import * + + +@pytest.mark.openshift +class TestHeterogeneousClustersOauth: + def setup_method(self): + initialize_kubernetes_client(self) + + def teardown_method(self): + delete_namespace(self) + delete_kueue_resources(self) + + def test_heterogeneous_clusters(self): + create_namespace(self) + create_kueue_resources(self, 2) + self.run_heterogeneous_clusters() + + def run_heterogeneous_clusters( + self, gpu_resource_name="nvidia.com/gpu", number_of_gpus=0 + ): + ray_image = get_ray_image() + + auth = TokenAuthentication( + token=run_oc_command(["whoami", "--show-token=true"]), + server=run_oc_command(["whoami", "--show-server=true"]), + skip_tls=True, + ) + auth.login() + + for flavor in self.resource_flavors: + node_labels = ( + get_flavor_spec(self, flavor).get("spec", {}).get("nodeLabels", {}) + ) + expected_nodes = get_nodes_by_label(self, node_labels) + + print(f"Expected nodes: {expected_nodes}") + cluster_name = f"test-ray-cluster-li-{flavor[-5:]}" + queues = list_local_queues(namespace=self.namespace, flavors=[flavor]) + queue_name = queues[0]["name"] if queues else None + print(f"Using flavor: {flavor}, Queue: {queue_name}") + cluster = Cluster( + ClusterConfiguration( + namespace=self.namespace, + name=cluster_name, + num_workers=1, + head_cpu_requests="500m", + head_cpu_limits="500m", + worker_cpu_requests="500m", + worker_cpu_limits=1, + worker_memory_requests=1, + worker_memory_limits=4, + image=ray_image, + verify_tls=False, + local_queue=queue_name, + ) + ) + cluster.up() + sleep(5) + node_name = get_pod_node(self, self.namespace, cluster_name) + print(f"Cluster {cluster_name}-{flavor} is running on node: {node_name}") + sleep(5) + assert ( + node_name in expected_nodes + ), f"Node {node_name} is not in the expected nodes for flavor {flavor}." + cluster.down() diff --git a/tests/e2e/support.py b/tests/e2e/support.py index 6fdd03af..60488466 100644 --- a/tests/e2e/support.py +++ b/tests/e2e/support.py @@ -1,3 +1,4 @@ +import json import os import random import string @@ -65,19 +66,30 @@ def create_namespace(self): return RuntimeError(e) -def create_new_resource_flavor(self): - self.resource_flavor = f"test-resource-flavor-{random_choice()}" - create_resource_flavor(self, self.resource_flavor) +def create_new_resource_flavor(self, num_flavors): + self.resource_flavors = [] + for i in range(num_flavors): + default = i < 1 + resource_flavor = f"test-resource-flavor-{random_choice()}" + create_resource_flavor(self, resource_flavor, default) + self.resource_flavors.append(resource_flavor) -def create_new_cluster_queue(self): - self.cluster_queue = f"test-cluster-queue-{random_choice()}" - create_cluster_queue(self, self.cluster_queue, self.resource_flavor) +def create_new_cluster_queue(self, num_queues): + self.cluster_queues = [] + for i in range(num_queues): + cluster_queue_name = f"test-cluster-queue-{random_choice()}" + create_cluster_queue(self, cluster_queue_name, self.resource_flavors[i]) + self.cluster_queues.append(cluster_queue_name) -def create_new_local_queue(self): - self.local_queue = f"test-local-queue-{random_choice()}" - create_local_queue(self, self.cluster_queue, self.local_queue) +def create_new_local_queue(self, num_queues): + self.local_queues = [] + for i in range(num_queues): + is_default = i == 0 + local_queue_name = f"test-local-queue-{random_choice()}" + create_local_queue(self, self.cluster_queues[i], local_queue_name, is_default) + self.local_queues.append(local_queue_name) def create_namespace_with_name(self, namespace_name): @@ -132,7 +144,7 @@ def create_cluster_queue(self, cluster_queue, flavor): {"name": "memory", "nominalQuota": "36Gi"}, {"name": "nvidia.com/gpu", "nominalQuota": 1}, ], - } + }, ], } ], @@ -161,11 +173,33 @@ def create_cluster_queue(self, cluster_queue, flavor): self.cluster_queue = cluster_queue -def create_resource_flavor(self, flavor): +def create_resource_flavor(self, flavor, default=True): + worker_label, worker_value = os.getenv("WORKER_LABEL", "worker-1=true").split("=") + control_label, control_value = os.getenv( + "CONTROL_LABEL", "ingress-ready=true" + ).split("=") + toleration_key = os.getenv( + "TOLERATION_KEY", "node-role.kubernetes.io/control-plane" + ) + + node_labels = ( + {worker_label: worker_value} if default else {control_label: control_value} + ) + resource_flavor_json = { "apiVersion": "kueue.x-k8s.io/v1beta1", "kind": "ResourceFlavor", "metadata": {"name": flavor}, + "spec": { + "nodeLabels": node_labels, + "tolerations": [ + { + "key": toleration_key, + "operator": "Exists", + "effect": "NoSchedule", + } + ], + }, } try: @@ -190,14 +224,14 @@ def create_resource_flavor(self, flavor): self.resource_flavor = flavor -def create_local_queue(self, cluster_queue, local_queue): +def create_local_queue(self, cluster_queue, local_queue, is_default=True): local_queue_json = { "apiVersion": "kueue.x-k8s.io/v1beta1", "kind": "LocalQueue", "metadata": { "namespace": self.namespace, "name": local_queue, - "annotations": {"kueue.x-k8s.io/default-queue": "true"}, + "annotations": {"kueue.x-k8s.io/default-queue": str(is_default).lower()}, }, "spec": {"clusterQueue": cluster_queue}, } @@ -226,34 +260,77 @@ def create_local_queue(self, cluster_queue, local_queue): self.local_queue = local_queue -def create_kueue_resources(self): +def create_kueue_resources(self, resource_ammount=1): print("creating Kueue resources ...") - create_new_resource_flavor(self) - create_new_cluster_queue(self) - create_new_local_queue(self) + create_new_resource_flavor(self, resource_ammount) + create_new_cluster_queue(self, resource_ammount) + create_new_local_queue(self, resource_ammount) def delete_kueue_resources(self): # Delete if given cluster-queue exists - try: - self.custom_api.delete_cluster_custom_object( - group="kueue.x-k8s.io", - plural="clusterqueues", - version="v1beta1", - name=self.cluster_queue, - ) - print(f"\n'{self.cluster_queue}' cluster-queue deleted") - except Exception as e: - print(f"\nError deleting cluster-queue '{self.cluster_queue}' : {e}") + for cq in self.cluster_queues: + try: + self.custom_api.delete_cluster_custom_object( + group="kueue.x-k8s.io", + plural="clusterqueues", + version="v1beta1", + name=cq, + ) + print(f"\n'{cq}' cluster-queue deleted") + except Exception as e: + print(f"\nError deleting cluster-queue '{cq}' : {e}") # Delete if given resource-flavor exists + for flavor in self.resource_flavors: + try: + self.custom_api.delete_cluster_custom_object( + group="kueue.x-k8s.io", + plural="resourceflavors", + version="v1beta1", + name=flavor, + ) + print(f"'{flavor}' resource-flavor deleted") + except Exception as e: + print(f"\nError deleting resource-flavor '{flavor}': {e}") + + +def get_pod_node(self, namespace, name): + label_selector = f"ray.io/cluster={name}" + pods = self.api_instance.list_namespaced_pod( + namespace, label_selector=label_selector + ) + if not pods.items: + raise ValueError( + f"Unable to retrieve node name for pod '{name}' in namespace '{namespace}'" + ) + pod = pods.items[0] + node_name = pod.spec.node_name + if node_name is None: + raise ValueError( + f"No node selected for pod '{name}' in namespace '{namespace}'" + ) + return node_name + + +def get_flavor_spec(self, flavor_name): try: - self.custom_api.delete_cluster_custom_object( + flavor = self.custom_api.get_cluster_custom_object( group="kueue.x-k8s.io", - plural="resourceflavors", version="v1beta1", - name=self.resource_flavor, + plural="resourceflavors", + name=flavor_name, ) - print(f"'{self.resource_flavor}' resource-flavor deleted") - except Exception as e: - print(f"\nError deleting resource-flavor '{self.resource_flavor}' : {e}") + return flavor + except client.exceptions.ApiException as e: + if e.status == 404: + print(f"ResourceFlavor '{flavor_name}' not found.") + else: + print(f"Error retrieving ResourceFlavor '{flavor_name}': {e}") + raise + + +def get_nodes_by_label(self, node_labels): + label_selector = ",".join(f"{k}={v}" for k, v in node_labels.items()) + nodes = self.api_instance.list_node(label_selector=label_selector) + return [node.metadata.name for node in nodes.items] From 6f75e20bf7ebfdf0d85330027f674c54d57e37d6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 30 Oct 2024 12:02:32 +0000 Subject: [PATCH 399/496] Bump cryptography from 40.0.2 to 43.0.3 Bumps [cryptography](https://github.com/pyca/cryptography) from 40.0.2 to 43.0.3. - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/40.0.2...43.0.3) --- updated-dependencies: - dependency-name: cryptography dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- poetry.lock | 64 ++++++++++++++++++++++++++++---------------------- pyproject.toml | 2 +- 2 files changed, 37 insertions(+), 29 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1fb76311..d1601c13 100644 --- a/poetry.lock +++ b/poetry.lock @@ -772,44 +772,52 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "40.0.2" +version = "43.0.3" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:8f79b5ff5ad9d3218afb1e7e20ea74da5f76943ee5edb7f76e56ec5161ec782b"}, - {file = "cryptography-40.0.2-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:05dc219433b14046c476f6f09d7636b92a1c3e5808b9a6536adf4932b3b2c440"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4df2af28d7bedc84fe45bd49bc35d710aede676e2a4cb7fc6d103a2adc8afe4d"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dcca15d3a19a66e63662dc8d30f8036b07be851a8680eda92d079868f106288"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:a04386fb7bc85fab9cd51b6308633a3c271e3d0d3eae917eebab2fac6219b6d2"}, - {file = "cryptography-40.0.2-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:adc0d980fd2760c9e5de537c28935cc32b9353baaf28e0814df417619c6c8c3b"}, - {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d5a1bd0e9e2031465761dfa920c16b0065ad77321d8a8c1f5ee331021fda65e9"}, - {file = "cryptography-40.0.2-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a95f4802d49faa6a674242e25bfeea6fc2acd915b5e5e29ac90a32b1139cae1c"}, - {file = "cryptography-40.0.2-cp36-abi3-win32.whl", hash = "sha256:aecbb1592b0188e030cb01f82d12556cf72e218280f621deed7d806afd2113f9"}, - {file = "cryptography-40.0.2-cp36-abi3-win_amd64.whl", hash = "sha256:b12794f01d4cacfbd3177b9042198f3af1c856eedd0a98f10f141385c809a14b"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:142bae539ef28a1c76794cca7f49729e7c54423f615cfd9b0b1fa90ebe53244b"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:956ba8701b4ffe91ba59665ed170a2ebbdc6fc0e40de5f6059195d9f2b33ca0e"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4f01c9863da784558165f5d4d916093737a75203a5c5286fde60e503e4276c7a"}, - {file = "cryptography-40.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3daf9b114213f8ba460b829a02896789751626a2a4e7a43a28ee77c04b5e4958"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48f388d0d153350f378c7f7b41497a54ff1513c816bcbbcafe5b829e59b9ce5b"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c0764e72b36a3dc065c155e5b22f93df465da9c39af65516fe04ed3c68c92636"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:cbaba590180cba88cb99a5f76f90808a624f18b169b90a4abb40c1fd8c19420e"}, - {file = "cryptography-40.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7a38250f433cd41df7fcb763caa3ee9362777fdb4dc642b9a349721d2bf47404"}, - {file = "cryptography-40.0.2.tar.gz", hash = "sha256:c33c0d32b8594fa647d2e01dbccc303478e16fdd7cf98652d5b3ed11aa5e5c99"}, + {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"}, + {file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"}, + {file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"}, + {file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"}, + {file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"}, + {file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"}, + {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"}, ] [package.dependencies] -cffi = ">=1.12" +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -pep8test = ["black", "check-manifest", "mypy", "ruff"] -sdist = ["setuptools-rust (>=0.11.4)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] ssh = ["bcrypt (>=3.1.5)"] -test = ["iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist"] +test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] -tox = ["tox"] [[package]] name = "debugpy" @@ -4182,4 +4190,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "22e59cce1130d5186c8fac0c3cae42381d12db5a7b1310e10cf187199898ff8d" +content-hash = "786cda0a2ec1cd0d12c24d9eac15b4675e323c6eb8c41fff9badf962dfa93d5e" diff --git a/pyproject.toml b/pyproject.toml index b4fbbda0..5e455088 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,7 +25,7 @@ openshift-client = "1.0.18" rich = "^12.5" ray = {version = "2.35.0", extras = ["data", "default"]} kubernetes = ">= 25.3.0, < 27" -cryptography = "40.0.2" +cryptography = "43.0.3" executing = "1.2.0" pydantic = "< 2" ipywidgets = "8.1.2" From b1921272aad333161bc43ff6ed81aad2a02d2d42 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 30 Oct 2024 22:35:40 +0000 Subject: [PATCH 400/496] Bump sphinx-rtd-theme from 2.0.0 to 3.0.1 Bumps [sphinx-rtd-theme](https://github.com/readthedocs/sphinx_rtd_theme) from 2.0.0 to 3.0.1. - [Changelog](https://github.com/readthedocs/sphinx_rtd_theme/blob/master/docs/changelog.rst) - [Commits](https://github.com/readthedocs/sphinx_rtd_theme/compare/2.0.0...3.0.1) --- updated-dependencies: - dependency-name: sphinx-rtd-theme dependency-type: direct:development update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- poetry.lock | 16 ++++++++-------- pyproject.toml | 2 +- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/poetry.lock b/poetry.lock index d1601c13..d5318d08 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3607,22 +3607,22 @@ test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "setuptools [[package]] name = "sphinx-rtd-theme" -version = "2.0.0" +version = "3.0.1" description = "Read the Docs theme for Sphinx" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "sphinx_rtd_theme-2.0.0-py2.py3-none-any.whl", hash = "sha256:ec93d0856dc280cf3aee9a4c9807c60e027c7f7b461b77aeffed682e68f0e586"}, - {file = "sphinx_rtd_theme-2.0.0.tar.gz", hash = "sha256:bd5d7b80622406762073a04ef8fadc5f9151261563d47027de09910ce03afe6b"}, + {file = "sphinx_rtd_theme-3.0.1-py2.py3-none-any.whl", hash = "sha256:921c0ece75e90633ee876bd7b148cfaad136b481907ad154ac3669b6fc957916"}, + {file = "sphinx_rtd_theme-3.0.1.tar.gz", hash = "sha256:a4c5745d1b06dfcb80b7704fe532eb765b44065a8fad9851e4258c8804140703"}, ] [package.dependencies] -docutils = "<0.21" -sphinx = ">=5,<8" +docutils = ">0.18,<0.22" +sphinx = ">=6,<9" sphinxcontrib-jquery = ">=4,<5" [package.extras] -dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] +dev = ["bump2version", "transifex-client", "twine", "wheel"] [[package]] name = "sphinxcontrib-applehelp" @@ -4190,4 +4190,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "786cda0a2ec1cd0d12c24d9eac15b4675e323c6eb8c41fff9badf962dfa93d5e" +content-hash = "77ed4d7230a35aead2842b16def52ec47ffaadba1374aaf72ee258a7fed3ec35" diff --git a/pyproject.toml b/pyproject.toml index 5e455088..cb03f609 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,7 +35,7 @@ optional = true [tool.poetry.group.docs.dependencies] sphinx = "7.4.7" -sphinx-rtd-theme = "2.0.0" +sphinx-rtd-theme = "3.0.1" [tool.poetry.group.test] optional = true From e8503c65a64c9b7b2c609d12ff19c2c66828cc16 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 30 Oct 2024 22:58:14 +0000 Subject: [PATCH 401/496] Bump torchvision from 0.12.0 to 0.20.1 in /demo-notebooks/guided-demos Bumps [torchvision](https://github.com/pytorch/vision) from 0.12.0 to 0.20.1. - [Release notes](https://github.com/pytorch/vision/releases) - [Commits](https://github.com/pytorch/vision/compare/v0.12.0...v0.20.1) --- updated-dependencies: - dependency-name: torchvision dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .../guided-demos/notebook-ex-outputs/requirements.txt | 2 +- demo-notebooks/guided-demos/preview_nbs/requirements.txt | 2 +- demo-notebooks/guided-demos/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt b/demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt index dd83de8a..27a5620c 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt @@ -1,4 +1,4 @@ pytorch_lightning==2.4.0 ray_lightning torchmetrics==1.5.1 -torchvision==0.12.0 +torchvision==0.20.1 diff --git a/demo-notebooks/guided-demos/preview_nbs/requirements.txt b/demo-notebooks/guided-demos/preview_nbs/requirements.txt index dd83de8a..27a5620c 100644 --- a/demo-notebooks/guided-demos/preview_nbs/requirements.txt +++ b/demo-notebooks/guided-demos/preview_nbs/requirements.txt @@ -1,4 +1,4 @@ pytorch_lightning==2.4.0 ray_lightning torchmetrics==1.5.1 -torchvision==0.12.0 +torchvision==0.20.1 diff --git a/demo-notebooks/guided-demos/requirements.txt b/demo-notebooks/guided-demos/requirements.txt index 4d2111c0..27a5620c 100644 --- a/demo-notebooks/guided-demos/requirements.txt +++ b/demo-notebooks/guided-demos/requirements.txt @@ -1,4 +1,4 @@ pytorch_lightning==2.4.0 ray_lightning torchmetrics==1.5.1 -torchvision==0.18.0 +torchvision==0.20.1 From 17dc2f75d0a3e37792e3b048c4eccbbbc49737a7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 30 Oct 2024 23:23:33 +0000 Subject: [PATCH 402/496] Bump torchvision from 0.19.0 to 0.20.1 in /tests/e2e Bumps [torchvision](https://github.com/pytorch/vision) from 0.19.0 to 0.20.1. - [Release notes](https://github.com/pytorch/vision/releases) - [Commits](https://github.com/pytorch/vision/compare/0.19.0...v0.20.1) --- updated-dependencies: - dependency-name: torchvision dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- tests/e2e/mnist_pip_requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/e2e/mnist_pip_requirements.txt b/tests/e2e/mnist_pip_requirements.txt index 05ed5816..60811f18 100644 --- a/tests/e2e/mnist_pip_requirements.txt +++ b/tests/e2e/mnist_pip_requirements.txt @@ -1,4 +1,4 @@ pytorch_lightning==1.9.5 torchmetrics==0.9.1 -torchvision==0.19.0 +torchvision==0.20.1 minio From ec72303fefcf86ccabdabc04b7a67577aaebca53 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 30 Oct 2024 23:08:56 +0000 Subject: [PATCH 403/496] Bump @jupyterlab/galata from 5.2.5 to 5.3.0 in /ui-tests Bumps [@jupyterlab/galata](https://github.com/jupyterlab/jupyterlab) from 5.2.5 to 5.3.0. - [Release notes](https://github.com/jupyterlab/jupyterlab/releases) - [Changelog](https://github.com/jupyterlab/jupyterlab/blob/main/CHANGELOG.md) - [Commits](https://github.com/jupyterlab/jupyterlab/compare/@jupyterlab/galata@5.2.5...@jupyterlab/galata@5.3.0) --- updated-dependencies: - dependency-name: "@jupyterlab/galata" dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- ui-tests/package.json | 2 +- ui-tests/yarn.lock | 1169 +++++++++++++++++++++-------------------- 2 files changed, 595 insertions(+), 576 deletions(-) diff --git a/ui-tests/package.json b/ui-tests/package.json index cd6d230e..89971922 100644 --- a/ui-tests/package.json +++ b/ui-tests/package.json @@ -15,7 +15,7 @@ "author": "Project Jupyter", "license": "BSD-3-Clause", "devDependencies": { - "@jupyterlab/galata": "^5.0.1", + "@jupyterlab/galata": "^5.3.0", "@playwright/test": "^1.48.2", "yarn-deduplicate": "^6.0.1" } diff --git a/ui-tests/yarn.lock b/ui-tests/yarn.lock index f0b1fbc2..5d383ffc 100644 --- a/ui-tests/yarn.lock +++ b/ui-tests/yarn.lock @@ -2,7 +2,7 @@ # yarn lockfile v1 -"@codemirror/autocomplete@^6.0.0", "@codemirror/autocomplete@^6.15.0", "@codemirror/autocomplete@^6.3.2", "@codemirror/autocomplete@^6.7.1": +"@codemirror/autocomplete@^6.0.0", "@codemirror/autocomplete@^6.3.2", "@codemirror/autocomplete@^6.7.1": version "6.18.0" resolved "https://registry.npmjs.org/@codemirror/autocomplete/-/autocomplete-6.18.0.tgz#5f39b05daca04c95e990b70024144df47b2aa635" integrity sha512-5DbOvBbY4qW5l57cjDsmmpDh3/TeK1vXfTHa+BUMrRzdWdcxKZ4U4V7vQaTtOpApNU4kLS4FQ6cINtLg245LXA== @@ -12,10 +12,20 @@ "@codemirror/view" "^6.17.0" "@lezer/common" "^1.0.0" -"@codemirror/commands@^6.3.3": - version "6.6.1" - resolved "https://registry.npmjs.org/@codemirror/commands/-/commands-6.6.1.tgz#6beaf2f94df1af1e7d4a811dff4fea2ac227b49c" - integrity sha512-iBfKbyIoXS1FGdsKcZmnrxmbc8VcbMrSgD7AVrsnX+WyAYjmUDWvE93dt5D874qS4CCVu4O1JpbagHdXbbLiOw== +"@codemirror/autocomplete@^6.16.0": + version "6.18.2" + resolved "https://registry.npmjs.org/@codemirror/autocomplete/-/autocomplete-6.18.2.tgz#bf3f15f1bf0fdfa3b4fac560e419adae1ece8a94" + integrity sha512-wJGylKtMFR/Ds6Gh01+OovXE/pncPiKZNNBKuC39pKnH+XK5d9+WsNqcrdxPjFPFTigRBqse0rfxw9UxrfyhPg== + dependencies: + "@codemirror/language" "^6.0.0" + "@codemirror/state" "^6.0.0" + "@codemirror/view" "^6.17.0" + "@lezer/common" "^1.0.0" + +"@codemirror/commands@^6.5.0": + version "6.7.1" + resolved "https://registry.npmjs.org/@codemirror/commands/-/commands-6.7.1.tgz#04561e95bc0779eaa49efd63e916c4efb3bbf6d6" + integrity sha512-llTrboQYw5H4THfhN4U3qCnSZ1SOJ60ohhz+SzU0ADGtwlc533DtklQP0vSFaQuCPDn3BPpOd1GbbnUtwNjsrw== dependencies: "@codemirror/language" "^6.0.0" "@codemirror/state" "^6.4.0" @@ -41,7 +51,7 @@ "@lezer/common" "^1.0.2" "@lezer/css" "^1.1.7" -"@codemirror/lang-html@^6.0.0", "@codemirror/lang-html@^6.4.8": +"@codemirror/lang-html@^6.0.0", "@codemirror/lang-html@^6.4.9": version "6.4.9" resolved "https://registry.npmjs.org/@codemirror/lang-html/-/lang-html-6.4.9.tgz#d586f2cc9c341391ae07d1d7c545990dfa069727" integrity sha512-aQv37pIMSlueybId/2PVSP6NPnmurFDVmZwzc7jszd2KAF8qd4VBbvNYPXWQq90WIARjsdVkPbw29pszmHws3Q== @@ -85,10 +95,10 @@ "@codemirror/language" "^6.0.0" "@lezer/json" "^1.0.0" -"@codemirror/lang-markdown@^6.2.4": - version "6.2.5" - resolved "https://registry.npmjs.org/@codemirror/lang-markdown/-/lang-markdown-6.2.5.tgz#451941bf743d3788e73598f1aedb71cbeb6f71ba" - integrity sha512-Hgke565YcO4fd9pe2uLYxnMufHO5rQwRr+AAhFq8ABuhkrjyX8R5p5s+hZUTdV60O0dMRjxKhBLxz8pu/MkUVA== +"@codemirror/lang-markdown@^6.2.5": + version "6.3.0" + resolved "https://registry.npmjs.org/@codemirror/lang-markdown/-/lang-markdown-6.3.0.tgz#949f8803332441705ed6def34c565f2166479538" + integrity sha512-lYrI8SdL/vhd0w0aHIEvIRLRecLF7MiiRfzXFZY94dFwHqC9HtgxgagJ8fyYNBldijGatf9wkms60d8SrAj6Nw== dependencies: "@codemirror/autocomplete" "^6.7.1" "@codemirror/lang-html" "^6.0.0" @@ -109,7 +119,7 @@ "@lezer/common" "^1.0.0" "@lezer/php" "^1.0.0" -"@codemirror/lang-python@^6.1.4": +"@codemirror/lang-python@^6.1.6": version "6.1.6" resolved "https://registry.npmjs.org/@codemirror/lang-python/-/lang-python-6.1.6.tgz#0c55e7e2dfa85b68be93b9692e5d3f76f284bbb2" integrity sha512-ai+01WfZhWqM92UqjnvorkxosZ2aq2u28kHvr+N3gu012XqY2CThD67JPMHnGceRfXPDBmn1HnyqowdpF57bNg== @@ -128,10 +138,10 @@ "@codemirror/language" "^6.0.0" "@lezer/rust" "^1.0.0" -"@codemirror/lang-sql@^6.6.1": - version "6.7.1" - resolved "https://registry.npmjs.org/@codemirror/lang-sql/-/lang-sql-6.7.1.tgz#314211eb79fbba96b4dd0d24fa954be9803368ab" - integrity sha512-flQa7zemrLKk0TIrOJnpeyH/b29BcVybtsTeZMgAo40O6kGbrnUSCgwI3TF5iJY3O9VXJKKCA+i0CBVvDfr88w== +"@codemirror/lang-sql@^6.6.4": + version "6.8.0" + resolved "https://registry.npmjs.org/@codemirror/lang-sql/-/lang-sql-6.8.0.tgz#1ae68ad49f378605ff88a4cc428ba667ce056068" + integrity sha512-aGLmY4OwGqN3TdSx3h6QeA1NrvaYtF7kkoWR/+W7/JzB0gQtJ+VJxewlnE3+VImhA4WVlhmkJr109PefOOhjLg== dependencies: "@codemirror/autocomplete" "^6.0.0" "@codemirror/language" "^6.0.0" @@ -174,7 +184,7 @@ "@lezer/lr" "^1.0.0" style-mod "^4.0.0" -"@codemirror/legacy-modes@^6.3.3": +"@codemirror/legacy-modes@^6.4.0": version "6.4.1" resolved "https://registry.npmjs.org/@codemirror/legacy-modes/-/legacy-modes-6.4.1.tgz#fae7b03cad1beada637fd3c12c568a3a7f63fe89" integrity sha512-vdg3XY7OAs5uLDx2Iw+cGfnwtd7kM+Et/eMsqAGTfT/JKiVBQZXosTzjEbWAi/FrY6DcQIz8mQjBozFHZEUWQA== @@ -204,7 +214,7 @@ resolved "https://registry.npmjs.org/@codemirror/state/-/state-6.4.1.tgz#da57143695c056d9a3c38705ed34136e2b68171b" integrity sha512-QkEyUiLhsJoZkbumGZlswmAhA7CBU02Wrz7zvH4SrcifbsqwlXShVXg65f3v/ts57W3dqyamEriMhij1Z3Zz4A== -"@codemirror/view@^6.0.0", "@codemirror/view@^6.17.0", "@codemirror/view@^6.23.0", "@codemirror/view@^6.26.0", "@codemirror/view@^6.27.0": +"@codemirror/view@^6.0.0", "@codemirror/view@^6.17.0", "@codemirror/view@^6.23.0", "@codemirror/view@^6.27.0": version "6.33.0" resolved "https://registry.npmjs.org/@codemirror/view/-/view-6.33.0.tgz#51e270410fc3af92a6e38798e80ebf8add7dc3ec" integrity sha512-AroaR3BvnjRW8fiZBalAaK+ZzB5usGgI014YKElYZvQdNH5ZIidHlO+cyf/2rWzyBFRkvG6VhiXeAEbC53P2YQ== @@ -213,34 +223,42 @@ style-mod "^4.1.0" w3c-keyname "^2.2.4" +"@codemirror/view@^6.26.3": + version "6.34.1" + resolved "https://registry.npmjs.org/@codemirror/view/-/view-6.34.1.tgz#b17ed29c563e4adc60086233f2d3e7197e2dc33e" + integrity sha512-t1zK/l9UiRqwUNPm+pdIT0qzJlzuVckbTEMVNFhfWkGiBQClstzg+78vedCvLSX0xJEZ6lwZbPpnljL7L6iwMQ== + dependencies: + "@codemirror/state" "^6.4.0" + style-mod "^4.1.0" + w3c-keyname "^2.2.4" + "@fortawesome/fontawesome-free@^5.12.0": version "5.15.4" resolved "https://registry.npmjs.org/@fortawesome/fontawesome-free/-/fontawesome-free-5.15.4.tgz#ecda5712b61ac852c760d8b3c79c96adca5554e5" integrity sha512-eYm8vijH/hpzr/6/1CJ/V/Eb1xQFW2nnUKArb3z+yUWv7HTwj6M7SP957oMjfZjAHU6qpoNc2wQvIxBLWYa/Jg== -"@jupyter/react-components@^0.15.3": - version "0.15.3" - resolved "https://registry.npmjs.org/@jupyter/react-components/-/react-components-0.15.3.tgz#891b2cb8457cd9b06079e2937795f8d7d0e319ba" - integrity sha512-u85j88lxMSQ+2iVWuHPpMtFxek7nQnQsLh2eVJQFVv0ECX6H4s2WzIa4Ha1VRu4gjAXDtI/9wec9qUW6nezUnw== +"@jupyter/react-components@^0.16.6": + version "0.16.7" + resolved "https://registry.npmjs.org/@jupyter/react-components/-/react-components-0.16.7.tgz#94926647a3578409c65d69d5b44c86cb0ca8ceab" + integrity sha512-BKIPkJ9V011uhtdq1xBOu2M3up59CqsRbDS4aq8XhnHR4pwqfRV6k6irE5YBOETCoIwWZZ5RZO+cJcZ3DcsT5A== dependencies: - "@jupyter/web-components" "^0.15.3" - "@microsoft/fast-react-wrapper" "^0.3.22" + "@jupyter/web-components" "^0.16.7" react ">=17.0.0 <19.0.0" -"@jupyter/web-components@^0.15.3": - version "0.15.3" - resolved "https://registry.npmjs.org/@jupyter/web-components/-/web-components-0.15.3.tgz#5491b5f6bc4e12f9b9c85bac63cd19514abedef1" - integrity sha512-ZrbyXr9mcYowXZ0Ic09ee6W2T3+t7+HP7yYwd2u7wHmQ0etWq5r8+DNdp8uFHiAeTV1kFm6aogNufR9fH34MCw== +"@jupyter/web-components@^0.16.6", "@jupyter/web-components@^0.16.7": + version "0.16.7" + resolved "https://registry.npmjs.org/@jupyter/web-components/-/web-components-0.16.7.tgz#cd347c4a1dcda9597ef405f94e27bfcfe920d1b6" + integrity sha512-1a8awgvvP9J9pCV5vBRuQxdBk29764qiMJsJYEndrWH3cB/FlaO+sZIBm4OTf56Eqdgl8R3/ZSLM1+3mgXOkPg== dependencies: "@microsoft/fast-colors" "^5.3.1" "@microsoft/fast-element" "^1.12.0" "@microsoft/fast-foundation" "^2.49.4" "@microsoft/fast-web-utilities" "^5.4.1" -"@jupyter/ydoc@^2.0.1": - version "2.1.1" - resolved "https://registry.npmjs.org/@jupyter/ydoc/-/ydoc-2.1.1.tgz#96487cab6d3eb1419a7891384e8fcbefc971d605" - integrity sha512-NeEwqXQ2j1OyLq4uezeQmsMiI+Qo5k7dYIMqNByOM7dJp6sHeP0jQ96w7BEc9E4SmrxwcOT4cLvcJWJE8Xun4g== +"@jupyter/ydoc@^3.0.0": + version "3.0.0" + resolved "https://registry.npmjs.org/@jupyter/ydoc/-/ydoc-3.0.0.tgz#36fcc4790723644713e35df32916961e4c9428d4" + integrity sha512-oWTSBPifD81I1oRNyKkMJF14FzNvBpJxiYHXaC1XeFXk67KNiqDepjVpYJ1E2QYThZhZGGtdNc6TC1XCQAJVKA== dependencies: "@jupyterlab/nbformat" "^3.0.0 || ^4.0.0-alpha.21 || ^4.0.0" "@lumino/coreutils" "^1.11.0 || ^2.0.0" @@ -249,368 +267,368 @@ y-protocols "^1.0.5" yjs "^13.5.40" -"@jupyterlab/application@^4.2.5": - version "4.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/application/-/application-4.2.5.tgz#a24819d607b46c80c6a366811680cf93eb45ea28" - integrity sha512-einksHwj/aki/F3Z3kA9o/8ZfHsDrdvzN753jv8wxPpi8s0aDfMivHafbvV4O4iN02y0Iv2yxsHEpA65SP/Kmg== +"@jupyterlab/application@^4.3.0": + version "4.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/application/-/application-4.3.0.tgz#c33e8f836974fbe4b3dd9f41c46ea3c8c8ce5b60" + integrity sha512-tH34PGe/cKDDcGgUfjtzVnvJRQCn9KRZtmXUHKYVWT4cJ4lgtiTWPPxPZv4vJi6bUFZG3tv9nfrPt1sUlPjAhg== dependencies: "@fortawesome/fontawesome-free" "^5.12.0" - "@jupyterlab/apputils" "^4.3.5" - "@jupyterlab/coreutils" "^6.2.5" - "@jupyterlab/docregistry" "^4.2.5" - "@jupyterlab/rendermime" "^4.2.5" - "@jupyterlab/rendermime-interfaces" "^3.10.5" - "@jupyterlab/services" "^7.2.5" - "@jupyterlab/statedb" "^4.2.5" - "@jupyterlab/translation" "^4.2.5" - "@jupyterlab/ui-components" "^4.2.5" - "@lumino/algorithm" "^2.0.1" - "@lumino/application" "^2.3.1" - "@lumino/commands" "^2.3.0" - "@lumino/coreutils" "^2.1.2" - "@lumino/disposable" "^2.1.2" - "@lumino/messaging" "^2.0.1" - "@lumino/polling" "^2.1.2" - "@lumino/properties" "^2.0.1" - "@lumino/signaling" "^2.1.2" - "@lumino/widgets" "^2.3.2" - -"@jupyterlab/apputils@^4.3.5": - version "4.3.5" - resolved "https://registry.npmjs.org/@jupyterlab/apputils/-/apputils-4.3.5.tgz#ea335ed016969c5cc844e1922e0d456cafbcdd9b" - integrity sha512-O88fr29bne7X29r742pb4NIQXm6VYRnx9uDzOh5RuAf/oJlW+xeY95EsHWFfXJQEerueuBp5ivTQxhf1ZM1hew== - dependencies: - "@jupyterlab/coreutils" "^6.2.5" - "@jupyterlab/observables" "^5.2.5" - "@jupyterlab/rendermime-interfaces" "^3.10.5" - "@jupyterlab/services" "^7.2.5" - "@jupyterlab/settingregistry" "^4.2.5" - "@jupyterlab/statedb" "^4.2.5" - "@jupyterlab/statusbar" "^4.2.5" - "@jupyterlab/translation" "^4.2.5" - "@jupyterlab/ui-components" "^4.2.5" - "@lumino/algorithm" "^2.0.1" - "@lumino/commands" "^2.3.0" - "@lumino/coreutils" "^2.1.2" - "@lumino/disposable" "^2.1.2" - "@lumino/domutils" "^2.0.1" - "@lumino/messaging" "^2.0.1" - "@lumino/signaling" "^2.1.2" - "@lumino/virtualdom" "^2.0.1" - "@lumino/widgets" "^2.3.2" + "@jupyterlab/apputils" "^4.4.0" + "@jupyterlab/coreutils" "^6.3.0" + "@jupyterlab/docregistry" "^4.3.0" + "@jupyterlab/rendermime" "^4.3.0" + "@jupyterlab/rendermime-interfaces" "^3.11.0" + "@jupyterlab/services" "^7.3.0" + "@jupyterlab/statedb" "^4.3.0" + "@jupyterlab/translation" "^4.3.0" + "@jupyterlab/ui-components" "^4.3.0" + "@lumino/algorithm" "^2.0.2" + "@lumino/application" "^2.4.1" + "@lumino/commands" "^2.3.1" + "@lumino/coreutils" "^2.2.0" + "@lumino/disposable" "^2.1.3" + "@lumino/messaging" "^2.0.2" + "@lumino/polling" "^2.1.3" + "@lumino/properties" "^2.0.2" + "@lumino/signaling" "^2.1.3" + "@lumino/widgets" "^2.5.0" + +"@jupyterlab/apputils@^4.4.0": + version "4.4.0" + resolved "https://registry.npmjs.org/@jupyterlab/apputils/-/apputils-4.4.0.tgz#3c9d58c7ffd4fab5de067ab88b0f67139930d200" + integrity sha512-Qlt36C9AVCyOx1O30KZR0Q1cVbquxsbD+ZhxFdDKaoSlax0vsRrOR42FDLXqvRGO5MCFHe/KtNjTRHise6o/ww== + dependencies: + "@jupyterlab/coreutils" "^6.3.0" + "@jupyterlab/observables" "^5.3.0" + "@jupyterlab/rendermime-interfaces" "^3.11.0" + "@jupyterlab/services" "^7.3.0" + "@jupyterlab/settingregistry" "^4.3.0" + "@jupyterlab/statedb" "^4.3.0" + "@jupyterlab/statusbar" "^4.3.0" + "@jupyterlab/translation" "^4.3.0" + "@jupyterlab/ui-components" "^4.3.0" + "@lumino/algorithm" "^2.0.2" + "@lumino/commands" "^2.3.1" + "@lumino/coreutils" "^2.2.0" + "@lumino/disposable" "^2.1.3" + "@lumino/domutils" "^2.0.2" + "@lumino/messaging" "^2.0.2" + "@lumino/signaling" "^2.1.3" + "@lumino/virtualdom" "^2.0.2" + "@lumino/widgets" "^2.5.0" "@types/react" "^18.0.26" react "^18.2.0" sanitize-html "~2.12.1" -"@jupyterlab/attachments@^4.2.5": - version "4.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/attachments/-/attachments-4.2.5.tgz#55704047a015ce28d32d7696ee9b5f95310abd37" - integrity sha512-W1on7D8ZVfe//Ec/fJOD4OMpsUkRhelEQBEuhJEbgKZ+b5y0awXiCHGsdQ1rkiHt90tLReAPqBDj5+9KavlM7g== +"@jupyterlab/attachments@^4.3.0": + version "4.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/attachments/-/attachments-4.3.0.tgz#41818a3a1f8fa9107f9e8d6ac1b632ab6dddaa21" + integrity sha512-xToxNy3qKbIl1SVAnedAUBcjvFy7CXM1lMkf5jAR/hGGy+ac4dthYXTLiefaUi4UvpBLbeHfeY/Ya8UuWfSPkg== dependencies: - "@jupyterlab/nbformat" "^4.2.5" - "@jupyterlab/observables" "^5.2.5" - "@jupyterlab/rendermime" "^4.2.5" - "@jupyterlab/rendermime-interfaces" "^3.10.5" - "@lumino/disposable" "^2.1.2" - "@lumino/signaling" "^2.1.2" + "@jupyterlab/nbformat" "^4.3.0" + "@jupyterlab/observables" "^5.3.0" + "@jupyterlab/rendermime" "^4.3.0" + "@jupyterlab/rendermime-interfaces" "^3.11.0" + "@lumino/disposable" "^2.1.3" + "@lumino/signaling" "^2.1.3" -"@jupyterlab/cells@^4.2.5": - version "4.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/cells/-/cells-4.2.5.tgz#205d020f6074d7e6b4c086d86d8a669973971fcf" - integrity sha512-4fSJnZbPT6YKTHu/DWgelFFRCo3+sU2di68e0k8b0oincOc4c0mg6hNa0dmxmrDah1IdOlhVOZX7RqKKYFGFEA== +"@jupyterlab/cells@^4.3.0": + version "4.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/cells/-/cells-4.3.0.tgz#8079c5d532891aa4a2b3ef260a8046a766fb1d99" + integrity sha512-IxTwICkGzp/S18TNoC+81sRGcPtlTVWZ7G44lTXTYKW+YrWRlO/bIyoNmWlc7Xu5tRitfmzqJFw80V/enMqG6A== dependencies: "@codemirror/state" "^6.4.1" - "@codemirror/view" "^6.26.0" - "@jupyter/ydoc" "^2.0.1" - "@jupyterlab/apputils" "^4.3.5" - "@jupyterlab/attachments" "^4.2.5" - "@jupyterlab/codeeditor" "^4.2.5" - "@jupyterlab/codemirror" "^4.2.5" - "@jupyterlab/coreutils" "^6.2.5" - "@jupyterlab/documentsearch" "^4.2.5" - "@jupyterlab/filebrowser" "^4.2.5" - "@jupyterlab/nbformat" "^4.2.5" - "@jupyterlab/observables" "^5.2.5" - "@jupyterlab/outputarea" "^4.2.5" - "@jupyterlab/rendermime" "^4.2.5" - "@jupyterlab/services" "^7.2.5" - "@jupyterlab/toc" "^6.2.5" - "@jupyterlab/translation" "^4.2.5" - "@jupyterlab/ui-components" "^4.2.5" - "@lumino/algorithm" "^2.0.1" - "@lumino/coreutils" "^2.1.2" - "@lumino/domutils" "^2.0.1" - "@lumino/dragdrop" "^2.1.4" - "@lumino/messaging" "^2.0.1" - "@lumino/polling" "^2.1.2" - "@lumino/signaling" "^2.1.2" - "@lumino/virtualdom" "^2.0.1" - "@lumino/widgets" "^2.3.2" + "@codemirror/view" "^6.26.3" + "@jupyter/ydoc" "^3.0.0" + "@jupyterlab/apputils" "^4.4.0" + "@jupyterlab/attachments" "^4.3.0" + "@jupyterlab/codeeditor" "^4.3.0" + "@jupyterlab/codemirror" "^4.3.0" + "@jupyterlab/coreutils" "^6.3.0" + "@jupyterlab/documentsearch" "^4.3.0" + "@jupyterlab/filebrowser" "^4.3.0" + "@jupyterlab/nbformat" "^4.3.0" + "@jupyterlab/observables" "^5.3.0" + "@jupyterlab/outputarea" "^4.3.0" + "@jupyterlab/rendermime" "^4.3.0" + "@jupyterlab/services" "^7.3.0" + "@jupyterlab/toc" "^6.3.0" + "@jupyterlab/translation" "^4.3.0" + "@jupyterlab/ui-components" "^4.3.0" + "@lumino/algorithm" "^2.0.2" + "@lumino/coreutils" "^2.2.0" + "@lumino/domutils" "^2.0.2" + "@lumino/dragdrop" "^2.1.5" + "@lumino/messaging" "^2.0.2" + "@lumino/polling" "^2.1.3" + "@lumino/signaling" "^2.1.3" + "@lumino/virtualdom" "^2.0.2" + "@lumino/widgets" "^2.5.0" react "^18.2.0" -"@jupyterlab/codeeditor@^4.2.5": - version "4.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/codeeditor/-/codeeditor-4.2.5.tgz#16eb06f25e43be37eb4bd6bbb48cecccd18a46a1" - integrity sha512-Y145T9JSD89mBTiD67JxWG738Adv87Z+RWp87W9CJE4ZLsJo1FzyXBvIAAwXN737zoSvnx3lMMY3mmWqUdicZg== +"@jupyterlab/codeeditor@^4.3.0": + version "4.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/codeeditor/-/codeeditor-4.3.0.tgz#1856dc4e0a8b5f08700b438cf5c11596fe156a45" + integrity sha512-eV0lxowI2CFalnqKL62kWV7/EekLfaQ4RjjrQJ8C+pz4/QNgj7oM/oaYd8YVM4rRa+TqGFQOzXoDm3Wk4Ely6g== dependencies: "@codemirror/state" "^6.4.1" - "@jupyter/ydoc" "^2.0.1" - "@jupyterlab/apputils" "^4.3.5" - "@jupyterlab/coreutils" "^6.2.5" - "@jupyterlab/nbformat" "^4.2.5" - "@jupyterlab/observables" "^5.2.5" - "@jupyterlab/statusbar" "^4.2.5" - "@jupyterlab/translation" "^4.2.5" - "@jupyterlab/ui-components" "^4.2.5" - "@lumino/coreutils" "^2.1.2" - "@lumino/disposable" "^2.1.2" - "@lumino/dragdrop" "^2.1.4" - "@lumino/messaging" "^2.0.1" - "@lumino/signaling" "^2.1.2" - "@lumino/widgets" "^2.3.2" + "@jupyter/ydoc" "^3.0.0" + "@jupyterlab/apputils" "^4.4.0" + "@jupyterlab/coreutils" "^6.3.0" + "@jupyterlab/nbformat" "^4.3.0" + "@jupyterlab/observables" "^5.3.0" + "@jupyterlab/statusbar" "^4.3.0" + "@jupyterlab/translation" "^4.3.0" + "@jupyterlab/ui-components" "^4.3.0" + "@lumino/coreutils" "^2.2.0" + "@lumino/disposable" "^2.1.3" + "@lumino/dragdrop" "^2.1.5" + "@lumino/messaging" "^2.0.2" + "@lumino/signaling" "^2.1.3" + "@lumino/widgets" "^2.5.0" react "^18.2.0" -"@jupyterlab/codemirror@^4.2.5": - version "4.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/codemirror/-/codemirror-4.2.5.tgz#b4bd9c46c8629920d4843d7f052ac5aaa5ac3a3d" - integrity sha512-oz0RApEC8Te4ojee4Mmm1nsEjYCBfXiP4qfoWfJJ5kHS52vwaylH8lWm6Dy/7RNiLdfQA29zR9X+ImD69exVKw== +"@jupyterlab/codemirror@^4.3.0": + version "4.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/codemirror/-/codemirror-4.3.0.tgz#312be380cfe39dcd9f76527935314523c5f80e34" + integrity sha512-8cVyybFe3g8Z5A6Hz8p5Xsp0rqaQyZBypxqW952hDrMJ6jqWERBtQQ0Jwjqjz+TdABaloX/wIbG+oYFbfrfxKA== dependencies: - "@codemirror/autocomplete" "^6.15.0" - "@codemirror/commands" "^6.3.3" + "@codemirror/autocomplete" "^6.16.0" + "@codemirror/commands" "^6.5.0" "@codemirror/lang-cpp" "^6.0.2" "@codemirror/lang-css" "^6.2.1" - "@codemirror/lang-html" "^6.4.8" + "@codemirror/lang-html" "^6.4.9" "@codemirror/lang-java" "^6.0.1" "@codemirror/lang-javascript" "^6.2.2" "@codemirror/lang-json" "^6.0.1" - "@codemirror/lang-markdown" "^6.2.4" + "@codemirror/lang-markdown" "^6.2.5" "@codemirror/lang-php" "^6.0.1" - "@codemirror/lang-python" "^6.1.4" + "@codemirror/lang-python" "^6.1.6" "@codemirror/lang-rust" "^6.0.1" - "@codemirror/lang-sql" "^6.6.1" + "@codemirror/lang-sql" "^6.6.4" "@codemirror/lang-wast" "^6.0.2" "@codemirror/lang-xml" "^6.1.0" "@codemirror/language" "^6.10.1" - "@codemirror/legacy-modes" "^6.3.3" + "@codemirror/legacy-modes" "^6.4.0" "@codemirror/search" "^6.5.6" "@codemirror/state" "^6.4.1" - "@codemirror/view" "^6.26.0" - "@jupyter/ydoc" "^2.0.1" - "@jupyterlab/codeeditor" "^4.2.5" - "@jupyterlab/coreutils" "^6.2.5" - "@jupyterlab/documentsearch" "^4.2.5" - "@jupyterlab/nbformat" "^4.2.5" - "@jupyterlab/translation" "^4.2.5" + "@codemirror/view" "^6.26.3" + "@jupyter/ydoc" "^3.0.0" + "@jupyterlab/codeeditor" "^4.3.0" + "@jupyterlab/coreutils" "^6.3.0" + "@jupyterlab/documentsearch" "^4.3.0" + "@jupyterlab/nbformat" "^4.3.0" + "@jupyterlab/translation" "^4.3.0" "@lezer/common" "^1.2.1" "@lezer/generator" "^1.7.0" "@lezer/highlight" "^1.2.0" - "@lezer/markdown" "^1.2.0" - "@lumino/coreutils" "^2.1.2" - "@lumino/disposable" "^2.1.2" - "@lumino/signaling" "^2.1.2" + "@lezer/markdown" "^1.3.0" + "@lumino/coreutils" "^2.2.0" + "@lumino/disposable" "^2.1.3" + "@lumino/signaling" "^2.1.3" yjs "^13.5.40" -"@jupyterlab/console@^4.2.5": - version "4.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/console/-/console-4.2.5.tgz#fced414f069d1937471752e76cfb276e88967b6a" - integrity sha512-qOPi2w+TG53ADXvud93aMsCsQUDJxhA8c2anWQ961g0ctbzqwOtGgb8MiUVGLEkFf0tUY8Fov39NdOJr75aCmQ== - dependencies: - "@jupyter/ydoc" "^2.0.1" - "@jupyterlab/apputils" "^4.3.5" - "@jupyterlab/cells" "^4.2.5" - "@jupyterlab/codeeditor" "^4.2.5" - "@jupyterlab/coreutils" "^6.2.5" - "@jupyterlab/nbformat" "^4.2.5" - "@jupyterlab/observables" "^5.2.5" - "@jupyterlab/rendermime" "^4.2.5" - "@jupyterlab/services" "^7.2.5" - "@jupyterlab/translation" "^4.2.5" - "@jupyterlab/ui-components" "^4.2.5" - "@lumino/coreutils" "^2.1.2" - "@lumino/disposable" "^2.1.2" - "@lumino/dragdrop" "^2.1.4" - "@lumino/messaging" "^2.0.1" - "@lumino/signaling" "^2.1.2" - "@lumino/widgets" "^2.3.2" - -"@jupyterlab/coreutils@^6.2.5": - version "6.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/coreutils/-/coreutils-6.2.5.tgz#906dc8b65c841e8bf1c7589983bab90a12f8d24e" - integrity sha512-P3HniEv3bZ3EvV3zUwCmruR713fclGvSTfsuwFPBgI8M3rNIZYqGQ13xkTun7Zl6DUr2E8mrC/cq9jNwxW33yw== - dependencies: - "@lumino/coreutils" "^2.1.2" - "@lumino/disposable" "^2.1.2" - "@lumino/signaling" "^2.1.2" +"@jupyterlab/console@^4.3.0": + version "4.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/console/-/console-4.3.0.tgz#5e425dcd2c6d8d50791410bdd429cc852e2b00bc" + integrity sha512-Ph82d61psolGDNxRwO+88deP8agjK86+sh417J6HKxYzat8X/5HklRyxLTkztQYXUQUd7FobeQ+UH1ezCk9RwA== + dependencies: + "@jupyter/ydoc" "^3.0.0" + "@jupyterlab/apputils" "^4.4.0" + "@jupyterlab/cells" "^4.3.0" + "@jupyterlab/codeeditor" "^4.3.0" + "@jupyterlab/coreutils" "^6.3.0" + "@jupyterlab/nbformat" "^4.3.0" + "@jupyterlab/observables" "^5.3.0" + "@jupyterlab/rendermime" "^4.3.0" + "@jupyterlab/services" "^7.3.0" + "@jupyterlab/translation" "^4.3.0" + "@jupyterlab/ui-components" "^4.3.0" + "@lumino/coreutils" "^2.2.0" + "@lumino/disposable" "^2.1.3" + "@lumino/dragdrop" "^2.1.5" + "@lumino/messaging" "^2.0.2" + "@lumino/signaling" "^2.1.3" + "@lumino/widgets" "^2.5.0" + +"@jupyterlab/coreutils@^6.3.0": + version "6.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/coreutils/-/coreutils-6.3.0.tgz#592b7aa640d82aac4be42bb5e95394d40e6bd159" + integrity sha512-zsoMx18JXfVEvMR4OVb+GR/AirXYEUBveySoY6/Z4Kv6vLZh2ZC+JZKgnlpPvql7D7Aa7tCUbSJdV33+fYELIQ== + dependencies: + "@lumino/coreutils" "^2.2.0" + "@lumino/disposable" "^2.1.3" + "@lumino/signaling" "^2.1.3" minimist "~1.2.0" path-browserify "^1.0.0" url-parse "~1.5.4" -"@jupyterlab/debugger@^4.2.5": - version "4.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/debugger/-/debugger-4.2.5.tgz#af2dfc32a33a44cb2a10d20ad9dcf6f376b4f494" - integrity sha512-au/0RMAMdau3adrJ1QLnW3m7eAtzDJfTYLFeTgrcMAyQIyrmWLLX6cNLrRlJuYfnBzuJJV5N/9HGClEQEdxeLw== +"@jupyterlab/debugger@^4.3.0": + version "4.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/debugger/-/debugger-4.3.0.tgz#7730a102652c26e4519f99194618e34ba8377022" + integrity sha512-mNpcm0NufsWxdCe4hnvm0WTBOW1lIe1sJyO/uos7JSdXUFtknYmtoEobVuf2y/goT6X5guvekMl22UIZQBVgXw== dependencies: "@codemirror/state" "^6.4.1" - "@codemirror/view" "^6.26.0" - "@jupyter/react-components" "^0.15.3" - "@jupyter/ydoc" "^2.0.1" - "@jupyterlab/application" "^4.2.5" - "@jupyterlab/apputils" "^4.3.5" - "@jupyterlab/cells" "^4.2.5" - "@jupyterlab/codeeditor" "^4.2.5" - "@jupyterlab/codemirror" "^4.2.5" - "@jupyterlab/console" "^4.2.5" - "@jupyterlab/coreutils" "^6.2.5" - "@jupyterlab/docregistry" "^4.2.5" - "@jupyterlab/fileeditor" "^4.2.5" - "@jupyterlab/notebook" "^4.2.5" - "@jupyterlab/observables" "^5.2.5" - "@jupyterlab/rendermime" "^4.2.5" - "@jupyterlab/services" "^7.2.5" - "@jupyterlab/translation" "^4.2.5" - "@jupyterlab/ui-components" "^4.2.5" - "@lumino/algorithm" "^2.0.1" - "@lumino/commands" "^2.3.0" - "@lumino/coreutils" "^2.1.2" - "@lumino/datagrid" "^2.3.1" - "@lumino/disposable" "^2.1.2" - "@lumino/messaging" "^2.0.1" - "@lumino/polling" "^2.1.2" - "@lumino/signaling" "^2.1.2" - "@lumino/widgets" "^2.3.2" + "@codemirror/view" "^6.26.3" + "@jupyter/react-components" "^0.16.6" + "@jupyter/ydoc" "^3.0.0" + "@jupyterlab/application" "^4.3.0" + "@jupyterlab/apputils" "^4.4.0" + "@jupyterlab/cells" "^4.3.0" + "@jupyterlab/codeeditor" "^4.3.0" + "@jupyterlab/codemirror" "^4.3.0" + "@jupyterlab/console" "^4.3.0" + "@jupyterlab/coreutils" "^6.3.0" + "@jupyterlab/docregistry" "^4.3.0" + "@jupyterlab/fileeditor" "^4.3.0" + "@jupyterlab/notebook" "^4.3.0" + "@jupyterlab/observables" "^5.3.0" + "@jupyterlab/rendermime" "^4.3.0" + "@jupyterlab/services" "^7.3.0" + "@jupyterlab/translation" "^4.3.0" + "@jupyterlab/ui-components" "^4.3.0" + "@lumino/algorithm" "^2.0.2" + "@lumino/commands" "^2.3.1" + "@lumino/coreutils" "^2.2.0" + "@lumino/datagrid" "^2.4.1" + "@lumino/disposable" "^2.1.3" + "@lumino/messaging" "^2.0.2" + "@lumino/polling" "^2.1.3" + "@lumino/signaling" "^2.1.3" + "@lumino/widgets" "^2.5.0" "@vscode/debugprotocol" "^1.51.0" react "^18.2.0" -"@jupyterlab/docmanager@^4.2.5": - version "4.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/docmanager/-/docmanager-4.2.5.tgz#b33805c103f07ab0414e1a3bab978c99c7dd9e28" - integrity sha512-IjWdEMl9MI6kkioViQza67+uwQiqI5DEGBaLyFFZG3R2NFmo2zA1zNbmVMUomMil/lcnVZN60ZnifFhiRvh1rQ== - dependencies: - "@jupyterlab/apputils" "^4.3.5" - "@jupyterlab/coreutils" "^6.2.5" - "@jupyterlab/docregistry" "^4.2.5" - "@jupyterlab/services" "^7.2.5" - "@jupyterlab/statedb" "^4.2.5" - "@jupyterlab/statusbar" "^4.2.5" - "@jupyterlab/translation" "^4.2.5" - "@jupyterlab/ui-components" "^4.2.5" - "@lumino/algorithm" "^2.0.1" - "@lumino/coreutils" "^2.1.2" - "@lumino/disposable" "^2.1.2" - "@lumino/messaging" "^2.0.1" - "@lumino/polling" "^2.1.2" - "@lumino/properties" "^2.0.1" - "@lumino/signaling" "^2.1.2" - "@lumino/widgets" "^2.3.2" +"@jupyterlab/docmanager@^4.3.0": + version "4.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/docmanager/-/docmanager-4.3.0.tgz#256588d7f4d1ff632f89103bbd75b65a8a2dd009" + integrity sha512-ptdvLpD5i6CPTxHeL+Q8Yih36+59jOdDPZgwfb3TC5Atx8SdsWhm2I0HA8G/j+Xq/2Grd5L3kwCkvQTG332Nxg== + dependencies: + "@jupyterlab/apputils" "^4.4.0" + "@jupyterlab/coreutils" "^6.3.0" + "@jupyterlab/docregistry" "^4.3.0" + "@jupyterlab/services" "^7.3.0" + "@jupyterlab/statedb" "^4.3.0" + "@jupyterlab/statusbar" "^4.3.0" + "@jupyterlab/translation" "^4.3.0" + "@jupyterlab/ui-components" "^4.3.0" + "@lumino/algorithm" "^2.0.2" + "@lumino/coreutils" "^2.2.0" + "@lumino/disposable" "^2.1.3" + "@lumino/messaging" "^2.0.2" + "@lumino/polling" "^2.1.3" + "@lumino/properties" "^2.0.2" + "@lumino/signaling" "^2.1.3" + "@lumino/widgets" "^2.5.0" react "^18.2.0" -"@jupyterlab/docregistry@^4.2.5": - version "4.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/docregistry/-/docregistry-4.2.5.tgz#3027ee9217d295413af3cbb0da111cd1d1a486b6" - integrity sha512-9lKh3PrbhPbm3ui2TRTFgdu7Nxqtdf1kf12HHk+v9rE3+Y8c8k3SrbpNTeBMn03mRZKYOsEe6nYPZkT1R3+ARg== - dependencies: - "@jupyter/ydoc" "^2.0.1" - "@jupyterlab/apputils" "^4.3.5" - "@jupyterlab/codeeditor" "^4.2.5" - "@jupyterlab/coreutils" "^6.2.5" - "@jupyterlab/observables" "^5.2.5" - "@jupyterlab/rendermime" "^4.2.5" - "@jupyterlab/rendermime-interfaces" "^3.10.5" - "@jupyterlab/services" "^7.2.5" - "@jupyterlab/translation" "^4.2.5" - "@jupyterlab/ui-components" "^4.2.5" - "@lumino/algorithm" "^2.0.1" - "@lumino/coreutils" "^2.1.2" - "@lumino/disposable" "^2.1.2" - "@lumino/messaging" "^2.0.1" - "@lumino/properties" "^2.0.1" - "@lumino/signaling" "^2.1.2" - "@lumino/widgets" "^2.3.2" +"@jupyterlab/docregistry@^4.3.0": + version "4.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/docregistry/-/docregistry-4.3.0.tgz#34a393c95aa48830b0cdab0ee4c16850830cf87c" + integrity sha512-FNJ7WNUDQrm0Fde+GqZqWUf7TYky2cznb7r3D31Anpbp5wggyDJqfqNmJ1EAehxFr/UxKDmDg4u3Zw1YjTLJGg== + dependencies: + "@jupyter/ydoc" "^3.0.0" + "@jupyterlab/apputils" "^4.4.0" + "@jupyterlab/codeeditor" "^4.3.0" + "@jupyterlab/coreutils" "^6.3.0" + "@jupyterlab/observables" "^5.3.0" + "@jupyterlab/rendermime" "^4.3.0" + "@jupyterlab/rendermime-interfaces" "^3.11.0" + "@jupyterlab/services" "^7.3.0" + "@jupyterlab/translation" "^4.3.0" + "@jupyterlab/ui-components" "^4.3.0" + "@lumino/algorithm" "^2.0.2" + "@lumino/coreutils" "^2.2.0" + "@lumino/disposable" "^2.1.3" + "@lumino/messaging" "^2.0.2" + "@lumino/properties" "^2.0.2" + "@lumino/signaling" "^2.1.3" + "@lumino/widgets" "^2.5.0" react "^18.2.0" -"@jupyterlab/documentsearch@^4.2.5": - version "4.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/documentsearch/-/documentsearch-4.2.5.tgz#5e239efddd6ce6c66d39f3d1046fda876e340034" - integrity sha512-hjBprI8HMgr8dc4MMe5SCi24pPXeNYAq3PaqoDeYkVQwmK+aJexRba6k8V2GMXcifZCHbWiDysOTGsXyGTmizQ== - dependencies: - "@jupyterlab/apputils" "^4.3.5" - "@jupyterlab/translation" "^4.2.5" - "@jupyterlab/ui-components" "^4.2.5" - "@lumino/commands" "^2.3.0" - "@lumino/coreutils" "^2.1.2" - "@lumino/disposable" "^2.1.2" - "@lumino/messaging" "^2.0.1" - "@lumino/polling" "^2.1.2" - "@lumino/signaling" "^2.1.2" - "@lumino/widgets" "^2.3.2" +"@jupyterlab/documentsearch@^4.3.0": + version "4.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/documentsearch/-/documentsearch-4.3.0.tgz#42b4c1d60dc3306f37833d78baeb9b35152ed6c3" + integrity sha512-nNMc+ldAYy4XeliHAzMdCIrt9lBVCnHvgtvkwV6zI91ve1YPXr/ak5Fsy9GHOXMGODkCLqAjo6uBIiQLO+g9eQ== + dependencies: + "@jupyterlab/apputils" "^4.4.0" + "@jupyterlab/translation" "^4.3.0" + "@jupyterlab/ui-components" "^4.3.0" + "@lumino/commands" "^2.3.1" + "@lumino/coreutils" "^2.2.0" + "@lumino/disposable" "^2.1.3" + "@lumino/messaging" "^2.0.2" + "@lumino/polling" "^2.1.3" + "@lumino/signaling" "^2.1.3" + "@lumino/widgets" "^2.5.0" react "^18.2.0" -"@jupyterlab/filebrowser@^4.2.5": - version "4.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/filebrowser/-/filebrowser-4.2.5.tgz#2019ebb328d64cc3f7b051f67f297597eda14620" - integrity sha512-nI/Rkcye7/2j4GM0BfMXOrAHNQ7aHBQAMZqiqBte3mPUV/AoGc+E4E4Iv4b4bTs2/nKwgc/8B9ZSgN4LeVEAuQ== - dependencies: - "@jupyterlab/apputils" "^4.3.5" - "@jupyterlab/coreutils" "^6.2.5" - "@jupyterlab/docmanager" "^4.2.5" - "@jupyterlab/docregistry" "^4.2.5" - "@jupyterlab/services" "^7.2.5" - "@jupyterlab/statedb" "^4.2.5" - "@jupyterlab/statusbar" "^4.2.5" - "@jupyterlab/translation" "^4.2.5" - "@jupyterlab/ui-components" "^4.2.5" - "@lumino/algorithm" "^2.0.1" - "@lumino/coreutils" "^2.1.2" - "@lumino/disposable" "^2.1.2" - "@lumino/domutils" "^2.0.1" - "@lumino/dragdrop" "^2.1.4" - "@lumino/messaging" "^2.0.1" - "@lumino/polling" "^2.1.2" - "@lumino/signaling" "^2.1.2" - "@lumino/virtualdom" "^2.0.1" - "@lumino/widgets" "^2.3.2" +"@jupyterlab/filebrowser@^4.3.0": + version "4.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/filebrowser/-/filebrowser-4.3.0.tgz#b9e91995d582c7367abb32579c1da1fa74ef2056" + integrity sha512-6lm77YO+Z78WZiJTURTUniduI2XpBkLHsSiWkw+3AaESxGxDyAAxk4YJftlZe1AYaMi6Oh6Kl4MmW7XuN2HCQg== + dependencies: + "@jupyterlab/apputils" "^4.4.0" + "@jupyterlab/coreutils" "^6.3.0" + "@jupyterlab/docmanager" "^4.3.0" + "@jupyterlab/docregistry" "^4.3.0" + "@jupyterlab/services" "^7.3.0" + "@jupyterlab/statedb" "^4.3.0" + "@jupyterlab/statusbar" "^4.3.0" + "@jupyterlab/translation" "^4.3.0" + "@jupyterlab/ui-components" "^4.3.0" + "@lumino/algorithm" "^2.0.2" + "@lumino/coreutils" "^2.2.0" + "@lumino/disposable" "^2.1.3" + "@lumino/domutils" "^2.0.2" + "@lumino/dragdrop" "^2.1.5" + "@lumino/messaging" "^2.0.2" + "@lumino/polling" "^2.1.3" + "@lumino/signaling" "^2.1.3" + "@lumino/virtualdom" "^2.0.2" + "@lumino/widgets" "^2.5.0" react "^18.2.0" -"@jupyterlab/fileeditor@^4.2.5": - version "4.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/fileeditor/-/fileeditor-4.2.5.tgz#a69faad6fb0dbc1af4c0936719121ac11c8a6352" - integrity sha512-Daj38DN7jRLMBt+0Zwa7mW0XUWQTk/QOoS7ZInLEIXRcuYz5TcYBdFWFkJdUMr5mvq+RzasSXeaR6ZU5vXZSow== - dependencies: - "@jupyter/ydoc" "^2.0.1" - "@jupyterlab/apputils" "^4.3.5" - "@jupyterlab/codeeditor" "^4.2.5" - "@jupyterlab/codemirror" "^4.2.5" - "@jupyterlab/coreutils" "^6.2.5" - "@jupyterlab/docregistry" "^4.2.5" - "@jupyterlab/documentsearch" "^4.2.5" - "@jupyterlab/lsp" "^4.2.5" - "@jupyterlab/statusbar" "^4.2.5" - "@jupyterlab/toc" "^6.2.5" - "@jupyterlab/translation" "^4.2.5" - "@jupyterlab/ui-components" "^4.2.5" - "@lumino/commands" "^2.3.0" - "@lumino/coreutils" "^2.1.2" - "@lumino/messaging" "^2.0.1" - "@lumino/widgets" "^2.3.2" +"@jupyterlab/fileeditor@^4.3.0": + version "4.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/fileeditor/-/fileeditor-4.3.0.tgz#b6ab4ffef899a0d14f59b840f1fa24642abe7ac2" + integrity sha512-WmWFGDxxXuNc3Jc/dNEChscOC1URZvR/YNE4bPNsZAZxXEzLhm538ELT5gWPHjUwlw4ZV+J//+w3avmQJJ+p9Q== + dependencies: + "@jupyter/ydoc" "^3.0.0" + "@jupyterlab/apputils" "^4.4.0" + "@jupyterlab/codeeditor" "^4.3.0" + "@jupyterlab/codemirror" "^4.3.0" + "@jupyterlab/coreutils" "^6.3.0" + "@jupyterlab/docregistry" "^4.3.0" + "@jupyterlab/documentsearch" "^4.3.0" + "@jupyterlab/lsp" "^4.3.0" + "@jupyterlab/statusbar" "^4.3.0" + "@jupyterlab/toc" "^6.3.0" + "@jupyterlab/translation" "^4.3.0" + "@jupyterlab/ui-components" "^4.3.0" + "@lumino/commands" "^2.3.1" + "@lumino/coreutils" "^2.2.0" + "@lumino/messaging" "^2.0.2" + "@lumino/widgets" "^2.5.0" react "^18.2.0" regexp-match-indices "^1.0.2" -"@jupyterlab/galata@^5.0.1": - version "5.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/galata/-/galata-5.2.5.tgz#73b05b354e43ab859ab8e94eee52da4fef7eb1bf" - integrity sha512-H/Igp4HxULaLPYyFWsTqQzvcEXWtNh+jgrVfN0Gr2P3DrpA6Hacb4UQVn6kYdEJdQHcwr/F3iAVieG20/Z1+kw== - dependencies: - "@jupyterlab/application" "^4.2.5" - "@jupyterlab/apputils" "^4.3.5" - "@jupyterlab/coreutils" "^6.2.5" - "@jupyterlab/debugger" "^4.2.5" - "@jupyterlab/docmanager" "^4.2.5" - "@jupyterlab/nbformat" "^4.2.5" - "@jupyterlab/notebook" "^4.2.5" - "@jupyterlab/services" "^7.2.5" - "@jupyterlab/settingregistry" "^4.2.5" - "@lumino/coreutils" "^2.1.2" - "@playwright/test" "^1.43.1" +"@jupyterlab/galata@^5.3.0": + version "5.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/galata/-/galata-5.3.0.tgz#b9e0e731689a691911790c216410bff733a465d8" + integrity sha512-27F2XGkZwX3uuD3P0SQbJ5iOx/DZS+VRUO73VrS6YStNH6qw70De5jnTdPxZ5CnJw10lFaQbEboZ9g4798gEYg== + dependencies: + "@jupyterlab/application" "^4.3.0" + "@jupyterlab/apputils" "^4.4.0" + "@jupyterlab/coreutils" "^6.3.0" + "@jupyterlab/debugger" "^4.3.0" + "@jupyterlab/docmanager" "^4.3.0" + "@jupyterlab/nbformat" "^4.3.0" + "@jupyterlab/notebook" "^4.3.0" + "@jupyterlab/services" "^7.3.0" + "@jupyterlab/settingregistry" "^4.3.0" + "@lumino/coreutils" "^2.2.0" + "@playwright/test" "^1.48.0" "@stdlib/stats" "~0.0.13" fs-extra "^10.1.0" json5 "^2.2.3" @@ -620,236 +638,237 @@ vega-lite "^5.6.1" vega-statistics "^1.7.9" -"@jupyterlab/lsp@^4.2.5": - version "4.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/lsp/-/lsp-4.2.5.tgz#23502ebe48c93178e978b3f5bc1a3b74c105befd" - integrity sha512-pnIxXDHjJrPJ9+jZo4YqpF06HeIARtICIPpDPnpF/OW9Ukm3/7Votfh4cf30z/eaLMbCGi9PT4k/j8TH6uO2Yw== - dependencies: - "@jupyterlab/apputils" "^4.3.5" - "@jupyterlab/codeeditor" "^4.2.5" - "@jupyterlab/codemirror" "^4.2.5" - "@jupyterlab/coreutils" "^6.2.5" - "@jupyterlab/docregistry" "^4.2.5" - "@jupyterlab/services" "^7.2.5" - "@jupyterlab/translation" "^4.2.5" - "@lumino/coreutils" "^2.1.2" - "@lumino/disposable" "^2.1.2" - "@lumino/signaling" "^2.1.2" - "@lumino/widgets" "^2.3.2" +"@jupyterlab/lsp@^4.3.0": + version "4.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/lsp/-/lsp-4.3.0.tgz#b53ba93442ae4def24525e8c325c805887f950a9" + integrity sha512-mN7kR5MWKM+wnrXyDZOTnRe2xmlGjNb94V6ALKmFckKgGiFjAGBJkfrUjY0DkLjJWAzlaT8tjrxNDNyKdx9qwQ== + dependencies: + "@jupyterlab/apputils" "^4.4.0" + "@jupyterlab/codeeditor" "^4.3.0" + "@jupyterlab/codemirror" "^4.3.0" + "@jupyterlab/coreutils" "^6.3.0" + "@jupyterlab/docregistry" "^4.3.0" + "@jupyterlab/services" "^7.3.0" + "@jupyterlab/translation" "^4.3.0" + "@lumino/coreutils" "^2.2.0" + "@lumino/disposable" "^2.1.3" + "@lumino/signaling" "^2.1.3" + "@lumino/widgets" "^2.5.0" lodash.mergewith "^4.6.1" vscode-jsonrpc "^6.0.0" vscode-languageserver-protocol "^3.17.0" vscode-ws-jsonrpc "~1.0.2" -"@jupyterlab/nbformat@^3.0.0 || ^4.0.0-alpha.21 || ^4.0.0", "@jupyterlab/nbformat@^4.2.5": - version "4.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/nbformat/-/nbformat-4.2.5.tgz#ab9f99b3b33c566b4fa50cbe79a6a4ae1af324b0" - integrity sha512-DF8bdlsEziUR5oKUr3Mm0wUx7kHZjlAtEjD6oJ8cOogQqTrMyBnUAgVjPr9QQob5J7qiyzz9aW2DYtaX+jFhng== - dependencies: - "@lumino/coreutils" "^2.1.2" - -"@jupyterlab/notebook@^4.2.5": - version "4.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/notebook/-/notebook-4.2.5.tgz#9515ea4e99a25af863290f5281b9347b0061c56f" - integrity sha512-GcnAV9u1t5SwwCSbvKDpUA5V0Ytxwc6VtvCzuM9FylvLuAH7PE4LgZlzz29fsPneV4ptoMmA5ExSLojHEhQVeQ== - dependencies: - "@jupyter/ydoc" "^2.0.1" - "@jupyterlab/apputils" "^4.3.5" - "@jupyterlab/cells" "^4.2.5" - "@jupyterlab/codeeditor" "^4.2.5" - "@jupyterlab/codemirror" "^4.2.5" - "@jupyterlab/coreutils" "^6.2.5" - "@jupyterlab/docregistry" "^4.2.5" - "@jupyterlab/documentsearch" "^4.2.5" - "@jupyterlab/lsp" "^4.2.5" - "@jupyterlab/nbformat" "^4.2.5" - "@jupyterlab/observables" "^5.2.5" - "@jupyterlab/rendermime" "^4.2.5" - "@jupyterlab/services" "^7.2.5" - "@jupyterlab/settingregistry" "^4.2.5" - "@jupyterlab/statusbar" "^4.2.5" - "@jupyterlab/toc" "^6.2.5" - "@jupyterlab/translation" "^4.2.5" - "@jupyterlab/ui-components" "^4.2.5" - "@lumino/algorithm" "^2.0.1" - "@lumino/coreutils" "^2.1.2" - "@lumino/disposable" "^2.1.2" - "@lumino/domutils" "^2.0.1" - "@lumino/dragdrop" "^2.1.4" - "@lumino/messaging" "^2.0.1" - "@lumino/polling" "^2.1.2" - "@lumino/properties" "^2.0.1" - "@lumino/signaling" "^2.1.2" - "@lumino/virtualdom" "^2.0.1" - "@lumino/widgets" "^2.3.2" +"@jupyterlab/nbformat@^3.0.0 || ^4.0.0-alpha.21 || ^4.0.0", "@jupyterlab/nbformat@^4.3.0": + version "4.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/nbformat/-/nbformat-4.3.0.tgz#8581a59e7b088876d741df3b1598931677e0bce8" + integrity sha512-7XfYrCN3eF00tJq3Z+fJd+d9AmoJIRvXEcjmcwRdddUkb44jVEKxZ9LGCRZ0m4QPDCMticyrqbXQpVMJIrNDeg== + dependencies: + "@lumino/coreutils" "^2.2.0" + +"@jupyterlab/notebook@^4.3.0": + version "4.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/notebook/-/notebook-4.3.0.tgz#03292ac51ae8ca85e938a936aa56361145371d31" + integrity sha512-2/nM9a9R9zrgBLg+k4hn8PeImx7RjUYTKW3OCVPwxxAh38RlTc98A8G9phKuLeMMWYXJZwObOPt0dOEI+j7bJA== + dependencies: + "@jupyter/ydoc" "^3.0.0" + "@jupyterlab/apputils" "^4.4.0" + "@jupyterlab/cells" "^4.3.0" + "@jupyterlab/codeeditor" "^4.3.0" + "@jupyterlab/codemirror" "^4.3.0" + "@jupyterlab/coreutils" "^6.3.0" + "@jupyterlab/docregistry" "^4.3.0" + "@jupyterlab/documentsearch" "^4.3.0" + "@jupyterlab/lsp" "^4.3.0" + "@jupyterlab/nbformat" "^4.3.0" + "@jupyterlab/observables" "^5.3.0" + "@jupyterlab/rendermime" "^4.3.0" + "@jupyterlab/services" "^7.3.0" + "@jupyterlab/settingregistry" "^4.3.0" + "@jupyterlab/statusbar" "^4.3.0" + "@jupyterlab/toc" "^6.3.0" + "@jupyterlab/translation" "^4.3.0" + "@jupyterlab/ui-components" "^4.3.0" + "@lumino/algorithm" "^2.0.2" + "@lumino/coreutils" "^2.2.0" + "@lumino/disposable" "^2.1.3" + "@lumino/domutils" "^2.0.2" + "@lumino/dragdrop" "^2.1.5" + "@lumino/messaging" "^2.0.2" + "@lumino/polling" "^2.1.3" + "@lumino/properties" "^2.0.2" + "@lumino/signaling" "^2.1.3" + "@lumino/virtualdom" "^2.0.2" + "@lumino/widgets" "^2.5.0" react "^18.2.0" -"@jupyterlab/observables@^5.2.5": - version "5.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/observables/-/observables-5.2.5.tgz#3e54e2b33c2bad32488362306fb4da6d7ebea569" - integrity sha512-EvrvweCpLle3uZciUdhbEUU++o0S7yHBKWtqrKFgb9qxM1qYFMlSMXlJLjE/cEXNiq/aZhsruyXiVKZ1UpVsVQ== - dependencies: - "@lumino/algorithm" "^2.0.1" - "@lumino/coreutils" "^2.1.2" - "@lumino/disposable" "^2.1.2" - "@lumino/messaging" "^2.0.1" - "@lumino/signaling" "^2.1.2" - -"@jupyterlab/outputarea@^4.2.5": - version "4.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/outputarea/-/outputarea-4.2.5.tgz#1fb6e651050ee15b6d395b28ed4584180a5a19a0" - integrity sha512-Oj/GVkEX5zHl/oOXl4Qmw05mdAB/+zEIuY9xsz5agBdeAD3c603PAR7Ep5UqSE4fltdnl+ILjSWyQPJgG4UEGg== - dependencies: - "@jupyterlab/apputils" "^4.3.5" - "@jupyterlab/nbformat" "^4.2.5" - "@jupyterlab/observables" "^5.2.5" - "@jupyterlab/rendermime" "^4.2.5" - "@jupyterlab/rendermime-interfaces" "^3.10.5" - "@jupyterlab/services" "^7.2.5" - "@jupyterlab/translation" "^4.2.5" - "@lumino/algorithm" "^2.0.1" - "@lumino/coreutils" "^2.1.2" - "@lumino/disposable" "^2.1.2" - "@lumino/messaging" "^2.0.1" - "@lumino/properties" "^2.0.1" - "@lumino/signaling" "^2.1.2" - "@lumino/widgets" "^2.3.2" - -"@jupyterlab/rendermime-interfaces@^3.10.5": - version "3.10.5" - resolved "https://registry.npmjs.org/@jupyterlab/rendermime-interfaces/-/rendermime-interfaces-3.10.5.tgz#ca63afab7e4070b24562d20575983979785aefef" - integrity sha512-wZq4QhmckMqEjImMxTLFVtj+E0GI82/i0nCuaEI6tReXOqVFDg5wG/ChD/B/g0Xz+uPqt92R5rzZTpzodkDiKA== - dependencies: - "@lumino/coreutils" "^1.11.0 || ^2.1.2" - "@lumino/widgets" "^1.37.2 || ^2.3.2" - -"@jupyterlab/rendermime@^4.2.5": - version "4.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/rendermime/-/rendermime-4.2.5.tgz#b7833d23c938b9e0d94d0dc763e1c20af000a9eb" - integrity sha512-2ZfKM4xNUKbuR1foA7B6nvXg8zRSai1GTEROcF0ievNYn3qYEmdYYW9XlP11+8SoFcM0a4WiTgocRCgVV5Fbiw== - dependencies: - "@jupyterlab/apputils" "^4.3.5" - "@jupyterlab/coreutils" "^6.2.5" - "@jupyterlab/nbformat" "^4.2.5" - "@jupyterlab/observables" "^5.2.5" - "@jupyterlab/rendermime-interfaces" "^3.10.5" - "@jupyterlab/services" "^7.2.5" - "@jupyterlab/translation" "^4.2.5" - "@lumino/coreutils" "^2.1.2" - "@lumino/messaging" "^2.0.1" - "@lumino/signaling" "^2.1.2" - "@lumino/widgets" "^2.3.2" +"@jupyterlab/observables@^5.3.0": + version "5.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/observables/-/observables-5.3.0.tgz#1941ba061070594a3fb61680415e5f8575c9db23" + integrity sha512-MasQvRzHcu+ROzyLUwH8X9Qpv/A8M3SXkqnozzi6Ttcx7kVZsbRTvkSR5KQWESsY1FEu0r450e5VtjSy1QM9sw== + dependencies: + "@lumino/algorithm" "^2.0.2" + "@lumino/coreutils" "^2.2.0" + "@lumino/disposable" "^2.1.3" + "@lumino/messaging" "^2.0.2" + "@lumino/signaling" "^2.1.3" + +"@jupyterlab/outputarea@^4.3.0": + version "4.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/outputarea/-/outputarea-4.3.0.tgz#ea94d50ae90d144eb27a9c3564af87a975b46c6d" + integrity sha512-3TslhNnnaxtGdZ3j/G7YOOVCosH1fIKceDIByjccir7f6Z9mMz23BDffoSx25vmq0FsVJB+FPsYBHeyfTfrvtQ== + dependencies: + "@jupyterlab/apputils" "^4.4.0" + "@jupyterlab/nbformat" "^4.3.0" + "@jupyterlab/observables" "^5.3.0" + "@jupyterlab/rendermime" "^4.3.0" + "@jupyterlab/rendermime-interfaces" "^3.11.0" + "@jupyterlab/services" "^7.3.0" + "@jupyterlab/translation" "^4.3.0" + "@lumino/algorithm" "^2.0.2" + "@lumino/coreutils" "^2.2.0" + "@lumino/disposable" "^2.1.3" + "@lumino/messaging" "^2.0.2" + "@lumino/properties" "^2.0.2" + "@lumino/signaling" "^2.1.3" + "@lumino/widgets" "^2.5.0" + +"@jupyterlab/rendermime-interfaces@^3.11.0": + version "3.11.0" + resolved "https://registry.npmjs.org/@jupyterlab/rendermime-interfaces/-/rendermime-interfaces-3.11.0.tgz#170b263124b5e3cdaab8f12e7dc6a7b85347ee65" + integrity sha512-Fn+H4iCBQow6IG+hWn7JzQQRQN4ePQz7gVBZ1s+dRvONnsOfAjs/Zgmbs1+0mZ/MQe23AKQoqV/Yqq7jbn3GhQ== + dependencies: + "@lumino/coreutils" "^1.11.0 || ^2.2.0" + "@lumino/widgets" "^1.37.2 || ^2.5.0" + +"@jupyterlab/rendermime@^4.3.0": + version "4.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/rendermime/-/rendermime-4.3.0.tgz#de1fe8fb350600193eff9dc9e215d537e41a4df9" + integrity sha512-D+S0RQxNZGCcpzBKXcUkEafS8HIaFOinqyzFQ0f6RMM9mbHRUoIRWG5ZBdpqK7FALQnOqKnz1Zyzawy2WkEIqg== + dependencies: + "@jupyterlab/apputils" "^4.4.0" + "@jupyterlab/coreutils" "^6.3.0" + "@jupyterlab/nbformat" "^4.3.0" + "@jupyterlab/observables" "^5.3.0" + "@jupyterlab/rendermime-interfaces" "^3.11.0" + "@jupyterlab/services" "^7.3.0" + "@jupyterlab/translation" "^4.3.0" + "@lumino/coreutils" "^2.2.0" + "@lumino/messaging" "^2.0.2" + "@lumino/signaling" "^2.1.3" + "@lumino/widgets" "^2.5.0" lodash.escape "^4.0.1" -"@jupyterlab/services@^7.2.5": - version "7.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/services/-/services-7.2.5.tgz#d98bf6999d53005fe978f6151d1060c1f6be5a95" - integrity sha512-Ya/jA8p8WOfiPPERinZasigsfSth54nNNWBQUrT2MEitdka3jVsjC3fR9R5XBpYQ59Qkczz782jMfXvaWNfCHQ== - dependencies: - "@jupyter/ydoc" "^2.0.1" - "@jupyterlab/coreutils" "^6.2.5" - "@jupyterlab/nbformat" "^4.2.5" - "@jupyterlab/settingregistry" "^4.2.5" - "@jupyterlab/statedb" "^4.2.5" - "@lumino/coreutils" "^2.1.2" - "@lumino/disposable" "^2.1.2" - "@lumino/polling" "^2.1.2" - "@lumino/properties" "^2.0.1" - "@lumino/signaling" "^2.1.2" +"@jupyterlab/services@^7.3.0": + version "7.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/services/-/services-7.3.0.tgz#c1cc3f0c0427ae55c2f6ca64317f0eb73c17476a" + integrity sha512-u9GWFMTEUJvDszz98tIUpaBOsUGSybQjwv+263obtCjaceezy87SReIsQefoI1Dh8SGfngGW7IOvThM/LNoYGw== + dependencies: + "@jupyter/ydoc" "^3.0.0" + "@jupyterlab/coreutils" "^6.3.0" + "@jupyterlab/nbformat" "^4.3.0" + "@jupyterlab/settingregistry" "^4.3.0" + "@jupyterlab/statedb" "^4.3.0" + "@lumino/coreutils" "^2.2.0" + "@lumino/disposable" "^2.1.3" + "@lumino/polling" "^2.1.3" + "@lumino/properties" "^2.0.2" + "@lumino/signaling" "^2.1.3" ws "^8.11.0" -"@jupyterlab/settingregistry@^4.2.5": - version "4.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/settingregistry/-/settingregistry-4.2.5.tgz#2ce28049c47ced6703c837766e6c2ff0e879b98b" - integrity sha512-RTHwFoldrP8h4hMxZrKafrOt3mLYKAcmUsnExkzKCqHuc3CIOh9hj+eN3gCh1mxjabbP9QIK0/08e89Rp/EG5w== - dependencies: - "@jupyterlab/nbformat" "^4.2.5" - "@jupyterlab/statedb" "^4.2.5" - "@lumino/commands" "^2.3.0" - "@lumino/coreutils" "^2.1.2" - "@lumino/disposable" "^2.1.2" - "@lumino/signaling" "^2.1.2" +"@jupyterlab/settingregistry@^4.3.0": + version "4.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/settingregistry/-/settingregistry-4.3.0.tgz#46d9c506b43b56f4eb8f2819114e180224024b50" + integrity sha512-+/1IOaANMI35CVO67yTKAo0cVau04MH0QFeJUv9DtY88CU50O6vtfh9+gPGQnl/dTwgHZNiZbfapyLXvLuASig== + dependencies: + "@jupyterlab/nbformat" "^4.3.0" + "@jupyterlab/statedb" "^4.3.0" + "@lumino/commands" "^2.3.1" + "@lumino/coreutils" "^2.2.0" + "@lumino/disposable" "^2.1.3" + "@lumino/signaling" "^2.1.3" "@rjsf/utils" "^5.13.4" ajv "^8.12.0" json5 "^2.2.3" -"@jupyterlab/statedb@^4.2.5": - version "4.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/statedb/-/statedb-4.2.5.tgz#9007e9b51eb79daae17ceb12abaa4ed3b707170b" - integrity sha512-GGP4NSkVzcn/zYZyjKId8OvDxq+JQTHEmiE2ayzUvvP4BwpGJ2GafY1V+QT5Tl+4SB0AzowpNud6XHUJ28M/tA== - dependencies: - "@lumino/commands" "^2.3.0" - "@lumino/coreutils" "^2.1.2" - "@lumino/disposable" "^2.1.2" - "@lumino/properties" "^2.0.1" - "@lumino/signaling" "^2.1.2" - -"@jupyterlab/statusbar@^4.2.5": - version "4.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/statusbar/-/statusbar-4.2.5.tgz#eb5c2553f834b31fe86f421f5a3b4b2ddfbf2fbc" - integrity sha512-YjZf9gClZc6HWF2BN0lSFYuHQ/Im8AM1wF+BHKyGh2qgnN4Z34/d87fCGjvX4yY5tLynw0fFiydNZMwQnkWqLA== - dependencies: - "@jupyterlab/ui-components" "^4.2.5" - "@lumino/algorithm" "^2.0.1" - "@lumino/coreutils" "^2.1.2" - "@lumino/disposable" "^2.1.2" - "@lumino/messaging" "^2.0.1" - "@lumino/signaling" "^2.1.2" - "@lumino/widgets" "^2.3.2" +"@jupyterlab/statedb@^4.3.0": + version "4.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/statedb/-/statedb-4.3.0.tgz#0242a0fb58ddd7cc464af4a3df277ae118aa68a3" + integrity sha512-NyME5GIHmTwV2MLIqtxV9hMxKa0v9AjAasN6xtDqhlyFwsPd4kI1dUAlYjxJ9Cbcc+z5K3/XNoFZyErOe/JQPQ== + dependencies: + "@lumino/commands" "^2.3.1" + "@lumino/coreutils" "^2.2.0" + "@lumino/disposable" "^2.1.3" + "@lumino/properties" "^2.0.2" + "@lumino/signaling" "^2.1.3" + +"@jupyterlab/statusbar@^4.3.0": + version "4.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/statusbar/-/statusbar-4.3.0.tgz#1482d69fe471dc61fc568de6f58ac6536904fb3a" + integrity sha512-ds8NB5MacSyzSYFGxTBtdxQ8RbfcUeCOSR8PLMqZ+DQuqAlyMAuzqNOGDHLmNVChSnzB97wLUeRWVkGj74PD4g== + dependencies: + "@jupyterlab/ui-components" "^4.3.0" + "@lumino/algorithm" "^2.0.2" + "@lumino/coreutils" "^2.2.0" + "@lumino/disposable" "^2.1.3" + "@lumino/messaging" "^2.0.2" + "@lumino/signaling" "^2.1.3" + "@lumino/widgets" "^2.5.0" react "^18.2.0" -"@jupyterlab/toc@^6.2.5": - version "6.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/toc/-/toc-6.2.5.tgz#f878d9fac248ecafd398684704924c522c402174" - integrity sha512-W2xQkVA7v87MaBcUB7OjzkOUY51bZzLL8S6esSARrNYdh5AY4rFKfnw0sKHAkSmrqB2DQLEZl/1z1GTNmyuYhQ== - dependencies: - "@jupyterlab/apputils" "^4.3.5" - "@jupyterlab/coreutils" "^6.2.5" - "@jupyterlab/docregistry" "^4.2.5" - "@jupyterlab/observables" "^5.2.5" - "@jupyterlab/rendermime" "^4.2.5" - "@jupyterlab/rendermime-interfaces" "^3.10.5" - "@jupyterlab/translation" "^4.2.5" - "@jupyterlab/ui-components" "^4.2.5" - "@lumino/coreutils" "^2.1.2" - "@lumino/disposable" "^2.1.2" - "@lumino/messaging" "^2.0.1" - "@lumino/signaling" "^2.1.2" - "@lumino/widgets" "^2.3.2" +"@jupyterlab/toc@^6.3.0": + version "6.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/toc/-/toc-6.3.0.tgz#18174d803fa00c27b503d355f5fbe5adf5d2210c" + integrity sha512-NjZqFllvpS4BlQzOfDa+YpQqeU1h8EQqaH9fvnfv48csUZ02P7ffvKcyBjzAcMpfhEfaXxIS1TqWPGC6jv+2hA== + dependencies: + "@jupyter/react-components" "^0.16.6" + "@jupyterlab/apputils" "^4.4.0" + "@jupyterlab/coreutils" "^6.3.0" + "@jupyterlab/docregistry" "^4.3.0" + "@jupyterlab/observables" "^5.3.0" + "@jupyterlab/rendermime" "^4.3.0" + "@jupyterlab/rendermime-interfaces" "^3.11.0" + "@jupyterlab/translation" "^4.3.0" + "@jupyterlab/ui-components" "^4.3.0" + "@lumino/coreutils" "^2.2.0" + "@lumino/disposable" "^2.1.3" + "@lumino/messaging" "^2.0.2" + "@lumino/signaling" "^2.1.3" + "@lumino/widgets" "^2.5.0" react "^18.2.0" -"@jupyterlab/translation@^4.2.5": - version "4.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/translation/-/translation-4.2.5.tgz#32f01cbc556dc0083fbff695f779c017e2cc2ed1" - integrity sha512-r2QABaUl2cm0d+8klviLc+l4+jssRrNUDnZ3hBVluCN0QowmwsovWc7V+R0VWIULzB5Jzf79RfJumXfVOpgUfg== - dependencies: - "@jupyterlab/coreutils" "^6.2.5" - "@jupyterlab/rendermime-interfaces" "^3.10.5" - "@jupyterlab/services" "^7.2.5" - "@jupyterlab/statedb" "^4.2.5" - "@lumino/coreutils" "^2.1.2" - -"@jupyterlab/ui-components@^4.2.5": - version "4.2.5" - resolved "https://registry.npmjs.org/@jupyterlab/ui-components/-/ui-components-4.2.5.tgz#be78e9aaa95164ede17ea7aa5d401ad0f70aca15" - integrity sha512-IZ6M4U48IJbthe4nQrOo8+JQeIrUxGQCK/ReKECx9xg9XCsvQ37iA1ZwOYXv5MhlF2bmIF7dXcwI7mpJgUAgOg== - dependencies: - "@jupyter/react-components" "^0.15.3" - "@jupyter/web-components" "^0.15.3" - "@jupyterlab/coreutils" "^6.2.5" - "@jupyterlab/observables" "^5.2.5" - "@jupyterlab/rendermime-interfaces" "^3.10.5" - "@jupyterlab/translation" "^4.2.5" - "@lumino/algorithm" "^2.0.1" - "@lumino/commands" "^2.3.0" - "@lumino/coreutils" "^2.1.2" - "@lumino/disposable" "^2.1.2" - "@lumino/messaging" "^2.0.1" - "@lumino/polling" "^2.1.2" - "@lumino/properties" "^2.0.1" - "@lumino/signaling" "^2.1.2" - "@lumino/virtualdom" "^2.0.1" - "@lumino/widgets" "^2.3.2" +"@jupyterlab/translation@^4.3.0": + version "4.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/translation/-/translation-4.3.0.tgz#426756c8e63b70f4643848b0ce3eb384979159b4" + integrity sha512-um8rbFXs4S1AtzHqtgnh80ttJ50mid1dVwgD+gz+M3tFGqG9ZvTWLfWOeIOCzSGyv0dtYtuWUf9M6S1S9jBNfw== + dependencies: + "@jupyterlab/coreutils" "^6.3.0" + "@jupyterlab/rendermime-interfaces" "^3.11.0" + "@jupyterlab/services" "^7.3.0" + "@jupyterlab/statedb" "^4.3.0" + "@lumino/coreutils" "^2.2.0" + +"@jupyterlab/ui-components@^4.3.0": + version "4.3.0" + resolved "https://registry.npmjs.org/@jupyterlab/ui-components/-/ui-components-4.3.0.tgz#925284ebdf121940de7ef8db3691cb68c6131c79" + integrity sha512-P0axMQ61aqgypTherhbVMgj47iWYhZgDKNj7YeJTQgiWotWYNmF2AHiiE7H2orIklgzCZh0OQKvcCQLo2cQW1A== + dependencies: + "@jupyter/react-components" "^0.16.6" + "@jupyter/web-components" "^0.16.6" + "@jupyterlab/coreutils" "^6.3.0" + "@jupyterlab/observables" "^5.3.0" + "@jupyterlab/rendermime-interfaces" "^3.11.0" + "@jupyterlab/translation" "^4.3.0" + "@lumino/algorithm" "^2.0.2" + "@lumino/commands" "^2.3.1" + "@lumino/coreutils" "^2.2.0" + "@lumino/disposable" "^2.1.3" + "@lumino/messaging" "^2.0.2" + "@lumino/polling" "^2.1.3" + "@lumino/properties" "^2.0.2" + "@lumino/signaling" "^2.1.3" + "@lumino/virtualdom" "^2.0.2" + "@lumino/widgets" "^2.5.0" "@rjsf/core" "^5.13.4" "@rjsf/utils" "^5.13.4" react "^18.2.0" @@ -937,7 +956,7 @@ dependencies: "@lezer/common" "^1.0.0" -"@lezer/markdown@^1.0.0", "@lezer/markdown@^1.2.0": +"@lezer/markdown@^1.0.0": version "1.3.1" resolved "https://registry.npmjs.org/@lezer/markdown/-/markdown-1.3.1.tgz#2193012296927f383102d7a2b035f323759315b3" integrity sha512-DGlzU/i8DC8k0uz1F+jeePrkATl0jWakauTzftMQOcbaMkHbNSRki/4E2tOzJWsVpoKYhe7iTJ03aepdwVUXUA== @@ -945,6 +964,14 @@ "@lezer/common" "^1.0.0" "@lezer/highlight" "^1.0.0" +"@lezer/markdown@^1.3.0": + version "1.3.2" + resolved "https://registry.npmjs.org/@lezer/markdown/-/markdown-1.3.2.tgz#9d648b2a6cb47523f3d7ab494eee8c7be4f1ea9e" + integrity sha512-Wu7B6VnrKTbBEohqa63h5vxXjiC4pO5ZQJ/TDbhJxPQaaIoRD/6UVDhSDtVsCwVZV12vvN9KxuLL3ATMnlG0oQ== + dependencies: + "@lezer/common" "^1.0.0" + "@lezer/highlight" "^1.0.0" + "@lezer/php@^1.0.0": version "1.0.2" resolved "https://registry.npmjs.org/@lezer/php/-/php-1.0.2.tgz#7c291631fc1e7f7efe99977522bc48bdc732658a" @@ -981,12 +1008,12 @@ "@lezer/highlight" "^1.0.0" "@lezer/lr" "^1.0.0" -"@lumino/algorithm@^2.0.1", "@lumino/algorithm@^2.0.2": +"@lumino/algorithm@^2.0.2": version "2.0.2" resolved "https://registry.npmjs.org/@lumino/algorithm/-/algorithm-2.0.2.tgz#d211da98c92be0271afde96b949982e29178ae48" integrity sha512-cI8yJ2+QK1yM5ZRU3Kuaw9fJ/64JEDZEwWWp7+U0cd/mvcZ44BGdJJ29w+tIet1QXxPAvnsUleWyQ5qm4qUouA== -"@lumino/application@^2.3.1": +"@lumino/application@^2.4.1": version "2.4.1" resolved "https://registry.npmjs.org/@lumino/application/-/application-2.4.1.tgz#6d312a4f8f8e14e61d1b784339f552f4a065f1ee" integrity sha512-XdCAlNajcsGdK6ep+s6QC70EY+uBnP3kDiWthFLl3EMkvkYwmjOPzIPGlwLEd9Hu0XCO+1Vd2PlpeTnxw5D3/g== @@ -1002,7 +1029,7 @@ dependencies: "@lumino/algorithm" "^2.0.2" -"@lumino/commands@^2.3.0", "@lumino/commands@^2.3.1": +"@lumino/commands@^2.3.1": version "2.3.1" resolved "https://registry.npmjs.org/@lumino/commands/-/commands-2.3.1.tgz#4ab5ec6521fefd3a9ff7ae0983c645483b9ecd07" integrity sha512-DpX1kkE4PhILpvK1T4ZnaFb6UP4+YTkdZifvN3nbiomD64O2CTd+wcWIBpZMgy6MMgbVgrE8dzHxHk1EsKxNxw== @@ -1015,14 +1042,14 @@ "@lumino/signaling" "^2.1.3" "@lumino/virtualdom" "^2.0.2" -"@lumino/coreutils@^1.11.0 || ^2.0.0", "@lumino/coreutils@^1.11.0 || ^2.1.2", "@lumino/coreutils@^2.1.2", "@lumino/coreutils@^2.2.0": +"@lumino/coreutils@^1.11.0 || ^2.0.0", "@lumino/coreutils@^1.11.0 || ^2.2.0", "@lumino/coreutils@^2.2.0": version "2.2.0" resolved "https://registry.npmjs.org/@lumino/coreutils/-/coreutils-2.2.0.tgz#3f9d5c36f2513f067b2563c7ad3b33f43905a4e2" integrity sha512-x5wnQ/GjWBayJ6vXVaUi6+Q6ETDdcUiH9eSfpRZFbgMQyyM6pi6baKqJBK2CHkCc/YbAEl6ipApTgm3KOJ/I3g== dependencies: "@lumino/algorithm" "^2.0.2" -"@lumino/datagrid@^2.3.1": +"@lumino/datagrid@^2.4.1": version "2.4.1" resolved "https://registry.npmjs.org/@lumino/datagrid/-/datagrid-2.4.1.tgz#6624d170d2695a707fc92d7364a08514778f2a4b" integrity sha512-9sJg8UU/hqcKDqO5Rd0Blm2JYKT9nyAK/kuPYeaQc4ZLvtgn4SoOenNaShLDr3Wp54quBM8npAlk1mWG+yYC2g== @@ -1037,19 +1064,19 @@ "@lumino/signaling" "^2.1.3" "@lumino/widgets" "^2.5.0" -"@lumino/disposable@^1.10.0 || ^2.0.0", "@lumino/disposable@^2.1.2", "@lumino/disposable@^2.1.3": +"@lumino/disposable@^1.10.0 || ^2.0.0", "@lumino/disposable@^2.1.3": version "2.1.3" resolved "https://registry.npmjs.org/@lumino/disposable/-/disposable-2.1.3.tgz#cd2b11d82896eb654c2a528c9ff79a85ccf88d74" integrity sha512-k5KXy/+T3UItiWHY4WwQawnsJnGo3aNtP5CTRKqo4+tbTNuhc3rTSvygJlNKIbEfIZXW2EWYnwfFDozkYx95eA== dependencies: "@lumino/signaling" "^2.1.3" -"@lumino/domutils@^2.0.1", "@lumino/domutils@^2.0.2": +"@lumino/domutils@^2.0.2": version "2.0.2" resolved "https://registry.npmjs.org/@lumino/domutils/-/domutils-2.0.2.tgz#b05973a179db2da71239976f25194f65c0f8eb98" integrity sha512-2Kp6YHaMNI1rKB0PrALvOsZBHPy2EvVVAvJLWjlCm8MpWOVETjFp0MA9QpMubT9I76aKbaI5s1o1NJyZ8Y99pQ== -"@lumino/dragdrop@^2.1.4", "@lumino/dragdrop@^2.1.5": +"@lumino/dragdrop@^2.1.5": version "2.1.5" resolved "https://registry.npmjs.org/@lumino/dragdrop/-/dragdrop-2.1.5.tgz#2c178ac3e7520551f08ffb4f31521d87940dcce1" integrity sha512-zqwR4GakrQBKZOW6S5pj2nfrQDurOErAoe9x3HS3BKLa1AzWA+t9PD5NESOKd81NqXFHjiMirSyFkTUs6pw+uA== @@ -1062,7 +1089,7 @@ resolved "https://registry.npmjs.org/@lumino/keyboard/-/keyboard-2.0.2.tgz#8ff5e360b8960716f45b742845bea6f3c5f44760" integrity sha512-icRUpvswDaFjqmAJNbQRb/aTu6Iugo6Y2oC08TiIwhQtLS9W+Ee9VofdqvbPSvCm6DkyP+DCWMuA3KXZ4V4g4g== -"@lumino/messaging@^2.0.1", "@lumino/messaging@^2.0.2": +"@lumino/messaging@^2.0.2": version "2.0.2" resolved "https://registry.npmjs.org/@lumino/messaging/-/messaging-2.0.2.tgz#133b94d1fa1c67ad60e622a32acaf682faf05aaa" integrity sha512-2sUF07cYA0f3mDil41Eh5sfBk0aGAH/mOh1I4+vyRUsKyBqp4WTUtpJFd8xVJGAntygxwnebIygkIaXXTIQvxA== @@ -1070,7 +1097,7 @@ "@lumino/algorithm" "^2.0.2" "@lumino/collections" "^2.0.2" -"@lumino/polling@^2.1.2": +"@lumino/polling@^2.1.3": version "2.1.3" resolved "https://registry.npmjs.org/@lumino/polling/-/polling-2.1.3.tgz#395fcfba3ada12439d5cc9592b68a89d781a54ed" integrity sha512-WEZk96ddK6eHEhdDkFUAAA40EOLit86QVbqQqnbPmhdGwFogek26Kq9b1U273LJeirv95zXCATOJAkjRyb7D+w== @@ -1079,12 +1106,12 @@ "@lumino/disposable" "^2.1.3" "@lumino/signaling" "^2.1.3" -"@lumino/properties@^2.0.1", "@lumino/properties@^2.0.2": +"@lumino/properties@^2.0.2": version "2.0.2" resolved "https://registry.npmjs.org/@lumino/properties/-/properties-2.0.2.tgz#39213876b06a917eae72e8ea38b71daa1c699682" integrity sha512-b312oA3Bh97WFK8efXejYmC3DVJmvzJk72LQB7H3fXhfqS5jUWvL7MSnNmgcQvGzl9fIhDWDWjhtSTi0KGYYBg== -"@lumino/signaling@^1.10.0 || ^2.0.0", "@lumino/signaling@^2.1.2", "@lumino/signaling@^2.1.3": +"@lumino/signaling@^1.10.0 || ^2.0.0", "@lumino/signaling@^2.1.3": version "2.1.3" resolved "https://registry.npmjs.org/@lumino/signaling/-/signaling-2.1.3.tgz#612419e6948ce77c00328f9eff5ecd995c960a70" integrity sha512-9Wd4iMk8F1i6pYjy65bqKuPlzQMicyL9xy1/ccS20kovPcfD074waneL/7BVe+3M8i+fGa3x2qjbWrBzOdTdNw== @@ -1092,14 +1119,14 @@ "@lumino/algorithm" "^2.0.2" "@lumino/coreutils" "^2.2.0" -"@lumino/virtualdom@^2.0.1", "@lumino/virtualdom@^2.0.2": +"@lumino/virtualdom@^2.0.2": version "2.0.2" resolved "https://registry.npmjs.org/@lumino/virtualdom/-/virtualdom-2.0.2.tgz#927c9803ebd31cd97ca2c599ec9a5d509afb3b2d" integrity sha512-HYZThOtZSoknjdXA102xpy5CiXtTFCVz45EXdWeYLx3NhuEwuAIX93QBBIhupalmtFlRg1yhdDNV40HxJ4kcXg== dependencies: "@lumino/algorithm" "^2.0.2" -"@lumino/widgets@^1.37.2 || ^2.3.2", "@lumino/widgets@^2.3.2", "@lumino/widgets@^2.5.0": +"@lumino/widgets@^1.37.2 || ^2.5.0", "@lumino/widgets@^2.5.0": version "2.5.0" resolved "https://registry.npmjs.org/@lumino/widgets/-/widgets-2.5.0.tgz#7e37d86dbbc4eed1f85aa199b9fffa4919aa1e3e" integrity sha512-RSRpc6aIEiuw79jqWUHYWXLJ2GBy7vhwuqgo94UVzg6oeh3XBECX0OvXGjK2k7N2BhmRrIs9bXky7Dm861S6mQ== @@ -1126,7 +1153,7 @@ resolved "https://registry.npmjs.org/@microsoft/fast-element/-/fast-element-1.13.0.tgz#d390ff13697064a48dc6ad6bb332a5f5489f73f8" integrity sha512-iFhzKbbD0cFRo9cEzLS3Tdo9BYuatdxmCEKCpZs1Cro/93zNMpZ/Y9/Z7SknmW6fhDZbpBvtO8lLh9TFEcNVAQ== -"@microsoft/fast-foundation@^2.49.4", "@microsoft/fast-foundation@^2.49.6": +"@microsoft/fast-foundation@^2.49.4": version "2.49.6" resolved "https://registry.npmjs.org/@microsoft/fast-foundation/-/fast-foundation-2.49.6.tgz#0bdee7d28dcf93918075618359b083a676d2891c" integrity sha512-DZVr+J/NIoskFC1Y6xnAowrMkdbf2d5o7UyWK6gW5AiQ6S386Ql8dw4KcC4kHaeE1yL2CKvweE79cj6ZhJhTvA== @@ -1136,14 +1163,6 @@ tabbable "^5.2.0" tslib "^1.13.0" -"@microsoft/fast-react-wrapper@^0.3.22": - version "0.3.24" - resolved "https://registry.npmjs.org/@microsoft/fast-react-wrapper/-/fast-react-wrapper-0.3.24.tgz#caed362a95fac88f9715a4b82425c395fba550c2" - integrity sha512-sRnSBIKaO42p4mYoYR60spWVkg89wFxFAgQETIMazAm2TxtlsnsGszJnTwVhXq2Uz+XNiD8eKBkfzK5c/i6/Kw== - dependencies: - "@microsoft/fast-element" "^1.13.0" - "@microsoft/fast-foundation" "^2.49.6" - "@microsoft/fast-web-utilities@^5.4.1": version "5.4.1" resolved "https://registry.npmjs.org/@microsoft/fast-web-utilities/-/fast-web-utilities-5.4.1.tgz#8e3082ee2ff2b5467f17e7cb1fb01b0e4906b71f" @@ -1151,7 +1170,7 @@ dependencies: exenv-es6 "^1.1.1" -"@playwright/test@^1.43.1", "@playwright/test@^1.48.2": +"@playwright/test@^1.48.0", "@playwright/test@^1.48.2": version "1.48.2" resolved "https://registry.npmjs.org/@playwright/test/-/test-1.48.2.tgz#87dd40633f980872283404c8142a65744d3f13d6" integrity sha512-54w1xCWfXuax7dz4W2M9uw0gDyh+ti/0K/MxcCUxChFh37kkdxPdfZDw5QBbuPUJHr1CiHJ1hXgSs+GgeQc5Zw== From e0d5fe86bd50219ac5f63185eabc3595cb5d3e29 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Fri, 25 Oct 2024 13:13:53 +0100 Subject: [PATCH 404/496] Add refresh button to widgets UI --- .../common/widgets/test_widgets.py | 10 +++ src/codeflare_sdk/common/widgets/widgets.py | 72 ++++++++++++------- 2 files changed, 56 insertions(+), 26 deletions(-) diff --git a/src/codeflare_sdk/common/widgets/test_widgets.py b/src/codeflare_sdk/common/widgets/test_widgets.py index e01b9193..12c23854 100644 --- a/src/codeflare_sdk/common/widgets/test_widgets.py +++ b/src/codeflare_sdk/common/widgets/test_widgets.py @@ -296,6 +296,9 @@ def test_ray_cluster_manager_widgets_init(mocker, capsys): assert ( ray_cluster_manager_instance.ray_dashboard_button == mock_button.return_value ), "ray_dashboard_button is not set correctly" + assert ( + ray_cluster_manager_instance.refresh_data_button == mock_button.return_value + ), "refresh_data_button is not set correctly" assert ( ray_cluster_manager_instance.raycluster_data_output == mock_output.return_value ), "raycluster_data_output is not set correctly" @@ -310,6 +313,7 @@ def test_ray_cluster_manager_widgets_init(mocker, capsys): mock_delete_button = MagicMock() mock_list_jobs_button = MagicMock() mock_ray_dashboard_button = MagicMock() + mock_refresh_dataframe_button = MagicMock() mock_javascript = mocker.patch("codeflare_sdk.common.widgets.widgets.Javascript") ray_cluster_manager_instance.url_output = MagicMock() @@ -332,6 +336,12 @@ def test_ray_cluster_manager_widgets_init(mocker, capsys): f'window.open("{mock_dashboard_uri.return_value}/#/jobs", "_blank");' ) + # Simulate clicking the refresh data button + ray_cluster_manager_instance._on_refresh_data_button_click( + mock_refresh_dataframe_button + ) + mock_fetch_cluster_data.assert_called_with(namespace) + # Simulate clicking the Ray dashboard button ray_cluster_manager_instance.classification_widget.value = "test-cluster-1" ray_cluster_manager_instance._on_ray_dashboard_button_click( diff --git a/src/codeflare_sdk/common/widgets/widgets.py b/src/codeflare_sdk/common/widgets/widgets.py index 8a13a4d4..bc8ebfe3 100644 --- a/src/codeflare_sdk/common/widgets/widgets.py +++ b/src/codeflare_sdk/common/widgets/widgets.py @@ -74,6 +74,12 @@ def __init__(self, ray_clusters_df: pd.DataFrame, namespace: str = None): tooltip="Open the Ray Dashboard in a new tab", layout=widgets.Layout(width="auto"), ) + self.refresh_data_button = widgets.Button( + description="Refresh Data", + icon="refresh", + tooltip="Refresh the list of Ray Clusters", + layout=widgets.Layout(width="auto", left="1em"), + ) # Set up interactions self._initialize_callbacks() @@ -95,6 +101,9 @@ def _initialize_callbacks(self): self.ray_dashboard_button.on_click( lambda b: self._on_ray_dashboard_button_click(b) ) + self.refresh_data_button.on_click( + lambda b: self._on_refresh_data_button_click(b) + ) def _trigger_initial_display(self): """ @@ -138,31 +147,17 @@ def _on_delete_button_click(self, b): _on_delete_button_click handles the event when the Delete Button is clicked, deleting the selected cluster. """ cluster_name = self.classification_widget.value - namespace = self.ray_clusters_df[ - self.ray_clusters_df["Name"] == self.classification_widget.value - ]["Namespace"].values[0] - _delete_cluster(cluster_name, namespace) + _delete_cluster(cluster_name, self.namespace) with self.user_output: self.user_output.clear_output() print( - f"Cluster {cluster_name} in the {namespace} namespace was deleted successfully." + f"Cluster {cluster_name} in the {self.namespace} namespace was deleted successfully." ) # Refresh the dataframe - new_df = _fetch_cluster_data(namespace) - self.ray_clusters_df = new_df - if new_df.empty: - self.classification_widget.close() - self.delete_button.close() - self.list_jobs_button.close() - self.ray_dashboard_button.close() - with self.raycluster_data_output: - self.raycluster_data_output.clear_output() - print(f"No clusters found in the {namespace} namespace.") - else: - self.classification_widget.options = new_df["Name"].tolist() + self._refresh_dataframe() def _on_list_jobs_button_click(self, b): """ @@ -171,15 +166,12 @@ def _on_list_jobs_button_click(self, b): from codeflare_sdk import Cluster cluster_name = self.classification_widget.value - namespace = self.ray_clusters_df[ - self.ray_clusters_df["Name"] == self.classification_widget.value - ]["Namespace"].values[0] # Suppress from Cluster Object initialisation widgets and outputs with widgets.Output(), contextlib.redirect_stdout( io.StringIO() ), contextlib.redirect_stderr(io.StringIO()): - cluster = Cluster(ClusterConfiguration(cluster_name, namespace)) + cluster = Cluster(ClusterConfiguration(cluster_name, self.namespace)) dashboard_url = cluster.cluster_dashboard_uri() with self.user_output: @@ -197,15 +189,12 @@ def _on_ray_dashboard_button_click(self, b): from codeflare_sdk import Cluster cluster_name = self.classification_widget.value - namespace = self.ray_clusters_df[ - self.ray_clusters_df["Name"] == self.classification_widget.value - ]["Namespace"].values[0] # Suppress from Cluster Object initialisation widgets and outputs with widgets.Output(), contextlib.redirect_stdout( io.StringIO() ), contextlib.redirect_stderr(io.StringIO()): - cluster = Cluster(ClusterConfiguration(cluster_name, namespace)) + cluster = Cluster(ClusterConfiguration(cluster_name, self.namespace)) dashboard_url = cluster.cluster_dashboard_uri() with self.user_output: @@ -214,11 +203,42 @@ def _on_ray_dashboard_button_click(self, b): with self.url_output: display(Javascript(f'window.open("{dashboard_url}", "_blank");')) + def _on_refresh_data_button_click(self, b): + """ + _on_refresh_button_click handles the event when the Refresh Data button is clicked, refreshing the list of Ray Clusters. + """ + self.refresh_data_button.disabled = True + self._refresh_dataframe() + self.refresh_data_button.disabled = False + + def _refresh_dataframe(self): + """ + _refresh_data function refreshes the list of Ray Clusters. + """ + new_df = _fetch_cluster_data(self.namespace) + self.ray_clusters_df = new_df + if new_df.empty: + self.classification_widget.close() + self.delete_button.close() + self.list_jobs_button.close() + self.ray_dashboard_button.close() + self.refresh_data_button.close() + with self.raycluster_data_output: + self.raycluster_data_output.clear_output() + print(f"No clusters found in the {self.namespace} namespace.") + else: + self.classification_widget.options = new_df["Name"].tolist() + def display_widgets(self): display(widgets.VBox([self.classification_widget, self.raycluster_data_output])) display( widgets.HBox( - [self.delete_button, self.list_jobs_button, self.ray_dashboard_button] + [ + self.delete_button, + self.list_jobs_button, + self.ray_dashboard_button, + self.refresh_data_button, + ] ), self.url_output, self.user_output, From 20b08b9131588d111f8042bb4ad67aad70eb75a9 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Wed, 30 Oct 2024 11:34:02 +0000 Subject: [PATCH 405/496] Add refresh button bug fixes and test case to UI e2e test --- docs/sphinx/user-docs/ui-widgets.rst | 8 +- src/codeflare_sdk/common/widgets/widgets.py | 78 ++++++++++++------ .../tests/widget_notebook_example.test.ts | 40 ++++++--- .../widgets-cell-0-linux.png | Bin 16694 -> 0 bytes .../widgets-cell-2-linux.png | Bin 42051 -> 0 bytes .../widgets-cell-3-linux.png | Bin 7513 -> 0 bytes .../widgets-cell-4-linux.png | Bin 3461 -> 0 bytes .../widgets-cell-5-linux.png | Bin 9832 -> 0 bytes 8 files changed, 87 insertions(+), 39 deletions(-) delete mode 100644 ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-0-linux.png delete mode 100644 ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-2-linux.png delete mode 100644 ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-3-linux.png delete mode 100644 ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-4-linux.png delete mode 100644 ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-5-linux.png diff --git a/docs/sphinx/user-docs/ui-widgets.rst b/docs/sphinx/user-docs/ui-widgets.rst index 6c797e04..92335423 100644 --- a/docs/sphinx/user-docs/ui-widgets.rst +++ b/docs/sphinx/user-docs/ui-widgets.rst @@ -39,13 +39,15 @@ requests and limits along with the clusters status. Above is a list of two Ray Clusters ``raytest`` and ``raytest2`` each of those headings is clickable and will update the table to view the -selected Cluster's information. There are three buttons under the table -``Cluster Down``, ``View Jobs`` and ``Open Ray Dashboard``. \* The +selected Cluster's information. There are four buttons under the table +``Cluster Down``, ``View Jobs``, ``Open Ray Dashboard``, and ``Refresh Data``. \* The ``Cluster Down`` button will delete the selected Cluster. \* The ``View Jobs`` button will try to open the Ray Dashboard's Jobs view in a Web Browser. The link will also be printed to the console. \* The ``Open Ray Dashboard`` button will try to open the Ray Dashboard view in -a Web Browser. The link will also be printed to the console. +a Web Browser. The link will also be printed to the console. \* The +``Refresh Data`` button will refresh the list of RayClusters, the spec, and +the status of the Ray Cluster. The UI Table can be viewed by calling the following function. diff --git a/src/codeflare_sdk/common/widgets/widgets.py b/src/codeflare_sdk/common/widgets/widgets.py index bc8ebfe3..6f3283ce 100644 --- a/src/codeflare_sdk/common/widgets/widgets.py +++ b/src/codeflare_sdk/common/widgets/widgets.py @@ -119,28 +119,8 @@ def _on_cluster_click(self, selection_change): _on_cluster_click handles the event when a cluster is selected from the toggle buttons, updating the output with cluster details. """ new_value = selection_change["new"] - self.raycluster_data_output.clear_output() - ray_clusters_df = _fetch_cluster_data(self.namespace) - self.classification_widget.options = ray_clusters_df["Name"].tolist() - with self.raycluster_data_output: - display( - HTML( - ray_clusters_df[ray_clusters_df["Name"] == new_value][ - [ - "Name", - "Namespace", - "Num Workers", - "Head GPUs", - "Head CPU Req~Lim", - "Head Memory Req~Lim", - "Worker GPUs", - "Worker CPU Req~Lim", - "Worker Memory Req~Lim", - "status", - ] - ].to_html(escape=False, index=False, border=2) - ) - ) + self.classification_widget.value = new_value + self._refresh_dataframe() def _on_delete_button_click(self, b): """ @@ -215,9 +195,8 @@ def _refresh_dataframe(self): """ _refresh_data function refreshes the list of Ray Clusters. """ - new_df = _fetch_cluster_data(self.namespace) - self.ray_clusters_df = new_df - if new_df.empty: + self.ray_clusters_df = _fetch_cluster_data(self.namespace) + if self.ray_clusters_df.empty: self.classification_widget.close() self.delete_button.close() self.list_jobs_button.close() @@ -227,7 +206,54 @@ def _refresh_dataframe(self): self.raycluster_data_output.clear_output() print(f"No clusters found in the {self.namespace} namespace.") else: - self.classification_widget.options = new_df["Name"].tolist() + # Store the current selection if it still exists (Was not previously deleted). + selected_cluster = ( + self.classification_widget.value + if self.classification_widget.value + in self.ray_clusters_df["Name"].tolist() + else None + ) + + # Update list of Ray Clusters. + self.classification_widget.options = self.ray_clusters_df["Name"].tolist() + + # If the selected cluster exists, preserve the selection to remain viewing the currently selected cluster. + # If it does not exist, default to the first available cluster. + if selected_cluster: + self.classification_widget.value = selected_cluster + else: + self.classification_widget.value = self.ray_clusters_df["Name"].iloc[0] + + # Update the output with the current Ray Cluster details. + self._display_cluster_details() + + def _display_cluster_details(self): + """ + _display_cluster_details function displays the selected cluster details in the output widget. + """ + self.raycluster_data_output.clear_output() + selected_cluster = self.ray_clusters_df[ + self.ray_clusters_df["Name"] == self.classification_widget.value + ] + with self.raycluster_data_output: + display( + HTML( + selected_cluster[ + [ + "Name", + "Namespace", + "Num Workers", + "Head GPUs", + "Head CPU Req~Lim", + "Head Memory Req~Lim", + "Worker GPUs", + "Worker CPU Req~Lim", + "Worker Memory Req~Lim", + "status", + ] + ].to_html(escape=False, index=False, border=2) + ) + ) def display_widgets(self): display(widgets.VBox([self.classification_widget, self.raycluster_data_output])) diff --git a/ui-tests/tests/widget_notebook_example.test.ts b/ui-tests/tests/widget_notebook_example.test.ts index 823a73f4..db746e00 100644 --- a/ui-tests/tests/widget_notebook_example.test.ts +++ b/ui-tests/tests/widget_notebook_example.test.ts @@ -36,6 +36,8 @@ test.describe("Visual Regression", () => { // Hide the cell toolbar before capturing the screenshots await page.addStyleTag({ content: '.jp-cell-toolbar { display: none !important; }' }); + // Hide the file explorer + await page.keyboard.press('Control+Shift+F'); const captures: (Buffer | null)[] = []; // Array to store cell screenshots const cellCount = await page.notebook.getCellCount(); @@ -109,30 +111,48 @@ test.describe("Visual Regression", () => { await runPreviousCell(page, cellCount, '(, False)'); - // view_clusters table with buttons - await interactWithWidget(page, upDownWidgetCellIndex, 'input[type="checkbox"]', async (checkbox) => { - await checkbox.click(); - const isChecked = await checkbox.isChecked(); - expect(isChecked).toBe(false); - }); + // Replace text in ClusterConfiguration to run a new RayCluster + const cell = page.getByText('raytest').first(); + await cell.fill('"raytest-1"'); + await page.notebook.runCell(cellCount - 3, true); // Run ClusterConfiguration cell await interactWithWidget(page, upDownWidgetCellIndex, 'button:has-text("Cluster Up")', async (button) => { await button.click(); - const successMessage = await page.waitForSelector('text=Ray Cluster: \'raytest\' has successfully been created', { timeout: 10000 }); + const successMessage = await page.waitForSelector('text=Ray Cluster: \'raytest-1\' has successfully been created', { timeout: 10000 }); expect(successMessage).not.toBeNull(); }); const viewClustersCellIndex = 4; // 5 on OpenShift await page.notebook.runCell(cellCount - 2, true); + + // Wait until the RayCluster status in the table updates to "Ready" + await interactWithWidget(page, viewClustersCellIndex, 'button:has-text("Refresh Data")', async (button) => { + let clusterReady = false; + const maxRefreshRetries = 24; // 24 retries * 5 seconds = 120 seconds + let numRefreshRetries = 0; + while (!clusterReady && numRefreshRetries < maxRefreshRetries) { + await button.click(); + try { + await page.waitForSelector('text=Ready ✓', { timeout: 5000 }); + clusterReady = true; + } + catch (e) { + console.log(`Cluster not ready yet. Retrying...`); + numRefreshRetries++; + } + } + expect(clusterReady).toBe(true); + }); + await interactWithWidget(page, viewClustersCellIndex, 'button:has-text("Open Ray Dashboard")', async (button) => { await button.click(); - const successMessage = await page.waitForSelector('text=Opening Ray Dashboard for raytest cluster', { timeout: 5000 }); + const successMessage = await page.waitForSelector('text=Opening Ray Dashboard for raytest-1 cluster', { timeout: 5000 }); expect(successMessage).not.toBeNull(); }); await interactWithWidget(page, viewClustersCellIndex, 'button:has-text("View Jobs")', async (button) => { await button.click(); - const successMessage = await page.waitForSelector('text=Opening Ray Jobs Dashboard for raytest cluster', { timeout: 5000 }); + const successMessage = await page.waitForSelector('text=Opening Ray Jobs Dashboard for raytest-1 cluster', { timeout: 5000 }); expect(successMessage).not.toBeNull(); }); @@ -141,7 +161,7 @@ test.describe("Visual Regression", () => { const noClustersMessage = await page.waitForSelector(`text=No clusters found in the ${namespace} namespace.`, { timeout: 5000 }); expect(noClustersMessage).not.toBeNull(); - const successMessage = await page.waitForSelector(`text=Cluster raytest in the ${namespace} namespace was deleted successfully.`, { timeout: 5000 }); + const successMessage = await page.waitForSelector(`text=Cluster raytest-1 in the ${namespace} namespace was deleted successfully.`, { timeout: 5000 }); expect(successMessage).not.toBeNull(); }); diff --git a/ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-0-linux.png b/ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-0-linux.png deleted file mode 100644 index efb098ef7d444d65a0d2be02ebe4f96f27849ccf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 16694 zcmd74cRZGH+Xq}yq_S6b$cpTdJ&Tf6NythXgi2giGRjH_Nj4#6hDc_TL}e2)Ln2#7 z#{1QMKll4Q&wYR1_1Am-Q9f6v>pIW#cl?gyyUqgjbdK#KVi>G9`j``eoSc07_U)<> z5|WawEiH;>?bCjw3QDgFDIUCd@gg@@xMcL#U9E}mp&=`IHYEuOiQTtmudgnQx4$^6 zn>6reX`F0y3X{OZ-vCc~{;iz5xu>@to?IXUEq9!Ey{)eYxcxV!&(d9k4*+hply`=JVf4d>y_ z#7~8~1`~rd{z_Wql*f+M>1O*F-M@cd^TY|St3ry$6n~f3jNDk7{K_RPyt$F0IA3jO zV1Un{my>(5gM=K1NlQvzw6>nHk-5Dlb63@MeN{5{Q-#OM%c!U`nwq2$_1_~}o0{^j z4ZZ1~-dERpf*FLD=L%oFGBz>sU7?p} ztGqd;L#ZSy`*V6aH!UqKHI;^uF(8F6U@ZPo;DZNJQc^tBfwHo)J+H3OoZhDGGxGhr zk&%%eU*wq=hV304R+)i;f$8b#&!0bk|Neb`em=*I{t8c*sb0Ljy|S5!nVE|Cik+R) z_3O<|O-TlsQ&Ur{tgMg21_v$K+1bhNrr-`wPb*1D-K%9+a65hWY_ukKcURZev%vDE+{I2S2=U-1q=dt={)zpSR#Y&UrIddjBIG9(YK|)5R#B1}m z-t_Enor1jl?TwYOVkQ!`%g62%^D#3sOG--i^$~Lk$;0)R%a+JvKRMsJ)!x>I+htzvcI(!yq5vO{kEFlPU%Mv67Rt`Z z*j45#&K9bxqceKw;*~4=_wNtfxwRzwb|t%rPFvh`bd4=k@C&;^JFN`s>#(6%`ea{T1uKdnP(_xkwER z4LjP}{7&98t?)Rfs>*rbKy^=#oUH7VX%bo=4p&!K9De)uZJbhYNJv`i`0)5l9lO`N zckd!2Bg5*gDJUo?Vc-FUZKGCDW`6VZpiY8>rM|II{`%0n)MKG}X%*{#mcBfZoaipHMSUx6uFW%;dsOr0y?GN` zYZda~L4uN(qNL=l;rAgHnq@@;1ka4;&%06bhYm5*(V@z%FI?d2Zez6StKGeOw**sm zQWD4EtrvM0<-$upKlK|o=EhqyE?l^fsNl|Hq?p{)|K7dBW@Q8dfsuH4Xy}oU z36Ig!loTTWnsUM{Q83n~xTvVsCsx|w_wV1kI%&y9aP#?s%wFH9ZEBi9HTV#TVz@cV zm}+S3B<<+z9Cj>0pQ6?1#C~&MvKPz<4Gq0}cV~&Sv$LYdvdAM^YU-CZpO^$?BqY?i zsZSg~ekbA4qen&tMn;bl64-)La?LBzu%i#3Cz{1ALDSeKhTXSs-@ao$1KY#b0J!<++*hkgX)#{x2FJ9PMSSV{~q&<74SvXhr z{rmT@`tRZ4R8q0oIXP^MjLXd&8b z=;)&E-``8CAucBN!CH)`AUS!ve@#(waVpd5oL7prGI`ePLc+KtMp1kDT-5HebuKH;-NNzM{v-$z9OGw~aK1|76c+ApZRJXr9*K8(BJGf1p>pcfsVi5m?D8EPACGzXkaOr4oBNS% zFZL*(R#V#%a<&E9z`?1yI_?LB4S4;%Jb%Ej*VTiwPj{v`Y7igI{Uqz22-f|N?|d*$CnBk^~N`hZ|=IX zb76YVqtBri1@Kams+w!n?U-QD(yGs`++dnuxIV?Oe}7C(roV1w4lg6s5 zL?*>mVbRgLy1GkCOPigQ8#{N!4M_3ciHxLmproml+fd(j`@Ph@7K<^})7vH= zhqZQhmqV>Rd)Cz6PIqd*SzmYa@cyxEiyb>?tKA}|XB85{S9Wnv%V%7Uca?stsi|2u zrpd<491$HoHW7WH?m_t(CPRl)r&<9XJF?E9bH?AhcMp4yi+uc8oNs(%^g~Fni)>czVi`HzrVWYG)2ys4z&Wsi(l>8t zsHxw-*t5nn@Qst1g~iUw>ZN7PE-Kq%p8$fBvZ56Gq!+>Bd-<(E4NBa9^l^f_dq!GX z(W_T;I^mUcMBdweTWtfhkBW?}`Vs!<(de0@=g+SU~{c^U2`! z>By+4+V81+#s&r{Sy_8Gwk>p;ds)r_2&O%KijGd|qp=7?C0fkoD_sx}Kq?R=aPVN- zixLBvxB5-Tp$$!*htGLO-n1V$x9;WFyHO|+2o|8 zsy9{dZGZ3XuO%i*E-0YsIo*Oo|glQxVgD|ORn#jceRs}laez0 zF}%Ah<{&Tc4hob^dvmkCq2ayqM}dY33x4A)du4FFi^CMvQrGh?|}q{`P6UGg#Nf`UF)c>cL+Yl~XCF;KPr z+_`hf=U-V=```qy@C(2+8;;(Oml|;Dw^v4@u{kjbK(iIxf6Kf7Zft9#ZD<1g@9Hu- zbLQ6k=t&Y1e;l*9Hm1}SDW_~;AjZK_)!XaQntn2rO@aFKJ%_bmf|ER9=1#3u`HRGR zWi(rR*c>0dvHoYrjvWdL3K&7PYn?x{&K029Gf+~B$jjfa8`h1Nt(9xuG}PACzWwJ{ zalxf8Y;w+aePymdbGS~9`sU{RloX$OYbXkR6`m$qTKrq+o5KQa8&G3oG}dA7zI_?G ziR5AhMp3vX(>i_}7vtkk)z!Pja7RWK=SEI&hBLCV?&FadZH%Fzp}D#6qdDr( z+0SuD(2!ah8iIADiM%P<*&Qxj;yrL+@Yk>NhK8eHh$*cCP_1Ir|R zrGJW2becgyp?rSq3%jh-vy6{xjG`feq#7QcKFIbr$oA~+D&E5UQ z^XFuSy!`yGpgCY)M~)oPIC>PkY&M-tG2p$s(^fMoDIr)}t82Ha@Z2HxukjumJV|sc zlr8(kiyEoaP1gO)%;J)g;3YNbCu0?h4i62DkK?pXOil-eeTjSZN@|zyb<{F-;6mmt z@3q!}8?O^bPaHZG_V(@D(AD$Rq3S13?u-2d+R4Jga{s}Dsqyjryu9VMJso62iWOpl1tLEE9#+0r795?_nf2^{6OPHw~Vo_F)fQHW87Ma$a4y z0%Rv7bdo0m)Jp=KTsOPTcsxmKe|lVx5gMnPC?P|;e&^=_mGIUj=!>^7ti=U2LfTa+Lr_r#3bRtsSn2KcQoY?pMMa-Ke@4erQ@haq{0wFu=aY1(8hGXA zPLGbhAGv{UqwMkTl9Uez@V1r~hzr>vHg@*h)Kt*4x4iTk%}N`~HEnH?n%vl^ta8rx zh1G0q(o0Ka>Fw<7-i_8lcf;6na&pq-ruMBSrJy)Q=~r#BckkYk*RKa>2@nPK1bBJt z-oM{vja$&w)I@3X3{dmy*RNMDU-r9uceVqSaem*v5C9O1h#MpnY{XhndR-hH2jXWz zgJsFah?c`1uh?s|1D9@L9`{vZ$R}klEN2@h@Uex#RO_? zZH>`RfAYlm`gPI7s*Vmr$cWT|)>c*zpFH^(y8toQ!NCE?gPYI?pD{Kj8#==B8T|#) zcD?mn5ZxAtSWcJNc}PQFLQL#N{lkOY)PeQZAWBw65Hv+&w5bCj`#i@;nq`_A7#NtC zc;nEo2s#36M5Sf^YcZeVy{_PP}0%ue8@-p5zN0z z(py~T=H!nbl#w=;mQ++!-aB2;ZXt(i6NMaok(H&cFCiyq0{Wkl zqN=9~CWM=2d-ZCU-ESa;wzf801#!Va9-bjb8OLv}tp&E7hgrx`okUj9_x6yHVG5Cv zkzsCmdU~pqL3K$?WaoL;-MtE7(dVshoJ{`Z<`jrW`Clg=K75GbFu$Lhi_2G==fVoO z8Uzur7QO~+u|om^*uOxG4!tGY=O95q9LUnNw}--7L73(Aui+OEc$t?sGdo+oS0ke} zI4B50^y=y=fo@`A;*_D`VId(6{VlByCk)8MaRmYqP{L9Dh586y8I~z%M|?p?)zzVD z&m4!}fnTk93(I17aTrt-Y6)rVwlrqtg-2<`g$`yY9n%xu>yb<-; z+1Y$Sz_8dds3Q=Y(eK~o-4+6Jy?vrE)ngt%?tOhT1L~-d(7uV6z=Al)mxSWw*U834 zZD@yC4a<4O#b@>PUwf>~H8ts(oA(nG+B-XsafU*?sg5OZ4mc}7H!*Fv^h2NL;q zZ)H+a(r5VMoSX+iL2ipbqe#_4qw1CdW8<%Ma0hEF2 z^uctLlrzh-7X<73`VwlbEUU3<6XWCFwu=6s$AK49n=h+62sD#VLlHG<0Uq6wAq_tP z42P^Ro8775+Omfr<_tQP;BrZ$98t9ZgO2eJAHF2a4kafapw&Q$VeO%0m~VJ{Z>J9A zE;P8#(wCs%?sxU=*v##<*}6M-wh0>P=_zwlW7~KqtE#Ehi3fy)oHR0G3`)Vt0w3aT zpFe*dXcK%&LqlUorVOH?qk^pL^UO^CLUw3tbv<&5ii*mgO<38QRKm*2gs4zpc1|nWo-z_Vj)dD<+HGrgbAqIU}+OF$P%4Nt(*l>9w zK`HwB`kX8Y<>d;c|I-weEH<&CdRXd$PhEEYe2U+P&J?y)@=_q-117 z?h-yeKJUIpS=iad#m1T#8vX`5RUpg^EJ5Q%6F+Zisw@zw{`Lkg;Fhy+8pV{$yfh)u8k{ni|3@0*%$lsqpDjZYimb zFJCBvzkL3z2%X@V+Ui8^-QO3~)M}uX<#R+GFTXKD7L?*ZrOdrQcrq z=JHq60%R}(Ca?{EoXwb_1u~H$&(+o1(;QFvb`+pZmgQ;tAmpyCu_Iv*RbrE+I1h=V9TUN+d zU9g^qx+)RdHpKT`mT7)pO;Z*Ymi+dbWUTwDd17XX{$;sx+R_9&iuZyUY*>1p%jrRn}k$iw#bQ@FeP z_r6C@CO&vSBm^SkwXQssXC720lu#dpO-r^qLVrJ4B>7rmSy>ri&SN=NLV=d1=KW4H zC|B1x^XdAFuYS&8qi6Xm`Y@Qz?;v;h`tl;+sq6BLB|rc$tzNKY6wyUYAivV1W;@k; zf{N)l-2~+2@sVp&-`-%4W0RCQ4*I00kCKSSH|bFQ>jo~T(}C&a(Nx#eTp+5Yqoacq zEz*Ba$P?ZU_@m2|X?3TZD6Ch)@5%l2)&*b7GCk<1NQqT;!;b_jSLf8%(Xodco0Fpx z=Z38SUR0HS4-g6RA22xn#CQUkof$1GD)PiBo0|4EHHAT>OyVQ*7V%tlYL315PXXrB z9*<9P3huAaFf1%AP&3BHOoWC=NJpMk;q~8v2CqMPPZDyJnT_p1KmZ&|oFa-E-+_;F zl07dXZk{%z#z9G7&#JAh1-OKC3W4b%)TNCbOKU4PMK>vqaZut{;0kQ_f#iYhNZe^H z_8K=4*4SkgG$c5{&z|uyFfjD7@(s6daUblcYHq;Sz7PQbfT3sW9SJFVd>GQlb#%af zcgbB>3>g1l{c=Di`MAP=WdYdz)tQg~c;>6D$j&o0BPb+Puq9Bvd>8Wt0x5n73JDQ9 zd^jgNyBz%jHnDH@W6=w{J~V-|O?HVMj*f+6Kt~5hSHiLu9mD=x>Egn8D@7qvW+=8s~QDOF}uOA^I%GAV#W% zoGEaPlaqvl4t8v9Favn2*XH`tL&;=Jwc~p5*M=PG)I)c!iEK zaWKZsDm+{p9tm;OLr!T)I|1_o-OZnzm3rQb18a}Ofzsy6mk%F4panq~4x)PsJxBIi zO%0ArMX_dOO=9#7kobp&>cW)-3dJ6@v`lU?*DQm1`?|C=;mFO=wUv20DY%fbii&Jy zOX#d!dYIwal8ScmwVqdvja$tcI0diA zk00ZgsqQg3w5f0;Qba5i6JNP9o^~SI?2Yp^C?oLiVJ8CxpdT9Tq7U8@Kmam@g>!FB zbd-nI0FCU3*SRDXbM)v@U^K`8V<*48adEhDLoA}c3_}?wuwuxy4X5}eQArw#oTOw_ zRMbTgMk~W|aC5Xv%pAaSG|e}!UqknYLW?LyyfANYEfX7?O}^1^;OclrV(+(aHE-YI z)UkNoJv}R8xF2`@{2+2(v9*2k=FPmZHW;;xOcyBl$B)m8iX>%anHn#;y17Ap@h#$; zi4X7U>H;Duvh9SR==A%8T*MEe1PF#e_}F=vUWATdp#OS@ z%5{1MhB|ApF9`}0BO};oJ0GGwZ(;iL=f9y2nfCAZTx>r(L>d_uM!IK@t*tG<%;@MS z_OV6n9&YFlPhw&qu@KM~kzuft`V;F=p+fnzvJ&P@D$TpJ=8lfv zYB8TbeBk5c`_VuF5wGn`rUnDqMf>!@oqRy3GI<7f)qGiT`7LK)_} zxJZdPc_O~gn4(*-uq<PpN5Q((~g7pKtXtBX7>WzjU7TJ@`7aN*L_qurNya zYnUaXUk3lY5fKyP9Q9tL{Lmk%#NX-aDxxtQ~T@9`owCgU^I&~^Sn;ZP;JYu7SQKMxOY2GPpr1nz{-52^pOKs!|L)KuS2dtg}-WTetHxb zUZ^x+DD+mR2|{_*$PJ)TbhNx7h$zG$mr}>+b;gP{uhW?6PLeuSt-M7QPMum5$mzNh{ zF02SjZC%~71_sdOcSy@e)DzEQcUz0CfTpsLXFPlM0bQ`Jjzpt5E{+9d4cDDk17r$# z)e~|Qtp?o3)2B}Z2NByM8iZvt0jgk1l%)W+BclBzi+oFJ8#lK<;4KZ-_Vq)rT3ZTTIaSI6wD!3BgLF?z`9fOC*k)X`cbh+^3$K$jb zxSMBAp9Y=Nqy05Cm2QxUkVz)K`Na#AB`)mXk1R++l%#%l?%YA~LVbl%15g^+S_P=! zc-#tKh`9#4)N5sgA?!4AFg1f%{l3gLLdQ}4w!S_y4UG|u4Nz(yD=?qyGAt#KeZLmj zGLZYj5U2XK2N4Wh$YV-d+X)0)$OJrw-UVs|)1NPO3jAaf9T~apl)UR4#ApT2Ra%W^ zj771S`y&pQFINpZVi+X}M$~6Ld-ele%+S~)JV_Oy~$AaZy8^JZw~&L{M%1? zLljUXP_7JKBnBE>SNw2&uHdU33%SeYh9{*HL?$OE;?6SiX~Z}7l9E;(hw)1p_zoT+ zkO08e!-v$g8jA}HC`TAnWCaJogH}z(yr25hdNpS?k-3L z1_##|ALpiyc=%9x4O9iYSkM0w}dlVmo>0-_R)n5;cR6 z2@go{ItW9x6a?75e3?|E85vEeP)$u$(Owo7chos|ueyt}{!>2q=k11h{h)JeOJb>F zmYK}-^eBx=Dagp6n|j!8+}*J|it0|yS^eUJn+CaW9;LDLdPT;RZ#H3GxS;=yG{$1sO!-+^$gJ@kSWVlimH zdv~dUnf3Or^>1zzHyNX1;qJgKt-49C688JIGg6AzC5RtCc>;PV&bSvPiP8iQ1i0Q~ z^JjgjAMYT=RkgPrWH^r-D7ujc=aZBMw2mXO2{^WVGyM3B2rd#T7o#i5*=$6Mo7y=$ zJ^fL5_;$DK!a~~6x_PoI_VyhuE!e5um0p{~Oh)GB$bw2<>)za}FA#-Jh&-0xxNu}( zlq#hJs{1>II4t4daz~f7HcDVhy z0`2)RSi(ufe;KsN@A@OCx)}BOp1m+ zv4c#6g(-7}=a`k3I*qryd?{9qP68z*LHyEZAWCczj2Xmzcb>K1hPX!EMrvy6fGYBM z3JTx)`!~%uSI=5lB;CDRi}|zs5t1$x!{wC~C;`thGwW+=e2eK7>yy%ajxtXB`$R;X za&`t@18iRbbZ})Lb+xU|fwqkdb9JXJFuBK>3jwXl7W2jL1I(|=+wQQwcu^x5D}LtG zDWo%TC$~($a85UsqepEZ&;!C^7Io&DU&=S~5xdLePeFCH^>QJK(^s+^%5LeKH+!Nu zh7zbV24M`{3zU=JBHU+tYHHLPIe}uq1?%hU@hLA1v#D1L4@T6(238G*+x93jQr=@( z@9bHIj%@n-CM)Ecsk!L7t(cVF6e=73QFIp^8#DkO5>i<|eVs0fs1 z%dLej0H)JL6DAB40ofNYyT->(iunzy?A%N1z~W*T>>*G0^TEJZEfdgDbyyF;F~l_hUuR(tpIRqZ{Z} zeHkX>f%fMY7a^zG6nq^Y9|zVX(n21}+8TnknyxOzawWD4$P&V`$th5t!eV24zI|JV zSU_crb@_K%xB<=A<14@c8YP?3?cVnGsOadrksH+ow>Q>Szyh|U8JLM^Ab^q;YjY=N zBs}mh2)O5&|L{0MMU3o32^b!t;3zxT>}ZMGZlD{9A2^VVwpLIee(>N<4gG48@`n!% zTwRY4eELgootTL_{cIg}QQg3Rg-9n8;+Ngs0uqQI{&zNe;4(jlq~s#-x}Ci}`JO!u z=!+(o{$sYIx}ynv^`eapqA!pS@v3|GZeejTuvHns4s32#MCX%}2N4Z`34JKIcJ=3r zT=imo^~~9<^_)hizTi?I(hk3N{tp#F+@_I|7RipW@86v;gb$s~aeUniG*nU|Qy~wz zNJV83@eB+5I2W}Wh-Shck8ZeTZx2TGSlWSYIdND22ZrmqDjcVR>Ni@*f@=};y$ilQ zEgs3>>AC*DcqU{kpHR-w#0&1JRyOKYn*;<|!VfgM*73q(!(E0kFt=AhR-zXQ2CxM= zoBVRet{^_igd*N5m}HnOVq#)_-FpP-ZLuU}iBM(N7Fu;drlBoCO$H7}4SDQ`ZlkKI zipc^k@7uR;@o{l^+z3ygBGKdz9eRN_fmou5YlHNhM(-(j->N<}gQWINca;4Q>|&v# zyJ%;(mPxnLv-KbckSJcd+2E_~gE0APizN<(d9mR(AjScvIlLah3Z5$#v2aC{)cQH% z9`OkYpRL8PoPUrEQ05^0r`sO6P3MX#c(3*IZLDltggBkJX;5YBjpP6KZ_LEV%A#zBBO)3; zf2McaE6k4M0hTm@cIN@g4{t>Glf#!MXJI8FufXFWFd_sj|XIT$t4@s=Wt1C6XVZ{ECqyju8@Do3JFglHOYN#^g$#(252 zh(?;8(9&{scW=Ec0Z|FhZ-lt)wb9fJ0Kt+mF38VsuC3L;V-v^IF!DmBUm&9hQAdlr z<@uQxdGk8kG2&3%s5Jn9E|oq|ruL>L0z5e>bH8~0oBwz;#O}8soaVQ0eN>Z4kM9y; zybq&ET3TA34Q{q?t@#`0PhDM)Rg>?`XR2@qr9g%_@kkKqVr`y?rMNRHuw3v$6N5ki z_434tJw6;DH%jXZG=Jj=&<|^j-~FYMgfCnnp`o44%~bO4<+-_EkdM~M*s&Ye>#<)< zTpW*Ak*WnU zwosC)n?iGv4UALrO3>u?nZO=}Uf9{zmiKiMFbY5jxdLx)B5y}H*5J5!z-O0lwM~Kk zf2M3BA3l(p(*ZUEe1W?`_XjKlD3D=62A@@<8Ke~u1bhiAy1Yy{bEX}Pil2wZs3Wt3a;wf_$^c{dqgEIQ{v zDRNcl2skkSM2+VE$aBM5u}-knXsloYFZ|F~tjBE^q;F_zli=m`Zx}(Bm4*KvTShvzVv47LXF={(h2Z!HF^M#WlF^8+F+ zd28S$IIE{m{d9Hyny0hi#iYP|mb(7HEx(`&G>i`qU$C^?o&&uJa)=)7w{myHJw%XN zfUn8OsHd+F>;G*+HXJ1mFHcVea{q~`DSEqrq?%J_&w8!Q9V9(^?3l30tGyb{|EFhr zoBhUMkWKupZZiB^S@Ddb+->hf~qV^JW9+`0A z=fnhxSdf23si5ez0Nn{963YuJ5a&id($Q)hh9vTLusbcD)%S^P2O2JJ0Ze{)@dvvdZ|ds8I4mjE{}E{P}(D z#EAyjod{nOo^S1Vpj!m1`@4ygWl*InE68f!sVTbq(bZNQa?tw-VVevK@f_Tx8rd|E zIxtar`M9$duytUvxc;GXI`|{p#Ygw8*XEMf{6%;dkZLc9FoM5TDRw0=^zjejF}7PS zF2Pd&tmd&(3H~lWnhr?CveMBlesk$Zi^ZcL^Eo0A*?>5eJeDcv)Bo{g&49@`71?wj zNrfjROhDD(A3uMZl{h8?R8>}*pFhtke-$8O>wX`c`9*_7B27kG88tG>RZsTu#Xfq( zlU}*~D~JIs29S@NQ@uw3bP&YBMPp+5y1Kw zA_KG80=Kz;CaTh!{s2W7f$VnTfUdd}QG&0q3xh+F_fXd*M*Bj3_|I06u^fE9p8urNUck(bthnG1Wf zIhNy}#xETdR{^9t~Sr5ix*EOwNM1yzuyR9qO80ed0@0+ zfKCl-dP_bkH6|kOfG_V@6YxX}bQL_5_-<6Z`e_rwI3Tm2R>OFr3Y4y5FFidy42Vsb zUZBO4#CQe_;-J&bn}-e`hN4~%vbeDD&RO0n>))I|D=ly{JVg=^7&z!CgDr~fJ%?w? zlv}XP>a8LDi4h)Oy>bQGJFFhWJ1T77{EJTtb#a;jJe!_B{{`D-W6f167Q>a5ot>hu zM_n9X9wQQoqV3F&qNA1Ef8&7}NWt5M1Cq0{VDwp`y>BIa-{0Twlk&q!wrkoAQydTx z9u}m`|3<4weF}?<6OY{7bz<&+X4JVUNqy)aJBs%goI=Bo*?N3r!sPt+NjCxGgX3Pwc3f(?xUMgv0W z5NSTei1UhyUdOJ%r%8#6Z$*zGUJQ77Lw9G!7D73tHpgLn#mDD!L;vo-{Rc+)gL-Q) z$_}vgj~~fSwEx38{9*{O3Iu`e-B-jZI*X15KHDIba&zk6Y`pW0l@Sizg$O?iaB*FL z<0rusQg036C_OE0R*JsLDj!n`-nt_X?@+EnOf{Fs;kc}IRL>--zIHL~*Zrbrhx z9#(+Ze}r}7vWNGjJ64&z89MDu)4g&Fw2xKNlb#lN!0GHJVUWLPHA y|9FV=szulfx){JwhSWOlcoOpm+P}nRK zjcXsj?!d@SgTH&v-fA6nF12~NHfX6p_?HFN(RnW`xr&{HM+1B*(wc+t?;V$j-Mt9^ zvMO+cdN*E$J#|~}==BUTGP0TX<=@LYS>=4EM@G`JvnR*LW8&l0*lFa#+%q*iJdOn@ zOwY`0R9QYfSIR9SvM}Doo^+d8;^xhpfkk?}AIt6EB}=(ckSSffm@RuwHRawtT9N=^ zNy(LmU7@~2s;x>9k&*iajMDP+e~gcdYP!}R;gYF4AmjPeq?(qQ^j2+p`sj%r%U@5O zJEy@;Q&U?zJoYoU)M;3bpZ`flMut`Uos%ijZ-$0en_sVo&jpnKaXEVISc>Q+YKr0d zYZ9EC&wBxq54FuBd0OfNZrWjL3Z}V3l~Oxx1Tc*P@r~ zu3ftX1qD}2eaF}-MDXJ|b1gZ#W%X)8)B4@Bi6J|8?ZU;-d8JfTdN;XFe>Jn?ypWx^ znApn7O0TULCnu-B|JK{LXKDO5R_3#{;@jHWlai7QcU~0*N2^SP z0u%;ouO6xLnCOT;X8*Q>o~ZlYAuXL0>21%6iHXTb(lawjLz{p7{{4M=`u_djEjp4r9N=4;&5zY;$?gLCz+X|SATw_+AnmYY6G8oK?qs*LwSmU=*aef`0M2er9lw7EYF4SlNd(Be`&dzL;{Q{@OV6BG9Ew24rB z?Uhf`*RNlv3@Nc_BB75wr2l~;Fj?;U`}gk?wYg;^C9@tqvXnV}(;+QKNl9tiy(S=_ zX|y>sMCl-P=!a+5*V}G0_Ys!o(W6IGJ*oRUKMW7E(a~8uIP5HAVq=Rs>$5yFw2%KR z10&k(#d$uSzs3hC_IpGdKepf z=F}1RaI{W1qBrpT+z@77fVq$aJqJ_wxOZn;X~F~&DJ}^ zVPO$IS07Xt# z77a!4hf1fQZakJM>7wMfFI-_ZT0JDLK z$*uPBAqVk5_5%kFsH&>&*|W#vub=Z34lb^)OsuS`G+aVLI_J;J@bc!?*4F0asHtk& zEiEr!wzE6#vqYhIw>zG&+*r`S^92P3?NMy;f<$pALaFmjYw3eeo;-<@>GY{4guTPu z|4;UlCo*DU!R#G4j3p%{k}NtN9_6@HnVI7q9TXi(T(p7$0<7|W0(6uE{r&1(r|j+d z1KIcO+vg>_i3>qHxxeAs+=wD4XXewV0^&&r=;>8Cq7xGnTSQfrl^;BK@Ug+aAwB)V z_qX|COoGC~G#_2BUj4na0@5`4jN9e4-ZeMIu|!Lx45`C zHxCbv`Nul%y^43^kC>0muS7&piX94SQrFTt5!+@hg546Lbiv8#NQFYW-t?C*Hum=2 zmc?GOW;mv*s^o#}x({WW#^;Zi)hW<$iHgQ3rKg=6bP#8aa~ttm%;Tt18>vIm-*Gzt8bnd{`vE#-{16lLY*SfxqB=&&-87u@{(T54)j3zOZltVfO> z-6Q+TzWh9`FR^t6x9=lbYRxO;v1@+Dr#^j`jt%}qa3(@*QaXES$L zT3X5;KW@&?%y3&rM`wBx-w$O#aq!hEp@RpBgV7Bt9fmj_UjIKEcD&`_nKU^icvdv?-pgaH8Ej5FVH_Db>xVyfx%v0 zM#Z~pYd%=0hUV^`p6&5x&Yba*WtC;@u(L`d6VAQ(T{**tijoqID72Y4-qFz!n>ryO zq0qx?@b8^M64l;3KL6|2udy{nV%;AWsv^gZp-5B~7w_?Y9cQ4z(cdMWl$ORW_8DDM zkcHLL^1lCa+bgA`M~~9e)90GkxlK_=tIy2L&>h)oZk}|_E_g6tTNbo^cSbxmHTDJB z=;Ozaan!;cC>$A%^MXJmDu?BAfcvF zXMi7RY3Y%IM~}FT6CXeRwm8*uF-$NTHKM6Wkvxc4XdfLNy2JPH-w$z%iiwfMcXf4% zG3^R|^7t`M!mp*JqOZrq#LUi@-@Hj)GZwY)89sSh8j&d-Nm*s3|KdxDK?d=pdfPsZ zrGCH;RGNj2CoZ_A;q&;a-@bjzy)u0N*Y|g!At3+)-Oryt|NPm^QVO*n2ig8zL943e zmfJ(IAtIyoqr$>r5fR_MeoYXw>yOySUtm^WbD{YrjySF)j%>t~2ln0aaz$ljC3-|f zg}8)-np1SAwHS^W+A?45`6o}FpqowVb^425Ut6kno5I)Zw-uw0eDm&I=nqK&+8@>E z0&7cOZD;p}w;Xx4qO|$G@6hzuft+#YD&PqGez%goGi~wH%$4!}qR^wS?1AX6NRr zpE*-rS$Xwk;<4i5;vYYLgoK2UlapJV?kePnW;~M^G3c;da6SFwd6jW7J1^5{zL9Bl7 z;X|R9`I<2&E6TLug^s&Ur~{OpJ2zBjHvkaaGJqqD0x^Y}F*&)qwq|T>oRNCDh(6@z zMfdOL9}HnHc+L#&VPQ#5OhjpwwCm>z@8n7h_VRGzBC7dC#6Uhxvg)|=1D}XH$I1G! zw-lDjV;3LfmbG=+D*5_ujJHR*xw)a*v+mpHF+Zk)D-c$ZIv@_)-n}u{*%=ue+%c<{4Zt*EHD)br5sW0gzWEk;dE zP2i}(Y7M5weHW%I8L#4W-H(mcCsm=zDl0qf>vll>rr)9(O;$kxbub^D=A}!Q?CieU zZ;RPW8RGi;=cjkvoV4MCE&!<+d3kwhX~MV~Xn{+Y+<;JAH-qmerOWznveXtTvK%`G z?svnle#%*LuomS7-SpC{vXfl2z+|Z@DS}b5WB3FYnz0i|AD%dK<_!w+ zv17m9m)irv#V^;1crRG*-yaW9fF%X8cr91_fl^aTD_l8a=1R66ueZ0i6IXS4`LC7v z@tBwxTKw6}Vp>Bd(+0lEZ?2335|)*f_3q*`($?0-+p@`eSy)<< zaC6W{p3&DQ78(wX2vg1oBhI|AS{^X@!^TNaFu$ZkxS6l-QD~s>oPJR$_Hg-le;G)X7!ll{Wcz-d4hh}*!3+z^b&#K33A7DT8og9kcfzi zuKN0Ud#@g8*x+z{k|XOdRB}jOUS3SB?9ro}&7;=V)-xX}b!CS@f&91r{00XRiU)C< zNxz{Heg7VF?_SGG?O@P;2m@druac$qn##xT#S1w) z{y-xHz*z!(XliOgyP{25yTvHl)!hwhE+Zpz@tT~Jqc$0%babv6cZ^2P04^$)0DY?h zob%eU12Z!-3-u)%o3p4vW@glW^eJ(1+?l7sZTRwcfE@!?=RbYw;N;ZP(=#>i)kvpe zAdU|^JOctkXL=tR^N5TmQ5ATmp z2l@acOiQ(A^hx0}%AGrJMMQ{*i+{$e+s~4dk>UK3ZQF(&it==boBP#+qbKSHaXIgH zTH~Vd^YaVEe;*kk8UrTKNtJVSa$;j;y>jW&)a-1}sPnTw%OBo#@$C{7ErkXE@Q$^S zmXT?1Z`a!3kOcKe_1|0t=BB2mMi-={q%^McI$BlkH2e(A02NXtk{;iq)9xi63J(tl zQVkwMGMeC@~kV<%3W@b~wB+700Mq1r76(i3XY4lJ_)6Qp3}^@w zlarl`>Z4!2Ko%Q(eI_oAxDi z0B)URBYpkRA3u&mb4g1}!U7SlEh1OE?N}u;V8YWNPH!f(uS?g^#k2@SFW^;6ChnC*Zzs|yZ7z|UTF;` zvi{NUJ4{DM2enK)S#sWL2T?#rkRI<0nluh*yt$vvRr zD0+9idbK$gc?7bfmR5I<{FZk1sbU$egLk(?eZ@j~o-t4a_y&p{J2o^p===NA)#&JG zwiPz>P7NRfh+MInyC^ArYH6W1;#dNa;TQo%Q~K4D25bzU2Mu!vGd4h`T2z5a^+J%kM56E5CoV^JD1lj7S=*@S>lz+|k+aa!a$K@xEDe@* zeb(@mYT@8&*6`flx~N6l4O=o4E5mh>$*FQ|@5tqjeeQ11 zRH8wHnHboR*Uk)-ZU1uJYja~!*ZF5Z$~x-koR_SR!`f6Q;5dZOi*|N9biSR#F>H^U z-df~~itfAjvY+tddOw)WJKK83D+M39_&t-KDFL~q=b5EFBUUbA=a zUfS@9v9Unbd>{~&>{ET#UH&cdT~3QKmP$0pwqe}HulZSBnibV zbhc;LYqPTtC1_Wamex9tl7tNxTd9FoLcH^Gv9q-$Qrmy!W0k(Sd8%f%kZBFwwVT(C zjo*K)cKhmHlbV{^av~{C;GFBX*L*b86cku|$S0C41q}`6YHHgl1oB5MPLAWrIIdz$ zCnxAeKx3VpW}(-HzdLViT!S(olJLSpSn0%x?d0SNzG!a%@{lPZhu_@Ayy7dDh@L&% z)D)o2edJt;^0G@FhwWv;Prq#$y27??A=tDxyvP&@b=J}GBzMdK7M8}oyfJmUW9ZVV zne{%uC{4{(cCSmiPIREkS$7P)dxuiEOJ|fM^6s?T66ho^9i`Wm>91dhN-sqf7azT? zij&*S8Xu7j1OU!qE#m#T&YMH+il)FQzzufEhy&fyul=U|WK<>^8XC}d*VfjIlA;Yv z`?a%cYm*e%PoF)T!G~JZ^zFNSySkH;le#)ZOedL@;>NM#930Qv+qc(i>*xT`l3(~= z67HLiA9sqS6&C8U(^P=RXL=OI=GWIlWi{8;jnd4HFcP2+y|yw3A)GTo+tbA*@9|@j zXb!Cq0s8LN)(~xOTvt|UcM8*O`<%*^J5^R7J?Xni`>qK4Oq|rzq=dd9XbqS%Xt3|l zD_SF|J!DeXmY?S53x+F$o*#Fgrb(5&}O7lsK+8BBAz~diVfS!>@nS&jye%o1O|Z(i!wq(w)(xW z!QI0H#)7pNOGnSYNtCv;jg1P1hVf6He8(YNUt6Og^In+P!5Za1Kep{JeOQdi5mF;) zO?7oOieBy2S@P^pRW6+T{E^M0sE_OG>oU^PcW&KcrvTJ1c=80#FryBQiH(K16L8sx z9G?SqIv8($>eQ()WlM;|Vc~b~Jd|)U!;uD1^6`;Q(P?m-x|5s%neV?y%ml$aicMbV zyfB!`$_fsaBnx${CbTfnZi+zVzf>}&ESLW#`*KWDgggg2!KXiCXb5aRI5dRI4^H1H zG}@P`hL3hU@EaTzbZ&N*{rhuZ<$MV#C=dcSdu@}Ir$uJ>XklSeOO|!Fz}`j+;TyRn z?O}Sn?rv_S<>eQPji5wHNWkklk(Knkxf$+Ap8&KmrKlS&Zf+myyxpviXoxiwNx3;!Ip98QLF%}e(!g;5cWsNeHgo=1753Q3T(BV_fo8Dg7 zeGG~;0SYkrOifG#uEjNWc`r@t0}|Z7PY2X*Z0rO0Pov94`|`yLbgxtA&oeVJYH&mw zpFiKajcUh^qY@H5-QD(ZUnnUv`fPFJ7HqMR-=MPLZ2*S=7|G5-hq`s^77RW#DEy#E zj7?0CC06_Vx(rkg`m&RnnuIDUD(Z-Uz{KJzFfe{i`8-djf=|Yq^hyBECCzWDY6|OafGmDmuy#DtZB;J5%M%rAs_! zB9W~mPWvntZbw9rxJgT|cG*hal@}7C7Ti@9-##vARAd3AxzRjATla4buu)+T8RRFZ zfMk~vt@comDRUH8RM3~X{QOwmxEhezGkMjEi#A-D0}s)LyH9rQbAE!}V`%t$^Uu2N z*nUm)Zwx%wIV2*9I+yz$IyA^U|JkQi-izFC&(ZJO7p)=>IpGtFbEu#A z6~>+9Rj2jzgvG?378b@&^)Yq0|1%44UH|;~^KGA`+`gn;fF6eu6*vc8i{FC;C|cA- zd?%QG3Y-%thPKD@A*8J=jTi+lK)Rk(`Te@h7Q%Q0JQF*cVdu^2YAH!_D1f%;zzbp~ zbspH>Vj?0DckZx?sy-ich9W2VmBHj+0w3g=xBt{_Npnc#p2I(aC))!VUJ%Vo{id7> zQMV+-O)ztqPzE_V)PV@1%*cj>m4Xo$6&0PYymHRS zXz;@aErF@>{1|TLq?cz64O5R_6R%1ZcYOaMK@`$zef{w;#n9#89eX>hOR5O;9K2BZ zZ0GwAABGFwSkxa#p#%p~PuGK{0`>dawQKL5p4-dJ`~hUe$SCZ%^+tKAGOY8)#v5Sf zEt-9WQdVuZR@c54ww5lgZa{8<(B~}WIb-Dfv066q-Rsw6#E>yrsDE#)x_Nl~!W%+? z$N6(0XydrD^RM9j$+?W-MNjDJ?heC45L;=SGQ~7Le)@#6jZOk#0FVQ&TWf4DeA0dU zS}lw5fw6xZ>kjLirgEQZ(BkLi4MQdPp4Mc<<-<7yBb%FxtEH{2|Lc-Q3^z*hMQdwy z?Hw#Z%6duU8JgNVR8{_$L_0kZY197YxmiTztF|_7Fz(`Fm}HuC+F4?!WdX~ms3M_> zLA&KUeE43cwL9m|3c<|Ej@WkK0z~8FyFkMXj}e{ue-=)`6f{(&z)E zyo0YUJ2)&YE~4L@E3#<1&u0KJ5S|dRTz&VE9YIZ1{#$+!0nE%E78VwQ8Z0mJSO6gb z24TY?U~@`e{~Sk6J)n2=PJ^lKGn_{fKV2o^Gt+E=r%5< zfN`1ao5yDy#LsUOAlHExd>@Q-4UQ&Y-`lqcD3n6lBxo=i8hKEAk1;`eo8kEmH>%Q} z&&ZSXJi#<42(&WwV*2_y5G5?Et+P;5EGBg!d< zLX^JF%m@ey#+su2K*mzL%80#^_TlNIfTGUC>G+)nFj($EAzdr228faJS5iyYdnoTO zbS>!~6rBpOXyRO;!pTXGFE1F`*w~8p67w?tI4gCYhhS<=Ps14>DXFhd>6x-(Tj)&} zq@h4Kq&0O5Y@{ZGQ}>Aq*M3D1%yZ}P>eRQ<50N*xJ~t8&9=?O&s{lP>K_n(!!!PE? z+Z%qV67n}L<8A4+WrQHjD;uXA@58ZcbE7AgZl5H_>Vq+XgW*q{ZAFQv^?AFrAIc83 zwjM&I6%rCUe%!R~8;)_lQE{L$M_z6&3_+{f5%U1D(441FXC^1{60s>MpL%;~SXp09 zx;Ck@(}v?jdcp?~{u;LKKlHK2gC8nKUf$Q&c?P>#Wr>)dKzIPt-+(LuZVtDu$Vfo! z&%Y~-IPPcjjgLu6qWW&F%v%AwuP-~g%RflI(aEI!%;GeZI?@#1i()3^*M9%>aCKdn znd=>gE6ByuqVK9iyO^=-!Q-@MtkXHUR0N5rd2o;}+H z;07x#FW>j+ETyxXTP+ZvmPCWdsm2w&XcW7T|1_+C(a$o<&Lot>S) zK@3e+SXl-M& zu(|=`(cEuU#kcK(NCNba?bIv`SO*ps1S#a#lm#yBaOKFUu}+Hk53RYQxmFB0l)>=$ zq@?bt#l?jM$k{6W4-yj2oIM-JR}L;BD7dSBA1y6*@F0{qVEfaj$xL2J4Etv0=01RP z1VtJZ9=lCnTsno7hDHxYCu}s2?{7hKKMW40?Eq3k|AK>m^~ZbI+GU8#MAHII-b+no zV`4%Yp?TC9qTuq64?LM~IPOK>7r*jR5?Q6OF-x(VO%~W95P#9v1Ec-0Y~E)-6~o$i ziZsAMh&B9IC&6Hh-u>jsmqYlBP>K1_V8$ze*ZRa@cYAjrsDegG6ZO(rMP%_ z*rZ&^e6OAZ7zk>Da)LIDC>ZQg)q!i?-hBN0cox!7F4Z3aYSsV~8>vyY5gYovl%uqh z`~;ME?A#*QjTXbFvyPdF17!eyo0MOU4eK#5G}P79v@9Kpq!)Y-cRnTMz~De(R+c+b zai9oTL=YTME1c;d0oX`d>VzLi)L^1UjyLufBEMi~`2vV=b7QqTPH_E4lt znyv9DdGc-BATB!*-Vc!_#HKn7i5W9Uv`?PYSHVYzBM`+bK`j%DFhp3-E21>(*D`?_ zy;^`b139`70D)+bG+SL?cauzZYWQA;PS zOrfDiCI6vH2Ta|32T}Jf;J&3L!d=vmW_5EqsNagU%7HeNcLIC7l^n2#d1h<+eTk>z#WTbVgG!$ z{Kk#X_lC*w%m1mC&!71YSp@w88WVu>#fy{vpJw;(t1>BAk?^Ka_QO@$xpOBx`-o5X z$pVt4Jq}~t)_#4T&2&cU1VOA!3uDz?prqh zhm8_Yb4Avsr>FJuUV}L;wR5GC;>*Ha7;&v8n1GZQq2F565=|J~XdhSnTl3vopYy8I zT`MCaD8UMiGNj%Pg7#qXhdt=$V?HWT%;s z^|7IYiqqECM!R)U2Jh$FgX5ozpWdBRfy(EI-NV0uWJ&{B6xe zRw>sB^f0)=brluMk4`3iUV306fEMTM;)1*^)vjIjow3v9F>Ik>jJ*ZM%1XHIN+!HUK5$6qrm1g_6tz4C6Ui_0xwvkRxSGC(0WKmPy|)7saZRM00)%?N0jAxcL@ zMX3PD!^8LF9fjnjnVk_&P_YvQcL;QQ32c zkzB{dfI53vP!K@?$gAuNvkhCHi!KC(himxwKxrSw2FuRN1Vu#mLrK8Z#twyg#lG2_ zege?}LK_6WgO=v$xom$7LEfzoP7T20OZSZr9m3IhU_%0gw>f#i{8)!{PvoeZ)X)wP8G?Z2CkB^25 zP!Is(r4WbQ7XuF?$E$o8bOv7Vv>bc%^>k3Q{|Q{NQ3Z6X{o$B)@s`uhK=z|4Gx;D2 z3)K#4T@{s!f7X71!$O-l9M%2=DJ%D{wA7(iA_?x&2OP|DA3uIy*~^+8>m6H!lC5<%O~PP33?ndwQI2hi6mu?e?)FPfBa36D0c6b z$N7Zr0MHQ>82DLPtID+26Ei|i5D@S`f@UlsvEO_$CJfJjMgaL48yj^}!tp(sU*%l$ zZuzi|kuP677bZF}24TesR&ouy2@v3BNC@Jx{$N^wotWO>CgGs%Q2l_ECfm-pP{v~Vcmd~H%#Kc~Ab$zqyCWJN?o{!ISHH<1M19?q4 zO9D5cqaZy5&HbvoJ4^m^c!rHmo1n6CE>7Fu2BX-fS7KmpOFFGNhALgpYh%SlDE>)S zmU6ltyhWg3apMpy)VA&04f0+~nA-l~#rZxlv7In(w@^5;*R(&FFotJM0PLyjc02x> zxzql)nY;hM{{PPrlK&S3;{Vqd3G}aQ=I7%(NJD{aa^@NK_-;_t{QUfqm>nU{vA+=Q z5B0&t_GI#U>MsxIMHii{cqmUE^AatVp&h(39_|Ahv_=0ozM=+NDhjN)88g8$c9&Q8xGrBq#|-p9|r!d zasLXRSz!Z{AQY#-)rbyRMITT4{p*(ozav0wW=^QYI4~%t5ztsrt=_$PbG^ME>4+Z_ z6I?%pqSccj9>|;}-lJCWemD`1JCdPM$dFl!3_&4hXHR?d$mX>@VoLOk)b?+mGX64d zxllR;1dHvD=X7;lM&Sh0;#dGnM5hPj$6KT`79vl>PSe{BZxq=Fo$vEe1-P8bE_x1Zk(wC>V_3wEC0m9)AQ$W!|-1dYsjAi%l<*;Z&v7VtmezE zE}!47sv*VwS9tmPQ>EP6&`V%ZZ~TOx3}*Wd`w%$gHed$6G?;2GXe|;gaCcFN)GIJ? z0q+XPDX^Q*;1Q4}(``@WWy*5VY`X8FXH4SQv2V1q`D5b9tikVqAf)$8PH(dE<&w@trTtspMAh4psQ(OC1Vxlp|OQ5iWAwmsR2;@||)BANPAb^-U)NQgW z7S2+8Tbs0`WZ3;?n^H(s07utw{9NSc>4T^g@4{*=w#t5GLmS;NTCxiIg)aNKKDm;J zx304CHx#KPamN~XrU(|)W}t_~9WlStJPHH?pD&OdR|~o+6ilbbgZdZBeAMUw%ERUd zbmhGf4v>=i@u_xy3*G+xFQfU;9kQV=kTjMT#5lMlxKU^p4%okqC@^ugu`Z$&sAQM(v*E+hS=KV=v}Oo{keA&u3m_Ac@$f`{|M>9x+Z+D73#h*s-QbDo z1h+=>hqq-L0P6Gv)ERs=HPu{6iNuzK5bOIZya3L@BJ1UBVL{YF*nRl!YHFcP7AtFO z5#RMxwYHHoAxOrslbm;XqBDm5!|qkiNlK z7v+9-1Ha1$IXd$66HW{|N00Sk^sF-f6rt1|2 zQQbnJbGJA=_Lz{b0z`m&VY`X!*&%Tg;~vd{&${w|g3DRNlWyO+Shj(+MB=XZt2=rr z@a$CckEgxFHFUxX*JvmCIC}M5V4l!2Zwmc2Gv-F<$n!eaLKj7!34I!i+7JYBSNU~`^ILBK4EkM zkgiV&F}mp3SWta94p=V)<=}r{#ET-3i;GKAT3XY-Jgj+OU|@Z7eHKPC@G)9MU0t2u z%4jIk*;on`KH3^H9R2+A6 zbwx=4!sEK5z$l0T)7+pSa6uJRBn^#DXb#l@qAoq87a`t;#nQ>p&+M~Jt{R0S)IqKYJ^q;z+6L3#c3=@SIHCW8YE>nPZ0 zdpJT$sjR)X@HfcT0`!sX;_R$4uG=tb;GF^CX?>^jW_ef^t&^??ExS*k{|loq5eAH*=pqh` zSqSbI=NFkmL)pUsuDvqarR!jQT9)4&13erc54s&aJvX<-?Ct=L;)JnIeEj~F*e`S7 z8%-C%^I4;2F{MhwB`)qcRC3X_3!`ha;ar;*teAPd`qRW>*T@<2AM%Wc!X#w{NCIxC za|P~y`FnKER*dN~C3KQx+;3pbcVc!n>hRv~e`p+oFJJCTxwMCDbu^T%SHJS_5yz#o z7cLwOS4JuWp`^#qqKzY!t;^j1F_5TX{@0Xb<1JF7YS%+vT=)ApIZz`p{;||QFyMx| zMG**+h*;0a24MDI(n*PzXE^nd|H-&CrNtf;jKjbcG|srtq_6piN>eIOCh3( zpeX`DAYe4|!onaAWNXF$3I+`-VSci2m0HWwQ(Qgh)kzLzRn-iqDHj(PjC|Ey>y$UT zF^T>Jrw?ge9xkr5KOB9dyLMuFeFgM1gIZTj4RYOa+FwEWBnM284uO{1JbI%Wl@Ai6 z@U@I5GPG2GPjL?ZVh!IFUwTQUjOVXm&`Y+q6d!@%?!o$@=w|Q{X?z>Gjn@S8AP|a3 zix5wc^tQajWp^U;(IfJx_Ml57;T_|1TzA}ELU|k?5|auG3H>aW_O`Dw`Dsso2@D27 zGfZquny{bI)AMpX(1;Dfya*&|8vY*DOx};`Ijz(>;~Zm27Wy4qt2)oNO%oC z03ASH=N9AQaMD%iyS?XK<8CyMf&zo9;?)7@_K*<|Pk6Yyf6fprya-8Y`dwn4K@RFj zhPmQ1D35ZfyPspMnMWrXph-bu8wFY5j=kK-2Bf8)c9CRB(J6fVSn0!gJv|!SRUp3r zdDgdY#pUEaBjSu?59&6AsS^3xh!FsaA3rcL;RNP~i+SY=^*1IKme&IV&5vI9_TK%( zIR;&B`z~e%251=jA2dPuh8qLO71@fJm-A(|(aHT86=IMn3H}^#3CaygjTsZ}Jf_v_wriQ!_=!N%MDm6j*$6YF56&FvwNIXWh%c@}%!-@uV{g8Me68r)( zwBQk~*nwYxYtAmvkmm$l4e>l=QIHeG1dBXUGM1JZz!ELoq#+hl!FYef{%3(K+@(!# z7U&KBa}=^^5~&lLSIM0=r6@3pw@^%4Zh=MMn$xzYsW)F-db;yRrvgaopOq{`dbBB) z9+jo@1aAC7wl_>Q6S`w_=x#I+okX!6En0*svTNuOKM$>1jb#6q1gB@Ks;Z%P=nGxE zfCoT3p{&XOS=!6ABaRz26~0ucD)p{iF4ZP>kD@L<58jbkyGpr_bf;jt9>u1RQPBv{ zOySErf5akob)O^g8Y_hhs-*>BnTTPA1Z`N@GLgX1# z0e}Qq!3QE2H8FAr#RHLqLmx^os1Gk7CnskmAA1aq4L{)0xT^rBlrv6Hv1ewZr9^8yh({3Hlp2yAHZKYR8q1Vlb7)qCP%Sa;;9 z1~FVce?Yb5{O|YCLS})ty#HRYtplIINzi@(0D2j4Hw5$xq(&V|U}NKOfE_fjNewgn zo$i=F*~0;ja{T%lT|M;g>jSjt?f{k*koFiwZGaN7r_w7R{cuB2Pu<}v$r64D{~D0z zScYp)aKq;2@Fz1qF6u__c_b;twxNQm^LD1`;d6|8MTB=cD24~OSyvcUlM;YVggHXWWa3e~ArA6nYUF<6K zWbC&j+`Hs=`76NOzt_A zjKBk`8_Xi%JS2^^wD@xJB873}XP>PXR8|@q2O`&pSiB`$AUZukpTaqWdtbjyOk$h% z?|Nv|a>-6!x-KBz;+tH831tss(~rX}sr2%SJ-RgU)Q}vNY54l@176nAM>#c^nFxCX z+8<~|O>J#)>n2ht9BmtqI8A-iIv-q;BPzV=g@hEeIoSM4R=KCYK`22Dg?ErD>wU`9 z^Z*94rh5+}FzBwLPEHb##zcDgKEa_)O8Ss=N80WwjtX=X9JIrHe4~pHC|A+ZFjSST zUJ}c1`1aj9q!f3BHY2q75R&OhEj>fSt#3~XjRYXtVq7pHEKGqdP@U@@#-P169qsI* zSL#Sv3s-zMvAP&CQ=3#ROuL!WNzaLM>t$luB${89*z{M!^k-d|LYR5^D}N<^e2 zH@D-{r#}F@czawJ2(lj#I-(B67G42>3k(Vxa#+TMAY77JOqpHmx(}9LXk31no16Tp zT#A$%`9{gE4em?W(kLsE{+qI)&G^z9Iy!Q|Haqo;F&Sq06rv3b;D-S2bu<}x<5 zK7_rGA3qKa@ik1v99=5v7`ErYpqNRWS8-T_W#C@GGd~nPAOEdes-2#M|Ph8y16gtnJAM@aB0UE?Dv!@GAeXPpK5m)Ua#Jrs>{baWI(H(DwJIKzmOEuKuIFxlkwlz4NEzzGcvzd;eRBLs@j6jWDHoL}Jz?{o%dF zwwWOuno1`2k%!Ke$~DNpnq9AZIBUu(QKtQBy>KddD!FG-Dz()2q1(er78&w2Zwz0n ztA{l-$a{Tx4CfLvOD3FkGl;SrJ?a8bi6jy9nU$Oa0`&YG93;NRE&E4b3L)&Te%>qr zf-NLybXdqyga*^!Pr}BhrK#E0+Im3J*#gcGCL$qqw|9Y*eEI$zlLWGuYX?lMqZ-0l zVK9eKHTUW2Fz%LxE2QY-ictr>vuB~~5lpy8kG{;z_`>9T#uwu74bd<8t&K&Db7I9+ zZ!@6-mtX2(#Xb@s_L@6NI4FK=cF0e_!uH*71rmwEA?dSZ3yOwL3AD8LJ_nS2=y9;O z6!cHS;Dw?5I#-`G&Qw`>0!j=ta7=fAdqOmQk6UsLTL)f_YxIF_20VdSYiX&$if-wS z8dla(``c8&dEls^3FhYJik!4JoZjJ>wL#y*)j^Ggbn*v#1Go7ALw30yED`n){qpD3 z6i#wAv`2UZdcBg1-Zb#USK3GJ?g;_W)zYGx7x-r01uz911|{#2PRd%gM}R`rYM`l8 zi|1`H?;~?Nj!#hafBKYy$TfHG(!jQjew%&JT+sZ) zyCBo?;XjME9qd&gK=59W7)JseTI+61@E*zU6OOrm-`di$e`rXfh@LXQ*YBAZeuq@Z zPKhvC`5~vIg!a{6vy>tn|Io}i|1n(o|K%S7qsQh5`7jiwJsBsiToD3v>uRIIQdx^s z=jGWM{Td5Gf_Ww6=SacIS8Zu`X1_gSV`E=1-%VK`z-NdXkm_(h1XSEcKSS_-cNZ7^Q>REi9JaJ-M>C;6VkU6p2v|l9=KNtz()8Gr zqLA?O@JQfh4FoAO#sOraqL&npZ##bvst{7a=EasF$P%wat>7g9pfJ>kA0)>xtTIR| z&CT700c4#V(FASrq?eTx5MUlYe26WvlbjPVK!44t3He_tPM-cF<2%ODIwh&wEpy@#ZNdv4rd zws&+yrGE=dcIuR6u@R=n$nB`vnXf&O*I?vI&aE z*|UV`HZCb>9@rT!DclE)@MdLM!(u@Ff(M0%+@+7>rUT$db#<~wj}oSTRFFa1iMx2r z3>G6^i$()rXUw;0) z2{{561qH`mWs7%m&7^ip%M<#kN>CVg?MYuB}BB z4>w^m_*?cEp;NSKgxcZ)AS+;NWmSqHL}%Qgq!!u_jufJ!n6p7{6x)$b1)?x8JU%q0 zQZePW&7&U8nL9@6zUSoVXn+Q4%ppmZI|`wfoSdSzAwGp+;=paSH8r+k@3A~k;54{S zVerY&6S_1s>I;ep(!&dI)}EPz-Ck^fo5TS)Ox+{Eqa#QBkwX@M>fnxffx1q-2Emt@ zLv7zlPHGaSK*BIikY#-#3C6{}MLnhvAaa*6seSsgpEZGQVCf0 z7yJRU7O*sS1;YG?+-`B7hnIrYEYwg>k4PsrF3z$a!=B^g%@ga`6u8a8pb1w4^O_g<7gwwlH0;L8(pue}5s0r*6_sQ777od|96a*&@ zY!zIc)<^*iN0A2pjld}rlMQg?7HMZi2O*(RP&wa_xeUY)s2}O3-eFhAtT4no7e$K3 z*EIY0w`T#n0@9np(ZPK;U%x)BqjSgUGq4cu*Z{&XvX6!gm1OVz2aWGR$5QTcZ^Kkn zWLOxS9SwDLI5c;a(%C6+Cpb6UQw6>nq7G=$xRkW&BlgWGO&C<_%iL<16=t{vVFBs= zG=|}}#?}m#NI%@`B{`V|`YTK)4J|Fwb@}USWk_g&jHVhH8#Bq!?A=S+PPl{Ga6#xL z=O3353?M_~YV!?+yVr`u5{e5%_nxc%pFs-odIHuj!rA`mrn>Q}vFu+fQO>&% zjh7cXh{wFLzOsGCj)>FU&&}^L)pp=2p^mk;-$kW-#3}XlvXPDB$4TN1`}XZrc|cym zV{#lfZXOy3*pmW7;egiC+JjzY=T3AGLs+~r3PB5tF65+ZNUjCXb7fB!+mW}84` zJspw&;~qVH$ff3DZ$F>EOYzz!N7CPJT+IYUQa8}I>^5IJvNC~chewHxg!_P7r(mMO z;xXj3?%Wx!ce|uSd+2=CP1Y*<#v924rvFw%o*ETJ@hv_ekyuTca-TCQO!Z2|=Z1Ed zg}Y6|%5)TbCr2*rEG0N@Jy?-TmoDY5Z~X;wgFg-O2B&fH?b|1h9~XV_f{bF95s--@ zj;aNXjzjA-zM)rh|4N+Q33=NPomm(%~L8XjN zj_wjkhXvt`JkhKs+OBnY{b7kb+rP(_L6K5j7OXx6pJWK1#@w5!v>> z8=@q$RAEzmyfaM(=rB60#v`L6B+Av#pD%@vp*Ui#TLd;pmHmRb8#2G_2v{CqDzAgu zoGKejNH6))r%$z%mGg#PPd5`dQ9e&d8tS zZ-)0+*KDAT&WI<*|2~0G3OBvPjw%3SGsg+wA#DqJQ>_RDfSa=)%Pt>1Ds;hAye$m{ zZHq2M=IKos#mDP6j8=oti_e_>LZfv5em9&*JRkB30UXG(F=54{T_CPUB*r`?2z&s< zNE$#Y@$CNn&YA5+V$4aOc}84UG&^8EDnYsnJUx}QwSPYHSPus22XM-fND=Yd0se=T zRV`Z{k5BW5OZ9|UAZlE^0!If&E_x>LW=#zZFp+nv86wIPI8Nwf4nF^RGD^x7F_uD= zprV4fUm}sw-^SZp1$`lj5A7eJ1G5O1gMXtB+9LeN#>U1XvHqle?`mo~VQ|4>y=c)d z;YAUvR#jy&a^z~Xlk~;w2pGt4mC@TUr0*a;J8C85#XOL_UKTetQrGiZUC;X7-2`qwBJ7`P`T)W7#xhM-ofD(g6zfK z-c1J|2ujv2cnEq&P{;DnlGgt+C1>`>Nhz6-Xyxjy`cN&FIcq5lY8L(06lr$)TlCZE zZ++1J$NwSPEgYba^+bV_bGBEj34UBUhuq^GNb?eMyhpfaEt!pSVdn!m8J=i!P=`Au zB{40zFY4+#Oc#oo5aDuVFL1zObM7>kk5EBgUgJ|2i%802cp5I+k$V^U_^7I>QOI2% zPVw~;Y)`Y@M#8=UNdzc9I*7{-F?h|x=>|gE#M$V8?7bf7Z2jt(ws~YALV^_ZKSL;p zfq^fj0*+!q47q3#M_0}6t;u>?=0O@&+Pr+(nI}nt1WG|U&s|5S2g6(NM=TiJscC0y z5lh&nmh2l{QBaV&f4{UDj;G7?6#PbB0>2M_w&SDa^!dObEj0>#>;AvDKMSb(*>a}dMWfvw71|GZBLBa zhu6WzrVx2UE80X3ZCotZfs)}3LWPu*3_^?GRRl1H-$4<-4zF_N2T$I^>t)|5E+3xq zxU>8XST6DqlBNNTHbq8GM@K(@{u}ylVoDlnLb(+%X6?B@rmaGFf!IO`g+CG6FJoA9 z6k>g2Y-bJBFgG`+`qcsAsl0xDE{~P-j+)Rd3!(ZZ>njrMeZAuB0E5w9xt^KRUahL4 zA`Kz}xF4lke^ckAe`o>X{82R`d?tmLatrL9b&9cooQ!A}956IBxaX!!LGzOiU6Lm_ z%l2N7H;AkxoW;5q6>LZ~mjcbu>eb(aWq2kuC`y|_Y`8Dx4cUcelv-_*HR6gfPDAjs z;w`v284_Zz%q&fk-mqt%J$viM4VifW$i%%0_`MrA`}Ev(qkolxMWRyZd0r(lg))>{ znGe+<5(oFN-RS1558sE_;&b(wF@sz;9b@BMj7fY+m;9e32TEJdG~gHo6G^hZwIdy}rmo7-)2El@MHXBL!>K?% ziS!w%l0p4v=C0do-K+8ad7YO)T%gZ~@t}EmIpNt)F$gapS01#mU@{5~)I+uXyZV25 zH%)OvDP?hrucv1+^1C^=0I2(Ie^OpvG_{zuidu~;4t$LMfI4Bn!5bpi=zemsDfgUe ztHQM(uY2Sj<&CQ1OL*1hB3`JMk!G8!-a5)T{v-Anx3oLOZl?tCF%c5#1nlON$MsVq z_n9uWg-XJcLg9OL3H_6@=+8HASWWbuS9i=_RF%Wp_Ve=MvLahvz<^eJM~Ca>}O zp#87kzc0b4d)XW2^Eat(MwSB}ErdcGYaxBZFIrUa5n3P_shN- zuBT^{@X2lIab9ey0igb|!-wVSqzxju*EU$~r=<=tTS5eA>Kq~K;MtQp}dG-d8EWkqt4~EQA+@R96tKw!sA*DOn*;!S7M|`~JbOjY&DM`?L#9rI5NnS(QGqe z^{rf~t~B;pi$6Mq(31bN7L3q1sENr}Nxz`sKDd~_eK ztLwwtC5N{ zV-?w%uZa*hHkK2EHF$?Oqt!jgGCPc82-f0hBEKnk_hUYwf$_?A9KIqseyiNJXz6oT z=7h^jfI6S|Xj&S8Fa=s0Sv2=9@~4mbfB{=r^~J?bxH2bCUj6g?&=}j!U4FLO)8Fu2 zN*S`e25y6l3V*bAwV#3k9=HI3Oq`B7n7fdGs8j(KC9btHAah zJ61|nq%vy->}fxV4t0$V@@ES=mhy5tophEYjXxjSRlz5{`-8uN+~Z@{* zPV;f&u{fv1qDRr=wF1NkI><_Upkgc2HZ30tT#@1@7&-=F?F5BhK>?WDZj!oO?6qj&(1E#F=1R z_6L`YHtVFWK77z1K6ajm$9X@^byrnCEzE0}UpRi{%Hgg%I>ih5+EWfWWHEFFzUq<2 z=;UKmDG5F;soDw(co;jp8&kkwjCW;hAV$7jn0`Rmu|^y<~bwu{`f3H^sXitAfvc;tD@VQLiSlyk4u{yZRzk7+IHTqUD` zeXk1r39*yh1L38A>L|Ontrs%3$RfuX2YK>QtTZjdyO8=2O%e1A^~UA(t?SgHY+4#T zGyuAAfP;$(;IfVVxN$FEy&}el+o;o;oc#+Zg)o06VtwHYM+F2UtjoYfLg+!Ex7XJu z2!A3VgidVpW@KsvWfrKthCnA}cYIJ!a(i%EV0xuMf|m$d-8DwPXU_m)Z5zz5K%S;g z;Y8-~3(8bdXmM;Ocz8pgk@@poIKlgF8DArPqO80;0T>PuW=)BWp-|DFAMe<{y=+H$ z9tT+*gOks4m?OgKe5+$bOkPD8Y(@bx+Tp@v0{-se1q&)j?)(k-gN8V*Sz~7R=c`xh zs;Z*R3ezP-m(op;BZB+*!;2SXm6hZX%Pk>0(NB%?Z~w`Y0!fH_j$-x>`tGta)xfSf z5UJ0AuJ}&Kd#H0*mO&ARCm*E{K{hRL@OAr9EMq|UPeT)vzT_eQ`X&>wJQU@zG;b-J* zDBcVLBm9psQfT<_3y?das@(NGG&R5RmN+H+5G|4nnh*<;CVlxGT0SH`h`KwgsIdDu zrztt|`hv_6SGj(DD})GYa(Hn-qu?pvDqP2G7>W|~?C5)t zg+%AhTiJ42BjYi(bI{tgoO2vO+&ogMy@j~8t4n8l_ih-rAM}k{FZ~@j-%U=C-twn8 zCk)3ET?Hr?__YbEt6ZGONbKy)lp_`X)Ei z!w8K$mkdA}O*Dz?B}mn-(}e z&wcH*wfwS21+SI{0tCHi)g!6qZmCfIhEea}Mq~(8mmd`ub>Iow{=9U2iypcu_7_ag z#KYY*HQC<^Pm*fbXyBzhBU(s`h~_Dr4#Ke#W80>}PjM5aak;o|@L2yK}P^-kI>VTHCqlc z5;O^Bx686DI(B|2_7X_v+7cy;9sh|^waI;yk$i31)TtM+Y17oeT6nJPUa%qgww2#f zah7{d&^Y~VjTLfMIjhXyCL!X82ZRZh-B9%e>}hv>j_!XVPcJSlfBZ_$#tFd$yfkM5 z%M6@3#o*$?ieZ)URNxSSe32ONV)P}{2@qZ_16Fzn3ZJ|$S9`X#xd)fUrV3hpFv0+h zLs}YVZo$|TgD24S@fMNTgud{ISZ+v0flW}VFC!hpY;ZIj0M)6aRSN+q8|M!(Q-|@fgVA@bIA^$fG#48^Myv zJM|JWpUMnu9lQq|Y~Xalicog)^YNLpq3HUgZ(@gDQQ50HYi0euIcTxAv0fPo zl<>ShEd0uZ{P~AiE8N}Jft~W@IzTK# zi%>`2RN6Mz+JOvnkLJPC zqX|9uOJIhOOXFel@iY0o7wnguny75WVlfvPW55Q$Z5SJ0AJ)Bs%7cUpG?W8p-%#~? zyTbc~ii!$V8&xSP0KSs;?VFfP`h*9gE!j~3$U{YD0kgmo^yz#}s?ivJ4;w4hMZ?k13rG`fucGswqtslQAJFPv1ev1H;;y zoz9VmPo0q@^~@CoP%1x4$W_hPF9o5aZ^GaJ($=nR&xazA zTWG_f-wd1n74<1xvF24RTN))rd2RTM^Jfgi=O|3rL> z|B3jl`Um2(F*&PFCv(l$lT1gWEkLP~5F3jydQ7&_nFje;OGRR6>A@_=Q$7>qn-VoX zwKLY!o6YeG%JcGKfSTx$)&lIV)g7D!{X+4@-}(jVZZj$>Dnftiev>5C(4iZlJ_43{ zx{hwdb7jO|vInIRpvUx-*L8z`kZIwk9Q6HDUr^gqr+PT{hbwg`0_G;vM7A#%I7cm^ zM#K*_S~tv^8;IVN)zecsBE=U4B|+^gp3(3q4K8?o<;j!pa9Gu@v0l>6n%zUBom5%h z6Cm@it18Owe#nrGt#-P8#+T80IrJCHaPBb3PY{Q+Ae;dKTeneNMMY*aq1=!TblvSF zD3XY#gtJ(n@EgA|P42`AN>K)Pcx8JA#}6eSV3R}_ZdzisAM51Ez5C+h!wPZVyHOci zShP~y7^(>Yj_1$s82*FkcwxYpEYLT~8}!(LQ*bxYZQ-%!#a zf8)L8KHuD^^I$|wb#!$4_Wcuh^@`>=1C~9dpw*s3gm`ktUJ3zjRkvNj!O5xh`O_y% z>xmA)3I%i8UFo(XFIcMsFt%duA0fayic>yTe-%ef~d;!*~tW{>t5_t(N7NB4!q3S_h3TM$%{)&%@jkR^& zO*5{le9!6`nH2c+Fm$EcL4RywaG8t?@AHrd53ZKSK3dtToD0|(0P z-Rq>LHq32t=XTmo_gb^=G_{sU!sAHaaRx3tT%{rWW>F`){yHLZ}Gw z1(~Co_@5Z2bmdAdtw6bU@!0${6&H0k-EijdaSf?S*>59A2h95k@Vi~)HPoslnlc< z7@Q!Ehcq(Fn4Vp0U@TE(Xbqm@;OA#pd9L$)+NWpFO1f2{-kCmK@ldYYk+d{!dgYtA zfM-@G@oCTgrIcVmcFxS1F{`eFPT)(V2bHjgQ8*B`RR87;s=v+$y89Mesf8=KZx_bE zth4C-mr%wqV(}hPyW4fKao4J~_tAH7k?=|>z|DPoqN)!3`eE9vWq4oj7jSI{x8=9KFL*i0b5zhm?ihTn|Vf%^b8TsK1xvYm6=lxR(a3@u(R@U*M z7(}iP3?%-bkDgxP_3PXdO8v?Zs)OGMxjeL|)G=(7S!y`^{{;mxLU{FGQILe>0Es7( z4kBt%cII8_Ys@W%CVW2TWOfc;v`fTrNp!#ipdjaQFxA&XP>e9Y^thWwa5y&=RhL}r-G{~iDJHWLk&Z#&6FCH#{=JZRn-~0Wh#qf5elI5SI7<+ zp4?_1d?~|+;Ag??Vr)Nt`t&s2tIY`N94ffF`g#BoLW*Hb&l3IVe*+^rDb~;-4i0`G zL(Sm}vo}IkaOHKb45plAX#p4XnC(#8l@SgfKYt!NY}h_Bm5IXGR;=atg#Iw=;oz)h zgWkdCu7Yt4tj)y=#kDOjF=Q0Y&8-wIOmeBb+yV_Ie!)Ql)UZVe)Uvrr6G-*yZ~tFW zkD7(%D5N;knt%M5F>M<2#Ad_Ah;G(7_j^DvJ|iKrvK|wN{`SO7wW!ap@AYn3It|K; zZ?GOamP9{6BSiKcvWIfXndEE4Stv=8w;wuu_|6_v)T!I1_5j@mO5_@P%j$ttgP`M@ zw^8;%nM!@(W0EMWams{sy$1a5os#LDw36PNW#1V$W5#aiADH@`E-!?%DDT&ckG8*; zj)}B~3P$^XDACf)pZtYUwQj^dTVbPeE%a|I_Jjpc~Spq|cu`BQ(! zPHgXBd|=*pQ?bGJh;RTy?)MD0is#54wvn)|-x!39@_%b-44RZiS4 z-DhM9u%b>4*Jwehl(9y)Zc&9?$6M_VfNZ83eCJ2D?>3AAx37T#C@&>v$PiXK6u0XR zrq$r!c9;F#kn?M1Ww^pXvk8#n>zMx*?E`~wl`uLA&w@Vres3*RhQa5a|{IB4IJnbOX3|w^r;Yug+Od(Z^okw}_^Wb^65j(!Dbm3+SS_7_seh z>I-;o#sZH|J$f_|HHKXpP)bdd2RfD%c(xCSF0i_X<`auUIKAjGcbA}C1ONi{!gp1I3&fFRA#OL;zf|i)6@whcxkX~l7J0lF>Ug1 zY#VBKJ@J>4K|uH0`E~0!)RJvRj8e5>dpq=s1NREtMxK=i#`!!6N|j5n zy02SUA3Qu-1kh7zI_Oo3ryg$`KtWKAM5eq3Z9qbuA1k^h&lp$MF)jGx$D04u(V}xT zKfi`~k8Q61Pj;K+_bg8^EeL4>IF`(t{0~D(M~Ud*bF0__dQ$!ycyAygc)>OOMeL|N ziVeo4OIj(o>yylYRCWW1`QpUD=(hsZx=@ zQ3#+{1#04$lG}3{7q^qsH-RIJirS)WX8U`t8?GX%C8GYkD0pGm-A6Ze5Cln_s%)E! z7rWZrqXs5u3)Tx;?%@Lrc{qYfL=sP_+(1zn2Jc2vSMbykI}51YVWon&*6BOumh756 zckW6s4Z2xs?A-|o!6)tfodB6=r6_^WDnY37Qg78LOk3%8O%(I^%9UOW6v32CgZ9=D zmQI+7iXaol0osp+G3af8LXFI_J#aka&!fZ>z~l!D|HCOGTO9&>gt(}|isGM2GDF9= z8$>b#G}t_l0||ey&w(DG_bVPeC>&}Jr+x3wPrNyn4fQQ-x}(TTtNSSz$&)_&YP91b z!Z#o#0B;g31S<2c%-%lpnow$5FeIpU4D@*Auy@IBS_;YUT8a zTg=Bq^XY-m3Ptw&-Sy@-YJ`Y4SASNKa9CW6KoY22^-mAaY(iU^?3*{JvWArpk|Y{0 zjP*&qm#6K*K}<~q9q2J3Q>(l9&rIbP4Kx;L!^59_QkbGFrlEm*_M09|jM~Pp0}}FT zFe|JS;sn^(ZH6oUs~qWCOzw4Ngu7l|J#@r}`ugjW^ctnYnd8A*Sl6{=yr-v*OA5#| zU=(n5NU?HNR)dLeEq4kIP90QC0R#fE}7AS=mXo8?g zqt;0x?T3LwlP4o-WBiH7Szi<;tU`7wb!~q;OeSYN)_ps8{5U4iPpL2N|GDg)tGm1T z=+W9rEC)&GU-qFTmnVQEI=Ih+=0z4dDAD2CSeHNkz|PZKp1G(-yxM4&F~-lhhnRWA z$;9UbaD=K8Ih%2i7-2EI4gG7#Zm}@Un~rUB|7pK1O5w5n6iOq9GZ~A`Kl01d_z<`4uQL^s<5f-^sZ<_BZptXZkaX`BoEHL|Z>QyYKbSg39NnAA!jFGos4 zzf@Gi5pzuv5tYH^3?4pwuAs*mV{1Dn&&1pKE$)FXr{0qg5}{**JmD!B z$@jhzo#xF%qWt>uenwB@uYEPP7K~BGKXnasp1Z4L4#A9A4-~Fqy7E6L8syfHRZi13=14_|RyxAlWe;;jh>b9?mZb`9zg@?(}KqY;a);+V9eHv(|MrxYb3Z)_Ivz z&KHqQl%XJ+%y4z3_#`xQt0JAo|s*ONmE^29nFWfn>&Z-A89U)qx&DJ zXBCld?+I?m8~aFK26^CO?M|Ju}%x$P~J#=6f7D*mI==V+3BF*3vI~3nk?YI!i{k{Y%2KDNVOd3wM`m;#+>-|%a z`$uN;zlnlJLLmf!=kIXi4yOJ?uI%pU*uYG72tCG_pJL0%OIVW_FeFaBiSI#PXBi0e ziGPR#0;m}U$prwQ$|3M!G>S;cia>LyD489#jzPKuEvm23<2ySqgB2t&hr56GHKPVy$jlE3inA#+=riFCG_{DcCp(II0*J zi6z^vMNHa=rKQxPh5FYJs~~gkO3}peLmRKIj(4u@Spg!X=%IUqP)+|~oHM)3=0b`6mmp^cW(qf4W2y9@u>6Aq33V}8Nrlb zqAEBToE#-=zVdQ3^i*O?M`%IspMZ#3tigc(@o=Cs%C^lp>G#GKCNeFR`KZ3 zS8yeM5a%etGTrWOg%p!lQtEGLDC|1`LM~nncZ>#MVOLQu<h#}ZK1E`@HvGU1<32VvX0309m3>CVoWFNa#H5K6F>LhokTs9% z#7o9~3sVG&FpZbIeYtbp5_jyl z_Bpu&JP!F|haD1Y-@F0LyvUt{em`f+(aZ$mox;P%Iy%OJAfSU}krh&p@nRDl4uyo| zgFaPtDJVd$1Xz*$3^3RZz6D#n zF(N?HqpRa>QfoTsPnk5yc<4~JOJaXL-!;5qe?pPH5NRL90QQ}YkaP#Gc1*T zaO8j-qHLWzP{Z@qazfcx)^`u|6}7!8^l)6q%sJ=I1}(ZWs%7_x0B^B4gqUc0T_On4 zmGU6+eu|ml#p(l{W5U+!7cZ_3H`^(c^2SMvtW{LRLM_MbNB|~ZG_eHqJfAUh&6-Py z`ly=tDMdaeUtT4n&~91KJ}Fx6)RWrFq!%Rj@0qxPt3rx=3x8Ji4 zTpSSvw=SYQ#??YkkNtVBL(@@B)80(J2|(OZHAVPomSF`o@~!#5F-DzMTfr;1E>OrH z|9`Ru25E+a>sjk>%V=Fn06W9CKw};j7TVY;aGvvY3wBRdhDGM3c8_F8bS4k5qs^7{ zr6@oVCz&K_Ass_JgI%C~!iK!9ydQ4HB+HUJ(&G1igyVqajp+pJhc7T=uk^+azr8&m za=j)#tg6bTX5h>+Npy2{9TB0lc0-JsjAk#7Yc&F9jh(i9%gUcEEtHg84y_nK75aM7 z#6mPyyE-{F!pK}XpsL~>5*lo^-*f%uUtNJ96y@a`K7XdFLqHe1b0-k}4^{Ogf1!t} zTf^gOPV7B(pomKYB7hFwjeGgL)B9weJL8Fa=sys2Y>vZ&wV}ao$tx;ek&1K^ZvZzu(R*T6?rm$ zuIrKG$AS3{O-8fVN&ZEDNo%`$G%MWDJ3v+c_+#dIziHSZ&Y%9uTf{_57C41U_uRPz zKH<^O+dC1e0avol`HlA*5*j-QNLh9;O^@L{ z)~Syc%U%!l(3Y8l3-I3!tCn0s&7IOen*Rt9&pTafyQyxt^vbIWg_bM%VXL2UL+4v$AlhA%2z2 z;an&5iOz^Bpz$17$jd+f1kkVPcRZU)8o?b(OGA#@bI(gxN|45XS<(65OGCkIK)b z+=x9!_*b)Mk{!VcYSVN@O~%11*CccIGqw<&FSbui3kce0%)r@3w(bTifI+TBqO;CH zI}lDZjk^To1XiY`#vctFg(Zq}M^#RYxf!k?yI?C|IN3;-sBR)txDFV21kk5r7GrxU z;Cb7~6jmu$j86N=6yUqmd%mz!+1*Biu()L{k=0@pEpJ5WiHgkl#BJNg{0`GbdyY?> zDHZ1sBIZoX6d~pyhbV+fOacT@L)nLLhd* znM}-m`w%^mfM^rT|7k+XN680}<-x4075Au^?)siZDofMMy>Epn_U_$=)K9DEr8ijv z(YMJ*DvO{>TF-L_+@a8?M-@^|u5Nm<2@o;H7JQ=iax6wFLdVo_(M_E^nS?@Pl3mbw zZQhIsVol&u1^b6jpBjRaFYNlDvXa-+in3NYjU-mibEK({p`Z9n?zVF^bOC)u#z#a_ z&I%bB8Dv+>xyMO(y+UsiG0s>_#%wcp z%=^SibkO;pq85)eb9U1>&1gOeQZt6p8!b)L_WqX&oDj~isD zl`t(z*I-lp#_1LgNh#asuw?qYiF+qi_}x#OowzAv(#w`#0qeA`?fH1@^V|G8 zZ%-|~WBPTeMbgq9m0$O)Ilr_q#ZDL&&Pm1_`u6MRrO5o68ZTUY_#}@yLnnhR7yq6= zJ!CA>gKoq&#em#_*w`N|<^3b&%8IjLO z;G@aFfx&Mm(gtd@Gm+reptRbQ^e0^;d?n+}`mLFFWChj)6#DV&*(YcYQV-p)UI9dp zFK&R<&U_$T{|@G6sJuZuI9&|Z&!2w-JbLfu;^J{UXb`a3efW9VzD2M7nRU02+?$}^ zLtetds&^r4<=Z%X`?Y19MBV%MF_XNxeIDwzcS@7rXM%~tLBI2}?K**ZZ^S=W2h!Yw zA)}(9;(d~}bS~YDyE8o}O-sjv(+t(N+cSL&j@xQKxysyKQyKLL7m^QZbSFZ zxo}}&#i82b7A=VmWCEOX#$ZYc8We+P$bn5wHMO-*xFxli^bRa%TBpckxz+v#KB#_5 zsU6$3qDl1x_W(iMyQVb%To)`u&iH*KYCR6dcq$29lCqsoGz~%N`{jMCC9}}mr#!G z=pL`{{9?*}$Job=^Q#@Q!74w%NHud#L#o!aDUYqMk$JEZ2@xkWMieM>K7Fv9Pv7jv zqz$-}G~3m2QS7(fU4crY>U@v-eErb|zg_taQ{#Bv9sm4oJ|E@Fp2u7KG zcGZa{28uot7(VdW&tz)m^qoaXhGq3alsWppn2m4u!nP%v+U_t-e%wOTunKMvB*m8#eC+h3hBhe`u7sKUuPp>R6>yL`&vnu1z?twm#k z%JPWwF+1HWi%;?>*;#>-EzbRB8XQSl6D6IuSR7MLrrVs^vx)p>&M$M|(JdXA|Jvp= zNC%~bOo)z=Q3(T(o<7~969NBVPEbe7H!fw)QHIVj0a2nuXzATgXFz7N+$!l!rc60= zWD>iT7h&c$zgbGEP5wA*u`}l%je%b0Q1S>75X18V+wZ89>2H$hSlxdv+zx6Tv4tkF z;G|LH+0dcVeSsq`$;!^mY(|HdX>oSR`XX26Iz#yqNbsE-l%W&k1XvZ^2HIxSCo7)w ziz|>ujEkEKjH3apnF9{<6Z?qgd{RwI%Z)=6-x&wLnSsSiYs|^E*4A8EDFL}?f6#P0 zTc_T`3&&4F&U0Y$JeVt*MRI}E+fOT~L979m1cih)=j9U9Bw^g~jn2WH_}o;Z3ONr& zV%V!1puNlppR#2m*MjhHlJ}9vmi!L7F!R}8PXmeYy(rS@&rhGqOMSUy6*lhIuMH-T z?Y`x3%gtU=&HWvFGXron_kmW2>v%(`#W(~E4cm3ea5V%uLHB_mdQoMzU=Wuj-p;iJ3g#uPib|`Z*=K|3IfM+C^OHdTKeMlA7ie`Y_Ou& zSw#XGS<#l3!35&`x{+q(;Lz^PHt6-6KBNn-!xO#9`ZSx9W}A7X)bk}(DV9r9t`stpxd^9Awb z6N7GmQh`DFYuo(aWTrFDXy*t z+ltwd(4M^8?R^%?@b~9`8@=A>PzbdLr3XyIlhO`bbbV;vfPT^Qc}Sd zd-dAQ6TU0Y<3`em0}A|;RIZHjDE{#LLrNn2{^N?44g3-OhqQcm{!H&DmEVRxU1W6v z`6KU`l&B+rx^@Y!mgdhi$zs8yykFk^ftk^nz4v|H*Y&F%s;(+eh);o!Kp+Se6=XFL2<&KhuZxEb ze-HoghQJG!lcu~3qPUN06@j3>sVMtE+dXY#(nDN(?E?;v{(oHI1Vn26zEwAjGY{A)s6K{HF!BWItmB~c#2AgGC!a%ciTd8bC(Pa4Lxs< z+;`>J7;o@$)7Kx>6QU!=3nrvIm}?EYi;s`k-WsyFdC(R?IXpb9^!PEcOmA;*Oiaw| z?5s9hMb$iZ!wg*I`1rVi8V7;!PJqh`3JL}U1klsb@eqhPF1(42)s&V-oa@TU2Eu}3 zVg^4b#`R@Lgpt+O)NtyTew~;QXJhN?@9%GJo`aj5T=Rs*)VXdvsddh4XlTgES=<<} zB9Tc*NbvXfH#awTb#*N(E6YmV}9@~qm^=>i=c9Y3n3Kh7RpaXuc*mfvYEzBbHQQBfgvaavwe zQ&Ul~pU7$GE`Ap|wOL9Z=;-9ctde48X7&YcC{vw<&7w>ni9`xe^#1($=X;T%`@N)Y z;|9-)(9q6mhxzU7vdEy|U_SEjuC6ZFH$HNLL4)=A_NcSdQ%ivqsERueUM5@>E#dXO zy}iBf@+fX>Yz#Pvx+@`S(U;cXbqGKD>#wlUQT;%52Zx<6R>Sbipdh(dueR&lwuog= z7iU$_6(K}4+temaz7o$`-sqJeb3T0FAtzvpOHNMid@V zF7+f6T%#8eISLE7f{5nPpB{m_n>GiYog7l$xdX)w?Rx(Fxks_LYr|=de#IDk}4B z5nJM-;sd>xd+c5j4IQ&VkiZL_npJ;ad? z4rTuS-_ph0WVa|NC{&aA7WdbFkO%AN>Iw-9yAAw~p_hLYLE*AK%1eL=l$qUK?%&C) zaorfpRYO-BZGL{yjg5`H-6bh0*&apHZqnY;a&&SMHWmttt+YvY??zMl-}o5XzdV28 zDaOx)K!grk^p}#zz($aekenQB?C$MR@tcQ#5@%siP*Mt$rX8>ppoq{#A|sR5R#q%u zzJxu3d*$RT`t|FVo!iFD%u{poVB%lXO-=dv)-W+XK48 zlBL`ES#IIt;_~+G+as={+ip%yPL!0Ce92VA#L}7S`W(7?VsdzA=O4ppi>KbPl}+Du`%^M0kaNbR@S^o!)(nwD6l*pNvJF=Ixrf#H!dy?PV?o<7c?5Z zLi4Vq#9mXA^j!4*{h#gaxdG-$mp0Iy!u0EZrKbSOd89mPGe23kwU+FWTAJr3%@4?ybZ~)4HD=6j|br z?bnd3#8_b?6tW~UO2^q)SVn7{mXrA{pjCs2VG7oIkhk!2;Mi@5?z|w^CnSvc7Jxf% zp-?nBc6s0jHCIFz!ni^ZB6 z8hLqnv%7!X`~JpZeNFSlMRU9hnv;t6ne3A%DPO(_gta#JH<8EyY!??7lW`eYT3IR3g`oC-9tkR7}!15Pq_2-AZ#C$45Ka_4W0|#auF5TJt!#1!@~pH z->=i`x4XD#dL;4C$Ovq&ua=gUl9H06<8QK)bV(miI37I;mZtqwRMg&M@taj5*W3Cn~hWuoV;44p)di=X#$WX_%OpK$-fN znHd<~G%c>JrKY5Kt`0HU+uQFr>gbFB{JgarSbKU^^c@pvsBDfVbk#rr>Mz91^n8fY zXk=t0V8iC-X1RIqz+*%1GfGDg z|I2ew8xf(G2QNRrlY>Lvr%x-7c#Nr&RZ@jIJ34?@riO>(RC950a9nuS@$vC_d3gZ? zr>3ToxQv_mMJ2HTt0kRRqJiluDqP?ukatN*v-0yrJ$^6v`5`1E8ti&-WIh2k%12Ul z_Voz~3K}-sJx-So;p7*uj|s$smrg#z)FXfvd4mB4RmZe!yNh!aSoATJNp76Q?EMQIMip5IkJ zMhAG6#+Gir2Cod#G&RA!^E=f>c1anw*q+oe3h~hEr~~GnYzAwwD1@JVW>lIfCbyme z)-&r^Gsnmi75iyJ92?OXBpF*etA&)QND+A_e;wy0?pL<~`F|zlK-Gi)oE^bn^Y6v> zV(MC&a`W@^$+(dNt9zAFp?K#NB2;~7)Oo0h^~mrr2DNw-?go!xohwz$Z4-(EsH9i) zM9{qF-P8?S=Y`R-=MfPRc$u&rf8V+_69kG!>!b2Fl=K(|bLC>y4uLpITBDC*f-;`j z`P`wToae^7+6>iw`0ybVxXEzKCn`2JcEu&CKO6Q^on`%fGbpc#i3!ijVVbz3%LU;m zj2bgN9u@KM`1Afft(24$U(MMf{=a$wwb+&Lqr{}UtBc~!9nL3T6iiHL?g8u^qc2W# zm89CdHz)3!m`oS9QBhJFB9Xo;1NSr(&CQpJjq8ARYR9yrPCi~g=}i?rD)FfTLbbHC zRA;%h)0~o$^6uR`=+`J3DNqPwKYwx}>-7~CSwYn@GA_dXr*EbAnkOYDii(Ih3$B-z zmZqgq@g*xWkdBOufULm2az#);pmwb9@alC+KYPN&mL^>vA0KUPZNLlQU=9wBG2J(B z-c+M5&Y`Ws!@~jb`!gh<$VN&bVpv;lbeO5qq}?UQS&3b-V{N00fG4bWj$@} zme$rkCnq2}F~1}bQhe36`4-|{2OKOcmQYDJ9|?C8ID$uy+8TXMI5;`MS>O^<+#)6Y z?0e?9vn(QsIyX8kob2!C=I6h8FFrPQ4n_t{z~W*;T3XMdqeXv4uX%YxeLeKlk@q<; zlZ3dqk+HE}q5ikoSHv=a@=8fughWK>`%dP}^d+A@m45y#>T|pYHsf*@eNJ5cr3aj@ z?3U0nG73Ix!3&J`_s8;=j!sJYF*IadwFI;U!YV_;>ro^XE1+R^cDe0%RAHif+DN3N z*MW`N<xZ{w_-oN{=u*Y8qO0e)u$eZ0N(ITBM-4P|6xz(}qCEMM+RH#9b$%+CG`aimIX zdibHIN128Oz1)uud;I(NZvp~>FBKK71vRy`H>gFw%zp%_2qSO<_#R;VXnVG>kb^D; zd_^0ODibsFOH0c%QI~um#e4VeadJjH&awOX8K+-aPEHQ+5wL#KtcQU#ucjusf0;(q znT3lhvh`W$kRqDbefz2S%izF30xFS#j*botCVMgkgc_KoTP6%iv9Z7R_Y;zmwyMKk zzsCMMECDzMxSEQ}`|Q{SBq7LiFRv5$@;@UZoQ9Pgw6w5Ci!Qibi$w@?>h!bnX` z$DKJjdHMFpakQw*uYVmI6e%kx@7?8yA<5fB**@ zTl0^Dg8Y1C6&2Q7k2N$jU{H*V(q6xA{D!31Sh zRZXrN6uu;r-@avz^i;n5>W8(zzmJy*MnUBMeVAJMKpPvI_=JS5sRpIo;hjJq*!w)K zTuI+ka!e^0H^7yHwI5N^wD9#91%|-r;$pi3Ia-9@2fx6{I4Ep(R#s0>k1Qil-sFil zLyQ8mY8rqN9zMRKo0}StvyMMzR#sjfx)csBqiXd>kFH6n!_sWVDg31^UCIT}{q0iU z9J2To?%%)f?Q+)s;nSzlhCf5Y!{g-^YDi=Tj8dUN`Su=b^X#ipo65C|?*9Igf`Xg- zqsa&C?Cd+-^rb&-c8qV`x&?9;Frd4;J4nXV)HD++QCWEq$E*fEVZ~V#siLZS1NDG` zbhc(1p}rp#h9x0ApwDG5G;jU&^mqvb4x9alauo( zPG8dJI56p@wRN#!)s1`F`ugM)6ky3)40Sa$uH)ldyFAn~Ft`J&qRasQ0%|(gG|TPc zc#RuSSOW)iIyCgDq%96kPzE_^sM(7b2{58mR2=N=VM%ybUjs1Avi}kx4Q1%^$m*2A zGtj?0w%zMnT3U1pbVE%N;W~r=`~v`JGk$kGy`)l8OKW3egG5G~P3IJO4AK`mgOHp@ zT1kmrN(vMAl>4fXZwQ5TZj+(b>5pRI;J%17OGVY*9zVPj|iQDk`dYoq{%c}~j%Tz-}P zG+m(GWNqv0D`jQn?S~N{?UGYc=mO!B-9+`b`JE?|;#i1i%U8i?I-taXy?*Wp&I9M7 zVf(-OU(wOg0Tf(XT5@o3m=jJ1-OXw6+3e5n#Q~Of@S76^o6?%{=5oBZN+ad#{OZ-p)Q{-%-jvi-N(zeYt(hHnajv8=sq?}j01vc? z1D%kNkhnOw2)lvz45u;pIukSkGoxtXgCExkB;@Soc3~9Moy3hgJtA?0TvmZfpL9zL#F|*gcf5OUZijG^EpigL zi43gkb!t{_jP;Z*wH6={z}%P=9;^(~gDg@Z z9|wQ@_|ef}xjtH!k&$uz`t{38I1ldnD`WULIMYTSgO$F@$hzwna9-nM&ebMrU#AH1O;ER7m_Q>0}AXdWb zpwOgg3WK4SkT?SiO(|pp>eB1vz;=23)Kez26j~3=Z>JfUfN*kNIu4F?FYqx(gRsvR z*^oYFWxckrge*y2eS5cGDvVSptTk)U66}1gYC8M{I{sW%_OsG9l_hhod+_${+b@3< z`y6gkUv?NdIPf{+>e|{%rMA1&OK>gw(i=oTu3`m>k>h(sUg zEIQ0F`xhcVD{Z4S(pdJv8s)t)Mx+K2rRSc7ZkSvEIT@?Y#jCkbvM6h6SG2Byt!G9sCqaSIYsF>ps^G9Pz23z-{79dOCGf zJM8>Rohdc~ApNB!^Feb+fX`pc(s3G9Zx7`tfxriuT%cECH=Yz6+!lTBA>YMx1f?*D zxP^s`0|UV*0)o!X%|%BO5B6?ULtG@~b9_sR0t~|gS63lq1{!?;D3h-+Ph( diff --git a/ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-4-linux.png b/ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-4-linux.png deleted file mode 100644 index 691e7124f118370dd40eb36b4c0e423830a99a33..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 3461 zcmYjUc{tQv8@5bBVoYP~8eXJKLWqg#D6GcYhE zfH4m%6Zj2Q)YkzIMlWMsErzllAsU#ZuCJ|W;+wUc>5RFhE7*>PF`acRLSj(aT6Vhm zmPvZ7C&jfGoWx4S(vVD6pOOqHwBvjQ@jbU?tKhFK;F$CpOi3u_PU&PJ$LDTSSyJBl zDvQTV1mt7YS|R0M(LwWuQC$Sg9V`0EFXqbo7kUpX>s8QOBax47`Iy2qkWki{5W#hW z6QO^GkP78K+iQOY>p~2|Tk_m^?VOw(4iN={0G`S|!KWo9QQ)43icCM6}M zr|+0OaHBRIjJh`m%`p`i(;`ps<`7!&cs7;@g2_4FQlc?YOiXOpMJcn}>bmZ+W5=`z zeIp|(Dk^^F2BOdyf^9ixeN|OTi%piQ59gIv1qBWJEA_p7!GHh#x04g?qMSskL2PVn zE}q!jv$26QHa0djHTCkMhcO>NoSm2#OYXdl8lt`n&6kR3@m-(ktUR^(UP?yhl`)Y$ zGQYOgkH6dJ*Tmghw@yK!vhK7eCHx@C3ceEZb!cmAYe9MWLw|q&!pPqj?X*%FbE0OgD(&wAS1 z+iT9PO}FcaVV#|w{r&6o#9?^tTeoiIy>4m>ZjTpsQM!XdP@94T-{)p#)`KLH&!C@Z zi-|lto>vd9lQWpM%8`5)w8Rn!)<|_}eIN508-sKRW)@nJSC(B7DBT zcjR855)rY&U^Ma`+jM`4H92Rbug^%%S7)L>(Z6|ZE^7lc(!^-Fgfa^pPwIJlo1;3gTvYI0WKo= z`BfAZBlBNt5%>fIEO3FLJ8NqmSBgqYyIWcaH8rM113Yl}L**X`ho^{^Ete{rmUhu3vM}EE=HDaI#2cke-^Fny|2NL_`D(FD@;8f-GBY zu{=AQnv|4JajdJa*Ae4ha1>n`tDRn28vFkJvZ5l7px}+7fq;MjIgWjh_-^1KU~m2a z&C}EK_rU=c-gZY=E%3HX+T!Th^XFaN+=d=hgK9fGJj}?*_^_n=OeE~Gs?W-o-rjGQ zl2TF>%=*U^E1_T`?v0y6t*wwJ`L?#UHV_0XIVAobFf=`vYEanU+Nv!DhndO9$ZV~zC+Ug9+1tj)v)iW+R>@qx#MIQm!-M@W zBLGnXA3r~BVF8(_C&bx4HWtXw{?@vAWo~YcMssp-fb;VMiY_iL;xku$uc)f3LNq6Q zr{e%bqO9_=vUwOOJ3G6ladEYlWoc>O3kU%0yU%#ek5gyiB z$}&Oh0IUFMkZIkc#igaCz5RVQgm{##gZwVgNT4GZ8aH5RU2UqfYrZxfK$mK&s~_QT zP@MnTln!273=tdm^yyHEIrGhxw97`&m=^o0ex^h74hK&Y6O-UzwG}e>;&~M6waGc( zs+tLu#|wo9*8NZM@v97NhWI24!` zhjBwRU0hr=4gp?x<{C(eMK!xP$$@ddi}dj$lenRwVQ3=guYiA-sDYttsao-d{MEH3WH?$*{8KR-WfxPp=rpn!isfU1g0m>3MNu=ucNXvkD7 zv14}KPa@3hjasp3sR2me&~RyadA#yI8jIx~4!^)AEnWQS)AfW;vK-sg6G8cYj7Cw> z(P6!&L=y!8SE9l5<9O}8SVdP?S8Kmno`bw&azKvAWHPeUn7Far;UEuXxec$jXmqDG zJyY9TSbk5HOzu2`&T(;aVypy}!7E^EZ$ItlZq#%4;te}dIWNXWC`#|&Cg@dB5uHw- zo}SKp@q(m1*&Goa9Xw^bxN=o3dZIJGuz(5SVyQ3pLE6b#KFztf%r&tsU1^DT;y>j<;CpNnqp9RxLgn-z#eJ$&p(mWpuaSv;Q3N7va?G~i?MKkX|^K1GC@xq_^wH& z?8Cy!)z#ID_j^KRcUD&)Iys$P@tK!bR4k-80&mdaus%6CIaq9_sfO3KljWN%>L*8Y z-9I*H9dhs}Qj0)wyl0+SfA4v#mL0;5a12;Lx3Ld$ii$b}43HmCZL7IP+rOx9iA2yg z%+1X?IXO94BJu{ZRegZOj(zZ8*1@J4JUZCh)F~o1G+2tmxN|i%kStL@R9zsNTT3Gm z&Fg?XJ-O=EckeF${3*c8>$9Q8grr7MQ3<~Aq?!LXgK1JV6O-=O$Ha4ki*0Xl3 zv-2$IHx8d2DppojLPJA=?uKcEnw1q7gYrcoq?MGm$Xx1>_5j~xz(epkE-o&phWEpV z9iKm64%kAab#1P$cJ%h1ISd7i2-sfv=iP1K3wT%|nwpwjd=kfxM^{x5y9*YH<`nky_I|D>4xAyRJ`lL&f&%djLj*Us2SDi{fCJdeuCA_RgF+CM zQ!}EXqDo3imNMTuJM}~8SADusY)o?WioWoyjEv6SUSOq!IEzY3mNsYuzNggHn}LVp zjDCrclD~LSUmT`c1ds#GMpZSlYt}$K322i}k&}x{NoD1&qZ}28-RkdBk^qD$>-+E9f6O^^&dk|+zx#Qg_j%uaLX;Jyah^VZiiU=UBP%2M5e@CXXmGCn z+c^6Ef|UOeCB`p2(>~6UH1N3KBwPy zhU_Oo21VD?)-TieqAt^pMa`KA1C5@5e$gNTk^5BtPSC`02?fC6_v05v55ds}?f?1f z>sB{3sFsh9kE|@FM6g`m)2B~yQ9o*G;-jLX!osW%e(mkqQBzY-Ad-u%Y;2wp6I1D- zCMSc8rkHDRAH4gG&y9}#AS%h9_1V*o9K)_)xhVny0ui^pmF4AkPY#cd-JG4VQGs%K zM@L7Fj=ZlO-oAay7B5_%78H~Zho>5az1!s-(;c6mk1`_bnCR;2+TPx#HGD$$U!Xtj z0s*CiogF85>?6z08wWWhrKpw`Uk8Ws($c;2$VXsK7wzu{DP2uXSAYLTGqDC2=tuv^ zPR-5DVfV0uZ~g>^D-1T^#1Q=(3CZ=aLnw*@} zR9E-eX8{-d%RYg>%F4mZl04% z>y4&X{P)jYqobN1N2MhhJ{>NVQ9Zmky9>jl&~!LvB3^51MYnQsvG;<<6teDbjQoXc znX{uoHM|rQ6y57x{575IYCN;D} z@U335H8vTx`M!M%(ViA5Ep6_kp>Hdn(udZ*_-p?9v?UVv307vZaLwkf?x!l|>$6Ka z8A?hq;gQ+YcsE$Xxl(3E(yLAX=wWr;*_J`Yri1Cl4rYG-bF3#fnMqSP7(s*G?_v1u z-1j`jykEbC_z*QWD_L1AAewh~dA297CL6Jm7Vcr8ub~iSE-UXC=%b zYO(PFQ6EMBHl9Yu#gTIDHzf0I9~~XoZgd~)*EbqeRNrhoz#V4{+>*LTjVX64%PcM~ zgQ15pH&m1#gy!W96{uV+)+HqJ&1cBc4C3HH#6Q!ac0(dls7Tgi>7;Is_L8HxSjgbklZPQhUcr|w z0hca`sWDPZ>(56QvNMaG$Vd5#w)Qr9_|GKQmbSLH8ZE5Yj*v>$yu1lg(waZ>Aybu) z<^{NTk1j)q!aCajhK7+o#&E)Xvas4rMYV`UD)%9+{%lpg%HKct{u-jeY>iP9iT}&@ za4Dvtp}jxZnwQVk9@Q!CyWDwo+5``XlsW@2l#a>8hd3FN$KGV@o^Q+XV&La5rcMB#P9P zUoh3(vvi>E>a^`!V0(Mg={A8xuw;sbZ8{CH)K043=~ix@e28%|0-;MyR3*%zpsXYO zxBcrSInvSD`}%^1r!eH5V#vJ3$KLDua=Pl;%Tq(tb0sZJbYeKV{=S^OV)S=8U8E*0t#)Fp*!@OJS;y1; z3oWdY)1bQ5&EDFUqcv%2AfDY)GhW<-PNZ{QHV%m_LLjycPoy(9xHkzm-@YBXxmm+_ zTskx`ZqN`@Pgnig$qX-W6W&mml)9X6iVBi9kg!!$1XPv~i!8^Zy!lc7ed%<2QuXHt zBU8jz-8xbHt=%2btgP>(#N!_|*1jlPexeIB5=>7Y8cLC}@sQBJ{Q|$1W`Eh_HTf{B zE{b}D@dYCoJtQ71o*1J-gK!V-z@!`XV>gpjJQA#N%{qvcxyE`9Z z9ITSk(mn_cy*SFdJTBvaD$Y)TwN%0()QpXN=T%xdGlSA=a6B;cP*D-jkzPiuva)8F zIC|+*1sX+2Nl`x;Cw*o}rg48&gAp>OV-RUSF*#lnl?oSS4Ddh$>=%Pa)t(4C&7=|! z=fYrNW!#wy<5Ddu0z(q4vnP5`r>?$O^hv~-s8u6q|bKtce|ih zQBAEva=H?XfG;s#~=8%q>vykA;ZG-w0|DraO2)J zUDflPypT)3udOSfPFzPeBrKC=Ud!`nB~3mta8lSee&B!*&Ljfogg|1=gx`EnCx#9? zxil|sB)J@B-;5Q;7MFA}e=w|Y5sv8UdP3Wa_pWC&mf86Bc3>a1Scm%|{J97Tbb*BM z%1ilqR(CcXhqSb}w&dF{x-u5-Dg?pHt3*WBMFqbT=fypr?JugNriMkKoV@E9iRhk?BH^4zp6^c{zb_UpqLH(6tAD}O$+bS@09Ms+z_tE)jU^&Q34~Ic_W7nZvArI#w7t+^wpT$B^rO6INnrfA4VZ`l$t< z7RDVeNr#ieMwl#m(IM}F5{1LvFziKjWP3~M>n|2NV(RPR6oPBgQq!oW-X@nBN1I{1 zMjo5=5Fv4KWSu>Yp54}V(dvfQ&A}qD3<$O98KOoRzJV|!XP$eatyZkf$N)(vpW`8@ z74!Gk7Vn)Q!j0D|l&}28aqRO-vg!V|f9fvPM`Y(*N=iZJ>j^APrEK_myP3GytopXb zFI7~wXu3NHB+|US;WM)%`OZnb>>8bWDE-vbRG-b5%H(AKT!soOviva2K>e?>Ty(eB zeMgzG_p8|B!Wg8JeVv^^E+lmJZRTJ5;Z!sxZVT{bDupcMqqC!-vMsUvY6pHyFiSEXq8B_hZdyx)=L{M)a_Hbo6RU z%8bm{Z{vd5*e6ZQiMXsQtEAakampNJaU%M{!~LjVQH)im_nN_eSMt|Ez%<*PBLT2CUgw$MnojmenDF#$;_&q zm@^AlBsQRv4o?(x8mk#NOOGy(f01%AO;0TZ#5{lQlb#;GxtTc@9bH@;Q@Dv39tL_3 zOz1!YS5fhMrZ1hMrk4Uv&Ckbv+a6Mq_!5~{6L_X%%@!=+zv|@VKu2LuI$}LN!)a_a z85IusaMwFE+{va$ozaH8?7!wFqH=K=U}UV;PQD~(LdDIbQNp5G z2J*jD*Pbc!-PSZTh_<(Pnur46qPN0e{&CM@jaOC{$;q5?zEol3sMnMm=2`xBy+dMB z_?;9V3{oxSMTQ~Z_4Q*p(iB)&L}z;?h@s3%tMLP?2}g&8rS+)F+EN*LYZjJ2E$%#5 z@6@%|hgwD5N*4!GyR);K{>aGU;1DK<9XSR#wq4eY9&%n!d|MBQKHa6($IRuL=j zg8tEX(8ZqJj9Nn6n1+f95E{tYy6UQjnUe1PWTt#o*RA&k@!wT4VhG{ZiH#*3zDRoc ztZH?Q1SWlpo zO4^RRcjW1=H?Mbfl37(HyCdL>D-8KX?7ViA1}vpN1x9ABxV)~jz0OlmEgX$rf*>*3 zo0y2I&0F0_dQLVqkL1f(^QsoMp2$W2ClSY1*XIr38d?Sh9(#J;xjHh6xPB}3qKvEh zHaqK-YuuhA%GK#^zx&lT8Nsf{(#qk9DM^z9P z#!O6W5)xaVU@oq#66cf8lc;O#?eF>eEWfvxmiAM^GkC2=b5|fjA^13)Li$>O2hx#j zKGf3SQY65%xOUphHk+#kA^2B@3b`lDRL0c!xoi)GA z{`1-gt2e(7Wn&#V^>qlT2bY>g3<0~aQ;RJvECF5XDJRVg=0|t`84FVk;w!2p&735$ zX=hs#8QzI#Ku1rQYtl|kBz@&^lpaBWBVfD&rc*_deb3o2-C6_w0tTN}{9#TBHaQnIvm8=K^U zdk|$ldhyvoD^2M5=&uk|w)dIez3jw9rc+YKhSa-7LVSndh}9RdT>Tk7@1gg*9YNK2 z#$mq-Zg$q)+?L-%$diG@H9-m2MGwP!qK1Yts9yVe+evngZcjckHL)m4+1wkprbfgm)I9mE#&^jTyc*okCH>~>&6l!3SJGeDo z-1EC!exno$0U~y@?OPAihv3SJEDg;l4K;=dtC?B${xlDe>e`F9VC?Nx*Tm=NR>#Fx zpo9(SbIW+NPxYTYd-f9AB1%uc?1k}POJOk_%OQsUx7vd?SFp-bs8*8Ocz(z z$fO|DzdJhg>(=^Yfdk2W=8NqCfq{WfXxZY~SXf@WS4~W4h>MGJzV|yU1huB1wfqMV zok0_IV`KUb>51#BD{k!T+=>5FZ6e|hkT7z8Rt$-rkZ0^4RF>={2>qQ2(o8 z{VQcrw>?;2TjLddsu5oBKg&?zYV_u~|7J(l_Wxd+`~Ugwf;HpL)k!Cem}%&5@iw6M z#?PArVc>je*W7s`&hJoWW_C6rA_CFZ*Wx|^yS+Z&pQ6EUcFX#4X==G{Bv`4%52 zEoZl*qhok@ILouphK5T)>t|?8A3l8esgzk-P%t$!b0u`jz`*cQzwrdo_2_rhDm%*P zI9Q$>9aKgKa@os#9R*Ro?=PEWvRCq@TDNE#Z{pjZ;Nl9rdPT`+)-PRWV`Vi{YnvJw zIoKIO#KFOV_4H|rA(?=HK`*H_%YcXO1{d-?0v|Md0eut+)M5);|k z*h=*p2)UvnBF0BY+Y_P9o;B`=pZNLt>FIOWHA@#X9kEDW{)jL#G<>734lcWFPn3j) zhH^n$IuWECI?tXz7j9Y>cG-&i{{4GO$_yw3I5{~5X=1(u^~uotvxT;YOtU>*UHZ+Q zr^mDW0s?#c`>jsvgX@})e&YonQ`Ai&|0=t#KaENn6SOMLUmwkrr=_K(nX4=-8|v+q zm66%l8ZTN}T4H5o6%!MK3M7I4ZD*=6p-kUpptX*H)S~X?rU())E-qD7Byh0Y++6$N z4d%k)V&~7F4-XEw1qJPGZ6l(h_P4jMQEz>|0;^F7+JU9z)zej0KG-g6ZER?mo}Eoh zOr(c>R8O2?CpkxW&JcMS~qvbeCYu(~J6oW}RuTH`+qdIbTmc4cBW7@|3E@sUS3sI6*gSfdS~_NQ>E+fOjO1L7nf7uhwSX^^#58; zZEfw&j*YLcFSr7Tk(ISDPd*V~_yr7@7`P;GQ(P)xlTZKJ==5|5eqVoobw$O<=qTt~ z7w*b&=!#7o9ua}_xrzcGzufoM(_2VTaI&vYPFFWoBFko~{7bK;(O`S~(#_>Dw&vor zC-SGl+1Z)o;@3x58?{QZvV|ojE^cl#{8n+mgx|l9fHDkNVjbOotxO;Ov!LMO>U0}m zUnS>#NLbkN!#^P*A)OV-qr=0B@^VAQqv+^pDiPPDq@>`G5K9Y-KfS%Q{|0|^b0a4& z54n`&ZXXy>aCXPV#f2%Gn5;B;9KYh{H}6kcTwOgKv0lnT{ajgDf%;xb$;(@#nn2~| zKE;`jmlsU5xUB4+E(q;6-fAk-&P?3Y7Zf$757C6%rsc2w0#C>e3Gg{}HMI>pa~^DY zS=rBSZi25~dEZT{4Twa3xh{L0SR6zs+ z0pMXj*T~Pr6gcHOS*jlp5Flue3Xh7ix3(5>SoqD|zPG!}&&%8BcklcDeIU3U8>=cU zjZq=M!J(j~^_r8DlabLBjQbt`3<`C(HO~I<;Ws1nH$_!dPJlDOHVxp6Ca3kt$VglR z-{tOzAUqm67^guK87XNHRM`6j8(@`{o12@5heuQt?7^5Xb%xm5+JdMyJw4sr+}vpf zp7Htf=jYF#@9ph@6VSZ9y*qC$9378hnUyXsE`sZf3k(C7Ha9H|4WpBjljGuEjotFF zJG;0v)Yr$v#%}h_H+dM_+uNI)OQ)6)NFWlqT79lvb#*CJKB}oP(9?gTq4G^v)6;ti z$eM)($oY))bPi_b#Q1o11B1wU19})bIy$%+f<>SM$k2Yi<f2L~rUJ|1u}pZQ=K zpuz+W-IUZ+0to2tv%e{6@E# ze1p8Hx1UP$@}y*Bf@@bmCrnICEiElNIy%5OZEbC4W@b;aYHI}obG`zzg1}?k6S=y& z3dRb2*2>PVw5SNt->+9^MLkd~f)sO$RZQ9#!7ypM%sfj)Q2%z;Lf zl$2oW4A_&7j_%phfZp!@AvAH@sQGsQasakbR3ZYryp>s5ZPU}*nwpa!4FGyb_Et4t z5g2%NeI3kSACQ=$;@|-;V62>9|5|8JP!EW2?(qv6#af^D{t7j=mzQ&UcwA0RY0VVj z!=jUtri!(zva_>;W6sW80fv?sX=tEcULqL{fZr;ss?yWaHV6e!7Lt;k3nFTOYAGVA`n=#xj%o_nh#~HteB9JCcUILvvtN~XPf4aXkjO( zr-vwUw>`Zk6A+gH2m#LnS4)BT-AT`j2imJjO2R5CDw31QlRr#=Jv=1R>tusq*8q?w zut-N3xj)TxU1|qsBN>Z`HEmh$j-WLG?JKJ>mKiuXOj$}w3PAI&I3hUsi3;#8DbYXg zz-F9=+}1w1+sz%kww5H*JU$_z8)0fQ{g91b4>0=6I`Ir9N*SP60~Sr-PPO=~OU?xGQtkbw9Aea2n^?Q%GKh(Z~x zO{a%^!@zs|_%Ww;B|5E%xp_~nR3IMZ!o#N5SXfw4==}NqJwI->-CQGx=ExTs00cJYvnQ-_gUK&1r{0PEsVOd#SdAWnL^UnT$zQPp1@%Y$Scw{6s zYH?Zc1aK;d3r}c4Sf1Hub#Qb97*R}n&BMi2_TvXyNBN?dBLNjvL?+88CJ}!AQ=kO* z=UYL9{qeDoCOy#nLIAL5ZeAWidU;Nc@o27WwO36|4JH;A9=#!&ufsg2QQfn(2?jBD zQkTbm+Rt2zSq0y9+%9O^s_Evml7TsYABW35csagl;5W?ckW6lX8nC4T{;5^(K#PG9)Y|H&p+QnX z;`i&@9^}7{I$6La{r&y3v#CZo>gwtxB_+XlrXK_N5#Jo+-rigB6TGPH(HnX!& zH(neauKT^lfvY?XkYI}4e2_qT7Y>Ra) z$SL;9`a0M$YWBNt$oE_;ykMknu>rPWW7}F^p9ERg#o=AG&d%Q6#Ms!%!h(f`MZL>5$*zuyiV6=; z1)%#85?=4r;*t`8G~ioTS63k4T3TMVWDd^|_W^YR4q95CrWX$`zTa-NI!_?~PHF4% z?92hJzq`8|bj7(3!z8*pZ@p(^W{&MOAq;U|>n8{4NLLu?_U7gzbo2$Nq;{=MazH>= zr77a4LhAdxk9btVuzjzKgGJE0wvLY7Y^{Q(W@3TU{oO4uFK_G3Q5X>Wz>;%`PGpsp zl`#nkKb2WPB6y*{kiu^bG||hKF99wOL`2v4vBwL5G>wW<`d{y(#4yTRd{a?Z2Zad% zK|!GLL9!?H1V~u>>B_F&-c5kSyj#M--@ktY8{%MN15^RMOM;L8X7+@}?`Cm%*%)Mh ze|LOG$;tSw#sHT9kOJvCSq5!&Sd^GaH3OmX=hhPWbFu1SMn(nzD^MFn8YRzf z4(=BW_4M@m5Qu?+0k@qgBMS=|8z9Y0%*+Hlj#h9e1l&D6K~V(12oN0roh9>Iu=p(= z$7XJBZd6q10F2ZkuHk#uw;&SmnUCJ1(CiQi93X^(S9x+d(nQ=i4{aXc;SFsf^Fh`I zSmu0Z`glFlZ)8cp9|x`?T% zDJT_~52P@1a75sZTwh;fU|@i7IEo@C0wV&#HQapIh)m4$1ULt%paAT+?xPgWH_$>P z|9QsQnTDn&#Qk7j-XqXH_Sdhi5F+MC8i-<+^1sDB9|JESpdXsLp}i-&qV3_z<6&PN zFT73rPeM+9GFa7&d;a2f7om>H(`?HT=acWjNQV8~FA~TWC#Pdz?8>8ARy6-CN>O)i zkQf1Z?)vmTC?-1EVW|^y!p{Z+?d+%qygp?J)2??|TpLIQ(>QH@_+SC@^nbrp3!(adJ7mfV?zkL02j}g{I>_I@bUI From a5a229c89d2f8dee318eb8e5aee8923d7ad2c114 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Wed, 30 Oct 2024 11:53:17 +0000 Subject: [PATCH 406/496] Add new snapshots for UI tests and documentation --- .../user-docs/images/ui-view-clusters.png | Bin 28767 -> 36075 bytes .../widgets-cell-0-linux.png | Bin 0 -> 17092 bytes .../widgets-cell-2-linux.png | Bin 0 -> 40210 bytes .../widgets-cell-3-linux.png | Bin 0 -> 7692 bytes .../widgets-cell-4-linux.png | Bin 0 -> 3512 bytes .../widgets-cell-5-linux.png | Bin 0 -> 10040 bytes 6 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-0-linux.png create mode 100644 ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-2-linux.png create mode 100644 ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-3-linux.png create mode 100644 ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-4-linux.png create mode 100644 ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-5-linux.png diff --git a/docs/sphinx/user-docs/images/ui-view-clusters.png b/docs/sphinx/user-docs/images/ui-view-clusters.png index 259d2dc117ed9fa73d2351032285a63949fa6bbc..f0b123389c2ae8789e6c714279f9233045bd7acb 100644 GIT binary patch literal 36075 zcmb@ucRZJU|2IxslnP}er4o@nl9^GGvR9Gpkx?Ntv?!6vCM%ne5!uO#WRF6WJ(8Wx z?|EF``@GKX@4xr&d_2zUywu0>Iga;uy`JlRJW;xQe%DU=og^eAyDrGdsF09srN;j+ z*s%@&+xw}k3IEw*D|JC_2mW#0Ve}Y3)7i;t*{Rx?*g5FmHYPE(wy`okWovlb*x1_E z%*JkVbE!B!#6o;X`nIvYow<$mVKs9rV-ihcqr-gMhZPO&4)b#J@*n2mIm0h>hM&9g z%%T7Z$zhTUGH2BsU;X;-phaE1@nKqsQSFy*<|~;kvS;`HDi-?1(!MC5{Gj+X3(KGV zF68%>4WC?p^HnzUaHfOrSN1zHXPsBp$|eO5PVAOZo^=xQQ~dP3=NJC@{mazUWPTt1 z*(0^%$isjBhjGNzVcUOy!kP6T|nvzC7p@Y2`CUk-nj!AjZ^s!TS38 z&`uLQJ-s(+X%dppnjPjwnk=<5{{6zm9OKi`i*uu@I=Z^aSMxaA+uKi{KHZpSqsOUN zvU+4DIDKTqtUc}OLQ_XLT}#5HSlz-qXHFH~>Oc4K<42OWd3nQr66=f4wO4-p{$A*n zkzsSkRQlihGrC8yoqA<{y!vx6*DYJy{)#75j!sU>NlHhY=SI#dDK&b1G7KZW=-ZYq zODik3Yenzd2Vcj=hMuuM`Sa&bl7Y{`l;_V^Q$`e1S}gqj9=e;gs-nW^mi52mS+Vj- zNs@<^^|6Ns$u`PE3YFe8HQ_>BgSCN%%X6bXj3OFj>oVuh^I3KBA7o_A%+8LznrE$3 zV8_lF8O&s^7nVQ!ohnRQWQfrfQth$ha0U^d~X6BV5SFz{=`N zzU^?ppsud&XjANVGBPrH>BuNCmvbBIYnk}U_Vyr~k|j?DA?rPijK0OiVlQ62;5TdZ zpkdcAqi_I+e0bE|sh=--r~yoARdR^XI4AcF`;4-!{gqX=fRc z>AA1jhjrPlF5bHG<dbq2%t03dij7{0+5}G#jh4elwvN8C-!U)m7GeTUz|j`O@oK zSzVgwe5ajbwioxsq4WNE<1FO;T@~6uD=$$_%n*F)U$)1&;R`RNW0isz^122SzL&h_qArm zHR@AWs6FB@2H*bOEqv$K*RpTfrsf^#jCYskeol1du{F2m{<^=F{Jr!1$*x?h{tx#y z7r8FIWO849(fRJiY>T3Va;~Lz`n6*DwiI>MxMKTpjrMfi&nfDu?{5$8l=j+fJn-p- zsME|AlFgepV;a8T0>XA9d$LU%Oor-0&Hv;maAgdB4mO?oQKsrP z1#awjc6O#8tqT(%KYH{ic289(pJ~#y;xn=5{i?oxrM0oKA$fLC_@f8uW_~x*>(@VE zc2B#lo))?D>&N1B|FL7oe*FHu`vabhR>Dp6==o;@-TAiIMQ=^M(EIrKsCyF2QcU>nGJotjUxTl2^}FBSI@&haa(l8zjOFsHPZ_1_wQZ@I#IWJ|Lya-g6ini; z_Mf`a`0j=#=e438AKvPhe~OZjIC1&akEx&KAt52$MxXYOQ0&@8`Zdd#+2ZRt@{?0G|d{TIv;4wF6lCMM_7 zb@I6d1dJ;l@0CBS93B!vyKVQu$M^1$Zrir)w6O54l|NP_yAO%V;cJLxa_-zYyV2%H zS;n6eLfijLbfHw7{$A)%-Q7(L5wRc@6cp@gFE&rytMJIiWiZ#npI#j0H*Y>xnT0(- zIb0uM`0*hbmdS%x!nVA3ereUe5-xwRoigCZjq&z0e^uj(#E;yu5=koY zJ6#rj58_K-ow4`kGp+y8*C+KY%Xq2ZPvY^@r$>(+d+4^d9KSe$<-)%%!OF_|-T1-% z`#W~+idRcksU0@{9CS%YE=bMM@ieMQb#HG}W1|wu>({RzdwZ({T{x2wb4D(B&P8{2 z=ab@btX1iiKNHFLQdA*%0X;13!IjC<#LE$9TGOruvs?;me1FR@BA3l6(%08Fa8=xC z=9nx=OKq*U7bR0dSUnzyzjl8#zO>e%AVn*aIpU1{Yy6~^b|r9me!TzJFNI8lN)-;5 zjrF^I{r$;0`M13%4@tNwJlS`gL}lROzs1{?p7)@VT6RuOMRW7ZLt@TTGlLg9bIi+e z6=JD8KW)_3(7@w zn|qI;p&{y*JjOxTc1R&p0Y!RxcD5V?f+fjgQb#-1nq2p$fbjmprwft==~%xH7G({$fftgWMpk)mvy&iOG(T%H!O`n;c2iNw~{)>w|~ zLxhOK)YMcXTPYS!`L}P{WGp>}4vKV<;mQS;nsRcFd4r<^18Hza@&akt-`GIrU*8@n zU7aDn(UES9Zk1fm6S`N{n_79uORdm(-XiRCFsE*qh{JC)*W6qERn%OKJOTn+`Fr2L zvJ=b9%q((S3sZ@gpPtC;?He6^;_pw@m1m=vSO!V$@zy*JnTR%T;C8?{cb6i^%P>d2kj?ypNiS{r&Gx!->duDOk zkVP?4Ax7$v1iXgPqz8D2d8Fc46sfpXt_OvYO#thrX*w&Ig{n*Bgd7ozP|per#O7`_T3~(SwHxOUs~;kG7>A0E##>oPoOhzMz2jR-X)> zeWWpFGwMPgT5B?=-rMVEySuwX&@xm^MT<0V(3?W$$g;YSxvT#_JWAoy~_DlpyPq&iq z=f;E6^P71@MjpCz=gyle?{=a^8-KFBZEbChH7k9sj`_cM?!}k6eJ@KQwO#+g zx<5l{M}AUXQrRePZb)8XqIArC^wf#}j^?ypz|TY8WA4+_)AG^WqrJRy+O8?h&fmN- zW6R6SZ0i~urE?1Zf{H~=&&}TW@RqGxiOuz~yj()9H#{u)BfCz+#H6UBbI|XQ*vsp$7nvVD zdNe)KbQF~;ATkf&ROZ|{d2VIb7x;t6j~{>fZTV06Tl6u)Sd~mukdv<{E$wCQ!f)=~ zyEkSFP1`-YBPD8M1)Tpp;s~=eY30|iUnv7AkBAGS*z&r+UTly%bLB_pveWKF>j8tqtKZG;53|ogeQo z#ReAzLcQ32{GTCMJy3Jdn@LGZ>Z40(1q%y{Cq70+Lo?sAR8yN%VyDt!e}IZ=w#R&R zx|#{_^#u8Cf8I2L67AhvRvXB!{mz00{KSVzLj3=_D4jwWzxm5_a<7LE!D23%nT>3( zYaX8Ht9*(|OG8CPg@Se#MUhPF%bcrh}r6djMYFym|BfR{yrA2e?eumgf%&-+qWveBIdi5$a!t zUg=O?ubUxS+OMyf4?r|kK@@Hed@2K6SBX2WVrp&uEHjg*s;Y{Xp1!s_=7Nlj%#V=~ zIiMigGj^<~(14qnHof9ieoPHuAdBM}r8~&TVo&xtX5 z0z8_HFPRx`@RN^!?Y^;2Z{C)qEG=9mAIy1aeRYXVEosk<_BS!;xVTRfy|Zox=)qYc zn_ z>mx+6!SnHr0SzE*_rk*&S`rma0RidF$tL0w4dllTcjH6192^p_m)%EAodF`|y}N9O zCGoM?WnrWvgDEH|h~K8?6)0!sjSfz<0g_{XEJSY)?2y=4<@03_N&w*SIV2Va5I^;2 z;;QE^hAN362N(>jhmT-o1myy>iof~u#M>57aco(kpFkvI?Sj9aU>)rp0~5(KXo@dQ1je)f_Ug6lQ(_@tC|I@L9tPkT`kHAdX)wuy1&&$b z78Fdt+$)3&PQ+T3F3yaOj*>RM(dMfSV3P*TVbyp;i#nKN*0f1lTDoL;jQyl$dfmGl z9V9pJ+!?}mJ$(3(mRwccu*eZ_$8fnDHOEhYDZ)5?kpL8D|9nC}szBAeL1wf6|ap8AFmQQT#QA~y@KnRH2RwgE< zFd-Xk#9Q8WOyaJe@br*fIu>>G^rE0Jb>><<#VW=ORX}t&V?Wkdaq+uDFI(>cHkB>x?Ch&+ zYqxIR`~moEzKwK#Ztm^7ca?2z0&SD(CuztJa1H6EzAIXF0+R8OgRendL!=J>)SQF(@u99jgXtkidS)ZZ?*db(lp$ z$EfyWBGj=T`1d*xZb*1|;xYrcOTy)dT1AO98DMU{!kT{`=b@yhs5>O> z(UwHCJ_`<2EIi=rFUhKjH*eo=-qC(c9nghfY_6_FGeE_Y0g-+-y(N_&K9Gorh&V6& z_DxM?2Vd;NGc%cuKlDING&D6$+(Lz(oNqrKRK*MFryppZz?DT#vj#VAgj-zMzkh#s z$=$25&r0vEXgJXkbX8wppU}YWZQkBDI4CP8w*y<^vOqCv4L9_({hT_iWitP)HD+=- z$?2o4O3@_X_s2Oop9OR2W!;&$GBZ$vn({Ww2ZW}qtZZ?4*`kg;@zB0y^mrhLWZgnR z^cBl*S&x#ESh2{;o8vEPm%54Fx^-)Hu0_$&#f6HF?gADeMy3L6AEO>8ck%HX0>Az4 zEzQ`n4#pva{qM9i6Q4z}*d&cK`q`l`1o5CR6%r5_gfdxlcO?iaCe@)s&rrsI5>TVP zLBiO$G(!Py@kGaN*DJm3==h$u=#2Rl>=T7Z(NOWb%Sv%_d$A!BR-Zn5w(hv~ z*gs9x^(+Oxo&Q3|BWWqA$IqT=IPZKC0N7kM-CsTWH8XZ56cy&Y-=RtMejqoZ@jbku(@6O*5p z*LI8vf#ONDu3Whd@eFrH8t?*lN=$bVMQ)2 zk7p3t1X{TXnwDi(uCILHNrJoMxrlw%nygwEB@qP;6J>gU$2Vi zr!LN`sMaiUymfo9R+bQvcz7=G$mkjo6APKq7~K9_1kDSD64#~wt9#PX&p^^zh?$v= z+7C>FVRu|z%&eRKL-t}h(w^X^U=I+2V)aVhzC|$pv#zCzyeLWsweNFiv$L>lJ8<9t zW~3VOTg{83=kJ(aI6NzB{U>tL~bR_;rHFk04IOA`Qze$3-@yTd2 zGqc5*nWHkfeEj@s&L>hHJlQ9AK#zCx<_CYzi=3yIC<86+b!%%*(*4u2%@AEc~O7VU4P+ti(MW$Adt7USVCL+E8Iig`z+o<>Kh*y03Z+ z+nzwd0v4?wYHN3%KYt#sNjYG@K#;19*K$;R{BhtRV@TCdD=e#cbNeAUM#W=YCvNbhX^l^`z_FyAWmVCOQSa7{7Y; zidN8)Ry|c?&1^|UHMk9Ji(kAH(m$RQpwVqkH2L^rHWxcAilODtR>sxkUL)%0H_=xFw z4L*)DXXpV9020o}%LgVwP6cK)X@8@QhN7YUTRO@w`(to7i_hc7*I{`8=^lh`0%ydm zw?vE(k51alX)Qi=-u9{WY zvmc5Yw8opzg`uK7g0=~s3_+1hj$)szjLdbkCIH_Zl$42R)-ZGU%g4R_{LU*Xa-|C0 zL+1oI&Jxv)DJ=J*oGqu6AlP8jYuQ`3Z!@#9?vRj>SY2HW5wxO5pX*q>R z9-Sh`7q}Dj;$4Cu3YV@(b26A3r3~ABis7(BKEA)tqpN!FUM| zvJ>#GufHEA+iv{rAg1&eK;p%j*vm|Zd3j%gtju=Vl%ju7?dQ;Xeti7XPAv0iP?2vD zzKTH*H@5FS7>)H%YYO2}Mp02Q|L3||QpSnU;)J9I2@;?cLhvV}Vj-ED@6a>V7Vp)B z7Mv|Yt}S)9Tlf9VvrE6m+EQg?Wl3)i)zJ;w+OHhVt{(F;ql#1SH83z3>B{4RSM%ZP z*N~kwC!dQrSi|Y`DJ&EL7XYmBNBtiZVp?`x6&E)NpOSv}9naF#(ebNg9}SHx8hgIo z=)Q!61hqF>dx3E|b>4paO)V`)MMYl`c+93pw5Q~5>Z=ny=h2sw z)l>JA5Ytz9#~0xJ=5(Jd$b{FECp9h^N7<7ea4>ZRyV<->Ost0e!ymMymhtZSGEaPJ zL;T$kUHdg@5wx-dH$w}HWzyUJ!c115Y$2LGdHU24E8m=xik{vZ5cAoyX9POr)Gc5= z!QugehWbY+a9F72RaKtwAZLcYFc7}7TJk<@n|3pbV#n$Ga1^ftjdE=8L~$C^sH&=h z5)-@|tU)*I^C?KhFc=KsPx`Z5`kHGMKJy(?DNznPbI8zsp}v}Z%Q`M(-F+Qjq?U1w z5k+f#HR9=6uYa-tvx2f;)brGRy@VP!dW9L9rMK z1`TajOIy1s`s`-t%ITt)2xg6EiE>*e)6~?&Q>uI^$G#A7zG0DiU~KL8`1nQati{1# zJrY9aFoQcFC@6UP%o&0?`Sy@*97hR_0sU4>Qo08klLgd=goI5L{5hQd>d}-J2-3j{To(Dzl z_&~AhpP0bo-)L-UDgXXmH&(e5EPFrKwK|Xn!XLmyF~g37{%lh7C$&Yd$dL#7I3fq6 zPZZC!4R5wG9+;T81TKcXQC3;`2%=1^N^(|CS*YtLTa)ywOb>Fin(yd#Wg2d`A8S>v z<>lZ|NSw#e9zp?wBJEKj*#$<(rkVb#>UL*+Q>eM3@sM-Y^|DQaeD7c@Kq*QF`@rHe zw@zJN&32qNK=(rmgvO{f|f)H%dz~% zTe%E=M+p8z_yH1vzbpO!hWFWcUVW8%2(q>)I=#zm9y*#Rw78goPYv=#I)2%HC?Eb~5nxx^{`H zF!9k-R{~IkZ>}xdr*-Xma`qiKFq?5PZWv#P`TG1)RH{}#8f9Mn*;5RZa=bi^MQc(S z-)HiM znGvAIX?C03T1aQfT@ff6u|Vab?&~@iOq=5_0IH!M2M*pc%WJlZf)89t-{W8tk_r8 z=p;d>t`dO(U(fOAvkwR>4`7TSsnAAeN;lRNWA!(L;VT;u#uO+ncvxIP9RN~@hyy1W zK2Z}NKHO5}Lst&u=i&a@26QQaOE1!*Ern>o_@pOv7(fLUbYG~Rd#I_m9E*IT`9|w3 z7DYh#$YjaNGt3kTHw<*bgoXC{8M-Ja+a_!~jrL@>`(Wy#B^&E5%YP<5V`gZ@T~A}( z6OiNVgKc{c9;}-iZ6P)wb}-%RJ@oYQ2n!I#Adxlz!XvZ**gBh^2(duQJO#LDiu-k4 znaHz5K>?03zK}nzV)ITpcm>LacAhmOVdH_EQBqPOLm&r<7+3#^tPxRjk`rDO4SxVe z@NjoAPg2}p!>~l;FYqDwMnnqW_E237ic;X2xE=`tMqrIXusU8z<%j#pHmcb-)}EHC zDe4r$Z@v$yDB$x`aM(;zbR z7;?rwh1cg=S0)IeFL=0#i3y~-hk%IZ&ELikK`s|{`QyVZ>&@*vXXbk%3sATMXa{sT zG29e}^k7Za@@n+c$B%cQIaVWRKr3KDee|5q58UnC@9$TI^W4ok>#4MLd!QRYm?q^W zh(sh8XiNV{Q!L;{;!1_?ce5{Y+NKYk<(|BFu@}zHmCvle?r_Vde|_VFaC8Z44T7p> zGl}EO09onAx(K^w`T#0eZ2@9%2sC_A6X&!2zTB7-7??YL<@)u*xRW6i(YeXqC_F>9 ze)+bDcHLrUd*-7@1D6NUZD8zRRd;Az?h-!3H<4VYqX=>DAw>5o-x|=2Cw_j3AQl@^fRO-wiHoAHuyx$Br-w^GxUg6N%Ke@K92^{9^ffz;P&H|4HgCZ$ zgB5%*$Eqs`@H*c2MNS&r*mE$HPCHH=Mg=lOOaOKW%&-ITX|K~62;;FnA{_fWUTXmp zHMOu{x;XVyesW^2bE+Umk7UrZ1~CJO=!3HWA2aA@{ey#nV^f21ye{I`G7Yw(tF)%5 zKZPa-@$B>J(kvwQOP+x4a6-QjVSWo4u= zQ{t~dJKl#j+jk{3P3(E-S|)ltBuQ_q3BxL%7*qm4tqY?O!JRzuD#>OD-(dHmbxYPn zAw`B?rax+sbD>B4;oP|Mojgj3bGi*2Rtu8YRm@P=B@L$XfFDM4kJt{@RzlpqSK;Mh zJ5+Z;;8O&>6;JrWbd#qj(i+%2-nbksD52 z-L6<7zEJTDk-dUyhE#ze;1eY5Y&3{Xn>Imt8$wfiCF1amkd6}-nMgokDzP_&?Z*O| z1J`UJK}cc^X+c=TvUW)+kNh0SUX8hhq>3QpeT-i=GDOf>FCjq~IFp^1N34h~TedW! zHWL@azGDMpft@md%7VX8iaoarpimkqqNSzbC}g43=!XE~h(r_q`nrXMJi;MF>;P4d z$g+W4k+6=huCGiI_J&nw&QE022&o-a%%5G8(|ETR;%q8Vw!nOGcA=e!1kJstk`q@p)CM;(;B zOvv6?(%ZLhFIit6=O2D`G3|wofq@is;t(;HJ1GYy(8Brwc_EM!;a84fJx`sT>qvYh|+nfq1@XXAI1d#7UI41sVh&mU? z>6<@1nvHh@$2jP&TS;84MzSVcW967lUU zt+`RH3s(rwJ0xtnq4v29wA zia%nf1x5~b=61EV24L+eCn(SjIvYUQfSns?@53N;Irh9VY#j5}q`gq+W3)02pvW+n zFKJAcL*f9m^@41Gn0l4DNY=_L5zlc&23WY{lS-DmzWI)J@ z=A;_QzF`}W3hSEZw8@;Xb}%MKK#Ayr999>nsShL}q2{hZDoX+z13aIO`#K4}_EOU# zVP+$XM4(M5_-u;J%_k-1{Y#JxVV$K;mLT7?YY^zR_U%d7iN5@;iU(drL=XuB3}E@k zkDE@MI8ktCBBkl*iP1AhZqb$j`#P;I@(^GKPe!Db!L@uZ1pB9-7@hPlF*{rAhtyk? z0c{`Fbb#;?-iyJ`sqA1wAP-YE!l|>tIo+zN?{DhkXSA07FX2M3K(6uSP$-_FOra(m z+ffLu~@BA_bMrzQW63(gj5nWlzSpk zB{@kapZ5T#&Ih=q5SNtTX9K@QB$KFG*qxfpsi$H}DFS7elap&S{~gtOUkiXT4nR+v z_fj>W2mdogOc@y28_{>|8U#|>6m3xHT|1ncn@hi|3Jt3XDPMpWf>c8h&<6#py|_n( zK_i1*H{+^y49X_`+1|Hrxs4w)m~o{7`z5;Qss z!?mIY7Iebr$2%icwx8p*6$9@e2k|2oIhf>+;o}wFTu|1qV@&Y7#z9<0Z3*L2_fLBD!4dPv$L_-BO#%o@!=9ycBi@&*`kvUy>x`V z1%I$1RWoCxJ&obdpFc>(+2Rl5McY27jGu#A9$?oxOg3Mlbg`2~=_#_fo*aS}t#V5` zR~fG9Mw-9VYgH*+j}UXw=wH@E(A`(iUL(Q}iqK~$TLh;;DXl6iy9X3myl^p*w}r*E zcWp~@9+`b{2jPvBl^ywFC2u8R9-F?|0y>^HuRHSxG+9+U<`Iy*nLTn{cZR=&A7~V9 zNNv!yAW}P!lL%Z2F#BV0uoAG3P3QehZ{ak)6s(Qf`~tZb%vgx*gYn`giYEnTFTNML zph#?dF&9BJ++jq<5gU;2ccae9+umyToe33s5ak!AC={Khq7xInAX`x~i4TCc)e4_p z;xPu*$8-@48ihlChO^9+vn+^)zT47L%#1wZcm!ayHi@ z(3EeZi4e*L&JqOV>7Zcxf)>3+-~quD6@{0Pq9HNYyqr0Y9sW|~2$ukRV=F`#6jv$; z)DdH{7Nb|Nq9Fn0T7RzvdJuN_eGvWiV?%@DXz^ma64L<-6ia%=_%`GB&=cf?IFen{ z-BQvgw9t0qRs@Te&bIo#1VBN=5pL%Z)B0C{jR!mceiAxYav_yK$02f3C=ZIjoJjMH zh5u;v984NMEg&F31Os5)K>2wV6~%!RB32vKoukv6B9$ne)QnMW)zZh0G@0&?BDX4Pm<5? z5w$AwL+#+PXk|S-eS9B^)rr%mwT5}+(WwZ(nAZHYho<29;q1eB&K*l=e{JnN3 z;~6`p+MWA)e}&`-MdW5J1K8RZq{nLpjheT=p#!n|3BY&)IyW&_b?mQR*yf~9Bo9O# zZ*Xw#@Fxz!Ai%o&;Au$+#yiN#``tHIqS_~PpbJ3aU}0sI8-3?*_c$kaHYvBi{o_X| z^Am))3BbTE{Ul53+~+^4N;r@NvQUk_5a`5`s*&am3mvp}7zmDSNLuycMA?NI^HVB? zx1fQ?f{ef=@xY)4j&WKWRcizJh(J$TZ%e@!~{`CposSa3?=_D zXm3BHl$Xt0Akpl+Ct^-{zHJ9W0X@>|LhC;y(2@BhY{3>z}J?@5CPYTMy?MjNFL1Gs}09Nghd*gN{>e~$a zl^j;+DS-VdDQavGSct447VrRC5RR3k>lU7a{tPQ}8~P20)>{uCIp4&6BnYT%+sBnK zMx_XcEzr{(8-VE1oO5HMsV;MCvkemRS3pi;kvS7}oRa)jjceA5gBZx@=}BDdbqmla z-ufv*L}<#!<}rY!`*W(`&xiHWPoIZ++g`tDfGk$tbrjyjI5Uq32YWoB+Y^x-_Je;DBH=C$XDS9#n7ni*S{2dCF#f(|!$9h3 z6kVS`DHT1TIzhNj&|L#;({pW;&s_JP(|I))Z@zK7vsS*#{?xmyx=)#nXbOlhOixW6 zMy&%4JKw)j7a7|!E8al`+;YZlMV`6H+{v#I5eUaU}cSGQPg&h~0n3y;>qrC9-g0r|%$8f7>221eK^ShC0 zk^@2{qo7b5QvRGGSmaFmIz0TUw-^1GaPP~{DYMhSqGXhmrcg5pof20<$R_0Ytdnuh zrNxpJG8G#tlQuVt?*#(S-$%8U8OZ$_9PI0R+4txf3303e(aIkXe2pPU(MvD1iUlAu zmzKkWG7uPe>o-S^+0oyYavw zxwQ{xuQI5dGPn6Xkcm9$4&QOza|USbO)jjP2v&d^$7b{5fjh%jY*fwHxuCYhg80>) z&!^g7*cP~s>(+MJz!`ZK6QdtQeeDk5ay%Lez0DD{oOt9xYqz8!KUTYS2^vsnn2;nC z1u&)`LqkQlM6Y?w(CIZ->nwzVrtMdVHzxC@Y58Mk+q<{zu3paqporM#PDQd}5fR-aD zUf89kz)RF8)gC{8zDFZn=K{*1D#fs?UUJ)2xi_Z7xg}&aAxA`F73*7DM-HClotM4J z`0A2dQ_1Z??{i0ctExN!%!o`Dab69o#7vI;2M^LS4LmJ4{Q_cMPh^~mU4+mbFXvsO zW>Y!VW>-{Hr1kbXDe5&5+Jrt%BrXyBjX3^2P2vQGZ4h}o}8r* z<}C{9EG$h03?r7TrgI?n?Ln|2IJPnJ%;H-vf+5Hnv)`5%$QM$UEW z*7BS=!js9G8HZpIB4tK|oni7XLhQ1)wxp+}<-Ykt5;*?>yx?ost^t7Gf;0i;7i~L~ zw>p%Q82Q3G6L_@kKqY-x_ib0sf!6DzA`zPhbP$=}E`{w6{p&9UwxUz=1@=@Yq`k?} z?PxX5cbYZXNzFS}Ld7VlaYP7tm6P*$5!e_)LUH-!)fS=&~2cy=q z3$YfA!FeZaFEJPBF1d|37nJrNO#~t;7E%>jzkQh7VSx+7Au8$EaGIV z+nO`s(gy|xHWvcI-^0d&jtM556g~H5r1Kp+L=FNwp@O{>vblye^aLSvI0YR}#pRy6 z4ADQKf!qh(Hq~Y*0Z#{1*zWG`4uVTK$-=h>egMW0VH#=aa%!>iX8#wn(@65wK}x0- zb>v2jXb4Y;0|5!Nf}wPDRzx|GPrDRCvw}}tCV&TeFL4qRb~6vcjR4>fLR$(QY!LXi zx4!By)_TE-R0j7#*zS~vM2W08&Wf5vNOKW~{RnVIB;6quel2v!XkP~JR)?Ve-eK|( z425WX#L?M#tf+qAWj$$<;Pnq43W>q$;tMWRR)ju^FYEWBCiy?8U z7H7baKxs}^J%MF|3L1w~kypI7gX07vB>C2~;SfxOoO<sa{2ohfDIetAZ&en}4s+CqDd=btP<5W?m3h))nF)`-(wQ&Z8%KU$(be>ue8 zNw7)c_$clzh(r5+TG}b-G)m0*QK|Tr#P@U&IEDa3Px#l!#xfuxLTqqA7&vlf3GIBY zi#G{B20bD0$V1{zK}~Ur^eHj`4-ioChF8o7ADD-%M<51W!J6bR2*H zfrgXhYHedf9N9)OGeTsBsK0qOy;-3DMAi_aPWZks5)j_}36UR*9KnxR$$Oh{mdYCz zUls&La4|y5ZH`cq$W}?sdwyE<9gsL~Ivn+tu(=3s0IHPGyDv11&|wsHaKEYiLkKUt z;jv^Ny?7accts+10@)9Gy|S|-#lh3u(&OupqVRo&*y%Vq5qFiVc?R*GDg1jAY?`nw zaSjtQx(ZAC23Il^5IAEM(0u9mOb9#o+_QrK&E$|dej{fA!yiHmH6m3YeK8bEmpaE! zB@;OVAoku@_*x{c#^-Rxpxk^9WWIO^(zuY@n; zMI)e4nxf|G!g0A^y4Jh~v8A%1-C!2{`p;q*eBcK2Qmn=AVm zTv1bVAwTrum1`G<$K=m+3j-g$(JPGv3nC-~3?mE&BcRC9`p{tH+Lvhg&GsOTQcGT* z^?H|$nb{{;48$pQoXgNDb&J4gpv_%|-u|`JT|&3Ou1Q#|am}>|G?X~%3p*GIy@QbX z5-~2TtFsMsSM)4+J6Y;d{*wvN&Wu7wz@{RYP5`^+GmuoA(f5X=Ml7Lg$6`}aD6Euv0Vv+8~DE| zME+88djd&IvNMA8*>Z(hSfhCbp4EO8r2!!#>7xUI-&ir_do_TvEd zI{%G!ADlrA0&Rd7au$>zFnJnzVLs~Y8j9kFidLHrOv^3}h1;BlXM>d{$8#ZlkJRIa zU_cR!f<8_$P#h9HU6_4TTpv>b1TeYrwQ%-**vtmykBqAqnxtf8sN(Dvxn(z&3K0eMmO_zgNH7Eh24=c0 z-Oho*81q!dZ4dvz-=SdUtZmu>e`lX(tX_LC|C1xeg=CEW`Hufd-#_-*~UzuWN-kq$(?f&x-8J1~b5m?jj(m$X5_fL12Uq|oG;MwKJQ?>pe>#yIJD%t$M zZt#C!@Eob^U)O*9i8Wg8uM6*`+e-cK>A@XGm}CDRp5}Q*#mm%-*9UK?b2)9V*!Dwf zAd{=!Ym>LV%;7zfmoDvsL6n>=DrD995Ou3n+il()E*ZA@s!+JZ63^h!kQ6?IdZKR4 zwW1^8I7LEo9efgPYa>{8)pG0Bt&PpiQWh2#(g!dDpEQYs08y5j`2__Az%$J8Yn|`& z^I`Ik@6x5fBlq5GEmE7(n0dH<%q`d>ESP@3j<$!W+coI}nHpP;Z&DDE*u|5~Lz~XG z`gC@MWs^7DejJ2b8=G0~K~#e}KR+MWnPGIA`aufUASO0;Ge9ZAu@p;7OS?1J%YU^+oN<>x^`o_I}%l%gUfk_<$=B`Yh7zCbJP z-Z2&y12l_~o?;OcC*t%ma??+W+?AF00$FWG;&5bS!SHQXA7iS$xVap!MI1^LADpq(cWB*hW!&{KHt&4Ifc7M*ta#K$ zr4uqF#Hn+8rLcl8sS3KU`a2Sml6F8Wle*TEM|10hUPeWQ3=&wRNaIN26d`eB1z>V3 zGA^bM0xuv4f=tJAkT|yD^mI^ zyWSW5pGSD;S9_gZr*vYqQoG;9CUk6Yb@IH&)WA)xgKs^$&j{z}^zJKH+;#rS`bKAl z3)AmM*!2VLHTnaiFLWlWeeP4pWxO~iY3ZR9%JM3L#LI9ub6{x*P5DF|!xg!_54UFr z^=`90Y;}_4k&s}*iY7WG4h&@i*N_m|ETlYkSy@>@@H0Tqx$bj>ot^y3l`D9)kGGOI zxE&E3hfn?ph7-uU`3JF?@u&UrE>q)cG;2?z_q*va+&Ds;cEc%@AR>0p`Rs^q?%f#UZ_zyGZ9Z;-QR;jmdU7 zUA}m+9Q-gkBZCqdDD--Dv(g#qNIJu>@B}b2n@^oOg%@J{@RxqGbLY+}0If3I1g|yJ`%1#!XlHLDlIO5JiGUn& z$`C8PJKGNNJZ#gj#(V_C4RP9tM1K5LSlE?!R`t+VK2}w2gI4N=HHj|j1+v@sHIpkU z+k5piFE8&~5CqiP>FhrZpFfwQA`*oG$+x6u7tgz?nVXv_V)I6 z$bU7j)YQ~KJn(@2WM^+5gN*_}U;q>Fc|ZX5QuW7%BS%R5nFA&$ttKU)5rD1 zk!NLV+_AVHcba#ReE9IguA9s`ZGOsa-9Pygjg-@S z{)}hY@a=~3C|_}?8XrZ=!EL?OCEb~e$6a6f3O~(UHcUO$TYcZJyY%~m?rG6A;i8G6 z@i}^rw#?VBnG;e{YLVNjzgFT}la-yVjiot`m&TFr*m2!eJPI&^8(ilORI@5tLxL(` zF&?NT-cZMZ)2KXES{O!c_@RbAnRr1FFY(#F3bLxw4>+q8eU4KDwv|)0C7Hv;=()5az+bK^|~% zL72I?gX71K*WtX>7o;|fjg2$kyx|apVn+O>hKkBZLODYh`$oL$09OH2G6e9l#H(wV zBQ#~9CmO(}u!NPOaCi^*J;@J%cHbaQ&>H#_yKh8d5_!;RHMF!o5wCXu3;>v+!-GNb z8rH$b#mV|iSX`NPbqaTI zP$?lb^)te9xDGHZ4EnN`Ab7 zklX+mwcOmqyK>B_7L;pGiNpb)pr8pr6Na8xHp#^BcYV);f z%F6vkXd;F_l~q-|xR0T+v04-`Vne`N0>rfMo<`jJE6igI=tNI(xIJVG*>mTzR##nf zp{kf5!Wt2b(|9edtwuObkL|l34iyG7KbKI!zQXN#R8PFWrLLV`p)ddqhg`evUhl&RjM*|{^7VEIUT9kcr@!Ba1uR)b*Rav}KoDMwMTLP8{je$wCLU~m5@H&-w&KK|#xz(X7>zsKW=9eotXL1E!} zpfmz0@!*^TgnDKiJ;41aX=)Pr8hqh(ef@i=mbaJO>Ya!Viq{zIIdH%W`Vr*eM<{qC zB#`~Pr5q89}xjDWJR742U}VZ`v7j@=|9I7o@6MOH-%R{`%1XqZ=lk9YKUPw`cA zPMLd>|9Phb#3e$<8yFm9UmMftLHy_zmK=erFiF_Ucxb#mL?Lh#?-=m}RDBKSf-wFd zo9=@2ZGH99d?}1ZD3)7D&_*_+h7&NhqN1WP%lKkaN=hG~Hi-vNyChgFcol)C&CFP# z<@-RjkA@+DvllAyGohiOXQib#lQd@NNhBpD-HhrD0|!@gNo#ItX~GGym<3$(Fc+5} zIQ5H}6TKNwSM^cRf$>QEs>{(ui8MNb0yr+bA9GO-=k!HL2y5$&Tm^moBW@5X5%rVO z)h#Bi_i#N0Ud3bG4Q=4GB)$~~aPZ=#Z5=^G0PoZ6;P1|NL<)+VpT7c*)~(@sMl7}; z2uVB64()OjJ%m!@2?fpC`YU=70sAD)8!S;TUnI6H?t>NhC@5$@$%ocfY7l;OfOA;j zF$oDQ{HDFoB2!;&KaDpj0<|?ZHhN$oyMbLpAJA|r3hSJs0WsDmI0-NBeJoLvhDcwW zuA{-YGQX2uaux3oQNaxZu1NzrAK~JnM6llu^~vRrv60bXLBahv;+Q^=x!(SN|521QXp-h=G&G;kOd2UgW+_sdlnf~{ z7by)INRveA6h#9?g)}LW4yltg$Q05bLlW*|pY!|Pb=SIo+$cWe4Tq1$80S9t{OOaymh-OLp0oBy%N}RC*;OT$ZPix(-CYd0GFF+q zvLl#V@vcdzeM6`P5m!e+P>7R7pLu}R z(lzs<1LCKjr5+3Q^!#%x&L$`KMP^Q)aP|3b2Q@V{ig;uVvt3*!a?s5;wMI@&0ARlm z7w76L2C96@LPn{oKB@Wf#08Nbk#3MHn*?bCDrle^Q;*XXQQTF1!Sl`B`e%W0BI27uTJ%OS`qi1LH_Wut7u zQLdl|ZO?1cqbvj$x*8wu-2i8^pP5y$y(JV-g!Z|o`Hc(WUk_`foApn#vKU{ z4`P4Z$3^sZ0y)>rBVBLd!r5E5-gw?J%hXf>=)HrIRm)~LxyG+_2kli_CK|V6(-u8-yjbscG69JYnRdEe8%x1Wa>scFsT`@!|95E@$do ze^SMLAh=uLjSL};+i};mYb*5YRmP0z%AATWK8I=s4mv*5p-(cP`o04i3}gR`acv^E zkl0K7n3I!k=l^^r%%Wu>KJl3GZI>9{S>D}!MO|1CzsqQJsvkaRg%-!aGx z2nxDSL`KLMpra&4W7KjImS4SW*5|>mrDc)c_S#C}Gdj}=-<#2lHh(XLz`bYB;+GCq zcyk6$$k5CzzSdnMsZyPuwAc1l6db*&-2`}cag0^NS%j|8-bHzS|Mcg4 z_qzOHqiPd4&h+mIKWu{IFvzyTG2_GAP8wG9$TQBIdy1F`v1PqA*x`&jb&bYJUkuwD6>kN`XY|hC^r+`a7z-$Sc zl5OKqkR zHf|LIaS_3g%Be|mFEW?M`@-S|XJxFDrE%es^-MLycsyZ0K|y^ty`*E%{}D)#m|vx% z-_`k2`14ik#f)DVA;tC3DiOB0gkZ9}L4reVf`Ub`c@6rzsH zk4Hy^aPZNAF`M>+gNVQE%D4f}x zm1T0&{NWAYUW33D=F^2U?vfkJ)9aeTj~<?KusCBxe0~MKJ*D{Ijd-QR(zR1R!i&Vl4swS z4amBdWgU_sldAaP-u2GMwmSA-oO1NQ-zMgU^H(}6+vVH+6*G5z#eO@peGfj*Nep)g zY2MTL*Md7e)-;~zI$=fS^~(3>eA~j)PfZ*=az^i|<@qMXbMK7Ya?(#j>(vLt2g->X z^LnO+dU{`b+N_m{`l$BIS8m`L`1Bkm50nw zdg1vEpv-okP=^{)U>0`p8*sXd*RMO&w|~VfRQ$3ew9__7O^eF#IP%);S+i!Xlik)- zr(hnlP+!02uep@bi$Ap=K#8bfO&=dOeFg7eZeh33hSk`mJ)kra#ipZ7Rbt{Kxbia` zCktl2>Duub4pj5BI=l-MB9JZj5#6a<|3#Pfz#ikF<l7i8iQhI4vLGyX=9#*wp}q~IMY;Vts!KvOy*)_$ z?oG8L8gHFBBY}CJPaSdjO|*t;df$GAzuJ2?{m9DARaEP2m{RkL#2;T{rI~l3%c@Ie zdpmuqtSl~`T|6!bv>~Bk8a{wtHKNlH(eU~6?z9rj`f29n<=L)Vx0`G~)w`8ms#Cvy z{hV-sKA_mI-yZ65XXBMXcqND>bkE+Jy7lvJ^kPnX6AL;{INjIlv$s=mlgz;2$n6y8G2h~fcR~?s`aPjK_}e;Jm$MT?R+}O`B%w?(Q92Vmb+_K z_uKelQ&rhiNt=<*ReE&y`YHv*pUcqF(RCaGGl~)B@S?3ko#>jL7)p=xh=dJYBE_s@ zX?eLF&*X7#ZYHie<7tMF%*g3fj(Z>`c;K~-tgZJEtob?@n_t-#^)_nDoF7w@vz-Ju zORq0LaOBoV*5#>;W`oAjh+uc`$M$q=JMk>;24A%6#S0Tg^UhE}F`nrM-b#Tb@pG`q zq&=KAZ2)t%v!KKPp_RMKe&e={Fb5DYOG`c zGkWxBfei^`&n)rj0;Mn+2|P$X0d5Bf)d%u&k@qEVC&u#Ckec|Bl|^e$yY+~f0THke zkGi;iEqDO00rB`Z5MkrauR=!*m;g|qV`bGJTv6~?q6#3P2`dSXT_#WMDyq{uUUKHR zj$#H-6e|F`D_0rUD+Z&9Q>KCA;}DATD+jHp=00Nd05wC(y<@mqo zFysF)FklIiD~7KUogB?(PY+X1-K@@Cy3B!eMNA$-k+6!H5=gGQDBeifAw(9s#$rBS z(N*ykgb-WgTw)z1wzL|;z=hA{TwAIqxDZa(PO=z~@n1;44^=FQAG5zte~W_63TtGZ z=~}N@ualqqM1KFn0cZ0gBhDlmb&@|ac&L+!(gTl=pWdcvmG{l{k4YF&xXCf7G3Bds zUj6Xl4@X2@H}2?d^2_#Qz2@j!*JSkvggF5-J+0E{{JUqd^4pTz`{%EjHF93{{yrB^ zO*}l&&F715)%LJ(j)pM#`I5>)XoATI{26flZ>VoeY^d@J3he0~KIVm8mNRhD>2Dz}-2Ws#|A7?9B;i^?o_f!LxvxP_P zGc_StOsJF51K)9a2e>*r_t0O3F9H1XaWK9&uy*QdYIgg=;%z5$ABKR41+2*%<{3Oi z?IzwZFD!ex>K4FZ16lXFkTCY^+xH6X&^7UA1w}>I^=O7HP;nmvqPTwJMttJX4}abW zw<$Y5;Ss&TF;I%ma&mUiaVt@h+LA>VnVW~BB5{dpIGi9(#_`zL4KeA6V||U%qP-gW zsxQ|w;}b87I5fd*eOLv%O_KyG0NFJwS7rzpo5qH5z`m;%xrbE{*Vb^U4i)4vQZ5sF zcHEt_dg|1vpUG8EGc$t();i4$)=ngd{8z8m3W9>OQ55@PpZBfS&De)5`yMgj(Ta4R z1ZMEf+&j%*CB>$ZQad1?y=wP^P2odlM>IVj8D z0>qvYIOXF3i~wP+Ne+mDA6fKJ>A6>sB0OHN!6727(uQEcUw&ZTY7p# zS$=Td`B$tb#xJD@k|>APAjC~fOgskl#&h_1!#j4&b`q`rCTSdzqf9qs7X}f1J{CT; z7ts@cn0w4~SmN{Y8u9gUt_TnoOE#0VLJui)QV_T(u&k(M3&Dm;NjcS|W!T2WB~ z0K34z;PTe1qY!D8I%xd-W}AGvJXswIqGgM4;B(Tq0kakzNo zN5?J?X5DF|+Z-t06OYM=mp{rL=cw$bnH!2AgMf99s8*RdNoc@MIh4d}Z0QkOXWG?c zyC|c=zWmz5e|R3~E#3;t%+C!WnRK)!Ej6G>He>-mR@SJU35!{W@$ssAm)E zW)>LaJ-^_1`0H{vx8Dn&SCnKL4XF?M=c|7=ZWv^^dY{qRNG}CC{*1mQE5buV4c`v8 z5tSqICZ~kTG`%z}g|F#0JHEWtJ{{b+;2$YtZOuhDokN)=1@ouKHjOT;K6|XMK_~sR z!DZFM-fBH*ybxcd@%FawqPn)lLg`q$>Fmpa|6O4DzoF37o3ur0Lp9u(Y`1RZBX@4b z$B+EgKiNcVm-Q;()IT8^=scBC;nexyFGLp)oAQ^_e_1YOkiNP*!0Y zA2o?F2S55t$r)>@oDXgam$I92qNv7YNgD!%!~`jwzy7?md0bzSOaK(C z9~mjnPySR@C2lDzY6&?wBnPBj%oAbl=&U~X=B77&0NjPaKMeZS4Fzhb5BB^=eEK3P zVdfd$_Npj9J5%8?(XtHMlZq(eW)g+^ ztdKC{#(7Fpr*eUTFzUxKkYSjcSv9^mkW#7-s1xQ+%b71~&qA%cGVz7TkpkQm7f}gx z^2~(`yP=7Na*`M=?(Vrt!`gOf5gkbeXMUxvtskO~>x+NHUmV=y->qWPT-839Qg0Uu zm&cVc=g!4;8My+zA0J&&AJSr)c+&(j|4WxPMugssrUVvaBI^nhE7w*y<6$N)cc507 zMUTRk6xTV3=|HlPn}wBdxN&c87z2w?HOXeF<>JMZ##D*Iu+T5Bty)whZ_Ea9GaDA2 za!_gkbX3nZAN2)OZ3o@U-;}hq(|s17*#V3zthPM88DGLCuU30XLt_;oP#|kFy?)+J zeMMej`GmtGfV*h>D95m`H~`Qv2&2+pt55U%{1McN7Q(_}Hyv08)=D7a^O$R@Y7UL9 zqg2PP%I1@>Emu)hUCBfky~T#qx_eu;Zhb`mol*jH*M(Z8FSi0A%c}bAe$G*RY%I@( zzEA)-l$fi~ZdngvS}PM|koT~nWOpE+wLRRU;`Sv_;Z;>tf^9-}I@Tpc^lq$~k3#;H z7xpLr99uZypSWAvEKUhe#EA<5u_D?X+DhRPS>ywFEz#M&eJVBGE@~SgorLKM+OgxF zV))HFASJ?ch5{(^ZFg<_*nHYrwgF?VLVHC&oxwK6@XryXt|Q--kU!A|?0^O@Qo*qF zN0UZ}KYUhmj*cOCP#msi+^26l{dARZ%M_OpoQfS}7dxgmbUmg_o-7_DnQk^_HGww} zABGOxa`WAharw29D@!V<6Jd!Xlu$_^_J(O zy2~ml|2016^0-YuUd)}g>z^R*6`8ij^fXGALj)*)9f^df&n+v`BoFnL+g>{7)%5lC zMMn!yJ#gSap^fd`r;i<@FxDrC079UO>x7{4gc27F&Gfj9+j^=s47?BQ*wGFEK^Raa zJ)8E&AHC>h#qz+aN@M5^!*u^&CUht53o%lyrUGkJbSO>g^a|OKXs9#};9(x?xT=hW}bneJM0>K6_3+5xPae5OQ z=S$4mi68*BHzkosEpD=E{6#kf*LKtJb@mZTHooK4n`?s2WpY#+_E|_YVc*^BJ{>YV z!DIZ5vpwv&e?&yYbHi`QOzysWHyFjT7|J6^FE4xk{%a0_LFmqV44bD!6Aut1(cifP z;YVLC%;dTJg&zQI^;tk>!4m?C#6JCs>`k=2=NBE{LFT&;u}jZL&>R7Q(UEW==y{0~ zCO-s_AyQIOLOsapiNQ*G3701Csuif3c0ugFbggR)2cK=g*sGhanmdO?bh@c^&Y|={96$7YP=1rwzyVP)Tj0 z7nY!W94gbxKd(5?zJ9TuUMF5S14>bV0b+!0q51L%lz}@hUQ|WgC00fV{pdF=AQT{v zyU=v7_h0^;-jcW7M8T+VIxscKR16d|85MvQ#BYMxrfKJI)%_bFGiwc`duJnI759tD zNVuPFASxzFYh&3**T;_^+j36D1s?n`GX8|M&jv!?7SncFkGm3EKIWH+6+nz|S(MPq z22oPKQrj$nzL?984fDSxgdIS0VQGu@*fHwb7h5E(aQ)d+N*p!Do)m@981W` z^BI51WHvE?-g|5g%Ze0xKly4E^9>fnZnUKb7)BIk<>Id>@%9U_4CSckW=;F0X|OMf zik!#;fY$agLk1&X?cTFzJ23A%xIK5C`W649tm!~&-5yzlxti+h>4}?936HZljHqQ` zHO18*1GRkyVrZm8rU#qo{2|}1lSKf4e3#FBPb|nHq+IdPK1-~`M=IpFH|KO zzCIh&mOb=L)KR(%{3k$lXvI}H>VP)}JbraP3K^w8a$!;p&`o?IqK$LMN~2xye3 zs{qmjr$`_X$;JBCmWx?ipZ=NGOB84ahh%Fh7{Z-blWN2+3zEpP$U&LU1B=K4Z_L}o!xiFy^U z{J2Au46H(t+~0^>c7oze-S%Z4pVYfCF6pRzPo^qusqVO$^}{;^iGj?}h`cTud~!c`;{h5Q0;peS z!|zc~GB(uvRv&64F(ZM}$kx_YOd5Q#dR5odWb!&IG=0AI_BvVaT)N9(jk2oh9`>Fc z`og4JGrQ_>+DO?#om2aIGZna>pP#<3f@i#sKB)4Y)YR`3wvPIH3TLJxuA|-%uD>iJ z8hGzo4=cs`1tw*0ri-mZYMTHq!xj*tddTjgwXxBK=l{KA@O?spPq&9{y03q`@kWKD z#l)mHQxO4qyZ7udtx`5QnHYU8;b++bUESzM#fy?ga6KJIr5JZ{KZNvJtM!&GyS=m! z_15aOYgbWlh23Dz!Ktl}n~&DK6zf-YAtaxbu=v zCDc10gHh+snZ&txIWVJC|D*Z+<4=*iS_LPjbpe}AH?ob^S+eyIEG|LbkH{2(O$%^0ZLvWq}$>hSMAecJbyxVUqQ>HKYJs+1MYLT^eczV z*e;F56PK{r!Rwor_ZvL8o0vgxV|$pGnBYd+i4Y=5^7UyiN-Xn8hwZ0>Yd6v*?uqp} z8I)O4OuHbABpYYXVXp}pQ%zcu-Vy;H=%(59Hk1}#oaDf>Jn!z*Eu?r_Sfdi3B)#S- zmC&Cd^z~j47X+iQ8+)f__&ikF0WJl<<6Ql6Owld3&M2uexjb9W-29b(nT#KimZ89e z!r$pZdadcEqyph||BVjaJ?Z=-iKNQ_y*aZ?cwoaujyzv@da_1=HfVzwKP1Ar3#eC? z74Htp%ya6D#`jS6qmu8j5$1a*jI4W@{4wh@VH^4ME*b~ne?r45N&^y_xVW_#`(mKR zVC$os4PJL&z)2LWa?bI!T-%0UmvE==JeYxaZWQiEYRRONN@>Y9HZq!JXE&6B64K0g zeKTyh5>h4VU_mSGn;;+?|M+Wd=*096eYG!Z3zMQ4*7pOy_L-;jX_#f@T;5BF`2?5J1!KJp8eat0-;R#0YWs8hyYRN(FHAIbehyw<5W+1H_f5U^Nw_dou|;O4 zI$Q;MKbbH+>Z_QS6gTAh|L^#r>}TZp{3;YcCN>1pY=0(nl@=Jp$-+z z6JUm``pIycNtdDt*#&Ad{(+R5e7uckpRS%w&r87SKm^2}^$zb>R;HMuEO6w!amFH1 zrO}3gDX9K~nJ=TcWn46voorS(l9ZGvne{NRJB&B#PkE|-(DH87VWs8OY6(EcLhHNh z&QaC(*Y`~t&1w_2hf&xg<@={~NVrnFA=a0C_K6F!$QaVJVQhDd1i5x&AkvFVTa(2# zSt(OK`{mtIRaTaQ^a5k($(>+5`X4o?IfFxuuaK8Gu3LYc3MH^ubltgZn)$*lS3+MYT~LEu=tWMHA(2&Izq`}I3!LV;Y9!!+m_S#s)eq2;km($ zxz@lPUkPmVpt0Y74AXzDrdGY)%EvzcvO^YxXy zqT!;&i|2vx(VgN^9{@(K%T^InoRYiruAF5-RMV6lVz`W%6|>jXapx25UBkG8q#b!T zDQkdAS2f{PNL44O<<^!)8_b~w@}n{-gw8}qx5J`EzgaB2D42=FcPoB4GhZ98mCXUQ zb#T~AB``~=yNDAw&KDijq6$X4I&8*_K|Ez##)VExfGwb|)F5|Xziv9YOrg%R98Ed0 zYf)Y@-c@kES4=A!jA9Ou^_5s6umFU~<4IBvS;6}lh2T#i7EUO`Y1EjEk|hR-$*}r* zaoshavbdyVCnE+-{5pGJr6A9#mLFI^cyQ|9!FJ$EhuA3mh>x=P(YKtT$m zkK~*@#E2A3F(lX*zBCF*G;%s0v)kb#M}*!0r3Pq*QofgNxp1XKqzLbK7H_o3kzx{@MMf6 zw85z#dcy;QXULJ6h)j0GEpcNH#lNVgX;g=D%19lVA2N0pmzR&Mt#9sQX=P;xeHFId z@mF)RYwBSc-|necE?oNp(;~Vy4^4f?RKNv+eubNJs#|~T`+2v?p#CZnxV<7oq8yCbndAjeS5N= z*78Clt&C{zWgcm-3g3hmZm(|K)JaQgRB5_ac;qS9wk#%9U71!KvcmkL$u-}$R@(3~ za}c&h*T*5vX)MrSf51)3;h~O%g=b5dP_@7Kd#bBvda1J!4|2|;o(!L z!SNFm?p|Ym4!&%re%Z{S!F3ZGm(YDO$CKHS{`=>#O^y(h0L(_^toZ|GBL`3rnb5a< z*D75u#JvtT;q|LdH8nN05>p}2hqY}+H=lpM{`~LOP2i^~-pyN2th(``02S2|Ygjq1DSl)#eW;Hj?RQ)8cbB!U6ViNI zPHay-jUuN4S>g5k@8vG+WiPZbNM^wJmjEXuKRp|>yk^Z0GLZ-Kv3IHspV@L(;9h*;WN+(S}UY>cybg6zito7nIe57m3v-U)`eGYg-tnWKc+WIv|7UQKu*>EdBY+iVyp_z zZsCMwxa{@ zjbDMPy`}ig@8197Z7hnC6<^-_Jnx=ga>oB>-~M^?*LG2-oMg?-#CPk>T|6gV*LKH$ E1NHxxn*aa+ literal 28767 zcmce8byQaC`YpC7hyseJ2q@i+AfbXtOIau&Al;!fm`E!rAs|wsBA|4tl$1y)NQy{F zNjKcN&N=S$yW@^=|G8t_GsfO$DwTY?b3XH3zG|xH$afvuMM6SCt{^X~K|-=M z1pl3~b36WhM6|F_lQjDprq{Lg*o4IliO*759RM@<`JM<;!IBN7v98!IDj2Sa-! zBWnj!8^_5lWm5PM3-KWtdn0{EGaG9LEi)@45-ro4419tN=M5bh1o#Am82ClRg@nZg z#AXXG6p)ZGkSNHW)^dJ2*6pOFRo(f1YVz%Y_h&3$_%`Kl-@k2y&CgHg^^2w#Y*i8M zMW(v@-O784)D6cDx%rYC1gf|Cgtxm*?)*4W>Sa5>ewm$zu3<*3+Tqs(_q%rvll2}G z_1D66Gun>d-+N{!oN_oov{*fj4RLDQSdY{jC zYsi0p^S*)r)8p)0Du%~tr?esvMnB(x?hkNK08)lj1Q(LBfR{A@NUDOC? z*W7MlF)G{U%e$6hNfUoH&$@4LuxfQ_CbZZ}vDj@P=+B=&j*=<=`^7K4x%Q!{Y2V3{ zCtJ;hnH9sXuPx6xySQxnGe3THJNcpesi~~t92XQFKYa^4ENa@4LVon<(bhE0hoP+J zIpRy)mqI+(mis0qRF|d)!mlgzR|SgQ8at>{bSv4S?Ty_$EhXida&K}wC#O_=PHtta zGmp0=^?a(#!#%Ap)bB}oEbpbGqqFI_d+%PFUYXRy#6-45ThI=Qqa=KGKV=J@Cf~dk zHTitRb#^#m%xk-O!<$x2V0y;8rWU+o_oQ*MYBCf&8mZ^5nO z`}e8UC5M5@!G;*BBS*;If4H|JO*3^59#+V#`Tna*8TK3N9ukt0q!bj@-QBUwN)aT! z8xbBW^F~zxw2O<2Uy>B4NIG(@l&)XDUTpSHJjo7*jGYR3_;3d!BO{k?ab%3BP1TY+ z9{sYmHsz5c^+9weFPFIHP)7Qwy!zUlrpchIO*SMv@^IP+l-hAMoigiOEOw(c(6^d&=28rQChlXS~> z9HGS48P!Mf+s*SE)l$2=yPt5KWvh$e*1UU~UP-!Q>m6?j`p>xY7nie+X{IQvOO1Yi z$GWz%V6w5k7C`ABQ)bsRjjT;nuie0`Jy9ieL?O!onmJ2$P zww2Ai^~DEV&ShaDcDmrzD^8B^T{+q+_(Xy8l)+OE%kOU=AXTm#8g8kWZ7y&a+Jg;Z zviSB|IZEI`Q{uUnGA}PgNjZ(4`#uwTV`Jm4!khm1$e#w$-aq9O(ve|hdj(q?nwu$a zb6j}IW=2LqF@#Oc&(CM;9r*qAm0hm%&EM2|?o-=tj($6vtkL)DSH)!6Mv!*8h!{D^ zmA5zgdc8K7#l;Wt^71CPeSb)~!cG4BTe^Q>Ai0?RFOp~2&em6#cojzW{Vj8{Ri8pa z0{yAkKQ>BlkiEaRgJN;2ntti0Kkp0O5^;5Pbwo7VsNC7J>d$3;Ppss7ZAi7f(MdFJ zh^~G6=Z`JcuA#sB(WbELiW+0RW$}8ROAmZ}b~=jFiO23K(WRd(#0Kz)@DHV%a(Jd=3F0SFG#D@Z}IoSJ?k0Q7s=K<5+&=SQ7peg{k6x*={{e13QBe~- z-3=6jrZsAwYmsk_>Np&>R2m<=JBK2eyu)8LwY;8dDu^iD&z&ZF#<~hZpP%*L%vRzT z^4zxX|-bxwCoO<)T~M*b|SZH+NC9JtI>nvLD!`5+jm~gs9mS zR4Iazrk!W4a{W5Pp+N7c{_5&0snb>g_RlZ=st4f$mLRZ zvAu#$EqJ{4OnsTB9!&FBcl~{~pDj zk4Q^PpOKO2A7jmZ@q&$HdZ>}2LRmRh%_gZr$h_63Ax3ltfw=k4La$d#mf<-P__4$bCS(NPWg*Q1@=En{D zK0d(OpZF}!!^2bTJat#nZT?x?XaN=ueTPLrK;UzhQaDHD^4w_Ch~u?<+dHFe8C&>6 zW1Ys@)t5SvJG*!LJGWr-qaNe2-nlJI+!(m?PpfObgg)x&>4`0j{TzEzGuQHp=jzls zcMp%L`T6YzEH$2se;Co{o}>Q$s(bQi*M1HXeus_KrJ;DT!y*BwqkLZL?w(5nG}o?O zGyVE+E%GmYf-OhQ7pYVX$bj zv>ZEB{*a<2+e}mZ=I^ki$;+=V^Kag~dAO(KBpy0|?&L)m&CWl4J@^>LDi1qB5WhG-c5sOnsoGZ_*?S^xQkMlC;%?MX~ z5EVsBq-SmI@6dJb8^^5w2KX@tC3S`o%{|)gwI~_YdR}V}^72Gcfx{KgrfDRHF`C44 z$hd^u{s*)Li!r;IUUiRAxURr-?#!Lzq->;ruN9@__b}JvphU>^5PAH*k(RN+jz_bu zC0j%Pxjkhjbe49}4bn9|pClhTErG3%Z{)}PjiX?fq}n42zS}~WGMNknnHg^EWK)=A z`g{3f4hrgzk|?$)h{t{&o?Oh`w;@R%v!$lKo~x&k$(iZzciiA%KiZMfG7|Po!~H|@ z{amUIa{ArJD)$^r)8eSFuMhBkSGk7;cmKEBeY(s(5uAk2dy(RcotfUBctL&g@r|7f zw5}{3`Egqx|GltJJ>&N~OtGpDFTNUBoa#@DIyyyv2_e~PoVf-btU|Fc?noLdYEuf9_5-ya@rw(l+~K(c6vcKYWZ zt$%NA@3CA|*5`?d%#WEgbhNan&2zM)ME|+-y;0}hPPXP~r;}MxrSYIarC)rd+{5$V z`z6}=@UST9wi?TjP*$24LRlt<=VNtyn+uP9~YGJ+=kVhnEU zjCam|e-b$>PZ^~+cJ#mS{?oM(I{D_&Is13K8nI!V|7_KaqB?^@r1AXN_xs-7DbCW} zg*R8u*B@pw;4SlJzR?%HJl#P?LfB?S25=lFO7 zU~ZK7O^(B&*1tXlAE##5xDTN3F#Ltlb7RekL+iyeOKGbPA(Fwy_{OiVE-eB572Fy> zw4XzZ;n=ae0Ksn!DtFx)?>L)l`TdKj5Xn<92X7FJtec}3gXpDIky*|g>yle`>|v6S zcnX-CaPd`ER#wepcLfXF01MD2`sIE>!FP=*JHCx~=9x7;-$sI}_oLIMY@pJgW_G;u z2~c>J!;os}8u}FtPkpXc=jD7`#+$!e)pIA?GU#1P;%?t2K{c+vvt`E#uXPFhgc08c z_$vVLfAP&V1#A!d)!f(iU4XQFfVzO~=g=s!va^#ELRpGkX1sxjf|->L)P{4O13-QO z4uR@l4e0Rt%DcBtle*Eu77>gt5eEP*1jI@IgMX$JLXRr==3_lT46fr#ZZyWl9zPfK z1ps{TbF9)kYmu$x@A1po)yNXPGIaW0E{FTUthjc zP$1TlqGH&UZ+GN`?d^9ajhi!V`_8y7Of=voOuxOReMB$KqF3r(n?-h-duqgG>gP6> z>0kONkXkRbe8H9W?cMu+eQjl=BZpJQn{3lFG$CTA0jWrOO5Cb}r_;1^I6y+GK7S5k z*G%C9EGOB#WlL6O=5_SW*E&TPNgX%WM+rVed&Ku29juQk2mE43?ZwZxT<wjg57gAEONp z4kobk+ZzhZ2OXDZ&1%EgWw3PpW16WgcC*9k(V{j%DB@xcgGs10wSi^Y>4Tskra9Wc zFt)a~@~JkQ&C%iE`^gUpF(8A`Uba&l4Mr1R}-n^NbPiZpGAu!ibN!Mox!?$mrUwLPYzn~&PrNXYcDsW`80v3!~5+Cd<9lau!~{!?jXr{x)HWc&L1h)qWGH?*=Mza1YR zKV9JOznk;Yn_qxp*9skdd2|XZis(5xIe}rT&;x2hn62lZAYQ?`Hu290rC-jvia@zhAoRrfUnQC`Ww+ZXv}ofdx)Yuwu7}3?Xt&*y7uX z_@Tylz3=mA$Jth$5y+v(+*fm*7n}eO5LeYe^R|wT$|G&mf72~0IKhg=FDPic!RFH> zZ5eu^t|}!D9&8g45qYgw7Kd9s)cvA*!q>pqSnk2D{q6a7$3e*!CVK^u@_?T%EA!lt z8wSD2NpK&M9}as?nfd$qZDv4Hol@p~);_?F><{C-6olyeF)?8+q%3gDMdQ`i);Bsa zs6zX&l{aoMA-DRlCM0BZ;$g`4XDuUu{;c3>Q`?`Cy-K$R|1IqmN}Cl} zYGY4|0=yel`0P`T5w6pur+Z%7!U$yDSFJ>o$jtrzIs%LdOb3LxEk`>rG4VL|@kVtJ9f@Uo<{bz{O#r0; zd*NP}GIUi^ul*&8!Qaznvx-hLeSj80>;gf&#)UlI>J~eH0M3sTwb8v>A|?CNkDC1q z%5P?7CIdQyVq@&ab4U*T{r%-wou9G!I!roo5-$I<04v>kl+;*|CvUG)?7WYLhKBT5 z^5d}dgQ=>qAM5Ir?#lmVssiLumM6ns>y||0>6$b2$^umV7Ri+3rDCx-{2t4*`14B^SVdYOvujZ3m0Sh2UdnKSc?$4h{}(EG1YSo2&UE@*e;oA4N@W70M`oEQhu> zJdFFlupl@u*H>BT?mqxRcJP>Fh-?b}^*1Jb>N(HeVGz&QN)V|01q4k+T0caZ@4hlm=)HIzC#H+?DwsAGQb`B1dAYWZYot zMD{kB3W1hCK=N_cp#YoUr$8>B-?w@c3cS6BVvbJxP%g2L5T>q`y3+>A-Me>hi)+cz z+rSFXnoLc>q~8V!g5a_rC;hXq@JQ4Kj0xW-H>7R8IGRz0{|q?Df9%7}^|b{(2z;s# z1K2OUIieCRq!0B_ZZzApX>*`l;wtBbm(+;dek2%)a;*6NK)G_1VT0`0*jTzlfh-rw zG%dYnMhFV~7Q7et2}-6k*WVDy-=ApY20SNZ)AQ7RptcWS>4kdYP6~=02A@B)FTH3o z&CJV7wd%~hR^qx}URU>XytMRZPb*8y7dk~E$O)xnoA^vT0r51zT*dJVqiq!UcA%OT z3GP2-%cJG?y(Ml=_)21hULKt^9J)ftpS=U<*0s~%Y@Bt{m7~e&=~hqehlYoJ%F3hx zu}AFP4z)T;nQ)T&&4hM~>2!!LhLbtSx{XNwLRD1k{h~CMHi_ z7fS&dog4KI6Sw_AV%3rTp|WxtdR0fBO=C?cYj|VJZ`VkTc%^mLi!>jqN9T|zRc?Fm zE2Ow-@TL7jk=}(r%DsU7u0xkGo7a(+@HBqmFZECaVn#`LuPx9^?R2|UP&QY&8S|}f zi$xO)3;&Fa40XELi)2d>V4pfmXF%)=3JNld3u;B>=R`W+z4&TvxDb_^^?ckP01rp` zE58*Mjw;}4f`ztT=0s=q+s`Q=P8%N^z(!-96HTycw!j2bP!ioT&j$3YP5HKcoA){} zQpM*H)EM%{t;rr0$Ov|}whUZc0Vp2=c#;#NZZ0l;{rxh&l+1)cNl2r>7POD2>&*bj z$c{=x`qS_l*w{3prCtGlMZF#XDy?;eI`*Nu`k^=Z;SfLz)TlGi3d6jO?7w$({KN$j za(0f6zaZ8m+w_*Q_t+>FufdC%%EXYJ5|6#po!G; z@dB>1rXN3k)X44)Q*108cCc{pvc5@k3VAd&HI)Eii_owPtAqIUKi&^eJ%Hs)fc8!s z8^E*zIRkuNbY$6XyhFIFyPHpD;c*R}k|KsiYZ37QR%3YI2{CKsUz z3W&4E2L^D^#KpxyzkdXML9@W#2x)U31vcMpfoJG*Y=4YRsX9loVmL>#MThLNUqHZ( zPmdXky*A>IHktqxBuAy(bWU-lqt{37zErg9mnKved~5x+SiT zlRb1kE*&a=b&w(UNk{GCR56Ph999>KB2s0RR{h-qR~x=KA_fMyJhj4FY6 z1jWs2(2tF&9d!#G*$t}#)PDyDgh+d>JKyVz-z>w>Umnv4W*CkAY?H1Mw8d4~4D zfhvHqtlV5<)EsJVZR*=ptg2rMZ(11FN1jHl4VxqK^IE*?dW;55<-}TuT8ZupwS*g~ zgkB>z)C+<1Q>n*FKBVNkB|BSWT-(mZE5&p|k^z$E@YIHztI2CcXl@ zaB%1yNGdWy4n->~C#SNrGX_I~EsJ`2ZIIuaU#K4+8yh18r-XzA_l>oMb__?>7J6sU zEJ!|kz<3o`_b!WDp}?m;OB_`MK!3o!8j zYEgTZA=3I8YBI5nQ2l@Q_1%eyiBZ4QhHyoC-$Q9fxawm!YfYflqk*MGTZ@(TmAX- z9vd4QP!&UsXpLi8_WB2<-hml14CVz&YE^#;oZDER+#qHdh^2r-!hX^ltD#_E`lu$r zI6ud=7yCkUQL~KNAM}wDU4UR9&^Aeqh&%eEsKgkUn4AkdBn;KmyE%O2iJs@_#HK&! zbki74379oACH9vf4Iox)f;;V@6W{tCi1)>ncPzJV-O9G^)*kujiQ0f}f^4|aoTN~z zvVhGYJ^;*!QCya3({t)ytEr4x_D|ZyQoowhN_$FJTU!(50ML-tIRGnfj|IR$ER2Ej z6jV6k(La3n5E&E0n(u*{N%B*oTUl9dHG;lKgy(8583qN=@B>p*Q=4)o5H*0V+rSOA zQ8_1<%rk9zr6AyymX?AelKB+W92GPsqhe8h-_s-2GmaTaZJVAKITJUpNO`;UPELhS{c=fa*0LAED5$VYPS z+&O40!KU9;xJT^bDhG#$f!9oKZ%8)Xw6*0EQ&DKhOVTYO?_MJ>X9R~;Lgh63^6XBH zi}~=`>G8bY&HQsV=-8;>Nd}dE^|2=}T@i&^v+uavkDou!ARogEtw3Q|=y@6G_gE>J zyD=RP=ej~^VPTQHPzUI+^!rr?21kBx$I5ldM6J7?A+s@dA*PGl$qv3pGr4nr^jkY{ z5ipNFW;myQ4t!~93dWcw)3ACE=7Ig$!cOBI4=%n+MhMG^?m&G%a`G0}hrT0^i$@`v z7(o?qc6L^S;0&T9tETo?p|grLXJ*52oJqcW^}Myt)|yvsD*E|62j@5R()=}w8oHB} zXb=rM_8cK*mPaIM}vUK^L$`>ppSwl7UrvlCD>8v5O3JDHLeT z1e>w=15f>VyS*6$LCmz~_Z>VKnPfks-yMFr=XAY?Yoc5rG+|>DTq3~&U{h66Lj^Bm^wdG6VISNosC=HGiT~Pp(km&En#2f}Y^le$=!}uM*mk`yV57j|b zCpm~w^Xb#4F<5(q@NgJw=SR|Ruh25a;Nlmw7i4xcsBR0c3#1oj8m+;B((@9q6GM4# z?Q~;6F@oG_Br663nV_jrPgZIsHmT*?>JvBe?;!^&1%YZHxe*mM&!)Eul^3eV`qB_R zf&b7+|IE!XgYF^B??D=>L169pLMUK{@~%X2K!hcvX++8?i{Cu{KesvE~6)#?^8C ztXcL8Sq|19m|WV~J{hIUyG%@mwr+WUn^Kwk!ppF?mcY)7D=UJx#xAF*#ztRXXhnzR zBtIxq{>QL+I!~ycKe@^Dg=T6{lZmg+;f>`{Ub(2)Gog0&M_IR>JGI-_qLz)Kah-nPMkbh{T-vFeHf=BUOq(&bDs61R#2B3>nSj?~&k^|;5i4T-(2Xjg#y8m~w~dZ;6$oPgoLyZ*FmA&j#3*}`QO5fRW~H@l4;JN1dvc4!2QflH1*Zfn~k(>W6Vkj-X zK|zD@Uh92zyk!*~9UVzRw2{#^H3hd&We&bK4GJ3`9+v%=jE!4dU0WN0F#^RW8GVJA z;XrFA%mniCJCRcfU}Va15>W(}zwJxKN=Ub*OM{1@tA)jw-cx$Q17YYi26thH8{WS^ zW!+n9rQum!9X2m-tcDS5oV3?xD9t;`$t%$c-Q3&=kqC3S0zhx*kRQH%i$GvCV@OW8 zFfe<|*q`N3jVc-uQ5PORe)B!Dy@TT>judb$? zj}JngVQ8WD$m_tUq?wv zckkAGvm1P#nT^c{S!7^g5%aUq6uuF~$0JagU|r!kyKvI%3Qh8b7`Hz^K1g`39^Acq zx6u{z>;kHvAXpf9fHWzRjFpSID;H1*&nfPfALo|w;bHlv z=V!$*gKJj$jR80lu&5Rw>W4{T@K$UFE#Gw##0vBII0?llWv)pAx)$9U&v%{U=;`Ts zD&-NanW{=Veln+1Ii+e#vxdR=BGGcj$H^}jpwqY0Chxgp?nPjwn@0o%3nzZj3$x_eI0m!3YDiCg&^N^ zO&ogl59|xU4#A!b4GqgNM5Cyf6=^SUU?tR#%dro**jQXhD-b_+PQ-S~Myj0Mlw5_A)e`M6SfDR%0}R0WZt3 zW1FEdL95r-I!(A{u+4xmFD0z5y}dySpt%YoQ7w5b7CPMj)aR#SF6*meklP1wxo2>9 zoT?9f2LaN+$mlF!3Fa2~&Y7>5^!U+A5F0Nfj8?P?3JRw1SL|MsT?LxH`5Dj>KG=&w zg_dnB*w1;9Z4-rqg9D}zVKKQB1D}Go@Oe%<&yQWf80Z-y4(J+Y5H>Hb6UIf?3haRx zB4;ZmApD!nUv+hJ7H9mR?NvsxKZh5BJ+P#r_QI=+ z=_nD&_?_CNP7rY;z4P%>Mo`i*!6i&W05fPxH&6q7IRVg6oSUrz=Ihk4hPJZn14 z>cI|p*2g(|3&LK8mo3-^XNjPHS^ zs;lcMdL|KJMm#YUgXUjG=j7VRe8lK?Q?s2?Q`?KK^BhWfYx-3h^mI(9K7V|$dundZ z2)a8VtD=L2qKxC_D=`lbUqhrEg6R$votVTU+X?d(;Vlx%pfXT&V}fv8)8l2Hh8TkK zf07`uC?JbVOF;eS9UX;;zd$rVKRz6;Scx|141WL;!U$h>s{YRsa5s3_2HTL?Sm9UO z76;5jh*^4QC>3IPyE5lPhzUl}7EXwXRVui-->d_}3p=_EEfs6R5uZHky7;SxiUjdM zXi;Dra9?nJ(BGdT->@p`haAJ-)$BtTG)5fWQ= z?6iL4S)^0)ISm7sg~cgD>^kiUTLu*H0aUR3!a|PYr<TTA*6MfZ*(W0<)(b>uoxne}1(! zG?0@Z$!jCH>j+)0>hLM7S536=t0ucMXU?dlX@MD#VLXp185-`zLwmEj^X+8PS~)Jh zexXGM@Q(P)@>sbA#ImzOs0y(1N!aUJN8mgg>QiKU_FUYoK!1{kQhW#<{f(BfH=`Mr z`~08nZE9Cwf;&>8`$#d_0H~grZ{VTP{%B1OBV9@tf9{646MIYy0mBuOg)SSG-BkUT zQ%?QW*Hazq5zsmq(d|&^)MOY&4BO-mA3h9EWgTK>YIau0)z#AZrNu#M3VsG$pj|(RnQCa?NXKE=AMr~n>d*d#xvn3ibole*G`1uFdInrN6{v3b z`M&-8lUrN75AQ6uA?J5-SzWY4RZYG&YS@;(9rZ>gS;G!)0aWobVDKs&4>1b!6ZP4& z@#<>OQOTDQ2_o6tzgyGFvFZWe&X3G}ft?67nD{ElXAiW~Q*b%b`C|UF=7VLY-N%!% zKY3~Xe%@Y3^|FmzGFHkx=5^m>#qUI53--2(-84L3^6d;^KMHuWyC%(_!T5wN)VV*i zvo{w1^aapM>(UxMK4PXrlYBO&2)deGQ0O76iTf!3iY@ijQ7esHiIT!8hC}rOrNjXWm{ZvI8s-uRf;bf#t1C1DYdM>l8I`x1V+pR6% zByI$v?oYBBzVHN#;=H^($`4+vi~JZAsoL$Wd4|yzG2R43X0N+@_wHqUAEd71fWyWZ zR4D#oP9y8phb{qy;@Ma80+$(RYkL56%vb6|ch;X=5_8d9!o|yLelzf0V!G=LI z8tDK!4w%Obq;Nb-5LhSCM~IGzo?A;LSiUtIh3c96()9iN_y0iB>>nSG1qmdI$roDz ztwULEFOt2n7YQ0^?XbWd3fv9}+0x1i35kjpPFNH%eE;D^^9JFpo~lX*M<8L6K>H-B zRjpLNg<7^vZyfF=%dTGzIN}t%jBIMpNYG@UK*9n^keEkDC65u-19IIl(P!|T?uHZw z*{&Mskub?(VT_=*v)p{JO3i$6g7V}LS}0;^5Y-ZlJ_KY|R?iNE`U)gLzyl*NbFkL^ zKD4TD(GH#Sd?ndUEWJnTL3L`*ORsIM=#s_naRhb*EOJZVvU&>NFSwjO3O1B1S`}N< z;i-d^l+U@8xTFEFJ`?s$=s2c5#RUSq>hMG2!~DDCD&nrq%cT4osD??PxTJ(u#4=K# z_;{ARk!kVa!O2biqS@|ib8TdYgm&IG|N4@)A#E;AF=K{H1l1Qy_&wh)vH9R5I(fwg zan^#i3GhmytYUEg5p0(rzRFT5IXS`vpDT5bf+2^k*948db~N9nyYPW(YNA@g8AN&@ zL_c_Nz$jm-N8O7gbtMOme$3vJ5~}+;Jsox!{n2mfrP z{#gy5?iu0IJby9BF|V%(g49$2#zu+Xe5o+Nmn9l5Y}x)8we=kcBnZw!7{p14_<=pr zuJ)Q@#zSwhn;=x50Tvbqp6Ee1-2n`l1dK+SBFLW>wc(w6?Oz$bk<@zjv2BUMmZsYJtMTGW3Lzf^2|mexq!4TFL19!;$s*P8)XUb`S_)U;qw2%FM9Jk37+Obukca znIm+SHNwezM>kP}7D~PQ{I%1+nZ9r_E}*Z^ zbaK4)yVV!ZM62>G8cY(BBW6HD(12<+OWGZnqK=^>5r$2062cHv-_UR++jQTzZ{KQv zb+d<`bh5^=0DU3Sh(4yC919vu*c5L_rj6_%95-ca^M@g)YOy>%Y5!{{sE`JW7^gHJJ_+^uPgBo6#3c07OoS`^J!zQc6WX48Wu}IC*>ZAYxEBe%E4_C7Gt1!m@oc$MXe4}^v+_Vm`vLgXdRx6U8hh8HZkQt zCpg0Wf{&m32}?ejYdg{(1Ayb8#xOIpj948(bB0tAO=$5WBMO-9gn4K8k+aiIUG*I6 zFm>J~1`8}?^l~uB+mWk|j*d*&pd*s5VQ@0QEkkEQ=5C2eL{(ec6M}xB(!foce{(cq zIwG1PzL~iqtXSwl-)cZWz*eZm82Emus`3W7gi|VANH?^Wk5|spduh8s%&d5$)Gr7z zV908(zSdIdYc_|q{wtMXzyk}b?8vcDPt%%`T2c*2(Ai5%tAHUKF@XWI1{ZXI77jFC z>D23S4kZOEID|twzM}L@y*j3ahNE z^T(bS-WpdVc2EBcLQObYb5;h(%m9Z&KwBST^E`zb4OTK6!Y=m+<7^9hntb?}V>)8C&a%JIvA1i@DdJ8nc zR2W-cYvg^>OI@K-n|H zO~;WbX2|(u#c+e=*^yth;r^H{y@h7=67Mk8B4hle)WdCgVUiGb z3Aa54DA$1-NL9y%M@N;=v*G#P1YSaj;YcS@m1(AT@~^u*#-1J)Hvg9^0vT{b%>F)v z7G_s2jN%~CDaMN5B!+&(90SaaknliZIlGIb2mN_V2~Qk6`#WP*&*4xMR5{`>RD#S7 zhJMewiZ_9DCvPPIbHEI1Kl+V(fSb~7e(VF52RDc_X{w;$$n`$B*p+54W5jm@N0^8f zOpL$?@i0Z@@Wl*WEBggQ&FN-pFf&3^!x5}UC_GHsZvc{sK1Q6;f#wRM9AqJr=`1p` z8H0*-8aMm;dM3%nckN{5>KIGWr79;G$A)%U5K1=G8)&*ImA6dDpFs#-tFtZVVW2YJ!n^z5xA1dq!|(yee` zo%-4Wp~3*V0i@rzgxwVfkq{D>-`?<55Y@o+SFr%*J0@@mcAkegGEgg9h-Xn}k&P3v zidOHCt_mCN1MS4Dy$@kTIFZm>*=*bueb0++XeRP(j+As0fqRS5Ykl>p_)WjN+jciY z6ku8);k!lyB)=U0+FvSM#L0lnVHu8ZY!S`SOlvVkEaCtoHyHAy0a^DzSHX&|H7&mY zl16886z@LwFvsK5m(CF&3DcYo*_jDw%Qt}Hptd|EDA?H8Yi0isYRt1AX{s8Y&wBjb z{WV?h^Rv5&^ExoX!qMyTe4!Tu@gx)mA`%h&23A(EIg((X2k|&qlS3tZ4DY%qUd~1> z(NNYjV~|bAEr@lM(y9Gtb_Qo6eEAHk$WZ1^!K@%4E_RI0sw+R3uxg+oGtJ#qO2cr* z9~HO>$$P?qVHe|iOcD>Jf_k#zerQEFAQ)Lg^*MEW0nM7tr_2Pae|f#4z7zgnjNS1~ zqS5!%V0`@a8Wn|5Bl4jIAh;FV7(x~&)gJk*?V_2w;^XJ{5&eU39Q;<3V!XeY(LCY^ znX)jcj4Ushe`|NP*RD%}V z-n?l9!-5^lO;rnf-sN5fna=-OweBk<#s6~#;QyCZt0ttG4t`<=;pV^YG^~@x6=b>VH*?2rt4RGp{yZNt3Q0WLrhU{0Wu|jc@LPa zk^rp;11GZ49^PEX?-$+0F%y%;<7T~kmxy@9^%{m}0Ld|#lLc@gOk=$`+X=&8gJZQk zx9?YOptSPJ%7MgDqwU0?6jQtMpbg?|5^-$m>C@q_uV^retj)~!sV86$EQ+iaooHT3 z_U08s$IwvF-w$#7!@B|uMI3U4>W=x`euAx{(1hF!K&_hwSW7fQjW$5MpihTm&`BI# z!+<0J2Ylz9_9@ILYz;xaf#8Y?xYNgnf+-QMezM+&^3lMQ#L+LLt(fy42BMS_zHv!c zA)OXtiUSQosxS`Z1SLcDAvJR&4o@OB8#4Q0$%}^pE(C)K0-hnbn?~noBaQ~U0=f`q zW1?`(3u7HtRBpKMMH}+=#|OcyBWU`C0gBQmOwm4I2B3h|4R0j)G3esrbR92&si7Vw z!VN=!Ol*%yv|$p9=I8;jnWL4!;7!r!)Xd#80;tI{lMsfm*a{b%Oj9fo(#A6QiwOFWSrvIR{rIS-&pC z@XU@d;gs0-&cdRi(F}l<=l!WwQc|n{-Z7<+0ccApf`!gKI{Fpz#pq4(*thk)y?W9p zSY*lp6tGa*I`Esv%m6D=_cCBuMoL>}YQ&C@9)+5f5~iSqN;t@45lBXMwWog{tLw zLEMRZF>w@7ZU!1Cx%jH#v0(;WnY~z#fk~{=u$7h79JqcBzBxb?t}hRK#T@ui8$IT` zzs8Bt!(?Y(Mo&|dx*m*XLdpq?T2|eK7xbV@dB86bZiH^Tj3E9^FXe8UZSCxw-_y{b zQierlqBBJJE<0Y=eH_M;+C7bmdR#+t))f}1 z)I6evv z0FF;sQ{HcQD~Z9Lf~JN$dhxmpJ|Oe9H!4vAM#Rw(466K`=#Py<>hnSCdajWcX4Duj zoqFod=9(T8G&_O@K&OQvs&90(4kh6-l80!5Na^lEOLRdPl-SF6fyx5CrNWa!obUiL zXD>0w2aJTW8*C>q4JJ+?uP={gIJZ#m+jko67qpMC-9iszUvWo~Bt$BxYlQm;M@a_W z0R!+uH-*C|6V(udVvvd;{o#QZwE5gn-#wc8Hfc*q<0 zVHOU=z-4p?rcCe#&2Ju9Uudfz>$y^VW7WEeQ$j$~=a2^IH05=5buW1M$@YmJ1~_AZx49apl{I*o%DY20Ee%WR;>&T??EI3c9brrzqia1%9G!6v@pw;;O z?M-H30#oc5upEUt#BV;c8AH!|sAfQ1R@q&^*2n}_Gp6^TCxj6L?lZffru3_#IIh=Z zkIDeK`|g7WXDQ19`31vim$;^b8;{X1anJN${uHb`;AS`WIg5H+lzXo0D;1{$XRY=V zl{~|gr7yWC5X8ZWj%>4X%(MzHw1)1A@}3X`suz{B6GR+gNdCAv(C-Z)IO6im!WwarQG+ed^klrF3c;Q+!} zNXc?2OSgKcaRw_4Apm(-WL{t`-Dn@8xc;x}%0L0b6!YFBJ)2^AUudmoJ`-Fvre1s>32uK{l z0z-88SwU(PGcqy)bJ$}P^C>th>~P-)c&@!@$)dwbp-7!g=`Cqm8m{9XkN*)2@y5Q-Q` zJp2a6aD{`g$`0ik;bb_$3;{IWnKm8sT9==|fKMYaXnOIpJ za2+Vt=YSsIsWC>^BEuU4^7Bs^t)WWZLkW9}%OJ1hFqWQ{XD!+B;uH8r1kE8FVoYUbOw&5=%hAGb*@{)oRW=jkbhbxBH1 zCC-;ad);zMTl-K?PY+zBiZxlYwkLR~b?d-qst~Kfo*OrA+_3Gh!f`m$XUcUoHR`o^ z`9k40hqbeyg&a{AHKaCNr!F1hwz)sRrBrA=R$E!L@k?y&)<#~d?&ZV%HUU{zt*d)1 zHY6h3-K_;hlq}g!zT;Z+964uftv`_(-?*;azkfqwt6E~p^6GEd3MomGJ{uaZKaKL(}^dChbiKcqo zCij2ufzV%pcRE0EBlKS7Xdz$1@CWP}8X78NZ!Z8n8U4o=2LO%*rm>mBsfeH@a42+u zurxLwuUtf3%#1T%`)qwuUkIeUTNLH1We z7ec|(C~>_REn@ZZnghN)oTLkG9Ku75i81mFApuG3zFaKaH1dEU#BnUhsb7~S--GfX zifN+O^A8ItP%ie`V{V73Ix*0+w#M`{->!c<&K98W#bD-**OgpjB2CV{S?N`c1X_e~ zAII_f(AN-4yPzEk2nwRFss>w(qy3x0Lp#mO!m^#PXqK)oTNpRS5ob?{$v!MV?B4|9 z&YeEJiE!IjRBXL^^(tXSRaEp-wNk=SrIWYDe!#Ar0(rT3p~nrUbIw?Z73Ol|H~YZS z0**>XBNPa&h@kw$FE~vk{hKZ9h82-`h`hYEPgGw4)|6FL2*VJFU3qo&4k{`txSRO+ z`PDljpi_e4ZIdXm;ujP&gcupdef0p$Xi)5AOihP1AEhTGFhYgmny%RBvuk7F<(*9H z+&>w|sv~{h`6*!pg_RB8f+hIY$%*>2%6RyRp4Wi^|AG!T`_GB1hck(IZxNc3(g*&gz?B1HXobGz%S} zlwK)ENi_TZmKn<(CMsHN3_?@Y(h>@-j`%9zz^RX&?m{m&Oj~>LVhenViNQ zjqdyRw_>f6-QQf!dMuQx0cKqQWpY4VGsgwb zuEL*X1kDE;);BSEn3l$VZ#((sU2dhVFE99mUYTAqB&Md8mRuO+5z)vd@q~y*T;Y4R z85OL;H*njEjbDGV^`DjK$dz#2Bb{#LvS^%kE(5mIZyuInh1uNrSQ0YlkPMElOAw{P z*Vp%uktEj&*1dL!HHVv`vjLS+6O?SgY_X*y0@}7|b^)gNgLq`^(G4!Ey0YG9OjiD1 z2F=}+gm9U({_GqO6Xrsofu-w?`cts~qzhzdfhBvb@+ z)ok;xyRfW4!A&hKi2&h>v`b4%)YQ~!XxoZmYzgY#IMk)#T~xHPd$NCgM;?E?e$Wz` z1DCP(^`iO%TO_wg<#D>#l>erBHFB}+@aLOPv)If&v|lt-Pc)wVT9W;uKgam`}m{m?ubFlgQN7b@0;60Nav|Cd9I!i|JeG4dPw}UrKH~Cv*n?8 z(k+RnA07L^C~W?AW<%k!)Z`ZlZ;``_3b(RIsL(f)X^Ittk$Bn^YihkRjr9)tZ`my z>6g*u;MH_=DB=}f0Nb1`DD(XM{EW=Z+cY&bF*c}aXfOiqM@{vE*i>9~sI17|-hQ~Z zOd39ZxN*#WDZ1`^)ER^~x_~+nZX-a<>85k^`>zn?5(fw`L_LX!sDj$z$CU@BMoC2l zH)|r!QxLC?(T?YfiHaiFEXdzk%)sw_)OA2}EQWgqA^h<5Yc9-yNU67<$&n2A2ti%F zZf2HtEuJ{=0^KKU>HGKZhr~Kb;6F?0qOPp0RL4NbXzg75tkjXXSs~~Y#QZuSfNUVG zmT>A!O}V6a?6XNB?C@x)Z-H$=pc45rh;&qWm7bnnm9sz~ydbDy$6DYCa&d8i-IxTe z3*mZ+iJ}juz`}bhFl|BC*^IlXfqd;(67ZzY%*r3frtkwGFY|A&NuTX&bqRZ8r35A= zudJN(?AfUry=;Jd_ykatI6^M5OG)V(I(zc~iX~3P%m9d?iPqHBY2H=9k`w*6cGWA7 z8+hK$oDSoc_H47z=_SbFzy|Eq>OK$SZmsP-69XPiMNfYo@KHF$2AW|xka0=$zWnSnr#LqY89Y$TWze8h>ZS`6Uv3Ibxra(0?KIVHvZ z*Qb4ftFIdqpPQYv4qQ%c@Kx|jScvFZEqk$1ePl?if~EScw6*lt!AmWdCr?}Vtu?lm zz}0!=^peJauy=X4eAx%Li0I>QENeU2HYO4NUs*5qcYb`*FeF0NeEh!GRR?==-vz3u z>(Y1Jq84@rk`A;#Gvv)KYaC5pkayKGH|I!6Nr9?Q9HN8C8^?%i-NDKUx(+D0B`{9; zw!L8oUa3TQhKZL-ojJ1^K!TtX!y_a7Fw0`U&Y>4?-@g5Zk&&6u#|~@jUL%U?XSrrA z-cbE!aQG-BF77i35(qmX%>haihY^S)kk-~b+qP}1waq)1$EX_!15R*MlrQ8NlpZD~ zrpJ#T1Gjc4v1ec)BxKzciI`}?nUwTRsQ^3gZq!f`GfBWa;@xkaNzxp`!u#PwMu9o4 zp+U)c@ihY{r;^wc9OP_(9t_)?<5+tz=o@UibhyAnxWtK9PGLSk9G*bjhDJx%LiJx- zUzyCA4|5HHM2dGngg3$!wu&(irltO~vyP~i=X7=Bs@6Rhx}BI|s*&*{t!Y$h z2N!oyP_QyHKOkC|QQd*SxAPO-!~q}w)&xu!iTsBl4yldzsC>lbEi%D}h<66UBgTfq z3e!%}j~_qNoc>93Z5rkB`4eAB?~yk zk&(@fQ|;i1xM(?C0AjAQFWz>8!Vq7SJoGIQ&p^EM1+Ob)MUlkA*2roIU_teGW22%!tvW!Q28N@k8(amKFdvgIU-V${s)FKIoYw zx3Py%&#N)%2V@S3jQj*dvDCYi*^4&D3wfaiOX|;_r}IiBQXUlpFM#=oB}fYR3Eq8~Yr$3Fq@8!Wsr} zJy7F|1Be7s_s%IODk2mEya%G;OM=YOaN?ukmQ)HTF24o`2{ZV!KS{euNfY2O#;LU1 z*!XK^UmzUZ#)dfHY#uB~7#x|PR1wAiyeUr*18n$nR6!)tF1$KGuoVnMp*Rq}OGs;Y zuS8orbO#>YV)#s*guEFc=4iNt*46!hbZ==nW_V8+=QW`$N&g@Z96lE8Y1mv#6(dPG5tZE+D)62iY=U~mkg zhdy|-`}}v4#x8Ow>T8E;j@SMB070af0}EJpDp)*T4yS9#b`RtZhvW!7k>Em@E>Paa z7|+1kS`~^baRL+~!673LBpmGgDi)H&8^sX3i&((+T&u(Q=C6nDQ`tOOU3Eum753Jb zUR{`EWUVZC<6H#M2S8Sj548%fhEWBl$Hy;tZUrABY)_b6pT#f+Ep-Q+JH#+L&qgn{ z=jQ@Pt+VcVi+`Obfl(GWKx_$Mote&rf>j;pKB0r4gUIOV#bf4V2Q36(xCSJWcxMmP z^&}YRanK+FTnJG112lKcc54LZe!4`e%{ToTz{NqiPl2oxdJ@h}1so9zfrWC@1Jb4Q zvEmPKWCY#-jm^qxC+=J0r61Z3m!qPiabDmMaRvtPgpG~O*>d6x<9#-3uvg;jfcta} zE4<89P%L3>^#dRV6m1$dnk{~LF})VLNYI@-o1k9f%)>j^IdkH?1o)mG6AAC00p;4@ zkPDF^&mURQA6U!sdGSh}$%I*O;mftId)-Z!Mna!P1t^eGkzeTX7zCkuH zx`*72ijwMML?m~TQEJxWqe?~FUGJYKZL>1Z)^~2Vbrl^;&pDdnokC_RaQpW0O{?zn zCaxmGSH@Oea)vLHD;{%%M+TZJUbOQ^u>S%MQO^Y zPL&RC4nBso~K0)L9PS8`E$m#z@zpzyb@x~ z)MSWhv@*7DyVP6JsyZ@lsySdOJPW z+HIEb=*;AmQ45)#NGDS4z&jj|0{EjMJx0o?%MJlMKAs-l5aOjlukrdD=r0C%3!$s) z^5~nmy=Q##zJzuhz+>1PrLcH(>@?GD5#f%X(uf#h?z75H(bkts=7Y7-b1#ImwdN$$4LS&g+bS zpV#xp`7^!ze(wAJey{I!eYX3Xz?a`jjV@_a7ep|Lp=bZoc?(+GRAaO~As+ zsB>|3e+JRy_#pN_U-zbAS4AgdI8kxJ?c1lKg!~lSSK_#5C>3)ro{m%NA}%B32(JeZ>*Dx&-M zFzP9@=gu89t|TYN8_wR~^-ly>rK2EW{MqGG+vkQ}SwrcL8wAJFj8?-te>Hu2a{6wz zr(k5(NgwRON^WRPn4qAmZ3Hz}_v<%gi1_U>vQ>|YDuh}&($2$2-Q3tNfLM7_&%hh( zKMiy?JtM=Z_NtDK&a(Zpg~URx%o*6y(h`P$5fy1DoBi%eYeY04>yHWx_g3uyKNjPz znE24HN=^hY+QO0f11+X2qhVtfLzu`lrGV5)TGm$NJpDkN~;NNIyx=zxP8=$oBQ-fDE$14;W$s) zxy=c=Kws-IP%pA5(i3DGaiisk6zo`Zk|Oye_9R--e%lT>#y)*xcpPg8JtYlub$Hf8 zx+MOD^#OV)I>E$eEMD9fqTd$w{u{i?Jq+Fi7S$5fHf!zLD*K3CI2Gi?BG*guzhOGd zfeQiXqKY;*SqHnQ%%4ABq!j>^BEwRWL7jk>eoKGi@Pw~bAQVIa)x*}x;lQ1$yOq(@pQdyB2Qyhl7xw#2~eDXyx?djNw6OAGk zs}a}1(OVR2XBoXob`@z^N(XNjuwKTxd~mFZZ4Zu5U0xgEY`2_aLxLfc(5fm2(y=Vt zQlJ@%#RPO4awKBx8bU44paw1_xPZnIJUg!7RsM!=`C{6iGJZ;1lK$|a&e*ZNXz~DU z*V9x%$NlBYLsi{0r)luGP-|wQH;mn^r>onOC5f*Xj3S6{lPpz!D0R0kPC;>JAbybvYePu^BWn2KhyfS1J9po%q}0P--Vs2OQD)|ssQSg?2HdHz z%T2Lg1^y%Pl3*$##@qJnMuGj0|44(RU0xNK6>9hxpEdFqbhg-!?FcUU`CRPan-JHG`PElzVo$$S8-dOFT&9BxG7qQw(|6oFhYN%KyB@L-WI=RtQYx*}%4`l!hk z&fj74&YBxMV?l7F^j23F!MPwyh{se{F>h-O9N0{rX(N)%7(o>RD_PCF9b*+mnCB#> zl7LoXarUDxbVCN_%`nadGm8!#H%<`SXcBhXw{5f7envnWo-Y;2dd;t_>PN6Uk{Oef1WRS$^{erF4go<#HecbA z-Xd)V!ZGu)&CAQfaNrPK@6X7CaQ3S)W4zefqi)Anho2|GgnXadsj0zBYIVMxJay_Q zq8TIPl*Q_}Z5+c~(f_?cT>=pQm~$6VV*fGpP_-v;MVP7@ zL?ctoAtTouR3d0f8c8?e_FPAWb!>1^?C^ig-1gv~GqAc>ll5=Wg7%vAtgQJClbf^U*TzCMX1yNF|4$F)E>|daG+}TwFo=5nDUE zxe(=wl~+jn7v7YD&)k2EG{1*whV3OMK7ZhOtr}a3(n1x%1_e{qpQ)M1viH$jsI8+j z&c4b7PEbj`Rfl@<=-ua`AV-KpFO7Y!rKQ!0myrz3I&=E8w~tTbo%f$fTO(3M&>F?_ z1i$)Jlp08rO=?eBc;m~E!Q!nsO%1+lH;H(#}J<9*snityuvdc7wX zXGBn@C%7N!KVU$4Y3VJxunE2y+1a>2X>9|B7LQu+i${T#EeNG!x$X)+o;7xt7Bf97 zE2X%2x!);&|GD(U*CKyhw=V3_<@#qdn6wsa$w9%gvdYfx^8GHv-*X{QkXT2(*sJCm zsobA3G@8Qb%nv_z8WRD}75{ZbCV4EP`!)%fa+0@$fMqR^CDl0FN1=&ute`qFVm6q}lv0r$|UqbNmi z@YkE4-VEPn^V%D7mM%E6`iEV!C7%RfSLI-eM4Scr^r&^J9rm*&A0RRd6j^fd=;Bw@ zZ8v*=4D9D2;;9U&%*5?Xo$3!P3#%2|-44Se|6ZQah-_fL+jnKQGQYp-3K03q-fLs}fg9uY}Nf(vR#yKMjX2Psb-h0!JvAAUsT;B5tJKywIAZhF)= z$-vZ+*2xc9Y}Z=$t3lk6yfgpk zAINeS|0&C9wAFP({=?kSo{n#;w(-^I=ti3@Rp&!oF1(?1qe=XHyS+WH?MW)IJ=Z!I z$H3qmKUn`DrJ18bQVZF*MCMF^=(E`Ffz+lKx#&U>T+Wx7fg9pk zYD8t{e=v2Qf+7`RtdeLckaNLUD85-LuLM!I^RC!&2`>rtoq6I4kJ>g7Gk0$fy0eA9 zL}B>#_U>t0vbPRyfHG}^nw#UAkib2#e5ruCZ&A9Otjx{L#YFHuLxGxFihy(|9Vx36 zAW?eJ*wVS*qmyKwOwMsx9A>VJqE`Z30$(m$&lDit?PEnF-f#T6e{^eX#pz_w|fSw(4g?p&Dt2i@oc<|#>awkB3G0Lh;BqHZuN zFa>>%B~G+n5EJ24Jo8B#?=df+?L5w$$uaLo-eZ?SGj+}6a;vl_Py7(g_Nnn}8s4eX z{$VaZVERP5GF*A^@cUkBg9k_K2(v(eFHxWf^k~!aZtUEmT`EdrQ4aD4&7L#o4qis! zc*gdVuLzT!$^0NMvf&qG_?I|uT^e-4R|MNfyE(@!G=7C7%%O}Rd@U#alp1fZVuW$+ zeZc+BfqW$vWWki*{sHGGK{T)pj}q*Ev+hKSF%)lL$5sD+qNh0 zrEMoMDgb4}-J#fRGC7hzB-A?D1)BIE86KjB2j~$>f}tgZbBKdZcs4e!JCa^>OMF~Y zbV_ZY?ugcuL|*je4nES1bW&wnw1*64oCER#7LGQOb|j=^%0Ca0r-l{G{8$N{^n_z2 z=z%a2+)YnEObr?7PfI0i`7ls`s7+o@p(yDGZleNR(E#7iuVG<@6;S}BOg?Tf^ zBK&;yqJ>_LeSWos(P%0Pn(nm>&kiNEac-2CqtnO1sm>>yf1n(a#g@el6Gso-Res`c z)L&6fimC|ZqeAuO7Vm2PWZ~`M0M4Jb``v==45INOp?T)rw6yW7ZYllr;PWwSuT%Y#1kxCssWo$(oX1eT$S+Es?|oMj z8g*e#c^~P%06F%QTzUZiXb^QS4J@);#fi1vd2g7 z-!I$wSp4+opiSi^jXFIxsco4)IXTte$UW{;zVqSu2UA}c{5xD)aNOPhXYBL&!45Ie nPrK!6{VCr0zW|MG8`5XpWBoBP{S>_XPsw=JyqS^TuiE!739Z;b diff --git a/ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-0-linux.png b/ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-0-linux.png new file mode 100644 index 0000000000000000000000000000000000000000..2d3aa180649fc0b9aa25c81623c76c0bffa0a051 GIT binary patch literal 17092 zcmdVCcRbdA-#4yZGP25wsI08WOq89G86wFDAtNKQQdubyk*utcB3lSWM#xHLS(%0G z&HX(3p67Ml*L7dNKknb-_s{S6<9i-S$MN}m-tX7z^<3{WQ2l}eDKR}U5fKsTc|}=G zBBC8Cc<-=xJN|opE{_cVv&~UcL7FJ@3*$Hu(GjBavgfp2V z)3AJ*4ry9)_nM2PYs)L{nDg3jj%Zr>ZBZI~P5xn{xhcMxi0G;rov(4{*K{ra68gVC z=nopO7agoL-1GNOe}>OJ{`-!9`~UgI^V5;0t}d}-$7K6?Nl72g&(CM+&MQ!8HW@cf z1{u7+?R@p>5&aP+Upryeir#3-YVt&ua~=zUc7u+ z!tO`ucw4HwtE-2(PH)(xwYsKevGcEQ9resaM4MAP_qZ7p*jy^G=~B2h|7So(AZmSe z!CHf|r?Yc?zDMNMsH zV{=(g&vmRhp{Ze_{z5@#r=GR-FTSS?Pokr(?d-A=5)v{qznBSZY;3eFzI@3xyQQh6 z6@B{FRPf=`uQD~){MRfHToe+1*)Q|V> zdY^ak^72+1?h2h>T2fY0BD=-2`2bf?Ibbh*qx%>;`_js#jZf?b*RPMix|qRu`0%aY zoh+Q3myC?!qoV3!uWoH_IEin3FS$iZN;>)_Ha9yvFd*QnK$OJF*Kgl4^|GZeUR=OK zY;A4rE1S}s=dh2hm-4-Tj)SlxO3=9mM_ySefvW&`^ZZOl* z?j{_q@NrZZ;X}=Q!_K$Sy?HiXgWJNeS3YW(rBC0Jw6eUj?bSTTu8+o zzkT)UL1(>5c0*(1)2B~WRaL2|s9LId3<`RS9FDTEY}??wqO0p~tY>H#>+kPhTwGjG zP=KYzS_G)9uC8w1zJ1K8_~lD;d;35A{YSDrr+>W1FKKhNfBC{IB^584oR>F08t<*6 zp<&EV>*M2tzaPqSv$f@oy-@Bn{^ar0d$+ll#l@>@OVcMwOmn-;1l;gcEIT$g)?#C0 zTXG(MraH)b*>!$QBk%XnkPKT`PfyQDl3(NF8ag_>oSZB7Pg|ZjbH&myA?iXb?bls{;hrS;yVSZ#=NmHOBop%`bU1odIkp3xP^ywf_PRsnU}jd zI|DS$uU$(|Pp3%v+TI>oXO^gDVq)Ut?0o&y^TNVgHa5RsU&<}Ya2jttuBD|V9=j=L zJ{v7!x4yA~r;2|uIXOAqS1zTkMkZ9ldh{p{H#ffZ(Zh##@7^sgDQR&Lj-H&Dc$JWF zE~#atF*-6jI;g-g{xmi~hHlok=4N#*ErUEujceCZ9`A7Ds+4lMc<~}Cs-(ny`14aU z0kPx9eMdR4m$9QwoHte%s>}|!9&}U_wy+pIxvLRe?Q;qN8kPKOH*%Nzm}yMpC50tbhofbjf_0>NM5Ffa$HW#$o5BR`imC_sHtUB z+wiTUP4Uchbak=)HO$`H+SkEh1-8kIdX*1L4+lBeex zhosTocaC_%b#--{o0~H;GZI@IrnC|9@yrJg9;Bvr`t|KiRh8_SGw%~7?AMoP?%%)P z+Nw$aXk=*Up76-Xh;3il_m-BEM~|8=Otf!pE|p_ZPLk{=C5=7fUXYPt?MZmOPfRpX zP^f85QBGBjc9|LcBxv#FA-#|l9w|rONB-aID|18DkCataQHs-1f)K86#zIZ`2iRF+!%vw6T6F2kmqx7Jjn3!0ezP`S=)mIMj zSl(oMnXW=RqbpZ_WSiFLaqC!ESX{V3mTjH(WuRK>faRK9`9^`mP_@+NvW$*S1g$Db zC6km_Z)fMT&`^i1O;5&}_ob!JqoYx?O>t+|7AFnJ$wPd6whho+P*Wq{zkh9^!$?p- z02?_eF|larTSte{``Zi62{PH)*&)nQo9j`fR_(9QS4?YyQ&LhI$HzZ-K>S=0TibFCO~In^x%14R9I64$ zxv-$%fmfMIjA(XJ(h#;KtFR+>1p0=YoLp5zX{iJ|d+Mj4Lwok@386pUpBYd=o|qsh zuBoY6YjzWb5U{X~=ngrWj%YkSjL)!`nnyui{!5HlZdMlg8@A)e8>0mMw;s>V-lPni zpJ+c}5)>8|cJJPf?BbilO5Wbyc`B)GS%xKkL(403qbPSwhnjengrcHnvaC-dBa_;n zKc}Y*{QBcZ@X#{$p@?m-Vlv6zy>D}Jr0b`qrm*kWT;ybBxv!Ky(?bhM(M-o8q{V&v z_N^*2EsX=4n6aksy}Lq+CRR77fSio%OSB02FdY+C3N_du`CasjjK<9#P@t;!4y^ z2XJ6QX7zhhlpY3=DLahcSo3S!~*z3-0jZcj+{RoBVS&nMY^PciAu z8$%-_wcg!>i(@TG7?)J!T3YGYTt8CP;uGILfBsxnNmaEGn@7a%hlJ;9USVNmt&xg~ zin4OX%a^+=-(oXjybpTcx^ZI?gLrlIcHV7N`{T!t+fvnjjg1AgDp%1IB_^_BnZ#qS z=;_sZuBf^W?<3irge5O6Eq(j;@rSFl^z@Wn#P4LeWKB&?foZVD`iF(A!e71O(m&WL zmUx8C@9y30C!brstgEekF^uKkN5!TRElm36LX4=x?9V#f&PT`0jEtjP4~=P?7V59J zy?n$h700nEXfd0l5OUky-J;>yITBT4V;28Pif8vK#4xchT=q;NUhh zxU1Oo*T@KF#n1l!1DIGF9@_`dn9y0*3-?O7_FDuWmZE4f8KfTBq)pGr&{9`_cIrB1 z_k{%YomXx%5})K992}Hd|Fh#HZ(;Jt8a&rI4SKLwS>h)bnjXU?7FT z$d_15Vw#q$UBp{s{3lPobDd#5DYcELe=d;vXJF7)yxI074xi-Yi5iq&8>B=UM8vZqu`}uzYy0WcFzz>cr0C%NkqW`$}0pwTZ{A2VUH~8OSp+F|oB= zgGN6@uEFJJ{46um7M&CermGu?m3y#)UMVCbq+!EjGF%gUSUIT$KhV%lO;7J?ZT)DT zTVG%A?CdP}nxv4NY+gNf?p#&XS^on`byPVd?R64VPu7?+|A;%5H?Xj=Ek+5t+1q16 z1zTKRoa|Zp&>L{`*RNk4z&NfmgA5|J0HSfHZ;@5_86K{vsQ6g%Sl$CB5;bLIs+kO`W}2dB&uDMlxB*;kXlTf) zPL(p-i?1anCT`BgxB^744Lusae)b39W0R9H$C@WlbSJpD0t{uG$;jh;4jlHCzHVp7 z&ifEsT1Z$}lY^S1^5^hyNl8f{Uyrq=WxwQuH2&MXo2Kgr%%jMUJahL_@>L3FJog$NJ!{$A5`M5qQE&$Ge z98>jkz&CrynPN`g$_xtHzqE%!E&tuCm>cyyN&W!h3O$Y1)h|bM* zyn3~wuI~Kh%e`%Fxp;cn*(ar>`Z_uyUEe*a8~~iN7maCXXaMSX_wHRnh7t%B#tyC% zTNqQ(JhyAifCbdasMHm>C>Twor_gR|vBz$4ak0;P>x{-pV&5k*F&-#N)-QO{iYhA9 z%j|T9J^&EsG-6-A905PUk_FV;eLT@bx+?%nJT|F~9T^@@J@}x%<_YkVz5QNN(#EDH zi`=el+qT*DmC2pVd;DeKgW!S0$jsDKh151r4-YI9^O@Uuz+mW8z()*~{9I4D`1rgh zzH-jfTvG^UWC_0M;6UgIL7%K_Y$Sk{0Vt&w1p>zeFR2k;O*J*_f`Z_A*{@&!?*DLa zd!GQ4RWK(Pm!$V5<9ipaix+8_nbnfX_wME3=C-+h9Z!)PsIi5o(8y~grLeH0r)Let z$4JJ`!!y~Eq>#wSvD!Jqf9h1J%Val(5zviVjOe*u@vWF#J#gdI`Eju6^jEKL13H7- zq9}Hebpe?%Ffd@F^>%e}uGio~ zLi?ZUDO5Xm?n6^kQ+4(3NjzCjK0XV9C{VnPEF)cQZL@4W78aHhx|tqJQ{O|RHZ9G} zPTiij4DZBu57$Qu3k#!uz)r@<`Bzz=018V|Pe<>lE}SxswNTT09`V4gX+ZzL1 zLCYbR*M=n#&lIu$%gV}tF^6hGM1+LCXPfR(^TYhBeCIfd5#ayh73lQl#!{c0nEHJ7 zP&Tk$aj`gCnAq+4F<{%K*wcq@5Vn_!3bv*dhz3*uv`Xx=u6K^96^>P3zdB9zz5~L_ zy7Im|*MfZJE)VAKAy3bZFm|=6?);3TBvU8~N=izo1JtwZT_A2#T48+t`7pNeiHX&< zwG$^!%pGFq`2>nFT5UvCm3HlAI%VF@%$F$Lnae!X- zx~4ySN^aRXI}6QxqVra`ckdoZVtpjvEobM$Q^f(FJ35vYesxrJ?73TBG%{jtYC43a zd6kw%a?6m2sDJuV;nY9-%`Ss;ZK*N}#o}9;scEUoNlO zvNHx6fUaXBY@n(6L_P^r)n%|Mu+lJ{y!P{FVsi3nR79xa9x}#fAgcBqd-u+*F8o3( zDa_BW`}`S?flBJgeq65Lhe>^ce0L|*uX4a6yU=}J-Vhx<@c*AT%|>Tx z+nSpd)zp}$^w)jYdBl?w6Yr|9-Ewgu?#)O}CI|s`S2(wkkdOeQNe-MHAf={=Ze$Cq zBhcL#g4lW|!K38JbbE#_<_4-CI#Ww~yR(BsU48vpzXpCHwK3l&lHRXSVn47wN(KM) z!Gj0l;^J{}ap0qshMdL_fpT(k%Fdm`e?dF3fYtom!ouIWyH^>gS9u&n4&ZB+t?3sU zm1bvV0494-J;gh5L72d+ACg?Ae{85)EsbXyl?M~FB?DR6`;cy2Tng^rud>&jP@b5c z1_W78lkr-gscCL*&eAy=+Ak^zh-CFuH)=pe=#MpZcSi@{&%!Q2>QC$>;<&By=XOrV zfxD-_#Gb}e05bgn>b5XH|1Hz7#C66buLJ7rks}o~H42z9T3UjlqOSJ#2POy0PN(C+ ztEg zf9uw*-S;4qme~({TwnKKQ3XfB>l8o#pZR$ROyDP$?q+7mAYGUlp`jY;>gu;{iTD{a zk|}6tWMlC}*KwJ6zNk`(u0c_+^=0%jN3N%@z#8rPB;$5M0s^k7vNAM0>@b~`np)G; zq^6*tASd^!vr|-DoW7DD^*J?Vi@OqtdYzPXFZgp~V}5R~OdT2(W}&jv*FY+EfP?n- zc7m!=|DOV4AVVAvaTzZ*P7=KfRuij+``mwcoN0phlsTZ`o>mZ1P++MrJY zA|kpvI>JIicnY!%M&(|rY+=lj9*W1>T3XKCGK9VhsX$(ScUT=1JswWZfx5Tp>2+OQ zmvFre4ct6DkK8-{PbDyVNDdePq0 zeu?|s^TN)M3p$vdmWJjJrEzI~e(yMkPcA@R1^=qN?kupE*|lqPv$G>j@lu$Rn>#I< z;z-HJBwha;@vn5adGk5H@!gV=)809ly-Z>^Wep8C0MKzq-+%zr)tA$rGoHD9iiO2T zKIwarLuGw^kP2HyM#fw+TDFNm6ov}Ga!012!MmHo=&|1GGYTwS1YqO)u<9b?`SW!U zDA#`(ZCTmc{>Zo11rAqNZ-yg*+PlQ90}nx|+9dlRkA9VR`!R0r-#{rqdL2+yfB*KF z9}6A3@h~`ebFN8hXlQ6_rO8`ZL?o$}P!iZeK(l#o-=3|*1MGNXFgiVrYUzWXKug;R z)M?$po414k4`K&W$RTl@pY|cRvOhU9LErf=0-b=M;LwFqv$`jT_^#lA{gJ&1px^h_ zxDwI{sJK6UO>b{65b@f|O5<~(j2ADiVosx3pt&&Hcu7ltL@7ZDaFw=TtO1ESdi1Cx zt3eWQ=L<@~$zq?p({kPHA73v=^LJeF?A6IFp!ke-emKYkHa43U-m zAUy?z1DN@pJ4D33v&%C>H+l*lF^GUX%u3-Qe13WYpX%|uGaI~cKsOz&umr1$sq?

    H5!~tE#FbQ-2lh*?@}2#Ke@L^QODduCKrUDul<1iaU>k zDMf#w$&s)NM?dm@l9zW1CF|%ohc_CneWh+%kSdf?x_f&3@86g3Sbp3&{PU-oot@h2 zI0-jRBO_*>w()uEj?9E>P)`^bl#c-qfg%j>VX$#QR?#yowEYf1639r#B%%15aB)Qr zLwl^tEL#$R4rc8vvh|Kp21+NWb5K(T%Cd&yciH*<|0VSdqd!BQ1@a#h-TYZcMH#4V zU{D`cR#IXBN_aIv*5AzByw&oQiTi)bN*--5RkvG9C$(T{1KRu-aQT6=X7AI}uV1-x<xnuI8h6al!+B+~FuFo8pT_TEui;J=zUkA#{Y;Ys)E*|Vk8`Lmy$YdfSg>T-F z8os~EPb+inSww^|Kfk)Z{&Vna*a|jHb#-Dle*%GiM^|Z%fC6{Yw3fl0E%cTsA?pb6!wRzyrZZd|9R2U!fRGA< zu#!~1L|50|ynXvNCKB!kI|5u1O3v7mA88s%u%8IJ!C~iMK9K($LRQy zv$E{c7z3mp2UL_BNL*JBm+vq0t=T%pn1j-T8G~QMMSQNU6%`XBeM(u$Kd)!$f1{7%6y5*GS68J-9 z_dN$deJtC^@G!IoI`BuaoA;&svB9u<=wkaDjA}EI;P<6oh<%lwj+p>URpw+o(D2Kb zF9XzmIkLl=V0iU@Y-DJU2&J06*V_?^_M1v5ZPt*uprsP1z{ zp85e-SX>-0KjY8LJAcXsZG9hyJ_ru>`qTdbq=D~BDKyC2;NRe4e67yg_sA(K1pe(` z9)j_kgT5glm}jg;YCITuCqS_Iodb-6N=j%QyjUa(3S;OY<_)yr zFVJlI$~>W9;2H4perar+3_D?NY3VxIoe#$mzDAs=Lt;Tegu9)-em65f1TbV;S{j68 zfT&GI-0XDkySBbQc;{>V{r!=AMh`SIp{6`JuKe-iM|CoQ8-=WsIu|c~s;O~I)z;K} zF6KyZq~Y-eGWD|^JvuZp;?iH?gI4Y+8iN4@u`KSCHrK@~S2h7gIIawoSy3K791DYf zz&=jW^LbPhTv2}8UQv=tHnq5z$jAoD=I2b|0ubK4@m~!>>sH9E60Kl zs1xMamET>tIXMDg+?!RHZnicy9>BmB7VH#H9+><;h-bWL{!pwImzDs)r@Ql&tEOMW zYt_=!TwN;P`rg;q2MwsluAE}e#5m!#vb>C;hn^Dg^yy&<*8{r<3Kn(>4-XGaeC(I) zTPFy&oeLB5s6udzgj){!XHL#6p6l4^RL-?}3=7`NMR$w|p^9L1RTEtjAx#*-6ciM& zFm0@@>zkTJ?M2gW8zuijY#{9EQyO}DeV`#Nt+2dNX zXft`axj_#fqV18Ale1HH_VmEk>4wYL)MPi_ni5a-lgFqG%120@8HPk&K>_ewr290S zaoq4Tl@Y?UpYCg#bE2T6rCpvKR)%j6I0E$rmIFnQi?j2aw6xlW25nv4zGCN$)YMem zTzUCccfPfoCQ!=K!a`S15Bbyftn$r?$;rbsG}nxc2gb(v>z&2nZdVKF0 z_m&VwyU*set zFOI0;Q*e2x$o?H}=y{H#Uz~5=L=no*JPP=itcA_S7Ul*aR!T~0aHAQX9Vm;uuI?>N zA~rTQ0&wG?dk`3ibrTX1ftQbJ#rB!s$AV zdcrZ%eXVy6iIt8--_Ezj_u}IT+60t25C~9~D3xk{8%PZ3CU}wTuKfPpuzzi6JE@Q+ zhCL4-ANCrwpoO_PgKQIDG_%MFaVn~?XV0F&>)Eycps1)Q)NBa7d&kof%>PzBYqfc9 zi`GD8EeH?>I&>B5L+BFdDi&5&tNHs^UArKF~AEqK2m0iY!ER$wF>&D!kAKdPpy z+W~hSGGlXOFCiaA1BGb#xO2;g=^=F8m<*o^+DUe$u2?HjS z(6O*Oq;fXza{AcAkA~jY4XJHoZ3#;@lOOayiV;R7%}p~m%q0ozzsPRBCm%W{WD0md z&@ia7XVA^IRy)cehE#p}1SVdts#>wPxwlt>pZ_X4Ev6`6oEt1TXxv+iz*eTTfJp}_ zC@Cr@TUxG?4B zdNZ-dK)tmzG)@W&|3FZ|&W_K|7~?-PGn3;(SFS~!*-d?Ydi@RfFd>X$AGH#*vmJoi zpqfRgd~0c8DL_)7?(5g^3#nO`az~)jSRM@?f&!Rp-l&O0kU$joLFKC_V3VTuk}x+C z5@zt-B6tZ2b#-@}(7uEW#})A$uFo|=U?kan6k-at4XxjCHnzmL zn3JZc)Dd3AT8)qHJ#m9J8tUEtwGU|E<>2yP=X- z{BNOUPb@cDm`tOG&nPv0GpsXcE=a#Yf7xdvO4xZP2po_KYYe0du4E~6^r#G^opBz|q z1Ce7hLN);Rm3_*`$HyM32(6h_^W|TO11kxg3LVnm(xn8QxzxOz9CTC`9Vi1ZA#@U{ zP#rDZ-TIdrinnV2Y$t@m$o=AtNT9n40ohnN4}~hNqbG>HWd zL39V0b6@;=;mAWG(Yg@UHj&N`^E=3GS(xiHl!*tKBJ-?kSt9(@`3OOmVhwD)FdXlM%rV#%-RTSjoNkbrAxC1k!lpM;0Y z=7lSzTq&~OcEfvVDi%+w;z(0-Ge0M1p!aECX)!2e*%QOV`3^(!j5SADSr77EQL&l+ zo7S;>%gD$W@PI=0`c-(_p`jEHs2v!~T_)*$rN@8&re^xNPfW@6@hBEbTUGtB0zuy( zCg>ZA*gI6up14zkEx|$sSP2?h#_=`ZdX?gS37~{&Ac*5u_R2{2S!Tl?{&2{^}k9eha$e~7NNd@lseK2Y*fB}#feFeZl43gxc`I)RU9b^+!? zG5FBfJ38(sCPuzRF2zw7EZB%-?dfo#t-4dif9FBmsSmYHYk zp>W|ANVt7#Z->?S1;6duB6F$ODIFd^`W40iG7}`Eq`TOtZe9p%{kt++ee)-@u`gf; zY(h%?C%X3yrm9pU1UKmA8(Y4c^k{I945<$u6rgHW#%)CTC}7&qzzVTpZvY<@K%kt%?dF{l{Tp^|iGK2X7)^LP1Y&b?w^1NFzg>DI{0~grLac?%*)N?=Dgv zsH4jdFQ@V&4o1yF@xcni{e)Y(eQM8UkupmJ$optxbd8`DsGo!!mF)E+6LaLY6dP#? zw;4hsA`ZsAfi{Y!mYXXWR@Vy`vzx<=8YuA}i2*(z>ea^1t|>mX;>!R_ayQBDAt?J} zQOaoQc6N3?Vtd6>Wv~C8Md@fM9v&XeOG?Yj%K@$g*EuiG38q`nCw+u2EB0bQU$d~- zFL7B{w-7;N$eWDfrN;i z*9)Bk-nSt_MMp;?GrQ5ICUss(Nk>%6Yui0sKGWWDoyzv;T5EX&asBxFVv{jkiNZ^txD7!jq!Lxs)D&bY1m}EXV9gWB zzr{0s{pL;X-)T>ar zCF-y%SXo(_Bt1I6eM8!}X~16HL-4-`PY@Sdo+_DE-}wjSgHdom*TfY|@jvJV2UDQA zT49f(M_^`IS#bpqt$%own278tx%$_elifm_$|2j=wjzzZ_K}k-EQJmY4k9SG^CeQ4 z|0Qg}BZtX_>{SS~UwJt>l;#WFxY(E&0BuwxG!1$NhPk;pXfrJ*-UBbgktHT2eS^7_ zo(_iOy0-X+gA}dT1HwOAo0YXSJnRiHcJy{{BpW{VaQsAlteN5;@?OdzSRMD0UL+@< zavW)ZpaCf|gig>;>IB3`sHu?sVx+v|DhI+NBjF0pBqwO5V{sv}NiS`@GddO$9Q>uB z0d^e3Pc%Vob@j7-oR$B%!gd=$hk#H3k8oFm!3;%ow6y=7#3E#Ht^~>jvVp?TXfRDl zDw~sZrI~8y78ZCoIL?N6?8|ERBV~Z#@gyt^$^vK%vc#@Td@?DJ!_WbvpFdw+S$U(E z{qe(xZIy4Q%hqf$*uceM|G~w9=Lxgm4_E@W3{VhamP!&gs>3KN-vT^CIXxMr0>1g} zo6aCZLQM#>H`4X)?rZpcG&yE*r+_VoOqvrq{In!&Vb{&f!sFu5=y;`;O=9%#$p=Y#7MMZ&H@Pc#C=R-zeXI2ZtP9|&{^ zwy3D8`j@bJGaxsk1b>uZn?JzsEl+JbzCJ13Y@Hc4pjpu`0--IZ=*`@P-yx#C-tWRgcvDfk^BmLPH+%oD-pn5&oy9eR z$t}p`An9r*u!4m&I##G=zx-za9wn*nc>K$MoBL$9DF?MZ6~8)+@DU>=<*Zmf{URqRfrSqktPjc|7Kv70VOYLCTaT3d zBI(2*9uy-%4cic2Kjbj*aTl^0B$cqfpke?SpmxF6BR_8itqq>x=Y}Y{^t+1+A|*`7 zqel^$6GRq;oGB$Mi|@pVHC3*?cnW*xcUmWh9H|^b9QxgL9|7rgD<_j+(=XNW|8&G79^c=@2wx5ovu*1Y5gwg>p!?jy{?I; zeWyIY&Y?sHxOl-^HAYlJMTLY%SyAzy^Bm633%>ggAzZy*LV}f5T0tQ-F>!n4Tc9k6 zx7h9YQaiipgekq@(NV!D6&jkS_;r9HhiQ{+J@~`eQeeV?fq|Aops?`D`^vr5diVFd zb7b$%M={`}!Qhvxwzip2_F^xLHqn9M;}#a@=b>DnfU6dPC!(G|2Qon@6MdHWMwNqv zu&@i%K+sqODL$F(8%M7syucH&8Oi+HaFWJY4=HmfZy+GA6B7k2zAz@r{bj5a&h$4C zOMuL}x#<-LQsc>R;J}O2RR5cbs;a~1A3uGX^75tpYhW#kCE_j{9OTGAxcHGgNYE8r zX9d%NR+Zz&Q~r0#>JWM2^NS-;Tc9{1M>k9t4w`ZL^l6}5gb0vrqqyb#G(4P;>>3=D zPHjW8%)Ijcb6*dh3xGbYU*ofr_twh;dh#LZfTEqt)dj>lb&9&iAunjn<%WkxDT?x` zQhQ6wrCf8;&=nv+d;7P@rZ#0ui!0TJ@5UzCi30@7B2J){Tu&cFK)XX)Y6(U*2o|(g zuo!}AZAeW#41p653Q8@cP?(3J!Ds>)Thm>+so2WJ#kX;^2Z{|LIUxt2jszbfRk_HH zoj3vb79h(CNZc>#HB?OspOu*z+Z$QG>({QKoAM2bZbFmAe;iM zyLTrV8XFtqI1vm|SA5m&+u}I21%Bi0_c!aFFbsYPqu{Y(a&5e}Eo(9vCjsf-gU z5fSpK@H^TxlyC~?#fu}XRA3~4JhMN+8Z80zY;EJBqw5FkaRezUG}Pz6*sVpfNQVW`PrFkv{EtoVo(hwjGP{btR%8$ehMh@50)!$u$o3_LR?RuLk)xJ0N_GkDc{Si+PSn3ysCi0p>JS-z?xc|#Cb;L)Hdis_!E#VP?HMd zX>EmlQLWe;cg_-c+P%{M-BND}86=x^<<`#&Se8*tP|wND1>a~g695WrsSXGTz=0L2 zaS$LJ*|>h4Q%w>}fJ*)=#qjb5F9*SLZpHUga_-o^9pBcR6B}!&t&Q^s$nKJmlc#E? zqc;Euf)Ek>=fOY(7Q@S4zVyQeDlD86xi3`<@an#?Dy07?BqUzYf-HClAP2VyBF?7i z_Ya|#lA->o0lw-h4ZeD;6*+~Si^l&@XZ;JRso8OxlthaF#m`tN?;o|bV-U|Iy2RmIiHaV`fZZTlLp;vG zqD463dLW?D++-tylq~9$ssym^5Vzjh*ZALfDQ~742k7em zoa^G_%K~|WxrP&eIOc_z2Lg;O<4%JI&>i!Ru(RU?CCUKsL$b5=pA!#EJSR9f02yDb z#o_7W$W;FYF@%Q`1kNifGtkmKJhgjs5~>17h{_GN^M;oj`-Xht(eQK?C*cU!882j9G=9P zd%nS{YHEj0R!bAIScq!ixUJJ*)xP-vV~|)`*`2W$5O4MX|A6_S#CQBS;pol5Ab3B7 zFb#CT+0pY7q-;AW432OxD&Pr04t9j1hpU6G@((YK8T~^ptfjAiI}JmN8mA5N?(9DF z6ix`D3{^(VK949W*>H{5%*+5R@bX2Y_xCp{^Pu~*D(kBe8%VtWKeppon^EbX znVEBHNo&i?9{Oa+72UG8f1#N!AV4^WjIrG74l;cU-Xx~9QHcxb4Gk?VT+t6qz55o3 zKvr#-t)ip?kgb8kDE&GjJRDh!#M(O_t|FI=M`&qn4bT9M60wAXG&B$vJZ(zAd*$|R za!`MUZ0dOPrlhbp4h9Ymn#@PYAYhK^M-Z!+iu&>@_!~Za7yxgE5e)?!itPHO7R$Wg zV6p-7SyXxVUU0fE~saJR5 zGa$Ox181Tt0LIeN6ctTa4j(sDLJEaQW@Y|J6HbF+KA%3l);86i$Z!RR2gawT(Q!TD z)?$pgQR0Z`WMBDKhmkiHyN18ve?CT_^^|}Sv1w4gfZMQKQNENV1mcPFo%iqW6pr>J zEbr)-H~RU+lk}%uxF5+A4hL+z{~w0~a4J%&z!1;&OMN}ghmYD(hyd!rqd>^vC+0Us zG|W^2A?z5v4+9jZ5zp)C(N0t%c!o2g0AqyAQ*FrqK;ux*^Kr9SQFs&x-ytmH){10- zo7;+3>IGw3Wb1DJtfer~JsW$}_8Sq=c5}l2FTmNe=vsgeP<5az!04a$QL`PfAo~n! z1FaPU0279Ro*r8VZqjk31f$QmSO8Z5CCm==bO%O=;?k}EvHfU2+iwoa!8^c{#oa)b z#l(khx6+1~s=uF~ZFe5$nKPcq#KVTs(Q(0}!yW>`2@ee&*z)Q|&axW#xvB~r3>yJ2 zlSrK40s45e#4Km(Z=9knnumHFM?(Sh^lXJ_ssvxs_t1+|oG#5(&gDrvk=?VWMu zZe;Nh9K$W*L;<{Wit@ouSSM(lf`X2p9?+mG*O}4N(SZs$iX6-?EJP$ax3KWgvyf-c z^Z>;&Gp7)WL_0%k!_jlsR%jElva&F)&M7O`<6L-S<0E`v`|z({*0#1~CZC7}qEtMb z_p1Hfs21ul&cO2GVTPO4QDPK~jg4_?y$sMBJ9H8;Rlr@4b%g0oi;1n^i0pck>@4l8 zr28t@v3IX>m>{@aTe;akJ%ztRk&S6`;O9h}$~ekWK4#Z(w+X4mf0);?Wl{nz>@RP& zFEiX<#^vq3g}Y@7dj|`7Ad}#`5tRuQ{Wai-_**Of9p~P67lCm=28zA^AHA`pUL6P; h$Ru0~XyB7g<~`>v)ESlD(2B5s{H76+%cvMrKwTl~leUWDj=Wa3sU15wcc>{TTyRUCO}Qm|q3`!vc0gWsv}XKy zs|J&VD_tMtw}?_ip=;5MA)<#jL%j-@SjY z#-pvG(ztf7@aD{yp`oFr*qt1@y1K^3#sw|I_Tl@EJ_c-~+rcQ+ zKY#uh=Xf{inrH~|h>F^^JUIN~#fxJuv&`J34J0fqiP>5Yy@MY#)~`9RUewX?-dJ1G zJDio78FT->q)w58cyWgJna%VVi7lSCy1J_?-&(r4t1GOmtSaM721On{dc;GQ`|_m| zy+vd4`D{%VX6CVP-;7?{U)kJPkBf^l&hEDh$bUAsq-}v;L1rev#h*4 zky!P5e*T~jA2gJe>(kTs6Zlq}7OL`mY>qitlZ(sU=r3b1-XS#8l^=2M9zh?1gC|OL zZf=fS3T`wtdooG$r+12eE6c+Zm zt4mi$XW-4PpLch2v{M|Ckia#^#>c;Z|L*VSM|9jdC?dkR$ckYt*5%KyoAw1SU&hSP zrr)_k;$dfe>eRQALGPC(CAt?bga!o-Jt-(J?`v#SIeq%HkFH?_5|W<_gN+4@EH_J*Y*iycWV#U&;2@$saM>vGSQ78g~u zv?9X7h_Brl=oA$eE_(4o`|Mf0DsL}y^8@-u%zFvjNTXw7&YU^JKu^ECvO+wi6zS2| z+xxY@Klc88OG(x*K7<4W1rgMvBO^L_hUCTWSMPJar0)#Gan3U+(Ns`SxOB<=(_>uV zjNzZ7PTxKh+xAuenCmOSP1W7qT{HjqOp?guGM!sm2vV=1V3N`p)>@Lf$JNfwwjS(EgpWEuQ zN!W~j4?lldRP?#ISQ87##NV$uI-J~7If>KiU{ZKVBVvkZalSm}${UkAv!u5(L6IfjfCFc-`~>Gf)%bg zU?y=%OKaitQ(ZRdgW}>*is`Rjz2YR(*49?!p_BJqtJd`%c6Sx898tWYm1D(ZqjQDGr%a>iM{ z>({TF{9a#~P~xJs!S8GkJn8h z!xkPE*7D@yQwa%)ZX1_t*RDA_O89Z+8&~nt1uqP|IZ4BF;6R}Iql|d5L=CQ}?yp}% zIgwLZkWQ?vt@rNTi*$mL*sR+((f06&q@;$H7NXK;V@*6o(`R+EBk09`vXP;ow45Bb zZy#6~Lt>MYHL^eD8EHihJWIlVgOCj>rD@knkg`0kNzUNnMa>X`CS`Gi6_e8!NDtj^@^yk`!rF{p}TeBhYlg7`K)|9bNza5SQu5Hq5>&> z2tHL+@@15!evclRpE`BQ-M#eX%R>TpBBe;I3JMFa+t{$NvQE#;?8(|wIX!MKjqH!Y zAtWFmfE0o66c=Yz+DA5_XD^cB7$Eao{qf`7yLaEl$I0`#M6Nr0;5~f!qNe6N z;@L*(7%wkL)^Zvho2`lP+nO2$9i1PoDaQaLu${Fkb~f}@c6LbCIMvUdJbCxwLuW^a zjQ580;IC2v7Uk;UB96SHzCZu`9^AV2vIlYZ@#9C!W8Q;)mCl+EnZ4#|AMyk48TU#r2<7*J({K7)^ zSw7VFw7rt7&r3>}l56>c{bOSdC?mhx-0TK;YnP1rxZ`qS){pU zvt=ayYLP^ZO3!r(kGL9>+qvU0(G?LK^4fg&`%3KmFH_%rd98(YR)c(UW+wA2-_QB} zllj`Zx}-nuCncRuKYPNH)MlW9Ws-C-^}>Y#I=g!fiN5lE{Cio&dwD$)Ek=feX7T6;E_U)r8?*IA~m7?+1diu$`Kj!=2-%8ft z-CBMxI+>T0w6|TLwE!zwR$98GbAYee`ug?#G0i4QB&4J)yG|JzvMn6&o0Sn1Jgu+K z%EzJ*IX!K~6~*Tn9iNgy@5VzI=IKH*NtZ z?Tnfli+FQg1E0b*{v1jhsU%e%9bMg(X;R`1ZDLwA#D#>!$EKzr>j$*j%7!0DlEUKS znLNI>wAj+Cu}g_jaJ(rz$0sRyK}&0Q3o*xW4<+81V@Hp=%}kim@4Wlo+YNUMyf&Ga znaN#Dejlgy$&)93ws|~Sk?upx@{Cu66wWrfpJcSW-*PSg(#vbePnK?t*EFSnHTtE@ zBBQGD(0R-tH(xkQR80^qi)29&sK^@=tTa-1wXvFiLs}>_2h}%iamn3|YymKhis%}-5{7h{DeC@D(^SaS38 zbuV5#K(k!{P{#K>&f-M6a&&b3`t_@$lT%7cigAToR7?!_{{7_uF`b>AK(FhJ=g*%fEFT#e0lGFR2ePZGGDf;LFj7~y zdi(fMR#rQMtlltg-KfINS8kEuRZ)?!{wuOu0nKK4m&+OZFY4-6-TEnVmR1g}hi3#p z&}nt`jIuu$itPsHCnkQPLLNVUe9xXemEZ9jSHFKpj<~Yhbj$i7XU`73-{&q~q#`Fr z96va09ryhCTz|Q%>-^WAj*h6fIG69!7YD58W@ilz4K*}00NH_ta-Tj8NsW$=S1AE_ z!ul=VEke$i?8sEi8S~M5Z9jaEL2zVr6geH0#lyp+fbec${_(EPP9ekBH2b_H5@nsf zX$%{vaO8^`zK%S>S>g3t2bWRZOPJ>Gmx;`5E|G{Lj@m+cfJsQNzRgzeBRQ%Y;NF-~NSZh5!JtR9#&G08r z1QykYFTQ0sN1PkfY@A-4$ z8mh*fjj}^K3i4y|&2sL`L^j!v9$m)ofBg9I)2EbdQ*(1Ea$ZNf#@Eh;+aaWm#EfZ@Z;O zDnzr~q+?!09$s8dj9H$fNfj=IgoV1GhjvsRlw&+o3)QOx(kK@tCauxP%;X&cXSL3<8-7R zzZI95m>3(2Cw`otA0hI(MxVYc?9j7Ju|wF+-Me?&*)6QCxj8wlAcrgoH+1p1CH>~v z**?BfEmloKW_Pt)p{664^ZT+sw{-1Iwnio5n~d3%D3bX`Z%|P+R8`5y$x9tZ@1U*F zWf2yBj@I!*%CYzwlMe~v{+ihn-@cjp7*XVNo}$A~ zeKwlIX9C%W2N!l)FOw1ZB`1IN+4?=0mX-$O9?pDsNx7casa}JaBD5)ZO@u46fN9h`Gl-k83=z8)JXzV2~q9=xi z#N3z1E|L`Q!F5;Qd2 z03AiQ>APlJbgNYlZ3(x*rbcCWn%i-XIDj@%OaJukB#lawU_6$ zwY~p*??RKCnLRr@OGihCl`GJCTcC96)Z?ry@SDGW{knhuejp`N#_r=Ba-M4oK3nCL zm2zBMXu+9=Y>y>h>3?&}6|ID-s_MadDAKYWa5SWMGX6Y7!pA z(%Rbp#OmVk$yu);aor9q@*X-mWWw{t#vEX1sAzElLKd$uhP-+6XV7O0+**s7#KYsa zetgoB@9FMFx^;c~tpT-_UDho!Awds6#=!7>W8G=U3oQ)}JfYpJj*d>l+qY@s@gkRd z+uIirkjO!so8DJ480EP22{apt9hS$`C@Cp#-MTd(+C}kEzSRu%`2aV!)34Pj^p=~O zn-9Hzkw3kHET9s>P9B|w+=|i-f~ie@YvX&DG1eC-Rm6;~;Ru7+r9;fS5Fn8`>HIr6 z`T3{D-^Y7Cs{J=7=ODOE4i0&X)?_@!gM@@+S$D_YSJy$({>h3)p7^=4vS*bNaQc>; z8*^w5G7*+LA%S_Cp_a1RM2z>$1EO8TyQK7m*E(F5M(WYCpcn7!?(XjCS$ZB2((}>+ z*jkc@N9p9r(Y2*f?$qOMi)n!G0C$0b$_RZ8jStZWPP;w*zPK0}AAbbBznGXwp&lM> zXbsSEo7F`fUs$lSv6-EpH`3KDaa$Up9j6Zn4{t@J3|6>bMf$jY9jCV0HSl42`Y_hR z6G`a&`A<1Hd>~|ShJ221Lj~{=VpASFt639>X_;KCm`r~6`N+1-JoF4);x5_F1t15Hg$IILhf z`g(d=62wQ}#R{I#RaaLBHHRy7wYR_a_%85^g|aei`7mgL1o5k<)YO7QLf!)igDXu= zPPVbNb$_~zPQy-}WO{SENTvHX&DA`vJ$qufRl+yW`&oZ^=Dxl>j`iz!8qh_;I9w;x zb+NFp&?Kb?U1DfhScjd-@S$%zN$uAbhXVqLu>P-KOLzXRwvH}$nIoVqMgsPaGcGi( z2d|1e)cN`ILVx+eziUYKlr%MfGSVF?$nd)rqV4)YNDERcxW*(QF%fErX?)_hiHVY; zB67M>tn5?W{NjQFvR%6*rKG5-sE8W_gX`8F6YyHEwoTH zVhC82k&!X;{UVagwbo=cuBZn|Nj{tF7OY_;hh)i!mRr!;WM^lS9+JJnT*4FaQ{f=d zG8Gy(J2SJ@y4~_bq@+GcTax_Z;vIc`NvAU1-QC0T?zFaQ`uJ=C0m?Xi+fIz4)Z4;S zNpMsYt)3SpOO(ScL(0E}DS~xH-0!)~1^sQo>#y(HrLp{vxW&sU*6jbcu$6ebnrHXl zGME_wK@9F}`!A8?WdHt~L!F|T_U`?iyLE8d+7TIrYOmDd@$Zig48C^YZmkOZMxdgJ zdbMb0I8B_}ZpNf{@T3r?D-5TOgEsD7tL-fQE>Loqbbki?ma( z8NC8N@~0FXoW4(g24Fn%Kyqs3siTVvJ7sV~80cYf@xrAz(+@+3tL#es5ir!`*nzH#j&rP^%GJSFesjo3dvhBO~iByh>WOmx-yw zaa_IN&+61CWN3aqK0ZM~yWUsZ{cvGn$H$L!viJ8#sRI8I9-2h?s%i5;u0XG3zaI1g zDY3M?9Kg22@)hcql%%9d8`!J!Dk>^a9a>tbyqp8Q3Pno@b|=Z#fg+8++Wew@1bMwcN^1uAxY z`9e)Wp_o56Ice|Yl&PHydKG8ypf3mLGt}pcVB>UkMGD3JPLzyuALPaRMH&_eAYLieqPI2W;Y0quRauh`9K%^udlD5a0LzF1%dCVi9>Gc>hFM4r<}{68+{?&%^^2CJ$=tePF5Bg ziPv23VFF+K>CRk0A0V7h$V{k*5J6j7gk@wdq4xE0X?BiSGXRAG5+iiEqEta(4GAa| zLla*CnC2xY>`oFvYN(gIvwQJWNb5d>RtS(>J+i+$|2(aAvgRj1wrBb4+g$Yr)k*U#>PhHxqfU^RL!-QhYlTziHT8Y zy<%lW@ow$+@AB8L)8fBYxPNMIhf0JK;qJbklaqt`jLthCAmH(1ULpmobdb`>wkCSI z^`ShLF#+_2vGEodhNCH)lO5bw%nYi$-#`oiVuDZrS}!=b8CUe<1d%Jr+S1dr-jZ|# z!GQYPgX8s@La;Q=h4?$lRPWcX$+~m5QEP#MosV#H`=83BqNe5|a~!JPdHC>QJ3Bif z1(4DB>dNx+?PTYp{q^;aP?V1x>4IL_)x~7EIT|7VYi;fJ93&C6*8P=UWzK=DD%eP9&r2+A$f+SA3uK%96em|t?_fsj~^~KZ+^dh`!?Dge9-z|($hn$K=EP& zBA-O!?A+XOF|qc~pT~!WZa*;h@Hm>N5gr^2Y9uTq1bbiZvn6-*9Z(}Unh|^H;Naj% zJ}2qqBI~Z+?rzkCDv$5)K7Nc=Ob7ntB3u6T-5ScoeE+eHbkEgE+CW7tr6v*rzW;t~ ztnb8fMs_w6E9*VZrlBDd9i2EozuG8nmFisZx2PsqVw60fH&95!!&itDpn&mnbAK8b z2zQB8Oy2-U2Q>!g6~xe3AL1p4c0@^7K}X?{Hu}5v)Aly88oVIqq2D=3vza`tzI|>_ zAj%e)x=4WYXI}~VaZReVrRJpmiBdt&+z}==bTUuBge2w*M z={6v+|i}LmOp%0K9BlNV4y81+5WkIsX~52^0KmqAivJ=qeS^Xc$AXT z?@LJ99*qM6!;RT8id)C|T_l8|DDP6iN(AeCew<*0mAKBGjc#=1gW3|#As_yw!lI2m znI{4rNV6GTuoA2l7X@3;s;^c${5%&JUt0|s*XgqNWcz*_KH#%R2VZRSrP6;#=LeRGq9VWl=?3u;v4)-En?DRI0D`U) z&qgY|n(I6C%#tR!rW!Px&BD@^Y37ELn_z|Cu7XcT}%gakdfLv#V zbr_;g(R?UjP7%|rL^)b=tjq`;7;Sl{S3@Yv1%bmyj_4{XW|WuTLN&Z{FBaHrdZmgYCQMqfEIl1x-wiutSKR+wt!2%&t@C5$-{O2Dd zZbEZ*-iB)19T^!3kZLzr34??XlFZTwao5#PK)lK!j7!uy`ue>fCGb2rS=CX@^oEiW%;szf;M zW9zVf?Gex~jEn<&Oq8l2!;_pGvKOBG{JAWLuHCjp1B>Kt0)o#86q)4#v~R8p1Ijc+ z&lZ$O2||gP&HoqW&X(O_DILs` zE)X@M;-mWB3kiY1MMDIv2Bxjr6|Ee&iE`v^P!V9M&?@;c5coZyVZZW>U{~Z~u{tNs zW6hLbgIk+FSL(NerA=vQ*(~qkmWk`~*p?j?M*>+PnFp;_LePiLTzBgfB2-SDszd{c z3JCp(hi446-Lkmm;wGe8zi5Hvcp`UW2b$H7gmxkUp*jE!Sw;;k;d2(INMGl;&$-$x2SEx9^ zFn*jsT}b>$`49nUXlbJ$F8zLerS1-lT8uc63=G101v@s*UMh2*y3c43U zeyv{p!5)DjY#b-~o$dS^*cQSBF$t~wqy{Og2{}IirQ&iS92lYjtw*_@RA7o{mP0+jo_|qr1s7hBV;5c zBb&_%3;;4vdJ6ua+eXB$b_UH+Oc#MSc~HBYk(SmD6%nK-dj~WxK0de24Ny+JP3tHp zKrGdHi~Y$*Zwj=&Ct6blPY@i;z{KR=pPrtMie%IB@QFL_ajbdI6?J$hz)7|Z40z+@ z#~joG{ypq*?{o~DCYhO;X=%6ZwvqUMp~F4S(>y9D0EHc=n68l|WovI=@Z^bl@@%>Z%6Bke<%vUKoCV5{Qz?Pb>PMh{*ix zEE^piaEQ+N^P>w3ZKj3Q$8OV8m}3Qw-1xd9X6K;s8&9-31o>1zLFiyW7(gPmj|K0; zFDy*_QW>Dh)Rarw2nHrb#_ha>#YjnMx%RTgv)HnO3(*B<1yaOKS68hhsg}>5zkhsm z8Xh$ix2HKdK_MZ(=Sl_xqQW$@Ib_`mkiwrmn}r7es|J>hzye?~f=xynWzNtLkjB=` zEWk_WG$)OHFGJ@xGmKlR@cvVlnMs&(3%0%s;245iE->$E01Q!AzgJu=)wIqo=PCWS zs+_gyn~cIE55*O7Qjd*`np!gusr;0qfk7XV;_R#qV+ag(pZfc$9`|)z_a3bD>gnl0 ztAj%UYzF=e6y1n{w!Z!^Tmbd2n%UIUwEeu{15_J)j=C-4*Q$f(c5qk{t=b@1?nMKA z|31A`(q&`ghoFJ&hielLSu9Jfb{YEs;DYW+k#V{+<1O$GY!syFlZuKsaQL7Bgwt|6 zh}hWJc(qb1K(YZWqJ_g|utdS2h$ai55)K&oKdXDX=E`W%{A;SJ!qo^&|Jq@+0S?o^jb9O&nOD&U z;F37IaEPYhz~1mDCL*%$_kQ{E8dc!-vN` zYH#l;BDz$M9e?@a1sFYFDl)sLL(@SJsfmk=!=Hdg6!;MbV<*S) znvAS0=Z!T7SJ$6#`y4)81Z$Z z<{4M9GcsPYwH4;#g13&inb2sozsQ&|sZ02jcdSz-opV$3H+(J9)AWyGOpB&&vpB0%#vZX1;#W zG?W`W3vv*c&E9fX(XP1@e>2#i$RO%uk^;wXAe4fFg3!f+GQtT!=T%T3mKQNLyxmz0 zPfbltOt>O#qUwUV)OIsKFyKsGzJHv2iv&ScjaZz;E4RAwIAr*H)isy0t_7vQ!ona zqfY^zT|INaeTdKzCmr++HVo(x7?i^WYrW3sJlke2|#16bhJUq5wQZNO3wR8iQeKAeMiYty%`Bvo>3)Du`>SjaA1 zxDV%KrPnQh<56Myu_R7(;*yfikRVifQh{Q2y)69o%Y&Oclf!#$zqIsP--cGOUYyXS z+V8^q!Jx9J42}N7yG90Mj^Nu?l{FUhFKTtm@_#!#@XvSbfyd(iJZbR%#y7899CZJS zd(5q>@_8lz3oAOKM~{fBnD5rfK!mw>k0MwJw#a?TCy{aRHRRs6(NH7+7g?0|>>*61 z#d+E$l9`JSe`5#__XkaR%*CcN9zb2*d&5oRz^^Bsl}=vf0YArmM7EqWe@U%tXl+?E zFRsUY*L9NY>{aO+DG__7w|3AS*%BnEkkGE<1gCP|yxDhr8Al~-?-7ut0u)$-6g@!_8);o7j{M&OV9gd}SFlaVbFc>9hd_(a=WWfSM zwQCp3NLP1v(}CS#rwoup;myBzk-=jajt#KbWqn`2g2}S6u;}{yx&HIp?xS7tCSV4) zZE*Ad!Fq2VHm?>iymYCjxA)hI&qJKLf6sPj{LOZ^<&!SAnSLOgosibQew{y9X&Sib zr28hn2CcIq>=zd105htO~C-@o(ub9nqOyxBDD zwkd{M9UA&U*K*sw4CE^)Nwn^nD(_PX#swt|o;FUq62So&S5neWAR1jbJ&}t7IS^<3DE|tWssmD)ee?s)=>FREeKY-v4 z3=D*u*zt3nnFPuj(hf3N)7}>xjes&oIHu_nZ zoDs^X#B)04)*yUPG#L$Fb!~u5M02W#yGl_MaFErEi6w1cN zFxZf5`~bFqW}qAbYvZ2ax5!1aMV_DGj)%hlHTO|kT7hZ(Jp^pVxI>om)WQPHh0ZQ6 z-|VHkM^S@__ad{Q#^Qn~!i|mD`T4uqKnjlnwa_y#eD3T-Ur)^@Z4b4|{$AbiZE=|P z5%p*ah#N7|15O#zkXwD)rzwPwB289y_C$O7NjxkXkcF z7clC3lFEK@CZ41%)3Zl ztDj9qUyF2(UM{`E62QywvWva_;?j}~#y8-O0B`3IDs|}4HmL$C2JALW$(V0oV;Z{+ zj-h+X2FW&uaQWW-#KcnXKOPi;fNd|le!G|q_-)(|Ip=n?UWgsM?qv?u^aRmzti=!#v(@m_b9;BvHClj0Fo}^z&ed#R}9wFneP_L5v z%XW@wx7F3D^O(l#?bXlY5#{3xQh)U0=g+I6@C9I#QCJ$-H?nv{#l>l#o;4ym;10Y3 zUI`}_)YLZFe&N=w^0G3I6DLlf?}PLVzz=7)+e9qdIn14)`oP%s=<#E}NFtKm*>})k zA$6t4qD{d+;!7at(K_LT!@5YBIJTdRZr?szn2K_`4AoNMaf*=N)YI0EPEF0r${L%U zMN&^i?}jc79K2Y}PV)cE;d^>{)ht01ILCMYnCm48W5V!G zjmgG;o-{zZ=d4j~*`W}LVJ!oF{bi%pr!EqS&oVP{-IODC1CYnh^WBe&WAwaUh;HOI zUxOM<|pKC{dfk;2s&)^XqKVUKARQk=~ob_;POWs zBRVFzHKKoFpbQ35lsl9S#Pr9hRS4An5)g6l%zyW0Fz+fJJ&p0MDAfv=IaXapK~=0Q z9amIBRFp5tAMkpgEMGnJ*-R`Up?~J&Af1fljRyd|6dM=Ui)p@ugxkgv)jz=-!(#$R zZaewjH|-4#*Kptw3mDq~K_?@VSTZWj9g=yRiHXU_$LEzWob8&Lt!-`j@XMi$A#iG6 zvoj{U5c5~yUr?q|AmrrOlNsovh)Peny223n0Sy5$)xB+gSu0yR7s}A&0l$%5_9sB- zfsLoWi)O#&0FH`3m_qz6zt~MDW&*{&C)OTYN5`w~F^j;IpPH26pP)qgqKlzs@Ev=;FhW3Yp$c!R*em(& zn|1@Rgxl-bv17!+f_%;ITU$xWp|mM+6$1Y$L>3ej03K!>`2G7gKRv|)Rl;@~rdLR? z?=p>`!iU;iU%}?Ok9~bGb=KtH6O9_<7ZAu8Zu|75B*^$aYBu?C0VxVCQkT@& zSmJO@9$Q&jey#M{J7^1Y6BLB@^Y94l^uEhY$OM?hnM~m6>Dg|n#v6mqGBnjxjM-8# zBG4}Sr%cRYyJ%7&mNy5ftUf_{^x|wt8oN^SD<};UDUr2Loeg1{;}$BG_E^oq=#hg% zVdQR~gRqU5vg93h2fMm+FBE-z6r}+@z*SFbx_M zSzW}$GZ;Y>@zm5*j8tz*sEhnhp@p({dSO9VTe~(6lvOPUtq~GQyDf^@L9Nsshx7_f zl|4NxFfW6-Igojfr~~ga7Crh$7_vJ_bMn}~ya0oh_3@09%pMBxY9OU&yibDN32tK&qaC9D-?`Qv)!u4Iqpmz^L` z+76%b0=xnZ33~^S6MWv#t1u_t(cV7PXP!o5qX8Q0JBQB()U7kwpB$V6Yt1B3mEazu zcKOfKA_U=0@7^uHDX6G0L}kt2?Rb;T!pz~-U4K;7GRM=3iUbKX%x4tx8Uq*SNQjA% zAzt*%9YfUJhO}_NXlD73WL4gG^%X3t!|2cl*z(#ZP z_!U-RQBir1RX$x`+CTKMwwEry=>Z;|P1Mzq_tnHP z&2R6VL8vSevAjN{JDw92(KMm)r6SD~?%%W`+ri5IwzdV>VN8nYBx!x7p9!lrfv=^= z{kUWrz@v5UTt{s!7a!jwTziuWi`{|NQ!@ZjA@y|h^Mx7@<-BZ+ADpb;@uP9rzIqU& z+;C-wy2-c*3%eLJu~v)T0yqHYLrlpCW(qDBoHReBod%9>Zfl>8HB!0IuEI?E+QqjJ zaC&~=4F<=t!-rAph57gjii-BJverg?W20_tYik1({&@j50}zDx;N~qYCs7eQrigBT zI#1%igh3BD5RhMv965s84s8?4IXQVS<_9XH$yjNJ3yX^2W?lU8BRnDk-!l=-NNVGh zBe(QOd8hRN_-En+k1OPT+qpfQaj!jIzPK&DVVy8B&ysge<#`RroPUO-HCBRZm%^N6BaOfZi2EBgn(^R4xW5x5N zNSLeNym@%RKw|C9-i}Z1*44p>qzT{MVr;ZTVl;l^5ex7d8A(G&=Q!Fxg>uGbP`kUc z`_wx1NRanEbLUj*j3e+|?Z8BWEeKb)z_(GpOvwgPexaU9a!xP4`Pp$G@%)dO8rHp) z5sS)^kwbe49?DyI$(-8M8~|Vnxpxo#`wnin9pUrAp{irVk(_}53j#hqY9$7^AK7y3 z&j~^%C{yr0P0quLB3E<=dkwuxSGNl`@}PV7bS_>@FUFWc1$vCt_a_jlp4K!B4D}vE z6A-KMh5tIgFv$;L{f+2vpath7^+$fku2kUsJ~u2~ejB_8wG8+Fp`~S9;P&7rm?iZt zT?&Mq=k8r}L9f9gt3QGf5WVJNao{1Y3Uq7feV_o$Bv@?FU^=2t1!0a=l#-J2%x5r~ zlm2K5PZ_!|Ilb?~gPn3TZ9fOkUdGiL+vhK^HshbJciQPYu{lQYTZT`>GQHF9nw4cH0J(c3I! z2f4@{!*F_8Sy(Ptd8f?m-2>JANW8eulkAnFteVN|Jd0s*|7p;K#Pgv-B%za1jlkmHt90$5<-{tbWz3=iy z4Xgw2V01$K!(9zcMP-ku^XihJ1}hLLNvE0TBLc4qX{0NI^xzn*=-q z{zNO~C~>9X3Mwn6J&THq<=hrwj2UQ?99oq`%!4;+xdy&FJ1YybiT2+FWGGhe#8RFsvE`)qo`k~g#lJsP$<;2G5HcP3(MMuHW+ z4q5eFA|mW;Y{^MU))p2&F&l{<8cPiIe4n&E-gX1{3%kW2R#Ss_1%y9@uNYwGucc0i z&oa}Y=tpJr3*DrIK(?_2J2@=yZc2$4wa0@VE_6k&qZ_f-kooc0Z zuB==ObMv|G!cnlChy!F9{Ghs~X2Zbm>o;!XfejKB-CAjrM-qU_9D_D4v+dMQbg-zo z==bqVeULhr%F`Y_QqKN_cjllm)z;B*8LIY$<7O`lOIWuntf|NX(6f-x(UD4e4adoAHU_CkoQw$t2Ab+<#bs({ioDXm| z*l#{m3%UzEI9`ijUZ{taDccs1;DS>lwO5PWDrF-f8IL;-IK`0wh|uf1LvM!~~E zlL9}54cyk4ohNDXbN-DEDF1R{Xf72u>1#&`%LnNkD4k0Xj8Vez5|#AyVT45c?w*;+ zNy94d<3d6odV5(n8i)svB?C_&3K`khWW~fza7Bq2zP^1qQruJvqM+i*lNjDWq@u3` zty=xn1*Rr+Y3SJe1L=W>!D3beghF3ThSUam6v{UwA5&dX3w0KnJ*9zWB~S^(#O%RR9lG`bZQ+UEedV*|EtU-s525S4k&!o;3csyIl*z9@Zf=F%F*xJ zLD||k0m%cgR-@es6BhtJ*b(xD?93mlFl3=GebHlsL)89i@rGc)ivbFD4a3-1;I7}C$S8}lK@gJ9LyT;Fe|S9q>}tf|=#2c%H>ofDP(4ISUptT+5o z=SvC;v43mtwzP>Q#7$oIj4tv!HA!P0Y7u!Klq~iai4Z+Pm}%+EkyfZ4Fnz*EEi5E7 zBuTPEYKiwlbq{(a$(_;+iA%Lq<#=XKVbluod`Tp zDK|Rr(SDdSa?7^5vFKl#AC*)R)x!T(jd1Mr0rzbW>hHGWRSw(L)XS^Ufa0HFkhVad z=!xNBwC}1s@B=>safU22oS-9EONFg+b9&_OVHDrc;{+UCni!}+bp-qc@qyX&4a{fY zg-`GcZbJtN4c-_fE+`wPjEzC@x&lV<(5c~^3S+wOKV|~vp=fV#{FpyZ^JLYBZ7tm~ z*Vek@OYx&?AH?Qeo1d%F8|3U#k2-y%pFZ|Xkc(h%#?wbRjJf<=fnUp_6on4Ba(56P zP#l%2l22Wj*!XtnT0?-9{Et7sW(-qTk|$&*WF{((x5*F+gW^H0AAN)xnm|6;|7>h=`~Uk>Bu|k?nWP>3ic0A#s6o!q?8Dm+$FeiFhcsi;{|JTO2}j2x;ZG ztgKO$cZ&`)!X6Gwm~|vE$d(uv2kAZdiXQ08IAH_W8Ug0^m zSwSa(Ux)(_NfXer9IxX7e+fdl=Lb4(@JZe6?e-WrLcs^P|7S#k9M1*oIVA-J1XN(@ zj#^+8+TgciV`iHI9NUjSbee^y2^B(+kMA{nZCFw9T`!A^^RQU_{54|Fz$VJcrK&$# z?*4(f7r;I&H4qA)&{Z*JFaqck@urD%2$K;JZRjkqpAmL2mgDstJb2sP_3N0Ne~Zqt z*k+r|LGJ<|zvL}U#XtfQftRqZ08mUuTDrTp7xrwtmaML^aRO)y=Ya1K*y~tz20FS2 zVpmYcfZR9IGBXd|oW4L2_yTX0DSjjCu{sHJ1MUYW3sWW#G9G`L0!!@XhcbF5<>*bk zbq|Ub_8gv3<~v|9kfAhEWfK>Ek#0Zz8rN$Q_ibr1rWr64{T8Is$-oLQAdu~L-~5Ij z1WPV-=|zA6HHsqUzquY@_?VT#A8cKXi5RBiIvgP|!*Gv@7*9;}@O$dx{T#OS4Ie*B zfJerwDe4N)SHYZ1PDy!{`3H1a^uaV+b*-)8H6{oo659iKH_1eSNsS+X8D{OaA-uTw z5&)r1Ieb*nk&y|VZzJW zMj7Dcupgj2Elb0~&cKQ1Uz65h2@%DO`A9pwT#MIn-@Gx+N8dk)zNvF|vFhI2GKSWs zCQH0|0n8Wh5&UhI&lu|pFxP{Y;=$JTz;JpIAB_^y5}?esBMH!%l$aQOAVA{FxxnX< zHm6~ysw<;d5GDF+eGd+O9}dKvP?RcKNb}3vHWV%{h%R0et^2+d_Tq_`_DpUMs!&pn z`WI;MzJ!}ngV#qV9qACJMwIB7=xDGu5*Dp&uMCZhJ1`BeQe+HY#J=HpRyfa(Q}%TtF#kFxGMe?_9x7d}wXG z0dCCJcD%3@um$SN*|TRsz1_T7gjZOlXmZrTsYs}8n?ZE%Ly#bE{vWgQQSMRxq^Ir27m@+mERIhYTFs`Ijv zG6=c(8c-@mq(J$mh+U~g8`N{hk`(XAgPwx?_h*8gdV3YCo9H0Y!C3X{nJrSGjh)@Y zl$7IuTcSE*+h5-yri5<9aBGxU1-BU%7KTi9=*W?~WV^S&IK(lta>sgfbQE8JhKEkS zQZH)z3p=Xqsmxz9{`JR=7?_%=#_K5m`rG1eQU&(EzJrhG|G_`3E>JBmudKj3;9s-$ zg?ODKGb1B9IeEkO{m}e;CB`8n!JtmVpJivT{ggu-lQYEbKw59N;vPPfANz`T%-oZR zf-3pt%RsTM!PTqW3ui#N!!&rOqOna|PY+Wh*%-kU6tuRqWTvCTdm0{Hq+<_KXR^r3@~Ad6v+qtY{&1n>4pj*F9e;f~j$iu3VV zqKcqCV!jJEgOoj-5Up}r8m}~+8Bfc|sI_%PE`<{UV-HSn%~mgTv%MJ10N_%u=YbI(5!4VNW!=7z{fj<#FRhclaOr z(VLErWynk2g~s12JQ*P_dPj>Ve!x@&EEHbrbW3=t;ivBIob2qzGFZ@meE&|B9St`y zL}hCX>tJ#Z7Y4Squ$Vw5_T_gkA)*L$gn2;n__Ytr;lE8K5^-~rv$L{j9C=nSNNlKA2B@K!w6rm((lHE>;3{fh(3{7`sIIpjs-#O27e$REC z>zx12{l~tlZMg66XIN{!*ZcimstHRkEgP!i-bjf<6#Y#&nPc>c+?@U^;frK&trezA zw$OE5>z8%vCu*@qd*-_ODix?HS z*(e!0l!}T`?5%6%KqCbO1$lXf7_AVqoW@`>r_{Hxl)4%;>FLvC9t%zO>W5fAkd2T4 zG>{3~;$}s&eYR3xc9LMtxNaTy7JMVo=;Jv#?I@7gp)3k@A(g1a3~t9CK7qMwC!&_; zn*@g~`^p?|Q>DH1+Tr!e#25g)hJjLbFaMsS0g^|e1Ly{YYw zK-$Qi)E|Gtj~^eboml(#8#c3;PHIZ{|l?iA#oc;Ui|bZ232aNW{YgBBH9`1QvRw;em|Uw?r2TRQcR zKDIpQ7V^JY!_kxYyOL@4X$m{E=GJg-rRY=b)4WuZZrt#ow5NwoWcC5^9W!tqrU#( zgfj44s6f%{|)a>n5 z=9Q55pE={~ev+k1Lp>E1p zDxR*g9elS#Lz(Hb6fi$xhO#%;lapHL&zfHGqEv>>DP`6 zu!Ck;ofZcYL8Xj$1i3ujdN)^B`a_$jkS5FZGBu1VrnErzuUXOywKFLdhtDi(HIM*{ zRxt&cq=<9QA9xeN$zB=yS@JA`fkzp&I&j~DCE7+@2jY56;_7t#n+rz`xRU0T=KttfR z83lJiSP%R9+D1T06bVM?q@Pd+yZ1g{rmU(e@mP2FuVXWnxG8#Bs+Bz1&x&%-Y#Vcy z#t6lBUrF#4dF>X862zu%pI&_fAL6NSYm56N!-BfD18o#_Q`V+$lrQ9U?(YE9>k?Fh z9|N`vK-oE%-Uk({z#)L64sIL}_@lPz{KyWYJ2)A>=kOxmyjdfH%$2&G)~-WZY^?bB z0qIgCF+@5l(&A=rRY2OXv1s8Vo=-Ct7|mB*Ai)MCncWA;%<*wHKDCC zbJ3C|CclD_&Pe5vW5V2PY*Xmf>oDDJbYqioSA48(i1=J3wC?dK!88KF!{43l)~wm9 zrOsU+s1T*ImArbUG^=5FdhXt^9t~aZYT#se=$n!2Q~^ctxw8I4zbD7ZKTS(!#?2;Fv`y?mJ#?QXUmWT`nLK(yaP zU8WX>()NQ-c0Ol4xMz9AfEx4p+qGKRfB-GBO;X|3^ZL&R6pQ0Qp(W)l!i_iERMCWT?zT*GJvkc zQTHX91z|!M#w>#>l8hVoQ9HY8_WhUpe#iIhnYnA1_D?;ioQIQ+9zSj*&7+0@McsW@ z;}ISdGiQqJ-eV9~%t5s{brm8g-4(8w=dWIUeEZga;KvUieu_$UH`M%#3m}r$^(5bi zxQF1un(v;{jr!o44cC}d9y!K2P6d$h`s$wufQS8ku&8PGO0M_4zb=j!ffua8nbD~_ z*K}m#$i2E#dFWlbcBMCNG5Y}*$Bx?jA(#sBf~1T=N8|lY-d@qCVea*@#BNZca;?%= zc`h>=D^lINuOKDNoeZ2woFjje4cPtTwHtdGVrN!xgsFS-zGSZoMCNTIm_#$7>?b*_ zs8Hx7ANLnJ!MpkSkXy)(T1akmcHF)k28(CAt80^;gUse+728urdDpWpopyB$af%F? zB1w*yTQtYgG6F}8P4h|y=EwCHIU1l8M9V0+d8>Dke%HbGB>ku1Z^c z&O~2dqZw3{k3MhC9Fvn8!$ywms=l_@j1__Nni}lLCxM-y1n=KJ7ly(nvwe0ic{hZ9 zvTbb?Is|2ExR_DpNKJ0vj)9_0WkphfsgAzlvM4Rr91haKe0D)ubPCF420yosL!GKb7 z@Q@+U=4gCcT3MwUcQ5xUSi5%q6f-xsX>wcNgxSJ2CyFjKF>&0qY0bg;-PAYYh<0XD z{Uu8P00Ft}(~0cx&80Is9zcY2`Dm%4IU#?y_=1gTe6a(TZ{gZ*&>lwwTW`A*|Mlzk zl4;cjlb|nZHj@*);IuTv!jXqR>!8j)_ScDQPnm z;O2Q__680`1nN~63nHTNRjH`FoZOlhm5As4@I(YcFd;zB&`6S9b9nG$7IIn?D~Fa zUi_QgQ-yFzidJzuu{&Y6l9L?%)i$;>59IwZ(nzX7=wkH3m)X;tNZ@4sm5Agu;T{iNpnObGj?M zfMLz%0q>AXO@16V@}F1N>nR`kB?JsIS9s>UbqA9EfFIv6oE7FTl%^&Ct&$}Yr#aN4 zZoV0XQkh&CW-h^$elTlb^ssPk3aKz(=Kb^SARRT(9MbaLEH$?vTzuFeh&FWP6l7&Z zs7%n_lSo_N+6e@Fs^qU!vv!BC|IT##VFpacs z=J5cz^(3~s%YrT>4f8FT5iJ6DQ+yyQIt zjZ#Nu(EQ?Q{vdIpoU!sw8A`q=xBo$Ew5Cs|b=>)KGvxo~X3RZ^bgeYNEo{yhuJJxNP#yDBa-yLY zRr;GoPnxutA8ugqCUxT>x9c2hLL0<}1(BM?FpvfuN(T=7@>%+=b@$Gv%vf?SFI~C6 zE?>Fg#q@vxI~tL>)3{-vZx~lT@}e8=Ff2Gsi35gL8|McLT|h-7rqvCFQ>a4+4vgLK zb=wvi$H1-xEo-)I2#{NKQ-@@kuQ|8CYrQ?y@wju%qzTqzvX+Gi);YED$Ru? z@+l4o*9z|^zXq0EEcsX52h7{#r*HL}7q6hTT{173Bxs_nR7qu#&_oxSrh$2)N;TfD zY{1YNJHa82&UPY&%?TGfy|Au$Orze#xH!Yvv+KCac(lJt*8RNre)~AgG!LmF6)_vE_3i|!lg*|K8TXlptL8Hk)Xy-Bo<%`t`0!i-jiLZRKXidou9^8tYk}|Np*1Pxj;C1%) zCD2WfB=z5aH|A}l)9=Q<0LB8a?Nnx7>X&uv7M%u@1q)UboFLyjXRNn!DM zNW0Y}-n5ulUteL9`m6SPoJflo*Zr`wIQUC0Z1mMKw#Ec&4udxw;&$W{OFZexwc&6;Ggu5to zhSNboz5NXcv!DOSjGC^O%dMyW0Z)c8sRRpVcA2t&S^k~OCpVmyT3cgDL$NbE?Ffrd z)U>pHQz)r0Fi2A=m;IL;cJR=T9C@1i7PubdoCJ@KOI`pb;@!EMl$Te;0Ee;7xPh<* zP#-;dI?RL6Lg>4{bB~es!*~S;`T_(aaRpA!I5oAMdK!N`Q8fKb%b<0HA zLs+9^+_(Wp@6enH7F#kc++R#@m@g$JW;{vj^ph4fVhsH>iE*$Zv1IIu?-{?TsRXEhpn%ym*p^MlyoVr)$6$6x}5gaT=JT)RzoG z;!2O+y`f39MQ7#`rUW9?0`xPdPM!pI1F?d@<2eZA1K3QY!tWAY?124L{|lrQV3q%7 zK1n@59F_0WvFSZn5H1Di99)kb{oi7T$qZ*fgFxkh42<~^5)u*+o~e9z)mUwL*ho4{ zlL)>K{G2{LdI;)y_GPhJ|NVH@rrYIp@|M+q1{0-~Y(a(#8 zKLOE$-?07M`|VoTFu|oiGf_=#-r~in$aB;KsggqHoUN!>RpPHBmjFM)Yj_4BB?zLo zzX8x9S0X}8J~aI-=r!&USQjV1&8u0huUYal_p({)>`#5!1&Ugy+YVP2ZgO^BmAOMb zWDW(V&eqyFC@bpsZFF=L^e%i3&`*y#WxNS?_G7o;N6@$>o*a=QVQFDO9YjCzvUO$} zC+XZdW0K*KnwzVH8AFuoQ$3p3vcg{TATh;VxWEXRAw=p9EjZ5L91$KK?ZpV9y2i)- zr-2WWlExP-<*zdR`su$O0;t2YgV99DYNMIz#Zo0`f~=o@llv48j+i6aGB z8T5peDdN&aeOnllyieJ;9O5h$rdmvosU_W znZN1RlgyD`iSFQ(LbDrWF5~7+)Qa268>vIslJ2VMIKa`dNdGaA#1^IaDMEf|=ojD_ z!rU6!smPaEP=v)ggAwzlIhN9rv`9w-Vlr}?!ONi|mZ}(omg2+a1i5ESK!a5b6`yHD}22FxV0pw{fXldBV zz>>wYoVZ9l zSrHUz+?foETq@$p&#osY%ObFa7l*lJ@SrOGKuQC%g)Nun{QALq^(I3=WEzk1%Z1jo zwe`@haeyJDW|g#Qx&EtaYwwyq0UceUw$#|z9*7x61A0jEa7yT9`OQ~yDavSw>InR7 zG9(86u60qvriU>XA^k(wcEj41T! zd-U*s^Wg(^`o7@jNNE`<70I1O9#56AMD6&R_s}FTNb)HMMpt!u2DN>lu24d-)DkUa$bGH80A$2@!TM<~Lh)zufo zm@#l9=lAE#>h;iCYe1PR?{67bw28L zp>fqrDZy(wW5(hMQkj5gw0V&mF=K= z-?k$eqBG)f12!S}CzvU#4@Jb!-=CD_k-GF$G6y1KZTrj_J4T(m-EILh0%Vo~>dwEx zQdSxN8!VN-esDKAi?if;!0*G>-|wgAO5?a)!RA}uK@L9ix2~$oYp(PiKltn7#~=;Z zevqq_#Dv4M9XfsMS^DqE$%193dxPX`k=dYE8z_jP<5&NFrYJy6$?V8K5Yylb)7}J{ z!ddX$RbrPyN&f=w`HWpVN)lI45>Scps2!fKUrJy!nLi)B(NH}voox9&ug%2MR!m;` z@b{tQ9Whs36Rtj?BComu$q0NU_1sz~$GjvzsIf#yd{_bOzK-iOo7;g0EgL}f%kV#8 z%peiRaf6DnNYp z-E5zr1feM8pZ?G!1r?cszjv6A(Ve2MLlh2&PSdTI7ZYon5l|6YhkbXqRZYuF1E55J zIlTGN^zZ1<_)5Ut|AGz$h%)GNJr8ufj8yyjqx`!q1}7*nAa-tv9FQKl&XF_ zM>|Mz`)3)Xa`1$X?tueH#)#?V2Ldf8Mg`{C+lD|~nx6d;XR@H+e^RF`R0vpB2exyw z*Nppc3m`4>LZqU&RRsdWPRI(t&FoeZ88`ev)sAbuAM$K3#0I=Aah0+FA<5nhMTKOE z4NvVcw86ne6yF0Z8`?LJ)We4yPq61}q#8XWg&Zq>QB#nRMuS2E2utOWWPvL%s&3D+ z;tyZ>SN@7@ZtTFKv@x( zd+`B?NMHtBejZE49yYFdIiv8hIGR{sHv&Vh_k!jpX0v9krMtvkhP0Zr9`JEDfOhna z*E!xc=&B0Fi#(T&jFzI}23MB*NFz`1jz-IAZe?|F;mX;wCFWsk6WIRr5f6xffhXw6 z(9l%Uca9Y2FJ>vQEzL1w)&zBTXeegHfs7Bd%(tv#ad7n`@{vy9%0z39CQW&7fKvy0 zCUm8Enlz&sv2teSi@S;9Be0;ybMo}UL4ErY`0fFVae?ez zMll0nkREAlDSd!96T;9)2H3$2U`Wp_)157bJ~m!IyXGbH;Dnh5yGKuVcq)uJp^e3T zg)D;D`1bAf4bN1r&0DgB;Q`FCYlphUdV{D@R?NHvcj#7DmXO;S842)M$%(pd%N#qL zorkp#zzQCF)Vsefi$%Fd_G5ow_CXqt$BLY+?8%6T-VtTAG)nJnEEU##%U@L4GH*%Z ziNNxC$pq@EQ$#6TzvIRa$!>4kb!{`p6Ii8y4f%ABBOI)@83&N-^MZ; z2iTDiLf)D>Wy*kl{eB8im-fmV)?>5kBMU}$6BCO&1y(a#uRtpTbQ!mFd0kKU;#i}~ zn`7H$@I+v8Mjz=w`;!AQ8A1RVkHFmLhFG2;DYf3muRf=qN|`zl>LK0#O^)=p^M@QSZtW1 zwwl#)*sN4)G%?7ma$Hw{nH)M>Jh1kbd()l!_utn=dH0r*l7FI?Ans^BI z9JO~bPJH(XD7y3Jp<^6fZ=uS&Ft^>_`vo0VF|n3YkfC<2G@Yfie-a~^}xnni8eVEFJAnxI^lciUx<;hhUp({ zsNbW;O?V-!yyj4xJ}y5U&ka}*Y-9ma>O-Xkm38tAzdG@Dx1-gS+XB3;Myach>=?K? zWZ?wk5=d?oD`3$F5B{86AlXn0=lxbln9=6j1!P$1s2u3PVOgX(w zjsX@tJ28ru3LyBLw^Kcp7lqS7glYp^vf>1WuHZ1cy13mYIll(c!#Q5u_I}tnYb}~f z9iONEwBL`entr>U?3?zDO)DTu78dSZzQ~^=-x)%K4DKtZh~tWdw5b0spHuf{u8~*( z!ts<{pOn;&uh;J5!*_R62`zo`V%m^|sUn$4V=6$S{Qkc>Wiy+jXZP;CR3*roQHzF*DcLFCtoS=x}cWsmkybwtWn?FI|u(b{CTp4 zF@AGKpy7^5xK2gFElGlV>Cz?~b>>O;?OXNcjjxMf)8XK7f6I%t6Q>U!eh1PId|+-8 z{E^gWTbF_kbFp)h0bhZvbacw88$f3M{`;75T+A^}ERW2*oi`Zc!+SyKHx(rF8&EN$I#E@|#>8xajYQ4F zbOPQZ51jL`b&P6&9XK&Ba5gArthS*9M0DOPe%!lx8$gW&wAi~)c-phjra_;wXR*p>mDQb!f z4i08jZ))|ImiDlj$ouInDT#KPy+@;gwS@22IrXjkq4&$Y5gSv#6Dq*@XeGZnwK+3< zQurf?Ot5H}+sjG`4Gi^yz)dTndJ4jha_z$CK}uyaRPn(tE;fgR1Y!$^P2T+<`BPfy zW#Yk|*XL`kJa}=!y&jgnVq)@OVU;h=JCH?S=VkQGn$o7VzC-+z_X#Sb3@EE*q zn;701B&(BKamN++X{5CCDa1)8+r4SE=XY3e>B-5q6T?Z^I1BWPXeRPQP(08qq_@dT zXJ&6AkfYc(eppCooT0#Ag`BNnMob)|BWBUEg8=Pu3>k1~Wu>BN zB_k#PLTP5vIiM{-V$(vG%i%bW31E*_7+r>d&oQ*FFo(eHTic`gEc23t%a3s^lX2)i z$CXKQ1SFVYF5xEc=p4f+G>0clYwFmsvx$jxRu3(@VtM@mt?8n>n6ZM=be?2GJA$@d z1T3miCH&-VZf;SN)(4$FZB4z4QxxbUM;~scN5kLPxGrBlO*_eSXU;qI};?m{IGh9C&rp| z&v!D>4YmU{ev)$g@aQrt#V zr29kJ!5-?5kdeI!)GPY+tGA_f`NG?Pv5*JBTEYF%syMNEvKAsN2Vw=!ffc|+%x}!L za878OzeM~J`Uu<+(Qk0~aducoAb`f~wOG53mU%!bO0q*;38SlVX`-fr@=Yz|cijIN zB_mA8||#muBVQ_ zfd_+iau>Xe(Bcx~68QE{>7)*Ew{M;Ox^>jrdoelTdGW>=zQh)wRY7^t((TEo!oMmj zb1>=S{Byktc0(5s)cBr}^9+Np$KYo8i<4Wa)?3aLDNB?@NJ&bb1h9rjh3JY3?a9uD zx8|WOdWo~gXr0zgR1#*nkLcfhh5_%84UUl=+YneC8$P^e=Y3~D1S!$iMw3XQ_onGs9 zTivtSU4G~gXZ?!fW0rsY`V~0kZp6tSKAQn0m?6b&(Ctdf)vLhC%4G%$`U(Y$dS%6R zm5^YFXZ49DJh|qK_>1WEwAbq=PkNhfu(icJ)^hUe%F5t|eZ6`#3)7Hhv1z)9+d=e{ z?}MhbuU;%%XKw9odvC9u2!)xc!hqg9eCG`6afh2RF`RA&C&rf2{%Z+;;FLhr0x|;e zaqz&x1@>og_qXlkTcnyvQJ(%o-=%vOETf*~_F~w;J@tR=4TpyS6f~9!pT1`uQCFsi z{}hX>si^=FllAl~!0+kOHI;T49{DPWfpGDXpU)(1$kGt)grU2*uIq5FS0 zRddk+QwWsj09c_}mkt(;)+7-_Zq%4l0&D)@i~aDA;oJnrwZ$IKRZ~?BoT;=qt1Nl- zt%V%0727ZvX0H2>+yx!`72SO?&eFlSfinRy_>>3217dN3o>)d#4|b^MvLk~O6oQhN zPnQM6%XfA2kay{+b7nP@;5>*w4$nFMJU6b6h*mn%nrAkZsFIK4Xer~hi*rNE?Y;ez z^&akBJao3^Zc2e}VGy(O*3JR(KfitmrF*5Puh8D7zWBhxgg$Kx75WgYIi(7RjTbLj z^60?>ZXb)3wcX{EaQ6|^QuBmymhgeG%z#MkxvY)+U34una^=R2YPzr$538`3A#lFT zmJH{i=a$p+5WL>gf_FkaCwC99JINuYa?y*IYbzFVA+kgA5mJ*E&z>=$9<-~veI_{p z$tst(nS;Nb#hNv7oE@tajyB=r%Zd@s@p2~$|GCy`t?E8S^IKQQp4xZsN?ic9U7`VP*00{yI~O% zxq9U#zk6qo?Mug96U`EkR($>T%}(~*g$sr20}>l{!}WoW9`~I=z>Pg-0ki)sY$QN~ z_%S$Is{Rh&5dHDac;l(u4jg=1AXFZZKUZYAPq_F!raEDFkVx2#`ZUPB%}q^A5^vlX zykOgfQIXU<`tQ@8-+m7v`d#zYNFq35t^cBj$Um+$~Q77=w(}Yog)> zvs&+0$-X*Ld3eggA_M-KA6HClQdtx2@NjYtc%+0HJSU28V|69IPyMIHOc$rU?x}TM6lWy08i@p6je2Pg_ zP@zBmG^;B6sZ)5_$lMyOe2qh&da4D5f6=@LcBHccg5#O)1*fLhm)-M}dv)5+yIiij zUROiu#PqHerV|tz2l{VabJ}S77xxsYv6D6}RVTG)ubj$j3HvtJ7fxZWMm7;DsW+whZ3&t z5XUXBe1%a++=F0v`%@`dHOjfiX`5Lj7ELtT!O3aejG>+9`}dCeHi7oN&>JKmx?)S6VYY*_vdCZNF|LFskIFm zX0@)lk=qS=Hz2f{+A9qBcq85_Waf!Kqwfh$Bmm|NF$#)bz3Xd=x-j?SwT~gULKXxn zTErBF^lP?TWA6HA>O^y&8?PS~AK&L4e>b7%R%oO5 zfG-Bgh&9Sg1G zg47o-o1~N!f<_hHNC@GV@oi-0DmKl)6CkE+?J|(UJdL44PlG~yJ^QGGMpzf(v1rj> z1SqZni0dCnp+Q4;*vdi5BYT6)`;ol=kG3f|D9E^VrX#5xsV?%mBT-RQk5M4$n_i47 z>GLK-7|20?A(YXJfBn_mR5Kgwz!)CTe+IFW7-;hl41Tb*H0vPf-$*bJ_$c)i>Gb7F zW`=i+?0JTqC@JmvG~TG3SjPyfp)xW{D1Gsr8$1}>t=UU1LT{(>02U7-RO7vV+t>j} zTJ0Z=g2+SiM3zIqg1PpMgH85EEF>y5K(v9M7O3bYdrj`=WTsAo7f1K`hv3rI_S;2f zR3(CqPniZdObAQ;?xj z)>APFf`QdZL~r}`>zM%*(~1ww6a<4Hgop!J2jCu9t%}!F$f}u&ArK6h%RmdfM41?z?u z3z=Ab#E5I#KP*z1wvD?Jp&seHFrgN(g}wv-wD)uCHiDB)zNegfr>&D}6JAEU>#TV9 z_y4qfMi!L6{Es~^-d+Or7 z(FL-Q^4_;QdIMvj=P}xYrV~~2`6H~f3rb^hjH4sxV{y?aI%IW0e&|r{F3=@Zgitm8 z{AMAlA}QxfNORG%X3^NvuWw9Egt&vN=oiG)Nks7VvGV~{btF4Dc1M{CA zl2LD;?%B*6H=dW2Kq^3U3DS?y=PB_F6OR1Be-z!rcIx-&+6hFI+MBtQ ze42cAyvQTm<&Ix>HVmg4YYQeCBEU+%dR2eI79u&77A*nD>=bmG&QF%9Ie*#~J>G5h z8|n0qZ?^Pc+?5=7%fNx8a3A35zxv26D?nGax5Z(94G-DZt}`2oR{f?2HZlE$0FHbd zVLcrta49`Ky~$o$?_IS86!xS5^0X z9nxlwpoHoUG&f_UY}ey^bbG#So$+nX&j7ch?b87as4>W!D{m^`aA~$AjPhH`|gsQ zg#N~Nr`-r`DI!e}ea>v4IRMOx8`HVy=-gjrgQ}t&E%L6@HKh*YCxJaFFCQV+t&WLB z&{X5%QZZ0ZN_$4fiT4dZaJ*F-_K8sby-K`|#cf{Y)CIA%@BVU8w`kp^W;e?-RJ*V} zlH)>z!Yo1<8$*fkuH<{J-VuS$+%9peqe+yjX9F%Q>FM5KfFRbkLFs;nON6Zd|Jxth z`strJa9|$IvfQ^!O`S7>QE3}B!x1%TVW^-vJ(;mOm0UT|g@B|CCPD4^>CKyo*M?rU z$vN{)(Y523H#R8tt5vtYxt$29e01sDx!ptNpja6aQXY_3R79IoZ`!mUG&S;y7U=6& zbK5AJjTJski?-);rC`L?-@m7fqjz&p<56X$4WV;WSnU{a?q?+>_{Y#qY)K#2VI9!s zswv??`Agq9a?*N8w@ev-R9L9rej01wI-B- zp-vt>iVGCg@rkG?#$IkDTya*o(dqc5Rlfq=B9kN{cNofSEm?$3KJ!wEzqsVWOYJ3p zk0yP*zkk;UN9K$miVaY2K%L1n2%4VM(q?ARac4aYH?e1wB-}uXFxr3+04#e( zi1rGGATB?5+UG*U_%AL$kJBW-dxF$_6O(IcY4ARX2cn6AH2Z{rCR~hYWZd1|fkLyhbvyh`gylPY*qV~OKJ5=DPqB&{M0L}Pvr|FJeOB<9?Y|f>CuvhqRhY`@j6Y;wk_y)u~*g- z*!?j7Q@7@7FjEJ^keN`1s2ybk07>3lLNJL{LU0l7o~_#BWEOXj2O9Sp9#Ms8@&3K6 zj0}=ub8Blxx|$|DD_);08?Ycs*P13mSy|?-TdyBIl2T{9EH83NQwC;mp{S{8VV?X; z#O_3bvh+}>Z*lD)UZ^G+|zzm+6?3u_fM`pNHA zP8k+9@|0Xap;EWw-3MN3zn$ei509_U|Fx0`qRvDO^#C+~t-X zl`-hdjE29)|Km{!m8?3Zhp!Ujx}I-*>o@&j3IeYBMQX=W#@2i4RywEOwoQKRtoP4U zxx-KFQ=fg}8F|ov<2rqSdSYlU;tq zIj5;)&VzC)8+4l7b#ARPH9xj6A+&I<+VKa@=}6y$cS!dRT<~o2g2ggM)-PPGy{&ur zqw5_dD;4%|oss#gPiaMUUv}PVQ0pS@W1m|3X@~UwS(l#wYH@wO@wvvK>dhw~+xPUx z;mhMkQB>Tjq=K5J?ON&&Q=G~!UevXWS)#T-7L%wEb8DJJpRy0+(`}?y4trbE^j<2n z7BS@?7$cG-@fz`7u)0Va823wD0|dHVvTyQ8h*6jy>8E+-NzYV zM6;)7KY?ema-SgfsWR}>;V*-R3zG1RzQeHc$@Q1?;Gf3L5)z8d!~UCZ(Tg18m~*oi nU!)US=au8_Di{E{r`VL(^gru(-03lY6Ps(aaMl$ATi^cyc{kX8 literal 0 HcmV?d00001 diff --git a/ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-3-linux.png b/ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-3-linux.png new file mode 100644 index 0000000000000000000000000000000000000000..bd55aa7784a429a7cdb660b8d4e299e3eeb26f3c GIT binary patch literal 7692 zcmb7}bySq^xArl#VcR{K2qvc%J9J@4ff+xvmLPQjofNjqDl@4$e&(X$ch^oJ+y*y9VAx z_<4OUlosADIH*X8;pBHxU~q6Kn`9(J)m#%+CtUPYHP3N3B02nMuDv38MVj`Jj*R<) zD7})&0~#6|(Y!lLDgQ_XmWJ}`HjZd~E9vegV!KD7bhnX{?jvidWJ$?bPVW;4J03$;7Dm($sJG#_;C+sgcI5xC0|6t$! zeHrdy-|yl_^hbaE^vT}I=_wD7u-B2JVRM?+Sy1@q_Vx>T`PEl*4hx-jyb1aJtdAb; z@9Y>CXdHTnD^cUpov2xqcPoLZypZ|Kb`SBM0+GOq63sJb}4$?b5 zBVV_iot{4Z`}ePey}iA~Zrk&Vi{^}xBSUtg)D=GGXn%i6Ln-($&P2Cuq#Atk(M5Y!#>dB}r_cIdC&r^iAP}F4*osR^9Gsj&LPMi#50?)%XV=!& zSc%$4Mh>gj(UDIhl9JB$#%vX1IB02TQZh39n8(px9>&HDyOk*^KL!WOwiT|@R#sMW znRUX(S#x`wpPi~1x%gtDMn^{pgiqjECPzlxW}8C9Qp1?n-wt1;b#rr5MJbdP7ay&Z zbTQlx%1vmV{pa%KLakhjzRW~npCo2$OUt8!*-#s6YexAviBsI(%9a*s{6JN0?ZXkX zWIJAp5zDy{avm`;F)h}p>gsAw4!iq0@eSN$# zvSEgom-kn5^Bpl+Sy|JLxW-pS6ciM6baYh0-g0<{%LR46J35kuykzwcrhob>r$3*l z^AUdf6wxsAwsJf-mlZz{elvC$n!&3vk2^U%6%-V_N?T&qHUF0P)~#EQC@HWd$GBRj zqob247hCMH|AvJnXKTw|+s)JSWTM(dS6BDNix+IHte-x8%23HFFXxGFpWAfJ%*=FI z8y`V?9gmh;nMBJa2|VNCD*1fMu2uM>sVTs+42?ePNtf#A=!j&LAFXk7c=4kB^mzXn zg@TSwoEkH1(Gwn?uGZFYS!8KxY5#qdtevJ1@^q=t&i3{@K@vehL4$*X7vGC8Bbu6; z*bUJM*$y-H|B#c%EASy?k#lo%L_|cSq@;A=iwR$v;^N|{_?_jL5tB7;ef9Npf`a<2M8tRRYH4e$ ztEovNd-HV5p&$jkj&^1m0##vG1l;V2eFZ%Ba#sqGNRvOmf;{$CZe8=gfB*jL<}bCb zn`tY#NaS2FDHoN1D-A-Hg@r{|M~BbCZ3^x6x+(bC+v#^|%m`@)*s#>h`5jDOSF*4z zl*(dyXnlSCw>D8xQCtG5458-6M*o0-w$Y%FkW=`-zq;N%c#fR%bm1S5R#)3%;GiB< zcvnZr;wh`EhXe(Ulv+jC)ty7>o6OD3%)mZ9Aq#=>(8^`e%vYL&6Jlg!Y-?+SpYVTB zTY_t&6B84ionHt!-%g7N3f3khBpe+b4diI%WM{)fKp(GFqZAU;&csvtFy*#W`8hd# zy}fDdsm|S_V`J476%SZg0{#87zJ1$ptyy!H#XH2Hl51*eGUeklb8~Z{J!fad@i1>V zCg{SYIk~v#>FBsJa&N=gcf;^JaF+Su4wDAK{6o}QthXtod+O$&>M$z7ozKPo9J7nPMcySOMSDn=J@ zm~~QIx^zjbWn-p6U0t2uHl)=UO7I<_7^+WRP7aS&K|vuYDe1-)ad~+*1_p_}>qHb3 z2@w%uUP8_*T1rYvOdV+R%kCy|*nyjDZ-~3m6~}9r77@|=w>5I^y3Y{?_4n^zD4;C0 zwuNmb=xRhC#?i#Y1kPUUV?;!R7VDZ_-Sg+qw*ukzAt50!A&~g~Vzlb-f4*Fqsym10 zs&-!O9U3yMv_}R8;?lx(nX0O))J(`inv6NCBOaGyzAkBczYyvAfQf<|6=|_vH_+77 zBytOEINQOfGACy?8SYhPXD|I}{9KGfMo!Mg#x~YZ&26pa(vYsEsYyV}DIqB-N%XfN zkYJ95gq+-^wiG|m3&v*jBSWGf3A*raNTfgW{^q83o%iYP@=)WeP3SXY)$UZ{5Rn48 zJD@^;mKS6!J^cOsb<5wJZZ=al2a}59UA=l0)27bM%$%2(r@t<#eQ0~h%G{jSdYHrV zer`z69VUeYUS+y)1_p*)y~<)xgf`hn`c)F@VY0~L1a*0N`6#~fDVC8;)$A#c{k76Z zD;gY$=NUH7Ow)s{5$_2nhc(u~YFW1XCwircL$EsaIp-P~+Ozh}w2Z0(vL zs$XbomS<)@XSl8Gmdz?nx~ zoL79FN{#NcE32DPK(GBt(k!iA>1kMnSU zI$9Y+*$W5?Lg$c^k--v5y1K`pfOkGOB9Tb5t|a7#6*gFGNnxRq zswzxY85x-^)CXT*=xjK@TO=e84D(Sam*qjW5pmPRU=gOijEsz5#+>IMWCkbL9Xo0x zkIry+b*-qXf@#qi%f)p6KJs*813yozC@mwyc)Y?cah=}t*3+k@zkdA!>EPkv(bXL* zwHo5);khHGqpN%8_U&QoveHr~S=qL=iR!E)sK3cFo3SdVWov6|6&01e^(kTVZW?A+ zE30J?Q#R<}s>89$t9KZGHDX_|Zctws8#^>HYhQjJ?Jh(0z|>z^U+*$4X=rHRC*Aw$ ziwgs^@z<}*SFR-S+I?tym33>l+GV|b%G=7!Y++?3AuLP;@kqN^LQ&E7Xa{3tWCS22 z7;{~$<=ur#d#hv7k&&;htg2r%e(DlFAMfcYGLv_Dz{nWTKGzxd)Cx#MLqlT=wX(YU z^yyPve2U?DV(ex3`1qg`9G#sr_o(a4FY_g5Dhi+Ov_GmdE*UAF(}m_5x|`RWxkP^5ryW)nLAU zPC>!cP!V?c^Np|J!aES+*eUis9^+I`j^ z%x^mNds6|A8R!dZ%Sf7T^I1Z3SXxFr`YC|^GTKFtgYzS2nMsD3Gk!=dKBz$eoylQ` zgVViyBol#UXPnck5o*|XC0=P2EY^w4>_noD&*h>1UDhW}N=6*{R8EEhL}(69_ouS* zB4JBG@-WLE;j<}R(kj(9pt6cd0!81y>(#pNf><5%Q7ry`e+d^R*;U%1e0>&H){JXT zj*h0kV?^;%d(6GPyfH+Kz|nK zRc@`8PqoQ{K0)n9#m0_IWV%lX$7Kk49uz`PN}5?biAzmQ1yJ%dg7K%Pr>CIM35Wh2 z)~>G~&90xFmzNk71ykPrU}GkKzjI-`y{&D0VxpSdSxrst&71t+Re214E&E_$7#S>XKkt+^pBBjfziKu)svQ5FKB{`&PS z46*r+_~tqLj^Dql#x$LrcA+sm!ZOfBOG`_WUi%ZoiC*iq`x<#V_YHyIIepGf$|v2e zU_<~gz^P7oZg;?R@i{wOpxQCe(uzq=p7Lct5OSJzCIHQbORFg>hlhm`l99#5#C%Ky z7qc<{BVD+57CHzhBu66;7MJRIIM*J{#l_{aIy$$w*z%Gi0H3nTxHSTXLYq(EUoatm zHO<7u#QvrASy@>ZE?j_fho8+~C_w|__+8kOy9n^`zGY|M4wB$^S=-s(-rn0Y01*|T zq4CQ=BH0Yl0(^Y#dn+UQ!8K`V^{uU#Kx%KTDo0GNb_x1q>qU{)xW}b*LN#@DsSq+r zJW_sV){y7ena|C=RyZFjeD)w*+IeO8vHOlWEL&Bzk4?9%W`EK{ELBNkVX-G2%v6TZ z^~)DwB*DA}8ZAH{Swh3ZbHX75fr99Q$hNn(#tQog(f(~iuz(@k-N(L!9p-O1RhF0U z13&^%$#%GUfR37)nl29&Pz!r|>giFa$0Q_BkdW{?%>OPdEc{ITNM#j-=$0687>t<6 zh=^}-z=Z--UCqr*4<9~cX0}`#uiT#R$jQ%N8|VS2cpyb1vnsExtqmUovJ6N3U04XE zSV9l-ObacjUMl8_uU5C}UyHidZ~6-5*-y#w{n$iT4SDxiIB!1e|?cW~R? zXjW}b;}$|-USZ)ne5?7nx!Tjcu`O~O*myuan3eqNX@!Mh&9g`l79>(qLL%XrLlam% zusj_dtKF&Mp#0QcJIY#GT9%eeJX4+}MhyYyXZv+(vmjy3q15{u8;dokv0P@L&hH8B zH~%yq930r959dMZPp5qeG;+0Q3Go1C!6D(%f;dBuffDqi{j;-~!N1(Qhxqg7&+zcD zpPwHz!}(VGBSA7rn43QLBKi#mhlg7hwoL*rzJJ$hyq7`=#lXgv2eJ&%LP|oiGE!pp zNX>)UPXXi4d2N^x);G5Xl*(X-NxR2`V>nC=Ysy^(Dm!ToNLCNshr#a#wa_^10a$|mOf46eeQvI?R8ik6 z14Bc1#5$7%moXTNmoG)|?Ck6Sgex5}x8pl9Bl@9OIuo94gI@yI)QfykR#tZD%8isB zb0A&V0zX1dkkXQpNW={)feN!{?*VU9#RH+z>Khuoyu4IZ zR7TYZ@h*d_f$0Y?gBsVOswzGNg_VUxscp#f^isDLeE=}+>yl+E-%?XWS1DY%EG#T` zty}lar|l39&HFZ1VLM-epgrspIo|cklKO479qh$Q}b>^}k>^@I;^7hk=`) zm1XDXSYto;NrDav+uqUf$>YbBZri4Ic8_6!*%s#JO4N&ve7nV3u!esEaZ&EgAkp)` zW4UZ61)y3E?U|TRUdMa-)y_6RSH<<>f%qY$T+gFKlv-a|S+VS{;(*0MwS@!+59Vr% z0G(&G+UrA9B zoUsbvBojdhi0uR3+>E^sGLej@P_ZwfSS=z`Z$ol02q2iAmInF9KNo-Yp%m-Rj;z?D zSb7SC;OMH>cR;Q9ksap)orl3?VE zXoFh!(yS~@LEYJ>FJI2KyM*WVtrsyE2q2Q(YgT}}(yu|G^Vp0s5PgQOk_x4guN{Gj z6K0UnwPUz&s(^9+zaIV0xZW7^|p<)3~2(Yy_RafV_`O1as9$Xp%F~4lMMi4 zO?7n(OH1&uObiSy!o0}T)FSJV;(II_Fv5P<*S|VB*i^}K?Ej|h?CcD)T_sCRQqly* z3=C|5d~j>fSU{54K?#%&@eLlWc)$&dZDu6*m78RDsj2JCx~P17d{ULnO-;+3mNgCw zW98=g*K4*{M!2A@lWI9-!b`t@fAZvs()#r~*Zg5B_YWzR^1Pk?0cFtG+$<#}H8e1A zlajB$ug{@9ifzmG^;>)U;<7St(CVGtUGVJy7i!E-E-sIlnYVU#)s>Y=VHSbZU!?{9 zJb}^$OR*Xp91NL_?G*YnrvRPSV-Cmzti2{M4`fd5bE3?Qu(Y(~lu6IWNjH(|K3sT9*#8mfv6x_a- zrzebzXhZK2&ByS$cOS$fuAMs^Y#8AO((|7{$S4&|5*{5bMG0mtl$2{8*zZx5XHS|W zB(Z*BKr~>5(cebFwm}U7t_mIgjid^6hnAV1ehIM}c(jA#W3Yrljen@h2i<-=qPrag z(c9nod2WbUe0Rt6nf zw`l+9Q31r5wcn#5B*<5bycHy2BMYEbcY^7CZd_bi@HnMWn=)&(35qTfX1cD`Ko`tOC=FO1xy7S4t zzCO5HV8a5c5HwTj6?^6FzSbwHBo>FH~0YS2eZEQds}b>-ldAovaN_rFOkl<@NuO7ifV0o`iE%= zz}rxNzXTrWEdBFfUdP35NFEs#lR#b|K(jG6&dADo_GY|duN9^u&JIb0+5tItyu1lg zv%tWFGZsby;9^!z4!s``BR@a?9kCd>c`#(ylr<*Cb^bRgsL9la@bDg(F1tHBAT;nA z%dN>oq2gm=a`mgZ`T55nLQhF~xw|v~*24nW(b3Tn;tg$WQuS@nX`tHHmX?E)?XEdp zpgfpX7U!NMAuV0Nm`1APJb*cvkF{2#O1^D-JQ}PEj?XTU!M&j z>k)-IIzHY$Jduy*o~ZSR?lAQ^*=z>UvRP8o&=7Ll`qj|j%LmE0Bi||LC*_r>_;@QL zBii#Hq}c7_1G->Y-OgJZB;~e_fyIFOSy+Ixrbm=*xe8E)E-Ws71823c4FL-=o6gSh zdfmeOJiBgL?&-d$qoCIl4vv%veOXCK|M%}j7eXK(y5?WMv$MlJXH)-@g9LKQ;)c;t zoh4u3O$aX)6L?|Jpz`vt31j(sdw2I+u)*5r>wwsR>wu$_SFoHyO`XhsRs&DiEh(*X zeD8{{-PYe$1?B_+w}_uF9Vb72{@e}}59#jr($cH6!1~z6Q&dE8XTZn;7XCuY6`+6` z8XR;!P*Ks>pMX#mkZ9rJrAzPxKW8@?ZnsuCEP(U;@Zker9Tpf`Bko%b=E}&*W@l$x zT3fR|e5hS)9JGSG{1v{PnW%F5-1ZI|hk;?su1QIm23`SEDlRS#bR#2^<@E#b0C;n% z*26_y+*gg+Yh(H++`-_kV(5?~oPWw4?AZ46_qVGY2WoXXXz~VYe8qROcU{8C&ThuO zlFRgW3O>fM%GXz1Lxaf4@GP%Hu#}}g7vQtRbdU2 zwH{e3W#XNko$)-jUxI?%U^5|Y2FGn;V{_U|%?0lGZ%a6QlcHN@o!W0W{0GFoGI9U_ literal 0 HcmV?d00001 diff --git a/ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-4-linux.png b/ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-4-linux.png new file mode 100644 index 0000000000000000000000000000000000000000..bfa203a2b8bf39d7fa1157c0f00f9687d2f09aa6 GIT binary patch literal 3512 zcmZu!XF!ul*9CNyr3lhQ6e$8C2^eY8dk0Y$x%b?2PGavH>oPO&FwoG@Fzf4SnbOdlNdSFL zdOGkKev?cDU$nlax*9YUgsWe`B&5EUIy^9Yb1oQWHN{7>)6(*{xPI~3w2hbZD2?U% zEAbL7_803%MZ$%{l|<^X21OWHL58D%i9%ebS0LN*2lc(RlKl@5piUI=AzPe1Pj&bvtkp(4aTIuk-%brIG65 z=lpL_5vKRY@7~}ACew61C|65Uleb)US6A0M?;zXi`nsaLybpdZAt3=%5*Ha6IXyky z@xu4yaC0&AWK)WT9|qfb_ndL0sxu|!;)Uq(nHjvDahd@q8{4~8B;IO^O>BTb;EL6f zm6hGy-8GEk0DBz;SXM`T3J<5d5be7)Cy&ZZPTtvS*;eV^Gc+`0WMIhEHwg<13knJv zd0o=s>EF^ zSGSUw>h8X-s*34CW@cq|cX#vh^4{^7MUg%VYz$?79W1_=7rNa>{P)O z-bhkHf>vyA65plx9%g3d{{DW4YI7)85*C|p_~5|Iz8DXyY)6>(@kN2TmnB&!SlBtPF zN_u*FPR@KHui{GsVS}==GH`DpuXS&^yLaz$N8w(-Zrvv+j*gCkt!!*;Sol=5E>n+) zNy*8qmX@FY`RAYZ_Vz#cyc`?~%%KZa_7Q7H)wkcj&D4Hw^t#0Gd=b6R>?Z)GST?wK z!Jts65tIu-U=Bc`^l+vSNNH77Rah9UWZ%w?w@ThEwpd9?Nv5`@-S4qIkf%hfm>*(ird4Nxp{fpo0}d8L`%^A8v6}n zBO{|^_Rb_eRh^*lh=|(S+M9y;Bq?78CMF|2J+&n5{Xd5;8-W~pdsnIP$Hc_QAy;DB z+LUK9*1sk{s<7JK*#V&?5D1x!m+bBBPsrQkg9By;2FfXU@uryez~JENj~`7|6>9wV zML7%+A|US&hH4G#W|Y1t$3n9o1O`jyr^j1Kr1sN?bMx_mIOK&K&b)sgY5Hca$%oh$ zOcouu%)`S*e>M-j*WdTM;SAIJ3;`b=FrIdL&DYQ40yv;YC) zO=gvVlskB;R0{!C3xb(g)O%=!W@<`)p) z<>|S#xtXRXu3c$1YxV_lt=M@m|naEe}0aq)WTOa2%N z<*A|J;PUb^$l0&b{&`9^boq^wA%k4~9@xP|Py%7g3I3r|nTg_6U=!=qiDooc?mQdk}0ZXkt1F^t)v9DD_N1zc%iVUeGo zZzNLt_H7)6OIKG{N=k~Udm)gB)lIr)&pkOlUI-VvE+|-6Uw@2lFD?DbCa7)>oow*< z3}f;|B6+S|qe8X;OsA%%=H}))J3EJmHyam2yJ3Jt++AEmMMQMK-9CMimXkZ0tw(SO z&$R}P1MZuh&FpbVsa@iZvTpJgP*R#38#6UDgeWPs*4DBQ-yZ2*TwEj&68jz7f9wrf zHhOxvxR}p+)tLj52C-l|7csJ4cthXXlZU%QdTAsi3i`mb6v5& zcQ2{B`nE#RsEdw{j>|I*`-6=Mk{S@U@`r^QFea(nx7EKOrYdkY2}MKYo1!8{h0Tbt!Sh555$`yF2-`Lx?{Go7RK+rdaEh1oG>+vrad6Awf= zY|*G${ROMm~ewI!nKEV z7%)WH%gD$8Po>00M| zo}Ln!Zq^(C^qHAFC?nRh@=iJ1cQV7eyM>av) zzR1kX>S`YhMpfbA8+hhwpQEJod<%cQMVr@Rdy)!;1dv=b{smZuvdT)`@_t9j_2752 zv$MYK$9GyrpabZxp(wX}QnE%q&;aARcb`0YqOGetH91)j#jkpDKqi+?xPg1c82}Zv zw6Jh`**fh63}4l|vYpjXY3vG-Xl7-VV2rn_Xm6+Deb>FOR+g8G-@Lhdcv#c~P{-T5 z`|{;W0Hu11kmJ24doRMmx-w-DTMKQa8JPKIKl1isFU;NBduOuT(vgw|dGs_m_%@TK zZ}y6`KxV*?JYNU;~EbL^zQM7=)djoi2=qxl&h0=c8Mwy}f{+KvYx|pbL_6F@az_e8-*Y%5<)~uP+9T zi4WHkdglW$ZC(D5D7WzOBdqYEbF!Wg4u>NN!-g*v1MMBjmXG-R=;Y)C*tNduHTS;& zFo?v4h6a>ETVo@6W1>_vN2kz7?&|m5rGLAlrDT~}sg$nmX@G;ew29sq@M2W?E#LhtCMDAWV{fqY4QcIdwhJn*IZK=vB?0} z9C}I#ABMx>LR|50$d7)xyGNGCzuev7w;4@p*3{^zJcf#X{5YenEd&n#1Oy@{J6m+i zy6vf-u`vKkYH4Yy8b2Uofm@?4(m>PG2g}RLL6}Smj6@I!#QyGXEEdbj@Z522V89?& z%fZ?C(&fv?|3>o_Y(8BP2WSSb0an7rXYSoQL2>a-V2#i!i!(Fw5J)`blDn(xOiMr| z@LwR0&o%P%@)p^0Up#-VZE8BUOY8*}5pHf?a$nR2y{j#~l@J#<+vsI$ZqALr8~(01 zFZ3yS%NiKdSFgCwpFh7bl#`Pqn4%}d26oMn^U)AuDqAC=H_M+ucA&{dgsi{W0BO&*?Qnz?)$d;1Ka@T6MccVrn(xu5%^7x zT0D>sA0Hpd45*OM3Td=3^W+6s{C7siWo~Zs`}bjgv8bu3J&TR~GjF7^p$%`7`Qa&n;^JbUxo7EUlX@Xgs8R?;Iw&5DY2+{M1{&Z1H>Bwln!w$hoznq9hCdge>;)??{5t4uU|WB_W2M|>qcZ8we;7szP7Peg@#@D{{Y5G Bzcv5> literal 0 HcmV?d00001 diff --git a/ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-5-linux.png b/ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-5-linux.png new file mode 100644 index 0000000000000000000000000000000000000000..92781875d7266212ec05c3417aa0e72ce60e60f9 GIT binary patch literal 10040 zcmc(FRa{k3_bsRhg0zB^NOyNjcXu}^-3>>jq?OKtw6t`0#{uaE>F(~k^t<=|{tx%z zd%OFAz1e5&wbz*h46$sJ-@PT`GC7%XxtW!RbGC*Dhf!=p^Z9cOAqQ*Mej8D9-b0>Cn~j?s z0+#}ZJebt4kpI3N4(`);W_g!?S8#AIpG(vJyENkdmi?cfgP#6Zw>nry;KM6!Zf>HZ zu>vrXBxzkhG^!PC%?I8fv|dy>Qm z>CKa%PyhS$m<5j=B9b^N>#ePDW8H_Z{~EPAypu-wclFZpggpQ0zXO}-Keyf>#!yKYmzI_m6eMp^ z|EE27BENLO*RNl*v)9BV;s4$1ZUZlBd)AMJa+)Hz5EDY9rFbY?%2&eo5gc*6*F-%#Usxy?w?AAIKt-)ae=Fz0ZA6M$ zW5e@4qlg>g*GTK*eS1Oj7J6J@>(h0xRBC5GOhGTF|KVu2w6D2&pM%4&avM?T-o^I% zL^x0#4{r(v^A!_|JUW#u(mBsD1-4?^*OgOYL2y6#W{kS)0S13rT{D1B;4GKc-~IFY z8Qf}NQ!XVHzh1|PqtdmSw)P8BmeZB4Qeu_1bcl@A$KnR>DOT2UTC&xLi+w|(=WuXu zB$w=>X>S+Y@hUgG%SA*L`};i0D$=0>xBJDqS@{K9ZT|lB^ybpio9yh3(=+#$LpFs) zVn`e~ul04C?=aA#N&`9ta1$0IBU{jaD48rQM9E5fIy$6ACnySeTF|; zpMohJ2jf$MrnyDX)-LhNG7s%Lhpy?`rt0{I5L5Tz`&@ZZq%;+!BXW$RxqD!~?}eIw zTd|t=6IWL+FfeF&>b!s7TBPlPY8e}+s*u_`%+fY?y3*(we0{~ZwZ-1#9ejAPce&&n z5E!M9mb|qukW44C6udb+JecI1m~h#h%;B4srnMZl+A~~G@OEY6A^pc6TfT;&ZixVl zOr^VA`2i&4_9*$Y3&Fz7FTMx1_EC;*7nnGL?ZXhTh8xjXH~WhVe$(-JDE~mtCDt$y9TV@|JWA6d({@Q*JK)yUJ)R&b{N$$wW9C((lB49)0b-`Kkln zDd`glnboq)Pfe^#zgrWRFks_2-&i@p#9<5%$-sV{_Exa71AF(tPe*ziN$7FCD-1Es z=plmOZgf;c%^#1;dc8l@dTQ(PdC<_e_eeRZ+{;X4tFW%uV&w6iUB-bU1YE%X^B*1p zSy;4c_^-C9Y+^EkgCClGqjhx3mwfY>L*ZWl_&b=BSCJNaNOU}ye{V?aYP>#>;Ld6D zd$HNqS|Rdewn|w;BO)*`s~}Ruv)ujg#Ch|Aos$v$rJ05K{_ak_iOrypY4=8Wc-6>A z-1c^q+d=0AEG*o|b7QdJ7xUE0%4=caqWO6(PkTwptI*f&GBUabEu6=fiZ@G-5)D^Q zQoBeQJDQ~>$;mPV_;y2sdd{0@>#MhQR?g0)DJjDXwIe)C!)t4O0se@mq@=&(A=A@H zw6wgN8@!ZMbR~K`Kb?cHlr;q3XVf-BqA+GPv^+kE%p5sRbP`GS#G?}vVqw`3^Z)d@ zc8ZPRoiR2pEHzkA*Buz@xpY|)EIIYEmDMtJM`XFXUNe z&@-Y4aq(xAx# zi$TxCL{+WF!a~CnCnv4Cg5rI6vM4I zrVFlPViV{ZA!QWNX#%@U15;2?dsucn-;vK5hDu|Eut!CYj}{CJ?EWkzF7x9RB^6*k z3M@S=SItXxcIM!;X>CDB(yw*+Br@XW8pp-aRd2rCQ|eheo0^(>|;JK_sL*VSyd#4*Tz1BwN2wyFQzdCg){?01q$x3H0-7(bz9pI-D%oKY^*NeGmmVR-;ryyGFrdDAgPuKK9|EOgxZ{-{HNCq@>-p z%C-5xkw!S$+tl;>OR!KY$;tIXk9v1om>MNEw)cXJXtLbNp<#uFEl^YQKnyEt>fW$$ zJ>UK0?D+T}U%qT_kD($U0FH5)jT;@oj7{j-oOUYFSFNe&5X?h-zEfM@pOTDBN5g>r z(uR?S2H>;v(D#jRKm6ljRP7H9p0J$`2BPH2rAR!AVThE9hyZgHWoD8~kpr_dHF;B1 zRd(Z8S$kf;H%!k=!Yj?rzM8K)bw7N2i~$@ctK6+PIuLTxg07Bg6V@+&s@$rP<^nKXWQwI&ZdLhCEXo?vjiGqf?0c4lFJE zP0AnZczJwdO^=m*{QlZPz>kkEp1rxQJSK{jG z1Yw~P+61lwNEyj{dqFJ^CuTQOQ*ci5s8NZD(ScnkLM^Ju$gE*F@>5fq)zuaz=3P4J zv~&agU4i@bxw-Mq=R4CnJ${-asWSO}LnQ;l!`V7J?|%OL>igTtvCh^$-E}=QHgbqi zRyJd%LUQKjT|`$G63O?dny#+#aq9bf*x{0?s+fyhk|5D-YxltIC8~P>epu-E{(jjo z0AZWPM$<6l;B=`jZ%$@o}E?AmG}1Eot&3XImyiI zWM_Z&`qeKxu~+?$G&DQODT^%$Tm2zZz=@;XyiW+?#?8Ah_Ywc+TAl-{`150mB!+&BgG3}_FyqdeWJSh8+3vc zAsd?`fsc#~Z+4Ck7D6p7P%aLMVMB0mt-a0Lk4{eK*Cw^>R0JkZ*r1ix@|x?vXg|eN&S^+8P$tj*V5O zHdXwJb5yLA>DJ4T2pSsGD?}|rrFZsLH~D`x?$aAL`%AI_t-892{k@;|`6#`86K0mH zLX&2eig9s7y6dF0v~hTNkNor;+1uJ18?wOh7!B@r5A^qNsz0UbnJQ!YNx5p^Eb<}> zLC}-JeeJG~jAP+rDan5`LN07-vjR2;N3OT2ZUdePi+73W;9Yz$8bCgV77g{2GP4+~ zDveW#Y=e~)k$~snxg#kU>h5k!f)5)ZH z?(>E-!n-@948^9i4!jO({K&7NGU^f%K_FD;10^pvFc<4u4v^V`C>z zP6`I)e6UgYZhx=Wo%Jvi5xKg;C@990sPEr)Gs)O0povI*+If6#f{))(JYNihnQyLk z;7*N7I!igcKyeNW9p#b4al80T;>p&{PcAkxXu~cK(iQ*P+!(dJham)r z=XG(3(4Rjs5|c1RDm05tme(3LS5zJ^Z2i>6cV!d4rf``COMR09D&T2T)7s9CW?Ng< z3!~2s-p8vwikJI^kkj0oi9&BiMy)!ay1Js?3f!+J)Zg9hOqIq&5y=X=y=?IHgxp?~ zXJ=Q3hJwK+FE(WvTpvka<;usfv3Opc4g=_4r=cl3IOw;YG>Xy15;M^?H~%Q|BvVO6 z5!xB^l$Sj%^@Nm+00o8S>C@rH#@{Ve*Y!F&K=a<-^5m6aeM>kz+@_IL=stGXQ?-L7rO0-EiLsPW%;$W^fcRTib74^cZv17 z1*~klOU*#P;>UJUU0?Y~$q|~5{X@EQa&;wP;n`lKM`ab=xxMy4HOS+bGoY=edR~5? z*xlA3;0Ah+YcBv)yf$5RPEMwwQuxC|Ou%$FFqY@f&f)*H^I%eApjWCWH7><_KaJ1T z)AO^H)n&Eq*h1M;2+FDmQ)x*&4zcn`??j>cWC@Wx9Pn;V7M4R()2AHl?Q(KA_4U7; zl+`C(Sy(8JkA;(y6L1+o>ZK$nkD3A_)znfjT=I7{uzz-F4pDx;8iWbAh2i z29hq^ipYo`U)oPvTT?hWwVDfWPlx^85>3t36B2?IvmSgS+uA_B;NH`=Tp2y;tEnj{ z-M5TGqmp87H`L$7(K8yIlt#ofzECE*FmzDebkBnt5qWb{<$hp9M~EaRXMxMu*zD3L zDMsEkY#*7kzL~o5Mq@2&I3xxKDA_01IgkKlr)!ge7WcWixv^IYCZ8ff{1iqa?Br&!wSIoLHS%eu zBCDvVcVa@--0Y~hpt^cACivXas- zyM_Aj?@*cNF>;gZ1MXb*ZoZe1mWh(Zc=F!zFu?G)S63fTHx*szKS4$L`3u|Iw|vwG z&`@OgT&+y4XkCppK78;p`mq{@n`Ms_^b8K}_nEv9Hv;A7=vrf7dH$ak+w~UvcGmKf*jNl- zUtxs6iL6^^u}>qINTN$Sp)HKI>_$?c=0X&+=?1Z~+DYcHUTlE~jeP!Bhxs(j;rTXo zWpfi310(T&)@%M#97|;Xzoj0;|9ADLv9U2NEiGol{6FQNrSiU0WLo6Fa*yMc*K`_N z+uI}oy=JWF=)=DkbaZujczCq6wS(<;4h}5xZYl!E;>Tvjr>CV()t~zM`o2P5kI@PK zKh?2+MY4ye=W^s&A2nFa$s0kVOEyumY^AHlE1%yU|_c) z%b(?Cl9)cxfti_^zg4Vi?s<|9{g#IZKjc7>3Re=6jGG&;|5){|9`eJq?ImKRFu=QKWm>TXJqi}Op`s-^IW#V zv~T!Pk?S*0_R-#+ikB;8n4-a?OtgNiX#l`8WiXtLks;bVfx_~pxN( zGjs^APYfUQ%rR0!FhUXrwXXdy%B&5k!+-pECm_)DaV$5Y)>2h<>|mjRgM%Y3Hg*sO zLqSIFW_d|RM~6qN^`+MD{-(3DlPQ@VxbM>VxXZ=f%&qc?;l}Ju%65m4|8&M?$LN5FD)(2`gpN5LdJXB)71qMuGP~y4rb=9!^0N$ z!^KU_zXokD^I5;}N+#$((6_{WbuR-uKhtaS;^5}?=oGlUxgjJZ^zihwpRXk$Cy$Me z9vvHFYi_eysCT`+*q^q1@|n3gZtQGpBrA>2-OJMxkAQ%Sot>P5!Z%PtNZQ;yCnhE) zI5;>z|D(0FwU3XFq2ZDk;`s|+K0f=U<|YdmezGoD5WlNsL`_XiUER5p6Q=+r04Tsw zfW3}gsB5Us_Z|FY+}lO@N*K)v#j^ zB{w%WIr-Yraz{Zy0azo|Jcawz*$@3d?pauL3s#huM|$70gXa+t5t($W_jcJ4;o$xZ zT+dpoJl4DD-FSOjpbaS>7!vw%`aX8Ea3w&Yg3Qc6GBZV->5}9kzkdByS-E$37!?-g zeYz1*T6#E`#OCSg84(dtmUVM`dw+L#c6P?j!NFy}sHdvxPo%4%VPR^TRZw7MYr8N# z{dH<)aq(b#J3lW^lJvXV;i5cc%(1+joZKF9Sy|c0iw0n*v0Qm_ENOlHh4yw*Ha6@f zmxnt~Z*OlQAt5p{G6sg?*4FpD5m0{5(f)ob;3fboM@L6FIXPYMv3_95*C4%;gjn11x#(^6f8{OUANEyU_S8X7|2I3jWE*w@7bC2M9}QDyq%tGE<3v2f%2gjKM)U5KLe# zz|a_UtN$dJ)*2NR6&*>-Lm&`KOUpfHJG&CleNfQz?^1WiJ;d8CD83dJ78w~C>|Dlp zJ79mp*4Fm89n=cZXN4#za;mE*VKCeNSQ;=^upv@ow-6l{2bzGR$@4TfJbZAj#-0wu zfm{+PIeEYWp`x5z=WyEWcQVPr!9fXZPX~vi#U}5F@bG7#-l3)joDoyx?c29tb^zkj z=P@gPm6a_&g@%N@AbEv{SLd>=qOLxHfPw?uJmN~gp>`lPHTCZPa;dPekV(IuUGTvW zCG_n4oQ;ibcXzjE{S^RoxgVv;*^{}QXlEBl)&vm`-=MiSkp<|cb}U?6ng#}bzbh#M zML_U5Iy;Yzjd|akZ3m$eBqk++f6UFz#l^&)PdMD)oa@%u={LAB(9nd0g$=-9sB}wZ zropkXYF=J9CHf5}CMHMsVBZG?1?6zQ4x5;GX>!u->S&oIU}0{qu(b3MczJmEOE4}( z#NfzCG#_gn9eiS9Lp?n`8JYgk9NA#&%BH4!SPEBteZ48tCml=6P0%ga^)YDi^95TL z^78UUcfkHp#6l8C`gKkejG%^;Ai4J$KopeMm6nBtg`Yn)ElpoqI={FWHZVZP$JbC( zgS!Ol=pbWcR1g>Mg25OV{~nPsG1%$uataE57khYM&&rP{2*rXM3Wb7KCnY5TQ*^-m zBcF$enV6f)D=55GdzdnW{Ds=-X(h{_NExoXlM7AWHO0lKOfC@2uX>F?*hPtKP(=HhlG@smnRGQ z@`8m63zJn*QE_&5*4NhugPJVXodGx@mihTJe6=%(Mi-Y#pgI7dadFjDRIs@|cpa~Z z!=mgK8>OjiFfqRa5Bg@KI0Q7?4R#mwwC=5+2gocFV1Bfjs2nGIOXb7>1Gny_n+%IKR$|S&MWzdk= z&GmJJg)2=wMNfD4Vx#Bj!~O00`Z_QwQigVg1rfKyJ06~yzCIcflD5`XqhY835O`zW z2qXcORaIpp&1QXAMP}za6INDM0EIF$GXcsG9G{Fd%3)$+K7an4l9Cdj1Xu?h9UVm| ze}-UaLPAYWj=2pBNcN2g4ejNvM&eYlP&6IBO8HdbV2Y^b%4I;SM34ZIfHxQqhaH`s zuC9ZQuR5s5t*ouZ#UBlX!(bJYlanU9lwF;jznp-(>{+;oizA5if~8Ms$WfvO@7&+t zpP!$fW}nEFPdRGc3RoB$Tix9yz{L$#=VNAmpB5>XF6jFkZ^TATO6qH1U=E-HIe|-b zM#hrU&EepHfK+Zr5ji=aU~G?9x;pmdS`aZr*enL$xO@!_4NXrczL(Ttd*1>XyhH8^ z01mjq$jFE(pRdiz%2a)S{$*s~Hk>{Cv7xDD~@Cq~@#A>S`Riw&?MV4GVfjTPv$}$_{dJ zatx8>+XR3rC~WzwC()6>&{>+@xYpaLmSD4-TbYU*jAi!6r-cV_7CGzEhcvcTR# zet!NHKMse6#w3X49V|CDx4VoS;iM*jicqKlV8;(1ex{_%0kHk`K$zU{`|xu{o2sg^ zvUZ~f%f%=Dp!e@yCU?(OStm3$-Zgn&0%a2&6H|s~?z5TUisr5Ys2)(-bb&bl2~kl| zu;sq|WOZdFv-v$xtAGQpqQFvXXlNub8RiO#>FK3{clGpusIqALf-oM&7#kM{hzKg^ z8+s2Ex-Q_rnVArZ$gZv~Emm48s>`)LY`|3@y(uXvX>V^Ao*G$Pd;pZ=aub6FVhNb9 zvFAv^&dx3}BqSs<((m!%9%xapZ~yw$t8OJB2{pCY!1eUU4i{Hf9&Tq^5W?&1doiJd7XJ3AZnP*cMNlPD}G00|vpPBB>Lhp^_weZRETDbx& z)y&P#_V@Rf=-+M%7&v)FY&_(gkQZWo1l}uU;G?Ajmp`(fRoJ05t-%2TXRn z7L=5v4XTP>_e*&f?}3esjQ#|p69fGVdU?z5@gp}k$o@4pcGi$~8NJnHkv7ofAiDi= z%+XMv%fhpKSaCYv{@LBm)Dm`rpCQM)Ya7iS>og4Kir)v?i4?H z^5n~xFH8oFKNzz;ljs>3OifI{$1Fe^0&TIfx|)yM!4G-R10kTLs(g@5sq3ttMz07#Ki2?&a`U zPnB@6vFSFr?E~uRH9ILY?fGR2^wGmg7mBm<=|Y2hj1+m}kZ3R(kZh30`%7TIi;D|? z19Gt#F!#j7H{YeyG&H&`ZaM82=YRh;0!r7?G8nctQfgc)0W@hOArIE8S7M~!HA)TS ziNXPo0G$qy0?2S18+s8bDX9-YwxFS*3Edq%JxB2elqDi63jB+MogHxg_Ti!8!WmP` zozq$`CRiRHmD3n6BOsPb4Vub-{Zdj_cXe|!C^JDgU;7yoqw3{VUtN6xrt&Ke9e@rX zW&j94n-JcN24%Ey^YFwaB%~xKgZx-iQ4xKKjfSSLvr|l7e!qtp0uq_n`1o$F^M7X* zCaY8-$U`I%A8-MX-~i$!z`$VMs%ijJ2nzCFLJXg&bEaZqI&Aj6{fUx5ul=R;HJCbh z#>dNxH$;pV)agrG9tw(!BYeYtC{crqL>%d(l~s9J8LIEp;m!GOegg8lfSsM4=M4OB zhCnX>ctOUC!>C71Ng2c-1b7#uimaTRxls77xcGR31~=3J{2n7fY>4P@fn)90y*$eI_H(! zj>pGA8BgZ}3>2=?qH_z9l_xiRkXr9zT9VwF;=rq0Pc&nEu1+CIZ*P#LZO74RN}LKp z;94C3xdPQqjEybYM4pfV2A#^|Y;gvG`}_SD*{uqw^%zBKmzi_{(}B5g|ErN*QNEA< z`#Br<90$#RvHIU Date: Mon, 4 Nov 2024 16:02:10 +0000 Subject: [PATCH 407/496] Bump rich from 12.6.0 to 13.9.4 Bumps [rich](https://github.com/Textualize/rich) from 12.6.0 to 13.9.4. - [Release notes](https://github.com/Textualize/rich/releases) - [Changelog](https://github.com/Textualize/rich/blob/master/CHANGELOG.md) - [Commits](https://github.com/Textualize/rich/commits) --- updated-dependencies: - dependency-name: rich dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- poetry.lock | 66 +++++++++++++++++++++++++++++++++----------------- pyproject.toml | 2 +- 2 files changed, 45 insertions(+), 23 deletions(-) diff --git a/poetry.lock b/poetry.lock index d5318d08..ef86eafa 100644 --- a/poetry.lock +++ b/poetry.lock @@ -682,20 +682,6 @@ traitlets = ">=4" [package.extras] test = ["pytest"] -[[package]] -name = "commonmark" -version = "0.9.1" -description = "Python parser for the CommonMark Markdown spec" -optional = false -python-versions = "*" -files = [ - {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"}, - {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, -] - -[package.extras] -test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] - [[package]] name = "coverage" version = "7.6.4" @@ -1787,6 +1773,30 @@ websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" [package.extras] adal = ["adal (>=1.0.2)"] +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + [[package]] name = "markupsafe" version = "2.1.5" @@ -1870,6 +1880,17 @@ files = [ [package.dependencies] traitlets = "*" +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + [[package]] name = "memray" version = "1.10.0" @@ -3322,21 +3343,22 @@ files = [ [[package]] name = "rich" -version = "12.6.0" +version = "13.9.4" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false -python-versions = ">=3.6.3,<4.0.0" +python-versions = ">=3.8.0" files = [ - {file = "rich-12.6.0-py3-none-any.whl", hash = "sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e"}, - {file = "rich-12.6.0.tar.gz", hash = "sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0"}, + {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, + {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, ] [package.dependencies] -commonmark = ">=0.9.0,<0.10.0" -pygments = ">=2.6.0,<3.0.0" +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" +typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""} [package.extras] -jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] +jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rpds-py" @@ -4190,4 +4212,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "77ed4d7230a35aead2842b16def52ec47ffaadba1374aaf72ee258a7fed3ec35" +content-hash = "c1aae8c255de1b3e2e8bcd45f6d4eddb81a395397fd46359500b94af18c8d37c" diff --git a/pyproject.toml b/pyproject.toml index cb03f609..c86799b5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,7 +22,7 @@ keywords = ['codeflare', 'python', 'sdk', 'client', 'batch', 'scale'] [tool.poetry.dependencies] python = "^3.9" openshift-client = "1.0.18" -rich = "^12.5" +rich = ">=12.5,<14.0" ray = {version = "2.35.0", extras = ["data", "default"]} kubernetes = ">= 25.3.0, < 27" cryptography = "43.0.3" From ac1a1dcfd72498b99349b430ddc37082a6f055de Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Tue, 22 Oct 2024 12:37:01 +0100 Subject: [PATCH 408/496] docs: enhance common module code documentation --- .../common/kubernetes_cluster/auth.py | 27 ++++++- src/codeflare_sdk/common/kueue/kueue.py | 65 +++++++++++++++-- src/codeflare_sdk/common/utils/demos.py | 10 ++- .../common/utils/generate_cert.py | 73 +++++++++++++++++-- 4 files changed, 156 insertions(+), 19 deletions(-) diff --git a/src/codeflare_sdk/common/kubernetes_cluster/auth.py b/src/codeflare_sdk/common/kubernetes_cluster/auth.py index e6146093..db105afc 100644 --- a/src/codeflare_sdk/common/kubernetes_cluster/auth.py +++ b/src/codeflare_sdk/common/kubernetes_cluster/auth.py @@ -165,8 +165,20 @@ def load_kube_config(self): def config_check() -> str: """ - Function for loading the config file at the default config location ~/.kube/config if the user has not - specified their own config file or has logged in with their token and server. + Check and load the Kubernetes config from the default location. + + This function checks if a Kubernetes config file exists at the default path + (`~/.kube/config`). If none is provided, it tries to load in-cluster config. + If the `config_path` global variable is set by an external module (e.g., `auth.py`), + this path will be used directly. + + Returns: + str: + The loaded config path if successful. + + Raises: + PermissionError: + If no valid credentials or config file is found. """ global config_path global api_client @@ -215,7 +227,16 @@ def _gen_ca_cert_path(ca_cert_path: Optional[str]): def get_api_client() -> client.ApiClient: - "This function should load the api client with defaults" + """ + Retrieve the Kubernetes API client with the default configuration. + + This function returns the current API client instance if already loaded, + or creates a new API client with the default configuration. + + Returns: + client.ApiClient: + The Kubernetes API client object. + """ if api_client != None: return api_client to_return = client.ApiClient() diff --git a/src/codeflare_sdk/common/kueue/kueue.py b/src/codeflare_sdk/common/kueue/kueue.py index c063c6fe..00f3364a 100644 --- a/src/codeflare_sdk/common/kueue/kueue.py +++ b/src/codeflare_sdk/common/kueue/kueue.py @@ -19,8 +19,23 @@ from kubernetes.client.exceptions import ApiException -def get_default_kueue_name(namespace: str): - # If the local queue is set, use it. Otherwise, try to use the default queue. +def get_default_kueue_name(namespace: str) -> Optional[str]: + """ + Retrieves the default Kueue name from the provided namespace. + + This function attempts to fetch the local queues in the given namespace and checks if any of them is annotated + as the default queue. If found, the name of the default queue is returned. + + The default queue is marked with the annotation "kueue.x-k8s.io/default-queue" set to "true." + + Args: + namespace (str): + The Kubernetes namespace where the local queues are located. + + Returns: + Optional[str]: + The name of the default queue if it exists, otherwise None. + """ try: config_check() api_instance = client.CustomObjectsApi(get_api_client()) @@ -58,12 +73,14 @@ def list_local_queues( Depending on the version of the local queue API, the available flavors may not be present in the response. Args: - namespace (str, optional): The namespace to list local queues from. Defaults to None. - flavors (List[str], optional): The flavors to filter local queues by. Defaults to None. + namespace (str, optional): + The namespace to list local queues from. Defaults to None. + flavors (List[str], optional): + The flavors to filter local queues by. Defaults to None. Returns: - List[dict]: A list of dictionaries containing the name of the local queue and the available flavors + List[dict]: + A list of dictionaries containing the name of the local queue and the available flavors """ - from ...ray.cluster.cluster import get_current_namespace if namespace is None: # pragma: no cover @@ -92,8 +109,22 @@ def list_local_queues( return to_return -def local_queue_exists(namespace: str, local_queue_name: str): - # get all local queues in the namespace +def local_queue_exists(namespace: str, local_queue_name: str) -> bool: + """ + Checks if a local queue with the provided name exists in the given namespace. + + This function queries the local queues in the specified namespace and verifies if any queue matches the given name. + + Args: + namespace (str): + The namespace where the local queues are located. + local_queue_name (str): + The name of the local queue to check for existence. + + Returns: + bool: + True if the local queue exists, False otherwise. + """ try: config_check() api_instance = client.CustomObjectsApi(get_api_client()) @@ -113,6 +144,24 @@ def local_queue_exists(namespace: str, local_queue_name: str): def add_queue_label(item: dict, namespace: str, local_queue: Optional[str]): + """ + Adds a local queue name label to the provided item. + + If the local queue is not provided, the default local queue for the namespace is used. The function validates if the + local queue exists, and if it does, the local queue name label is added to the resource metadata. + + Args: + item (dict): + The resource where the label will be added. + namespace (str): + The namespace of the local queue. + local_queue (str, optional): + The name of the local queue to use. Defaults to None. + + Raises: + ValueError: + If the provided or default local queue does not exist in the namespace. + """ lq_name = local_queue or get_default_kueue_name(namespace) if lq_name == None: return diff --git a/src/codeflare_sdk/common/utils/demos.py b/src/codeflare_sdk/common/utils/demos.py index 485f47f8..5c546fe9 100644 --- a/src/codeflare_sdk/common/utils/demos.py +++ b/src/codeflare_sdk/common/utils/demos.py @@ -13,10 +13,14 @@ def copy_demo_nbs(dir: str = "./demo-notebooks", overwrite: bool = False): Any files that exist in the directory that don't match these values will remain untouched. Args: - dir (str): The directory to copy the demo notebooks to. Defaults to "./demo-notebooks". overwrite (bool): - overwrite (bool): Whether to overwrite files in the directory if it already exists. Defaults to False. + dir (str): + The directory to copy the demo notebooks to. Defaults to "./demo-notebooks". + overwrite (bool): + Whether to overwrite files in the directory if it already exists. Defaults to False. + Raises: - FileExistsError: If the directory already exists. + FileExistsError: + If the directory already exists. """ # does dir exist already? if overwrite is False and pathlib.Path(dir).exists(): diff --git a/src/codeflare_sdk/common/utils/generate_cert.py b/src/codeflare_sdk/common/utils/generate_cert.py index 014d192d..7c072da0 100644 --- a/src/codeflare_sdk/common/utils/generate_cert.py +++ b/src/codeflare_sdk/common/utils/generate_cert.py @@ -28,10 +28,20 @@ def generate_ca_cert(days: int = 30): - # Generate base64 encoded ca.key and ca.cert - # Similar to: - # openssl req -x509 -nodes -newkey rsa:2048 -keyout ca.key -days 1826 -out ca.crt -subj '/CN=root-ca' - # base64 -i ca.crt -i ca.key + """ + Generates a self-signed CA certificate and private key, encoded in base64 format. + + Similar to: + openssl req -x509 -nodes -newkey rsa:2048 -keyout ca.key -days 1826 -out ca.crt -subj '/CN=root-ca' + + Args: + days (int): + The number of days for which the CA certificate will be valid. Default is 30. + + Returns: + Tuple[str, str]: + A tuple containing the base64-encoded private key and CA certificate. + """ private_key = rsa.generate_private_key( public_exponent=65537, @@ -79,6 +89,25 @@ def generate_ca_cert(days: int = 30): def get_secret_name(cluster_name, namespace, api_instance): + """ + Retrieves the name of the Kubernetes secret containing the CA certificate for the given Ray cluster. + + Args: + cluster_name (str): + The name of the Ray cluster. + namespace (str): + The Kubernetes namespace where the Ray cluster is located. + api_instance (client.CoreV1Api): + An instance of the Kubernetes CoreV1Api. + + Returns: + str: + The name of the Kubernetes secret containing the CA certificate. + + Raises: + KeyError: + If no secret matching the cluster name is found. + """ label_selector = f"ray.openshift.ai/cluster-name={cluster_name}" try: secrets = api_instance.list_namespaced_secret( @@ -97,7 +126,26 @@ def get_secret_name(cluster_name, namespace, api_instance): def generate_tls_cert(cluster_name, namespace, days=30): - # Create a folder tls-- and store three files: ca.crt, tls.crt, and tls.key + """ + Generates a TLS certificate and key for a Ray cluster, saving them locally along with the CA certificate. + + Args: + cluster_name (str): + The name of the Ray cluster. + namespace (str): + The Kubernetes namespace where the Ray cluster is located. + days (int): + The number of days for which the TLS certificate will be valid. Default is 30. + + Files Created: + - ca.crt: The CA certificate. + - tls.crt: The TLS certificate signed by the CA. + - tls.key: The private key for the TLS certificate. + + Raises: + Exception: + If an error occurs while retrieving the CA secret. + """ tls_dir = os.path.join(os.getcwd(), f"tls-{cluster_name}-{namespace}") if not os.path.exists(tls_dir): os.makedirs(tls_dir) @@ -181,6 +229,21 @@ def generate_tls_cert(cluster_name, namespace, days=30): def export_env(cluster_name, namespace): + """ + Sets environment variables to configure TLS for a Ray cluster. + + Args: + cluster_name (str): + The name of the Ray cluster. + namespace (str): + The Kubernetes namespace where the Ray cluster is located. + + Environment Variables Set: + - RAY_USE_TLS: Enables TLS for Ray. + - RAY_TLS_SERVER_CERT: Path to the TLS server certificate. + - RAY_TLS_SERVER_KEY: Path to the TLS server private key. + - RAY_TLS_CA_CERT: Path to the CA certificate. + """ tls_dir = os.path.join(os.getcwd(), f"tls-{cluster_name}-{namespace}") os.environ["RAY_USE_TLS"] = "1" os.environ["RAY_TLS_SERVER_CERT"] = os.path.join(tls_dir, "tls.crt") From 63ee4ae9900732cc3bbda69dbc2e04114ea864e9 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Fri, 1 Nov 2024 10:39:07 +0000 Subject: [PATCH 409/496] ci: add autogenerated rst files to .gitignore --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index 52304a62..43684cb4 100644 --- a/.gitignore +++ b/.gitignore @@ -14,4 +14,7 @@ ui-tests/playwright-report ui-tests/test-results /src/codeflare_sdk.egg-info/ docs/sphinx/_build +docs/sphinx/codeflare_sdk.*.rst +docs/sphinx/codeflare_sdk.rst +docs/sphinx/modules.rst .idea/ From d1f63c3798865f4825be0f9b48a816b3976add6c Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 31 Oct 2024 14:37:43 +0000 Subject: [PATCH 410/496] refactor: creation of ray cluster/appwrapper --- .../ray/cluster/build_ray_cluster.py | 553 ++++++++++++++++++ src/codeflare_sdk/ray/cluster/cluster.py | 337 ++++++----- src/codeflare_sdk/ray/cluster/config.py | 8 - .../ray/cluster/generate_yaml.py | 308 ---------- .../ray/templates/base-template.yaml | 192 ------ 5 files changed, 746 insertions(+), 652 deletions(-) create mode 100644 src/codeflare_sdk/ray/cluster/build_ray_cluster.py delete mode 100755 src/codeflare_sdk/ray/cluster/generate_yaml.py delete mode 100644 src/codeflare_sdk/ray/templates/base-template.yaml diff --git a/src/codeflare_sdk/ray/cluster/build_ray_cluster.py b/src/codeflare_sdk/ray/cluster/build_ray_cluster.py new file mode 100644 index 00000000..e590d483 --- /dev/null +++ b/src/codeflare_sdk/ray/cluster/build_ray_cluster.py @@ -0,0 +1,553 @@ +# Copyright 2024 IBM, Red Hat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" + This sub-module exists primarily to be used internally by the Cluster object + (in the cluster sub-module) for RayCluster/AppWrapper generation. +""" +from typing import Union, Tuple, Dict +from ...common import _kube_api_error_handling +from ...common.kubernetes_cluster import get_api_client, config_check +from kubernetes.client.exceptions import ApiException +import codeflare_sdk +import os + +from kubernetes import client +from kubernetes.client import ( + V1ObjectMeta, + V1KeyToPath, + V1ConfigMapVolumeSource, + V1Volume, + V1VolumeMount, + V1ResourceRequirements, + V1Container, + V1ContainerPort, + V1Lifecycle, + V1ExecAction, + V1LifecycleHandler, + V1EnvVar, + V1PodTemplateSpec, + V1PodSpec, + V1LocalObjectReference, +) + +import yaml +import uuid +import sys +import warnings +import json + +FORBIDDEN_CUSTOM_RESOURCE_TYPES = ["GPU", "CPU", "memory"] +VOLUME_MOUNTS = [ + V1VolumeMount( + mount_path="/etc/pki/tls/certs/odh-trusted-ca-bundle.crt", + name="odh-trusted-ca-cert", + sub_path="odh-trusted-ca-bundle.crt", + ), + V1VolumeMount( + mount_path="/etc/ssl/certs/odh-trusted-ca-bundle.crt", + name="odh-trusted-ca-cert", + sub_path="odh-trusted-ca-bundle.crt", + ), + V1VolumeMount( + mount_path="/etc/pki/tls/certs/odh-ca-bundle.crt", + name="odh-ca-cert", + sub_path="odh-ca-bundle.crt", + ), + V1VolumeMount( + mount_path="/etc/ssl/certs/odh-ca-bundle.crt", + name="odh-ca-cert", + sub_path="odh-ca-bundle.crt", + ), +] + +VOLUMES = [ + V1Volume( + name="odh-trusted-ca-cert", + config_map=V1ConfigMapVolumeSource( + name="odh-trusted-ca-bundle", + items=[V1KeyToPath(key="ca-bundle.crt", path="odh-trusted-ca-bundle.crt")], + optional=True, + ), + ), + V1Volume( + name="odh-ca-cert", + config_map=V1ConfigMapVolumeSource( + name="odh-trusted-ca-bundle", + items=[V1KeyToPath(key="odh-ca-bundle.crt", path="odh-ca-bundle.crt")], + optional=True, + ), + ), +] + +SUPPORTED_PYTHON_VERSIONS = { + "3.9": "quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06", + "3.11": "quay.io/modh/ray@sha256:db667df1bc437a7b0965e8031e905d3ab04b86390d764d120e05ea5a5c18d1b4", +} + + +# RayCluster/AppWrapper builder function +def build_ray_cluster(cluster: "codeflare_sdk.ray.cluster.Cluster"): + """build_ray_cluster is used for creating a Ray Cluster/AppWrapper dict + + The resource is a dict template which uses Kubernetes Objects for creating metadata, resource requests, + specs and containers. The result is sanitised and returned either as a dict or written as a yaml file. + """ + ray_version = "2.35.0" + + # GPU related variables + head_gpu_count, worker_gpu_count = head_worker_gpu_count_from_cluster(cluster) + head_resources, worker_resources = head_worker_extended_resources_from_cluster( + cluster + ) + head_resources = json.dumps(head_resources).replace('"', '\\"') + head_resources = f'"{head_resources}"' + worker_resources = json.dumps(worker_resources).replace('"', '\\"') + worker_resources = f'"{worker_resources}"' + + # Create the Ray Cluster using the V1RayCluster Object + resource = { + "apiVersion": "ray.io/v1", + "kind": "RayCluster", + "metadata": get_metadata(cluster), + "spec": { + "rayVersion": ray_version, + "enableInTreeAutoscaling": False, + "autoscalerOptions": { + "upscalingMode": "Default", + "idleTimeoutSeconds": 60, + "resources": get_resources("500m", "500m", "512Mi", "512Mi"), + }, + "headGroupSpec": { + "serviceType": "ClusterIP", + "enableIngress": False, + "rayStartParams": { + "dashboard-host": "0.0.0.0", + "block": "true", + "num-gpus": str(head_gpu_count), + "resources": head_resources, + }, + "template": { + "spec": get_pod_spec(cluster, [get_head_container_spec(cluster)]) + }, + }, + "workerGroupSpecs": [ + { + "replicas": cluster.config.num_workers, + "minReplicas": cluster.config.num_workers, + "maxReplicas": cluster.config.num_workers, + "groupName": f"small-group-{cluster.config.name}", + "rayStartParams": { + "block": "true", + "num-gpus": str(worker_gpu_count), + "resources": worker_resources, + }, + "template": V1PodTemplateSpec( + spec=get_pod_spec(cluster, [get_worker_container_spec(cluster)]) + ), + } + ], + }, + } + + config_check() + k8s_client = get_api_client() or client.ApiClient() + + if cluster.config.appwrapper: + # Wrap the Ray Cluster in an AppWrapper + appwrapper_name, _ = gen_names(cluster.config.name) + resource = wrap_cluster(cluster, appwrapper_name, resource) + + resource = k8s_client.sanitize_for_serialization(resource) + + # write_to_file functionality + if cluster.config.write_to_file: + return write_to_file(cluster, resource) # Writes the file and returns its name + else: + print(f"Yaml resources loaded for {cluster.config.name}") + return resource # Returns the Resource as a dict + + +# Metadata related functions +def get_metadata(cluster: "codeflare_sdk.ray.cluster.Cluster"): + """ + The get_metadata() function builds and returns a V1ObjectMeta Object using cluster configurtation parameters + """ + object_meta = V1ObjectMeta( + name=cluster.config.name, + namespace=cluster.config.namespace, + labels=get_labels(cluster), + ) + + # Get the NB annotation if it exists - could be useful in future for a "annotations" parameter. + annotations = get_nb_annotations() + if annotations != {}: + object_meta.annotations = annotations # As annotations are not a guarantee they are appended to the metadata after creation. + return object_meta + + +def get_labels(cluster: "codeflare_sdk.ray.cluster.Cluster"): + """ + The get_labels() function generates a dict "labels" which includes the base label, local queue label and user defined labels + """ + labels = { + "controller-tools.k8s.io": "1.0", + } + if cluster.config.labels != {}: + labels.update(cluster.config.labels) + + if cluster.config.appwrapper is False: + add_queue_label(cluster, labels) + + return labels + + +def get_nb_annotations(): + """ + The get_nb_annotations() function generates the annotation for NB Prefix if the SDK is running in a notebook + """ + annotations = {} + + # Notebook annotation + nb_prefix = os.environ.get("NB_PREFIX") + if nb_prefix: + annotations.update({"app.kubernetes.io/managed-by": nb_prefix}) + + return annotations + + +# Head/Worker container related functions +def update_image(image) -> str: + """ + The update_image() function automatically sets the image config parameter to a preset image based on Python version if not specified. + If no Ray image exists for the given Python version a warning is produced. + """ + if not image: + python_version = f"{sys.version_info.major}.{sys.version_info.minor}" + if python_version in SUPPORTED_PYTHON_VERSIONS: + image = SUPPORTED_PYTHON_VERSIONS[python_version] + else: + warnings.warn( + f"No default Ray image defined for {python_version}. Please provide your own image or use one of the following python versions: {', '.join(SUPPORTED_PYTHON_VERSIONS.keys())}." + ) + return image + + +def get_pod_spec(cluster: "codeflare_sdk.ray.cluster.Cluster", containers): + """ + The get_pod_spec() function generates a V1PodSpec for the head/worker containers + """ + pod_spec = V1PodSpec( + containers=containers, + volumes=VOLUMES, + ) + if cluster.config.image_pull_secrets != []: + pod_spec.image_pull_secrets = generate_image_pull_secrets(cluster) + + return pod_spec + + +def generate_image_pull_secrets(cluster: "codeflare_sdk.ray.cluster.Cluster"): + """ + The generate_image_pull_secrets() methods generates a list of V1LocalObjectReference including each of the specified image pull secrets + """ + pull_secrets = [] + for pull_secret in cluster.config.image_pull_secrets: + pull_secrets.append(V1LocalObjectReference(name=pull_secret)) + + return pull_secrets + + +def get_head_container_spec( + cluster: "codeflare_sdk.ray.cluster.Cluster", +): + """ + The get_head_container_spec() function builds and returns a V1Container object including user defined resource requests/limits + """ + head_container = V1Container( + name="ray-head", + image=update_image(cluster.config.image), + image_pull_policy="Always", + ports=[ + V1ContainerPort(name="gcs", container_port=6379), + V1ContainerPort(name="dashboard", container_port=8265), + V1ContainerPort(name="client", container_port=10001), + ], + lifecycle=V1Lifecycle( + pre_stop=V1LifecycleHandler( + _exec=V1ExecAction(["/bin/sh", "-c", "ray stop"]) + ) + ), + resources=get_resources( + cluster.config.head_cpu_requests, + cluster.config.head_cpu_limits, + cluster.config.head_memory_requests, + cluster.config.head_memory_limits, + cluster.config.head_extended_resource_requests, + ), + volume_mounts=VOLUME_MOUNTS, + ) + if cluster.config.envs != {}: + head_container.env = generate_env_vars(cluster) + + return head_container + + +def generate_env_vars(cluster: "codeflare_sdk.ray.cluster.Cluster"): + """ + The generate_env_vars() builds and returns a V1EnvVar object which is populated by user specified environment variables + """ + envs = [] + for key, value in cluster.config.envs.items(): + env_var = V1EnvVar(name=key, value=value) + envs.append(env_var) + + return envs + + +def get_worker_container_spec( + cluster: "codeflare_sdk.ray.cluster.Cluster", +): + """ + The get_worker_container_spec() function builds and returns a V1Container object including user defined resource requests/limits + """ + worker_container = V1Container( + name="machine-learning", + image=update_image(cluster.config.image), + image_pull_policy="Always", + lifecycle=V1Lifecycle( + pre_stop=V1LifecycleHandler( + _exec=V1ExecAction(["/bin/sh", "-c", "ray stop"]) + ) + ), + resources=get_resources( + cluster.config.worker_cpu_requests, + cluster.config.worker_cpu_limits, + cluster.config.worker_memory_requests, + cluster.config.worker_memory_limits, + cluster.config.worker_extended_resource_requests, + ), + volume_mounts=VOLUME_MOUNTS, + ) + + if cluster.config.envs != {}: + worker_container.env = generate_env_vars(cluster) + + return worker_container + + +def get_resources( + cpu_requests: Union[int, str], + cpu_limits: Union[int, str], + memory_requests: Union[int, str], + memory_limits: Union[int, str], + custom_extended_resource_requests: Dict[str, int] = None, +): + """ + The get_resources() function generates a V1ResourceRequirements object for cpu/memory request/limits and GPU resources + """ + resource_requirements = V1ResourceRequirements( + requests={"cpu": cpu_requests, "memory": memory_requests}, + limits={"cpu": cpu_limits, "memory": memory_limits}, + ) + + # Append the resource/limit requests with custom extended resources + if custom_extended_resource_requests is not None: + for k in custom_extended_resource_requests.keys(): + resource_requirements.limits[k] = custom_extended_resource_requests[k] + resource_requirements.requests[k] = custom_extended_resource_requests[k] + + return resource_requirements + + +# GPU related functions +def head_worker_gpu_count_from_cluster( + cluster: "codeflare_sdk.ray.cluster.Cluster", +) -> Tuple[int, int]: + """ + The head_worker_gpu_count_from_cluster() function returns the total number of requested GPUs for the head and worker separately + """ + head_gpus = 0 + worker_gpus = 0 + for k in cluster.config.head_extended_resource_requests.keys(): + resource_type = cluster.config.extended_resource_mapping[k] + if resource_type == "GPU": + head_gpus += int(cluster.config.head_extended_resource_requests[k]) + for k in cluster.config.worker_extended_resource_requests.keys(): + resource_type = cluster.config.extended_resource_mapping[k] + if resource_type == "GPU": + worker_gpus += int(cluster.config.worker_extended_resource_requests[k]) + + return head_gpus, worker_gpus + + +def head_worker_extended_resources_from_cluster( + cluster: "codeflare_sdk.ray.cluster.Cluster", +) -> Tuple[dict, dict]: + """ + The head_worker_extended_resources_from_cluster() function returns 2 dicts for head/worker respectively populated by the GPU type requested by the user + """ + head_worker_extended_resources = {}, {} + for k in cluster.config.head_extended_resource_requests.keys(): + resource_type = cluster.config.extended_resource_mapping[k] + if resource_type in FORBIDDEN_CUSTOM_RESOURCE_TYPES: + continue + head_worker_extended_resources[0][ + resource_type + ] = cluster.config.head_extended_resource_requests[ + k + ] + head_worker_extended_resources[ + 0 + ].get( + resource_type, 0 + ) + + for k in cluster.config.worker_extended_resource_requests.keys(): + resource_type = cluster.config.extended_resource_mapping[k] + if resource_type in FORBIDDEN_CUSTOM_RESOURCE_TYPES: + continue + head_worker_extended_resources[1][ + resource_type + ] = cluster.config.worker_extended_resource_requests[ + k + ] + head_worker_extended_resources[ + 1 + ].get( + resource_type, 0 + ) + return head_worker_extended_resources + + +# Local Queue related functions +def add_queue_label(cluster: "codeflare_sdk.ray.cluster.Cluster", labels: dict): + """ + The add_queue_label() function updates the given base labels with the local queue label if Kueue exists on the Cluster + """ + lq_name = cluster.config.local_queue or get_default_local_queue(cluster, labels) + if lq_name == None: + return + elif not local_queue_exists(cluster): + raise ValueError( + "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration" + ) + labels.update({"kueue.x-k8s.io/queue-name": lq_name}) + + +def local_queue_exists(cluster: "codeflare_sdk.ray.cluster.Cluster"): + """ + The local_queue_exists() checks if the user inputted local_queue exists in the given namespace and returns a bool + """ + # get all local queues in the namespace + try: + config_check() + api_instance = client.CustomObjectsApi(get_api_client()) + local_queues = api_instance.list_namespaced_custom_object( + group="kueue.x-k8s.io", + version="v1beta1", + namespace=cluster.config.namespace, + plural="localqueues", + ) + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + # check if local queue with the name provided in cluster config exists + for lq in local_queues["items"]: + if lq["metadata"]["name"] == cluster.config.local_queue: + return True + return False + + +def get_default_local_queue(cluster: "codeflare_sdk.ray.cluster.Cluster", labels: dict): + """ + The get_default_local_queue() function attempts to find a local queue with the default label == true, if that is the case the labels variable is updated with that local queue + """ + try: + # Try to get the default local queue if it exists and append the label list + config_check() + api_instance = client.CustomObjectsApi(get_api_client()) + local_queues = api_instance.list_namespaced_custom_object( + group="kueue.x-k8s.io", + version="v1beta1", + namespace=cluster.config.namespace, + plural="localqueues", + ) + except ApiException as e: # pragma: no cover + if e.status == 404 or e.status == 403: + return + else: + return _kube_api_error_handling(e) + + for lq in local_queues["items"]: + if ( + "annotations" in lq["metadata"] + and "kueue.x-k8s.io/default-queue" in lq["metadata"]["annotations"] + and lq["metadata"]["annotations"]["kueue.x-k8s.io/default-queue"].lower() + == "true" + ): + labels.update({"kueue.x-k8s.io/queue-name": lq["metadata"]["name"]}) + + +# AppWrapper related functions +def wrap_cluster( + cluster: "codeflare_sdk.ray.cluster.Cluster", + appwrapper_name: str, + ray_cluster_yaml: dict, +): + """ + Wraps the pre-built Ray Cluster dict in an AppWrapper + """ + wrapping = { + "apiVersion": "workload.codeflare.dev/v1beta2", + "kind": "AppWrapper", + "metadata": {"name": appwrapper_name, "namespace": cluster.config.namespace}, + "spec": {"components": [{"template": ray_cluster_yaml}]}, + } + # Add local queue label if it is necessary + labels = {} + add_queue_label(cluster, labels) + if labels != {}: + wrapping["metadata"]["labels"] = labels + + return wrapping + + +# Etc. +def write_to_file(cluster: "codeflare_sdk.ray.cluster.Cluster", resource: dict): + """ + The write_to_file function writes the built Ray Cluster/AppWrapper dict as a yaml file in the .codeflare folder + """ + directory_path = os.path.expanduser("~/.codeflare/resources/") + output_file_name = os.path.join(directory_path, cluster.config.name + ".yaml") + + directory_path = os.path.dirname(output_file_name) + if not os.path.exists(directory_path): + os.makedirs(directory_path) + + with open(output_file_name, "w") as outfile: + yaml.dump(resource, outfile, default_flow_style=False) + + print(f"Written to: {output_file_name}") + return output_file_name + + +def gen_names(name): + """ + Generates a unique name for the appwrapper and Ray Cluster + """ + if not name: + gen_id = str(uuid.uuid4()) + appwrapper_name = "appwrapper-" + gen_id + cluster_name = "cluster-" + gen_id + return appwrapper_name, cluster_name + else: + return name, name diff --git a/src/codeflare_sdk/ray/cluster/cluster.py b/src/codeflare_sdk/ray/cluster/cluster.py index da87639c..fe29eaa9 100644 --- a/src/codeflare_sdk/ray/cluster/cluster.py +++ b/src/codeflare_sdk/ray/cluster/cluster.py @@ -28,12 +28,9 @@ get_api_client, ) from . import pretty_print -from .generate_yaml import ( - generate_appwrapper, - head_worker_gpu_count_from_cluster, -) +from .build_ray_cluster import build_ray_cluster, head_worker_gpu_count_from_cluster +from .build_ray_cluster import write_to_file as write_cluster_to_file from ...common import _kube_api_error_handling -from .generate_yaml import is_openshift_cluster from .config import ClusterConfiguration from .status import ( @@ -56,6 +53,7 @@ from kubernetes import config from kubernetes.client.rest import ApiException +import warnings class Cluster: @@ -74,9 +72,15 @@ def __init__(self, config: ClusterConfiguration): request. """ self.config = config - self.app_wrapper_yaml = self.create_app_wrapper() + if self.config is None: + warnings.warn( + "Please provide a ClusterConfiguration to initialise the Cluster object" + ) + return + else: + self.resource_yaml = self.create_resource() + self._job_submission_client = None - self.app_wrapper_name = self.config.name if is_notebook(): cluster_up_down_buttons(self) @@ -91,7 +95,7 @@ def _client_headers(self): @property def _client_verify_tls(self): - if not is_openshift_cluster or not self.config.verify_tls: + if not _is_openshift_cluster or not self.config.verify_tls: return False return True @@ -100,7 +104,7 @@ def job_client(self): k8client = get_api_client() if self._job_submission_client: return self._job_submission_client - if is_openshift_cluster(): + if _is_openshift_cluster(): self._job_submission_client = JobSubmissionClient( self.cluster_dashboard_uri(), headers=self._client_headers, @@ -112,7 +116,7 @@ def job_client(self): ) return self._job_submission_client - def create_app_wrapper(self): + def create_resource(self): """ Called upon cluster object creation, creates an AppWrapper yaml based on the specifications of the ClusterConfiguration. @@ -127,7 +131,7 @@ def create_app_wrapper(self): f"Namespace {self.config.namespace} is of type {type(self.config.namespace)}. Check your Kubernetes Authentication." ) - return generate_appwrapper(self) + return build_ray_cluster(self) # creates a new cluster with the provided or default spec def up(self): @@ -146,7 +150,7 @@ def up(self): api_instance = client.CustomObjectsApi(get_api_client()) if self.config.appwrapper: if self.config.write_to_file: - with open(self.app_wrapper_yaml) as f: + with open(self.resource_yaml) as f: aw = yaml.load(f, Loader=yaml.FullLoader) api_instance.create_namespaced_custom_object( group="workload.codeflare.dev", @@ -156,13 +160,12 @@ def up(self): body=aw, ) else: - aw = yaml.safe_load(self.app_wrapper_yaml) api_instance.create_namespaced_custom_object( group="workload.codeflare.dev", version="v1beta2", namespace=namespace, plural="appwrappers", - body=aw, + body=self.resource_yaml, ) print(f"AppWrapper: '{self.config.name}' has successfully been created") else: @@ -198,6 +201,7 @@ def down(self): associated with the cluster. """ namespace = self.config.namespace + resource_name = self.config.name self._throw_for_no_raycluster() try: config_check() @@ -208,11 +212,11 @@ def down(self): version="v1beta2", namespace=namespace, plural="appwrappers", - name=self.app_wrapper_name, + name=resource_name, ) - print(f"AppWrapper: '{self.config.name}' has successfully been deleted") + print(f"AppWrapper: '{resource_name}' has successfully been deleted") else: - self._component_resources_down(namespace, api_instance) + _delete_resources(resource_name, namespace, api_instance) print( f"Ray Cluster: '{self.config.name}' has successfully been deleted" ) @@ -358,7 +362,7 @@ def cluster_dashboard_uri(self) -> str: Returns a string containing the cluster's dashboard URI. """ config_check() - if is_openshift_cluster(): + if _is_openshift_cluster(): try: api_instance = client.CustomObjectsApi(get_api_client()) routes = api_instance.list_namespaced_custom_object( @@ -442,67 +446,6 @@ def _head_worker_extended_resources_from_rc_dict(rc: Dict) -> Tuple[dict, dict]: return head_extended_resources, worker_extended_resources - def from_k8_cluster_object( - rc, - appwrapper=True, - write_to_file=False, - verify_tls=True, - ): - config_check() - machine_types = ( - rc["metadata"]["labels"]["orderedinstance"].split("_") - if "orderedinstance" in rc["metadata"]["labels"] - else [] - ) - - ( - head_extended_resources, - worker_extended_resources, - ) = Cluster._head_worker_extended_resources_from_rc_dict(rc) - - cluster_config = ClusterConfiguration( - name=rc["metadata"]["name"], - namespace=rc["metadata"]["namespace"], - machine_types=machine_types, - head_cpu_requests=rc["spec"]["headGroupSpec"]["template"]["spec"][ - "containers" - ][0]["resources"]["requests"]["cpu"], - head_cpu_limits=rc["spec"]["headGroupSpec"]["template"]["spec"][ - "containers" - ][0]["resources"]["limits"]["cpu"], - head_memory_requests=rc["spec"]["headGroupSpec"]["template"]["spec"][ - "containers" - ][0]["resources"]["requests"]["memory"], - head_memory_limits=rc["spec"]["headGroupSpec"]["template"]["spec"][ - "containers" - ][0]["resources"]["limits"]["memory"], - num_workers=rc["spec"]["workerGroupSpecs"][0]["minReplicas"], - worker_cpu_requests=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ - "containers" - ][0]["resources"]["requests"]["cpu"], - worker_cpu_limits=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ - "containers" - ][0]["resources"]["limits"]["cpu"], - worker_memory_requests=rc["spec"]["workerGroupSpecs"][0]["template"][ - "spec" - ]["containers"][0]["resources"]["requests"]["memory"], - worker_memory_limits=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"][ - "containers" - ][0]["resources"]["limits"]["memory"], - worker_extended_resource_requests=worker_extended_resources, - head_extended_resource_requests=head_extended_resources, - image=rc["spec"]["workerGroupSpecs"][0]["template"]["spec"]["containers"][ - 0 - ]["image"], - appwrapper=appwrapper, - write_to_file=write_to_file, - verify_tls=verify_tls, - local_queue=rc["metadata"] - .get("labels", dict()) - .get("kueue.x-k8s.io/queue-name", None), - ) - return Cluster(cluster_config) - def local_client_url(self): ingress_domain = _get_ingress_domain(self) return f"ray://{ingress_domain}" @@ -511,35 +454,22 @@ def _component_resources_up( self, namespace: str, api_instance: client.CustomObjectsApi ): if self.config.write_to_file: - with open(self.app_wrapper_yaml) as f: - yamls = list(yaml.load_all(f, Loader=yaml.FullLoader)) - for resource in yamls: - enable_ingress = ( - resource.get("spec", {}) - .get("headGroupSpec", {}) - .get("enableIngress") - ) - if resource["kind"] == "RayCluster" and enable_ingress is True: - name = resource["metadata"]["name"] - print( - f"Forbidden: RayCluster '{name}' has 'enableIngress' set to 'True'." - ) - return - _create_resources(yamls, namespace, api_instance) + with open(self.resource_yaml) as f: + ray_cluster = yaml.safe_load(f) + _create_resources(ray_cluster, namespace, api_instance) else: - yamls = yaml.load_all(self.app_wrapper_yaml, Loader=yaml.FullLoader) - _create_resources(yamls, namespace, api_instance) + _create_resources(self.resource_yaml, namespace, api_instance) def _component_resources_down( self, namespace: str, api_instance: client.CustomObjectsApi ): cluster_name = self.config.name if self.config.write_to_file: - with open(self.app_wrapper_yaml) as f: + with open(self.resource_yaml) as f: yamls = yaml.load_all(f, Loader=yaml.FullLoader) _delete_resources(yamls, namespace, api_instance, cluster_name) else: - yamls = yaml.safe_load_all(self.app_wrapper_yaml) + yamls = yaml.safe_load_all(self.resource_yaml) _delete_resources(yamls, namespace, api_instance, cluster_name) @@ -595,61 +525,166 @@ def get_current_namespace(): # pragma: no cover def get_cluster( cluster_name: str, namespace: str = "default", - write_to_file: bool = False, verify_tls: bool = True, + write_to_file: bool = False, ): - try: - config_check() - api_instance = client.CustomObjectsApi(get_api_client()) - rcs = api_instance.list_namespaced_custom_object( - group="ray.io", - version="v1", - namespace=namespace, - plural="rayclusters", - ) - except Exception as e: - return _kube_api_error_handling(e) - - for rc in rcs["items"]: - if rc["metadata"]["name"] == cluster_name: - appwrapper = _check_aw_exists(cluster_name, namespace) - return Cluster.from_k8_cluster_object( - rc, - appwrapper=appwrapper, - write_to_file=write_to_file, - verify_tls=verify_tls, + """Returns the given Ray Cluster/AppWrapper as a Cluster Object + + The get_cluster() method is used for retrieving a Ray Cluster that already exists in your K8s Cluster. + Returned is a basic Cluster object which includes the exact yaml for your Ray Cluster under Cluster.resource_yaml. + + Parameters + ---------- + cluster_name : str + The name of the Ray Cluster/AppWrapper + namespace : str + The namespace of the Ray Cluster/AppWrapper + verify_tls : bool + A boolean indicating whether to verify TLS when connecting to the cluster + write_to_file : bool + A boolean indicating whether or not to write the resource to a Yaml file + + Raises + ------ + Exception + If the Ray Cluster/AppWrapper cannot be found/does not exist + """ + config_check() + api_instance = client.CustomObjectsApi(get_api_client()) + # Check/Get the AppWrapper if it exists + is_appwrapper = _check_aw_exists(cluster_name, namespace) + if is_appwrapper: + try: + resource = api_instance.get_namespaced_custom_object( + group="workload.codeflare.dev", + version="v1beta2", + namespace=namespace, + plural="appwrappers", + name=cluster_name, ) - raise FileNotFoundError( - f"Cluster {cluster_name} is not found in {namespace} namespace" - ) - - -# private methods -def _delete_resources( - yamls, namespace: str, api_instance: client.CustomObjectsApi, cluster_name: str -): - for resource in yamls: - if resource["kind"] == "RayCluster": - name = resource["metadata"]["name"] - api_instance.delete_namespaced_custom_object( + resource_extraction = resource["spec"]["components"][0]["template"] + except Exception as e: + return _kube_api_error_handling(e) + else: + # Get the Ray Cluster + try: + resource = api_instance.get_namespaced_custom_object( group="ray.io", version="v1", namespace=namespace, plural="rayclusters", - name=name, + name=cluster_name, ) + resource_extraction = resource + except Exception as e: + return _kube_api_error_handling(e) + + ( + head_extended_resources, + worker_extended_resources, + ) = Cluster._head_worker_extended_resources_from_rc_dict(resource_extraction) + # Create a Cluster Configuration with just the necessary provided parameters + cluster_config = ClusterConfiguration( + name=cluster_name, + namespace=namespace, + verify_tls=verify_tls, + write_to_file=write_to_file, + appwrapper=is_appwrapper, + head_cpu_limits=resource_extraction["spec"]["headGroupSpec"]["template"][ + "spec" + ]["containers"][0]["resources"]["requests"]["cpu"], + head_cpu_requests=resource_extraction["spec"]["headGroupSpec"]["template"][ + "spec" + ]["containers"][0]["resources"]["limits"]["cpu"], + head_memory_limits=resource_extraction["spec"]["headGroupSpec"]["template"][ + "spec" + ]["containers"][0]["resources"]["requests"]["memory"], + head_memory_requests=resource_extraction["spec"]["headGroupSpec"]["template"][ + "spec" + ]["containers"][0]["resources"]["limits"]["memory"], + num_workers=resource_extraction["spec"]["workerGroupSpecs"][0]["minReplicas"], + worker_cpu_limits=resource_extraction["spec"]["workerGroupSpecs"][0][ + "template" + ]["spec"]["containers"][0]["resources"]["limits"]["cpu"], + worker_cpu_requests=resource_extraction["spec"]["workerGroupSpecs"][0][ + "template" + ]["spec"]["containers"][0]["resources"]["requests"]["cpu"], + worker_memory_limits=resource_extraction["spec"]["workerGroupSpecs"][0][ + "template" + ]["spec"]["containers"][0]["resources"]["requests"]["memory"], + worker_memory_requests=resource_extraction["spec"]["workerGroupSpecs"][0][ + "template" + ]["spec"]["containers"][0]["resources"]["limits"]["memory"], + head_extended_resource_requests=head_extended_resources, + worker_extended_resource_requests=worker_extended_resources, + ) + # Ignore the warning here for the lack of a ClusterConfiguration + with warnings.catch_warnings(): + warnings.filterwarnings( + "ignore", + message="Please provide a ClusterConfiguration to initialise the Cluster object", + ) + cluster = Cluster(None) + cluster.config = cluster_config + + # Remove auto-generated fields like creationTimestamp, uid and etc. + remove_autogenerated_fields(resource) + + if write_to_file: + cluster.resource_yaml = write_cluster_to_file(cluster, resource) + else: + # Update the Cluster's resource_yaml to reflect the retrieved Ray Cluster/AppWrapper + cluster.resource_yaml = resource + print(f"Yaml resources loaded for {cluster.config.name}") + + return cluster + + +def remove_autogenerated_fields(resource): + """Recursively remove autogenerated fields from a dictionary.""" + if isinstance(resource, dict): + for key in list(resource.keys()): + if key in [ + "creationTimestamp", + "resourceVersion", + "uid", + "selfLink", + "managedFields", + "finalizers", + "generation", + "status", + "suspend", + "workload.codeflare.dev/user", # AppWrapper field + "workload.codeflare.dev/userid", # AppWrapper field + "podSetInfos", # AppWrapper field + ]: + del resource[key] + else: + remove_autogenerated_fields(resource[key]) + elif isinstance(resource, list): + for item in resource: + remove_autogenerated_fields(item) + + +# private methods +def _delete_resources(name: str, namespace: str, api_instance: client.CustomObjectsApi): + api_instance.delete_namespaced_custom_object( + group="ray.io", + version="v1", + namespace=namespace, + plural="rayclusters", + name=name, + ) def _create_resources(yamls, namespace: str, api_instance: client.CustomObjectsApi): - for resource in yamls: - if resource["kind"] == "RayCluster": - api_instance.create_namespaced_custom_object( - group="ray.io", - version="v1", - namespace=namespace, - plural="rayclusters", - body=resource, - ) + api_instance.create_namespaced_custom_object( + group="ray.io", + version="v1", + namespace=namespace, + plural="rayclusters", + body=yamls, + ) def _check_aw_exists(name: str, namespace: str) -> bool: @@ -680,7 +715,7 @@ def _get_ingress_domain(self): # pragma: no cover namespace = get_current_namespace() domain = None - if is_openshift_cluster(): + if _is_openshift_cluster(): try: api_instance = client.CustomObjectsApi(get_api_client()) @@ -812,7 +847,7 @@ def _map_to_ray_cluster(rc) -> Optional[RayCluster]: status = RayClusterStatus.UNKNOWN config_check() dashboard_url = None - if is_openshift_cluster(): + if _is_openshift_cluster(): try: api_instance = client.CustomObjectsApi(get_api_client()) routes = api_instance.list_namespaced_custom_object( @@ -926,3 +961,17 @@ def _copy_to_ray(cluster: Cluster) -> RayCluster: if ray.status == CodeFlareClusterStatus.READY: ray.status = RayClusterStatus.READY return ray + + +# Check if the routes api exists +def _is_openshift_cluster(): + try: + config_check() + for api in client.ApisApi(get_api_client()).get_api_versions().groups: + for v in api.versions: + if "route.openshift.io/v1" in v.group_version: + return True + else: + return False + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) diff --git a/src/codeflare_sdk/ray/cluster/config.py b/src/codeflare_sdk/ray/cluster/config.py index a9fe4408..b8b1652e 100644 --- a/src/codeflare_sdk/ray/cluster/config.py +++ b/src/codeflare_sdk/ray/cluster/config.py @@ -47,19 +47,16 @@ class ClusterConfiguration: Attributes: - name: The name of the cluster. - namespace: The namespace in which the cluster should be created. - - head_info: A list of strings containing information about the head node. - head_cpus: The number of CPUs to allocate to the head node. - head_memory: The amount of memory to allocate to the head node. - head_gpus: The number of GPUs to allocate to the head node. (Deprecated, use head_extended_resource_requests) - head_extended_resource_requests: A dictionary of extended resource requests for the head node. ex: {"nvidia.com/gpu": 1} - - machine_types: A list of machine types to use for the cluster. - min_cpus: The minimum number of CPUs to allocate to each worker. - max_cpus: The maximum number of CPUs to allocate to each worker. - num_workers: The number of workers to create. - min_memory: The minimum amount of memory to allocate to each worker. - max_memory: The maximum amount of memory to allocate to each worker. - num_gpus: The number of GPUs to allocate to each worker. (Deprecated, use worker_extended_resource_requests) - - template: The path to the template file to use for the cluster. - appwrapper: A boolean indicating whether to use an AppWrapper. - envs: A dictionary of environment variables to set for the cluster. - image: The image to use for the cluster. @@ -74,7 +71,6 @@ class ClusterConfiguration: name: str namespace: Optional[str] = None - head_info: List[str] = field(default_factory=list) head_cpu_requests: Union[int, str] = 2 head_cpu_limits: Union[int, str] = 2 head_cpus: Optional[Union[int, str]] = None # Deprecating @@ -85,9 +81,6 @@ class ClusterConfiguration: head_extended_resource_requests: Dict[str, Union[str, int]] = field( default_factory=dict ) - machine_types: List[str] = field( - default_factory=list - ) # ["m4.xlarge", "g4dn.xlarge"] worker_cpu_requests: Union[int, str] = 1 worker_cpu_limits: Union[int, str] = 1 min_cpus: Optional[Union[int, str]] = None # Deprecating @@ -98,7 +91,6 @@ class ClusterConfiguration: min_memory: Optional[Union[int, str]] = None # Deprecating max_memory: Optional[Union[int, str]] = None # Deprecating num_gpus: Optional[int] = None # Deprecating - template: str = f"{dir}/templates/base-template.yaml" appwrapper: bool = False envs: Dict[str, str] = field(default_factory=dict) image: str = "" diff --git a/src/codeflare_sdk/ray/cluster/generate_yaml.py b/src/codeflare_sdk/ray/cluster/generate_yaml.py deleted file mode 100755 index 01823f1d..00000000 --- a/src/codeflare_sdk/ray/cluster/generate_yaml.py +++ /dev/null @@ -1,308 +0,0 @@ -# Copyright 2022 IBM, Red Hat -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -This sub-module exists primarily to be used internally by the Cluster object -(in the cluster sub-module) for AppWrapper generation. -""" - -import json -import sys -import typing -import warnings -import yaml -import os -import uuid -from kubernetes import client -from ...common import _kube_api_error_handling -from ...common.kueue import add_queue_label -from ...common.kubernetes_cluster.auth import ( - get_api_client, - config_check, -) -import codeflare_sdk - -SUPPORTED_PYTHON_VERSIONS = { - "3.9": "quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06", - "3.11": "quay.io/modh/ray@sha256:db667df1bc437a7b0965e8031e905d3ab04b86390d764d120e05ea5a5c18d1b4", -} - - -def read_template(template): - with open(template, "r") as stream: - try: - return yaml.safe_load(stream) - except yaml.YAMLError as exc: - print(exc) - - -def gen_names(name): - if not name: - gen_id = str(uuid.uuid4()) - appwrapper_name = "appwrapper-" + gen_id - cluster_name = "cluster-" + gen_id - return appwrapper_name, cluster_name - else: - return name, name - - -# Check if the routes api exists -def is_openshift_cluster(): - try: - config_check() - for api in client.ApisApi(get_api_client()).get_api_versions().groups: - for v in api.versions: - if "route.openshift.io/v1" in v.group_version: - return True - else: - return False - except Exception as e: # pragma: no cover - return _kube_api_error_handling(e) - - -def is_kind_cluster(): - try: - config_check() - v1 = client.CoreV1Api() - label_selector = "kubernetes.io/hostname=kind-control-plane" - nodes = v1.list_node(label_selector=label_selector) - # If we find one or more nodes with the label, assume it's a KinD cluster - return len(nodes.items) > 0 - except Exception as e: - print(f"Error checking if cluster is KinD: {e}") - return False - - -def update_names( - cluster_yaml: dict, - cluster: "codeflare_sdk.ray.cluster.cluster.Cluster", -): - metadata = cluster_yaml.get("metadata") - metadata["name"] = cluster.config.name - metadata["namespace"] = cluster.config.namespace - - -def update_image(spec, image): - containers = spec.get("containers") - if not image: - python_version = f"{sys.version_info.major}.{sys.version_info.minor}" - if python_version in SUPPORTED_PYTHON_VERSIONS: - image = SUPPORTED_PYTHON_VERSIONS[python_version] - else: - warnings.warn( - f"No default Ray image defined for {python_version}. Please provide your own image or use one of the following python versions: {', '.join(SUPPORTED_PYTHON_VERSIONS.keys())}." - ) - for container in containers: - container["image"] = image - - -def update_image_pull_secrets(spec, image_pull_secrets): - template_secrets = spec.get("imagePullSecrets", []) - spec["imagePullSecrets"] = template_secrets + [ - {"name": x} for x in image_pull_secrets - ] - - -def update_env(spec, env): - containers = spec.get("containers") - for container in containers: - if env: - if "env" in container: - container["env"].extend(env) - else: - container["env"] = env - - -def update_resources( - spec, - cpu_requests, - cpu_limits, - memory_requests, - memory_limits, - custom_resources, -): - container = spec.get("containers") - for resource in container: - requests = resource.get("resources").get("requests") - if requests is not None: - requests["cpu"] = cpu_requests - requests["memory"] = memory_requests - limits = resource.get("resources").get("limits") - if limits is not None: - limits["cpu"] = cpu_limits - limits["memory"] = memory_limits - for k in custom_resources.keys(): - limits[k] = custom_resources[k] - requests[k] = custom_resources[k] - - -def head_worker_gpu_count_from_cluster( - cluster: "codeflare_sdk.ray.cluster.cluster.Cluster", -) -> typing.Tuple[int, int]: - head_gpus = 0 - worker_gpus = 0 - for k in cluster.config.head_extended_resource_requests.keys(): - resource_type = cluster.config.extended_resource_mapping[k] - if resource_type == "GPU": - head_gpus += int(cluster.config.head_extended_resource_requests[k]) - for k in cluster.config.worker_extended_resource_requests.keys(): - resource_type = cluster.config.extended_resource_mapping[k] - if resource_type == "GPU": - worker_gpus += int(cluster.config.worker_extended_resource_requests[k]) - - return head_gpus, worker_gpus - - -FORBIDDEN_CUSTOM_RESOURCE_TYPES = ["GPU", "CPU", "memory"] - - -def head_worker_resources_from_cluster( - cluster: "codeflare_sdk.ray.cluster.cluster.Cluster", -) -> typing.Tuple[dict, dict]: - to_return = {}, {} - for k in cluster.config.head_extended_resource_requests.keys(): - resource_type = cluster.config.extended_resource_mapping[k] - if resource_type in FORBIDDEN_CUSTOM_RESOURCE_TYPES: - continue - to_return[0][resource_type] = cluster.config.head_extended_resource_requests[ - k - ] + to_return[0].get(resource_type, 0) - - for k in cluster.config.worker_extended_resource_requests.keys(): - resource_type = cluster.config.extended_resource_mapping[k] - if resource_type in FORBIDDEN_CUSTOM_RESOURCE_TYPES: - continue - to_return[1][resource_type] = cluster.config.worker_extended_resource_requests[ - k - ] + to_return[1].get(resource_type, 0) - return to_return - - -def update_nodes( - ray_cluster_dict: dict, - cluster: "codeflare_sdk.ray.cluster.cluster.Cluster", -): - head = ray_cluster_dict.get("spec").get("headGroupSpec") - worker = ray_cluster_dict.get("spec").get("workerGroupSpecs")[0] - head_gpus, worker_gpus = head_worker_gpu_count_from_cluster(cluster) - head_resources, worker_resources = head_worker_resources_from_cluster(cluster) - head_resources = json.dumps(head_resources).replace('"', '\\"') - head_resources = f'"{head_resources}"' - worker_resources = json.dumps(worker_resources).replace('"', '\\"') - worker_resources = f'"{worker_resources}"' - head["rayStartParams"]["num-gpus"] = str(head_gpus) - head["rayStartParams"]["resources"] = head_resources - - # Head counts as first worker - worker["replicas"] = cluster.config.num_workers - worker["minReplicas"] = cluster.config.num_workers - worker["maxReplicas"] = cluster.config.num_workers - worker["groupName"] = "small-group-" + cluster.config.name - worker["rayStartParams"]["num-gpus"] = str(worker_gpus) - worker["rayStartParams"]["resources"] = worker_resources - - for comp in [head, worker]: - spec = comp.get("template").get("spec") - update_image_pull_secrets(spec, cluster.config.image_pull_secrets) - update_image(spec, cluster.config.image) - update_env(spec, cluster.config.envs) - if comp == head: - # TODO: Eventually add head node configuration outside of template - update_resources( - spec, - cluster.config.head_cpu_requests, - cluster.config.head_cpu_limits, - cluster.config.head_memory_requests, - cluster.config.head_memory_limits, - cluster.config.head_extended_resource_requests, - ) - else: - update_resources( - spec, - cluster.config.worker_cpu_requests, - cluster.config.worker_cpu_limits, - cluster.config.worker_memory_requests, - cluster.config.worker_memory_limits, - cluster.config.worker_extended_resource_requests, - ) - - -def del_from_list_by_name(l: list, target: typing.List[str]) -> list: - return [x for x in l if x["name"] not in target] - - -def augment_labels(item: dict, labels: dict): - if not "labels" in item["metadata"]: - item["metadata"]["labels"] = {} - item["metadata"]["labels"].update(labels) - - -def notebook_annotations(item: dict): - nb_prefix = os.environ.get("NB_PREFIX") - if nb_prefix: - if not "annotations" in item["metadata"]: - item["metadata"]["annotations"] = {} - item["metadata"]["annotations"].update( - {"app.kubernetes.io/managed-by": nb_prefix} - ) - - -def wrap_cluster(cluster_yaml: dict, appwrapper_name: str, namespace: str): - return { - "apiVersion": "workload.codeflare.dev/v1beta2", - "kind": "AppWrapper", - "metadata": {"name": appwrapper_name, "namespace": namespace}, - "spec": {"components": [{"template": cluster_yaml}]}, - } - - -def write_user_yaml(user_yaml, output_file_name): - # Create the directory if it doesn't exist - directory_path = os.path.dirname(output_file_name) - if not os.path.exists(directory_path): - os.makedirs(directory_path) - - with open(output_file_name, "w") as outfile: - yaml.dump(user_yaml, outfile, default_flow_style=False) - - print(f"Written to: {output_file_name}") - - -def generate_appwrapper(cluster: "codeflare_sdk.ray.cluster.cluster.Cluster"): - cluster_yaml = read_template(cluster.config.template) - appwrapper_name, _ = gen_names(cluster.config.name) - update_names( - cluster_yaml, - cluster, - ) - update_nodes(cluster_yaml, cluster) - augment_labels(cluster_yaml, cluster.config.labels) - notebook_annotations(cluster_yaml) - user_yaml = ( - wrap_cluster(cluster_yaml, appwrapper_name, cluster.config.namespace) - if cluster.config.appwrapper - else cluster_yaml - ) - - add_queue_label(user_yaml, cluster.config.namespace, cluster.config.local_queue) - - if cluster.config.write_to_file: - directory_path = os.path.expanduser("~/.codeflare/resources/") - outfile = os.path.join(directory_path, appwrapper_name + ".yaml") - write_user_yaml(user_yaml, outfile) - return outfile - else: - user_yaml = yaml.dump(user_yaml) - print(f"Yaml resources loaded for {cluster.config.name}") - return user_yaml diff --git a/src/codeflare_sdk/ray/templates/base-template.yaml b/src/codeflare_sdk/ray/templates/base-template.yaml deleted file mode 100644 index b59d2a49..00000000 --- a/src/codeflare_sdk/ray/templates/base-template.yaml +++ /dev/null @@ -1,192 +0,0 @@ -# This config demonstrates KubeRay's Ray autoscaler integration. -# The resource requests and limits in this config are too small for production! -# For an example with more realistic resource configuration, see -# ray-cluster.autoscaler.large.yaml. -apiVersion: ray.io/v1 -kind: RayCluster -metadata: - labels: - controller-tools.k8s.io: "1.0" - # A unique identifier for the head node and workers of this cluster. - name: kuberay-cluster - namespace: default -spec: - # The version of Ray you are using. Make sure all Ray containers are running this version of Ray. - rayVersion: '2.35.0' - # If enableInTreeAutoscaling is true, the autoscaler sidecar will be added to the Ray head pod. - # Ray autoscaler integration is supported only for Ray versions >= 1.11.0 - # Ray autoscaler integration is Beta with KubeRay >= 0.3.0 and Ray >= 2.0.0. - enableInTreeAutoscaling: false - # autoscalerOptions is an OPTIONAL field specifying configuration overrides for the Ray autoscaler. - # The example configuration shown below below represents the DEFAULT values. - # (You may delete autoscalerOptions if the defaults are suitable.) - autoscalerOptions: - # upscalingMode is "Default" or "Aggressive." - # Conservative: Upscaling is rate-limited; the number of pending worker pods is at most the size of the Ray cluster. - # Default: Upscaling is not rate-limited. - # Aggressive: An alias for Default; upscaling is not rate-limited. - upscalingMode: Default - # idleTimeoutSeconds is the number of seconds to wait before scaling down a worker pod which is not using Ray resources. - idleTimeoutSeconds: 60 - # image optionally overrides the autoscaler's container image. - # If instance.spec.rayVersion is at least "2.0.0", the autoscaler will default to the same image as - # the ray container. For older Ray versions, the autoscaler will default to using the Ray 2.0.0 image. - ## image: "my-repo/my-custom-autoscaler-image:tag" - # imagePullPolicy optionally overrides the autoscaler container's image pull policy. - imagePullPolicy: Always - # resources specifies optional resource request and limit overrides for the autoscaler container. - # For large Ray clusters, we recommend monitoring container resource usage to determine if overriding the defaults is required. - resources: - limits: - cpu: "500m" - memory: "512Mi" - requests: - cpu: "500m" - memory: "512Mi" - ######################headGroupSpec################################# - # head group template and specs, (perhaps 'group' is not needed in the name) - headGroupSpec: - # Kubernetes Service Type, valid values are 'ClusterIP', 'NodePort' and 'LoadBalancer' - serviceType: ClusterIP - enableIngress: false - # logical group name, for this called head-group, also can be functional - # pod type head or worker - # rayNodeType: head # Not needed since it is under the headgroup - # the following params are used to complete the ray start: ray start --head --block ... - rayStartParams: - # Flag "no-monitor" will be automatically set when autoscaling is enabled. - dashboard-host: '0.0.0.0' - block: 'true' - # num-cpus: '1' # can be auto-completed from the limits - # Use `resources` to optionally specify custom resource annotations for the Ray node. - # The value of `resources` is a string-integer mapping. - # Currently, `resources` must be provided in the specific format demonstrated below: - # resources: '"{\"Custom1\": 1, \"Custom2\": 5}"' - num-gpus: '0' - #pod template - template: - spec: - containers: - # The Ray head pod - - name: ray-head - imagePullPolicy: Always - ports: - - containerPort: 6379 - name: gcs - - containerPort: 8265 - name: dashboard - - containerPort: 10001 - name: client - lifecycle: - preStop: - exec: - command: ["/bin/sh","-c","ray stop"] - resources: - limits: - cpu: 2 - memory: "8G" - requests: - cpu: 2 - memory: "8G" - volumeMounts: - - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - volumes: - - name: odh-trusted-ca-cert - configMap: - name: odh-trusted-ca-bundle - items: - - key: ca-bundle.crt - path: odh-trusted-ca-bundle.crt - optional: true - - name: odh-ca-cert - configMap: - name: odh-trusted-ca-bundle - items: - - key: odh-ca-bundle.crt - path: odh-ca-bundle.crt - optional: true - workerGroupSpecs: - # the pod replicas in this group typed worker - - replicas: 3 - minReplicas: 3 - maxReplicas: 3 - # logical group name, for this called small-group, also can be functional - groupName: small-group - # if worker pods need to be added, we can simply increment the replicas - # if worker pods need to be removed, we decrement the replicas, and populate the podsToDelete list - # the operator will remove pods from the list until the number of replicas is satisfied - # when a pod is confirmed to be deleted, its name will be removed from the list below - #scaleStrategy: - # workersToDelete: - # - raycluster-complete-worker-small-group-bdtwh - # - raycluster-complete-worker-small-group-hv457 - # - raycluster-complete-worker-small-group-k8tj7 - # the following params are used to complete the ray start: ray start --block ... - rayStartParams: - block: 'true' - num-gpus: 1 - #pod template - template: - metadata: - labels: - key: value - # annotations for pod - annotations: - key: value - # finalizers: - # - kubernetes - spec: - containers: - - name: machine-learning # must consist of lower case alphanumeric characters or '-', and must start and end with an alphanumeric character (e.g. 'my-name', or '123-abc' - # environment variables to set in the container.Optional. - # Refer to https://kubernetes.io/docs/tasks/inject-data-application/define-environment-variable-container/ - lifecycle: - preStop: - exec: - command: ["/bin/sh","-c","ray stop"] - resources: - limits: - cpu: "2" - memory: "12G" - requests: - cpu: "2" - memory: "12G" - volumeMounts: - - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt - name: odh-trusted-ca-cert - subPath: odh-trusted-ca-bundle.crt - - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - - mountPath: /etc/ssl/certs/odh-ca-bundle.crt - name: odh-ca-cert - subPath: odh-ca-bundle.crt - volumes: - - name: odh-trusted-ca-cert - configMap: - name: odh-trusted-ca-bundle - items: - - key: ca-bundle.crt - path: odh-trusted-ca-bundle.crt - optional: true - - name: odh-ca-cert - configMap: - name: odh-trusted-ca-bundle - items: - - key: odh-ca-bundle.crt - path: odh-ca-bundle.crt - optional: true From 6ec44c5f91072f8c4edcd1cc0922853bcd60aaa0 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 31 Oct 2024 16:27:25 +0000 Subject: [PATCH 411/496] test: update unit tests after refactor --- src/codeflare_sdk/common/kueue/test_kueue.py | 12 +- .../common/utils/unit_test_support.py | 36 ++++ .../ray/appwrapper/test_status.py | 7 +- ...rate_yaml.py => test_build_ray_cluster.py} | 13 +- src/codeflare_sdk/ray/cluster/test_cluster.py | 164 +++++----------- src/codeflare_sdk/ray/cluster/test_config.py | 124 +++++------- .../ray/cluster/test_pretty_print.py | 7 +- src/codeflare_sdk/ray/cluster/test_status.py | 13 +- .../appwrapper/test-case-bad.yaml | 3 +- .../appwrapper/unit-test-all-params.yaml | 176 ++++++++++++++++++ tests/test_cluster_yamls/kueue/aw_kueue.yaml | 9 +- .../kueue/ray_cluster_kueue.yaml | 9 +- .../ray/default-appwrapper.yaml | 9 +- .../ray/default-ray-cluster.yaml | 9 +- .../ray/unit-test-all-params.yaml | 25 +-- .../support_clusters/test-aw-a.yaml | 3 +- .../support_clusters/test-aw-b.yaml | 3 +- .../support_clusters/test-rc-a.yaml | 3 +- .../support_clusters/test-rc-b.yaml | 3 +- ui-tests/playwright.config.js | 2 +- 20 files changed, 355 insertions(+), 275 deletions(-) rename src/codeflare_sdk/ray/cluster/{test_generate_yaml.py => test_build_ray_cluster.py} (84%) create mode 100644 tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml diff --git a/src/codeflare_sdk/common/kueue/test_kueue.py b/src/codeflare_sdk/common/kueue/test_kueue.py index e9de364d..77095d4d 100644 --- a/src/codeflare_sdk/common/kueue/test_kueue.py +++ b/src/codeflare_sdk/common/kueue/test_kueue.py @@ -51,8 +51,7 @@ def test_cluster_creation_no_aw_local_queue(mocker): config.write_to_file = True config.local_queue = "local-queue-default" cluster = Cluster(config) - assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-cluster-kueue.yaml" - assert cluster.app_wrapper_name == "unit-test-cluster-kueue" + assert cluster.resource_yaml == f"{aw_dir}unit-test-cluster-kueue.yaml" assert filecmp.cmp( f"{aw_dir}unit-test-cluster-kueue.yaml", f"{parent}/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml", @@ -65,10 +64,9 @@ def test_cluster_creation_no_aw_local_queue(mocker): config.write_to_file = False cluster = Cluster(config) - test_rc = yaml.load(cluster.app_wrapper_yaml, Loader=yaml.FullLoader) with open(f"{parent}/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml") as f: expected_rc = yaml.load(f, Loader=yaml.FullLoader) - assert test_rc == expected_rc + assert cluster.resource_yaml == expected_rc def test_aw_creation_local_queue(mocker): @@ -87,8 +85,7 @@ def test_aw_creation_local_queue(mocker): config.write_to_file = True config.local_queue = "local-queue-default" cluster = Cluster(config) - assert cluster.app_wrapper_yaml == f"{aw_dir}unit-test-aw-kueue.yaml" - assert cluster.app_wrapper_name == "unit-test-aw-kueue" + assert cluster.resource_yaml == f"{aw_dir}unit-test-aw-kueue.yaml" assert filecmp.cmp( f"{aw_dir}unit-test-aw-kueue.yaml", f"{parent}/tests/test_cluster_yamls/kueue/aw_kueue.yaml", @@ -102,10 +99,9 @@ def test_aw_creation_local_queue(mocker): config.write_to_file = False cluster = Cluster(config) - test_rc = yaml.load(cluster.app_wrapper_yaml, Loader=yaml.FullLoader) with open(f"{parent}/tests/test_cluster_yamls/kueue/aw_kueue.yaml") as f: expected_rc = yaml.load(f, Loader=yaml.FullLoader) - assert test_rc == expected_rc + assert cluster.resource_yaml == expected_rc def test_get_local_queue_exists_fail(mocker): diff --git a/src/codeflare_sdk/common/utils/unit_test_support.py b/src/codeflare_sdk/common/utils/unit_test_support.py index 61a16260..88b65aa4 100644 --- a/src/codeflare_sdk/common/utils/unit_test_support.py +++ b/src/codeflare_sdk/common/utils/unit_test_support.py @@ -20,6 +20,7 @@ import yaml from pathlib import Path from kubernetes import client +from unittest.mock import patch parent = Path(__file__).resolve().parents[4] # project directory aw_dir = os.path.expanduser("~/.codeflare/resources/") @@ -381,3 +382,38 @@ def mocked_ingress(port, cluster_name="unit-test-cluster", annotations: dict = N ), ) return mock_ingress + + +@patch.dict("os.environ", {"NB_PREFIX": "test-prefix"}) +def create_cluster_all_config_params(mocker, cluster_name, is_appwrapper) -> Cluster: + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), + ) + + config = ClusterConfiguration( + name=cluster_name, + namespace="ns", + head_cpu_requests=4, + head_cpu_limits=8, + head_memory_requests=12, + head_memory_limits=16, + head_extended_resource_requests={"nvidia.com/gpu": 1, "intel.com/gpu": 2}, + worker_cpu_requests=4, + worker_cpu_limits=8, + num_workers=10, + worker_memory_requests=12, + worker_memory_limits=16, + appwrapper=is_appwrapper, + envs={"key1": "value1", "key2": "value2"}, + image="example/ray:tag", + image_pull_secrets=["secret1", "secret2"], + write_to_file=True, + verify_tls=True, + labels={"key1": "value1", "key2": "value2"}, + worker_extended_resource_requests={"nvidia.com/gpu": 1}, + extended_resource_mapping={"example.com/gpu": "GPU", "intel.com/gpu": "TPU"}, + overwrite_default_resource_mapping=True, + local_queue="local-queue-default", + ) + return Cluster(config) diff --git a/src/codeflare_sdk/ray/appwrapper/test_status.py b/src/codeflare_sdk/ray/appwrapper/test_status.py index 8c693767..a3fcf870 100644 --- a/src/codeflare_sdk/ray/appwrapper/test_status.py +++ b/src/codeflare_sdk/ray/appwrapper/test_status.py @@ -19,6 +19,7 @@ ) from codeflare_sdk.ray.appwrapper import AppWrapper, AppWrapperStatus from codeflare_sdk.ray.cluster.status import CodeFlareClusterStatus +from codeflare_sdk.common.utils.unit_test_support import get_local_queue import os aw_dir = os.path.expanduser("~/.codeflare/resources/") @@ -28,8 +29,8 @@ def test_cluster_status(mocker): mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") mocker.patch( - "codeflare_sdk.common.kueue.kueue.local_queue_exists", - return_value="true", + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), ) fake_aw = AppWrapper("test", AppWrapperStatus.FAILED) @@ -39,7 +40,7 @@ def test_cluster_status(mocker): namespace="ns", write_to_file=True, appwrapper=True, - local_queue="local_default_queue", + local_queue="local-queue-default", ) ) mocker.patch( diff --git a/src/codeflare_sdk/ray/cluster/test_generate_yaml.py b/src/codeflare_sdk/ray/cluster/test_build_ray_cluster.py similarity index 84% rename from src/codeflare_sdk/ray/cluster/test_generate_yaml.py rename to src/codeflare_sdk/ray/cluster/test_build_ray_cluster.py index 606cc950..7d6d3d0a 100644 --- a/src/codeflare_sdk/ray/cluster/test_generate_yaml.py +++ b/src/codeflare_sdk/ray/cluster/test_build_ray_cluster.py @@ -13,7 +13,7 @@ # limitations under the License. from collections import namedtuple import sys -from .generate_yaml import gen_names, update_image +from .build_ray_cluster import gen_names, update_image import uuid @@ -39,7 +39,7 @@ def test_gen_names_without_name(mocker): def test_update_image_without_supported_python_version(mocker): # Mock SUPPORTED_PYTHON_VERSIONS mocker.patch.dict( - "codeflare_sdk.ray.cluster.generate_yaml.SUPPORTED_PYTHON_VERSIONS", + "codeflare_sdk.ray.cluster.build_ray_cluster.SUPPORTED_PYTHON_VERSIONS", { "3.9": "ray-py3.9", "3.11": "ray-py3.11", @@ -55,16 +55,13 @@ def test_update_image_without_supported_python_version(mocker): # Mock warnings.warn to check if it gets called warn_mock = mocker.patch("warnings.warn") - # Create a sample spec - spec = {"containers": [{"image": None}]} - # Call the update_image function with no image provided - update_image(spec, None) + image = update_image(None) # Assert that the warning was called with the expected message warn_mock.assert_called_once_with( "No default Ray image defined for 3.8. Please provide your own image or use one of the following python versions: 3.9, 3.11." ) - # Assert that no image was set in the containers since the Python version is not supported - assert spec["containers"][0]["image"] is None + # Assert that no image was set since the Python version is not supported + assert image is None diff --git a/src/codeflare_sdk/ray/cluster/test_cluster.py b/src/codeflare_sdk/ray/cluster/test_cluster.py index 20438bbe..5e83c82a 100644 --- a/src/codeflare_sdk/ray/cluster/test_cluster.py +++ b/src/codeflare_sdk/ray/cluster/test_cluster.py @@ -25,21 +25,17 @@ arg_check_apply_effect, get_local_queue, createClusterConfig, - route_list_retrieval, get_ray_obj, - get_aw_obj, - get_named_aw, get_obj_none, get_ray_obj_with_status, get_aw_obj_with_status, ) -from codeflare_sdk.ray.cluster.generate_yaml import ( - is_openshift_cluster, - is_kind_cluster, -) +from codeflare_sdk.ray.cluster.cluster import _is_openshift_cluster from pathlib import Path from unittest.mock import MagicMock from kubernetes import client +import yaml +import filecmp import os parent = Path(__file__).resolve().parents[4] # project directory @@ -205,7 +201,7 @@ def test_local_client_url(mocker): return_value="rayclient-unit-test-cluster-localinter-ns.apps.cluster.awsroute.org", ) mocker.patch( - "codeflare_sdk.ray.cluster.cluster.Cluster.create_app_wrapper", + "codeflare_sdk.ray.cluster.cluster.Cluster.create_resource", return_value="unit-test-cluster-localinter.yaml", ) @@ -225,120 +221,51 @@ def test_local_client_url(mocker): """ -def test_get_cluster_openshift(mocker): +def test_get_cluster_no_appwrapper(mocker): + """ + This test uses the "test all params" unit test file as a comparison + """ + mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - # Mock the client.ApisApi function to return a mock object - mock_api = MagicMock() - mock_api.get_api_versions.return_value.groups = [ - MagicMock(versions=[MagicMock(group_version="route.openshift.io/v1")]) - ] - mocker.patch("kubernetes.client.ApisApi", return_value=mock_api) mocker.patch( - "codeflare_sdk.common.kueue.kueue.local_queue_exists", - return_value="true", + "codeflare_sdk.ray.cluster.cluster._check_aw_exists", + return_value=False, ) - assert is_openshift_cluster() - - def custom_side_effect(group, version, namespace, plural, **kwargs): - if plural == "routes": - return route_list_retrieval("route.openshift.io", "v1", "ns", "routes") - elif plural == "rayclusters": - return get_ray_obj("ray.io", "v1", "ns", "rayclusters") - elif plural == "appwrappers": - return get_aw_obj("workload.codeflare.dev", "v1beta2", "ns", "appwrappers") - elif plural == "localqueues": - return get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues") - - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", get_aw_obj - ) - - mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - side_effect=custom_side_effect, - ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_namespaced_custom_object", - return_value=get_named_aw, - ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_namespaced_custom_object", - side_effect=route_list_retrieval("route.openshift.io", "v1", "ns", "routes")[ - "items" - ], - ) - mocker.patch( - "codeflare_sdk.common.kueue.kueue.local_queue_exists", - return_value="true", - ) - - cluster = get_cluster( - "test-cluster-a", "ns" - ) # see tests/test_cluster_yamls/support_clusters - cluster_config = cluster.config - - assert cluster_config.name == "test-cluster-a" and cluster_config.namespace == "ns" - assert cluster_config.head_cpu_requests == 2 and cluster_config.head_cpu_limits == 2 - assert ( - cluster_config.head_memory_requests == "8G" - and cluster_config.head_memory_limits == "8G" - ) - assert ( - cluster_config.worker_cpu_requests == 1 - and cluster_config.worker_cpu_limits == 1 - ) - assert ( - cluster_config.worker_memory_requests == "2G" - and cluster_config.worker_memory_limits == "2G" - ) - assert cluster_config.num_workers == 1 - assert cluster_config.write_to_file == False - assert cluster_config.local_queue == "local_default_queue" + with open(f"{expected_clusters_dir}/ray/unit-test-all-params.yaml") as f: + expected_rc = yaml.load(f, Loader=yaml.FullLoader) + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_namespaced_custom_object", + return_value=expected_rc, + ) + get_cluster("test-all-params", "ns", write_to_file=True) + assert filecmp.cmp( + f"{aw_dir}test-all-params.yaml", + f"{expected_clusters_dir}/ray/unit-test-all-params.yaml", + shallow=True, + ) -def test_get_cluster(mocker): - # test get_cluster for Kind Clusters +def test_get_cluster_with_appwrapper(mocker): mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") mocker.patch( - "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", - side_effect=get_ray_obj, - ) - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_namespaced_custom_object", - side_effect=get_named_aw, - ) - mocker.patch( - "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", - return_value=ingress_retrieval(cluster_name="quicktest", client_ing=True), - ) - mocker.patch( - "codeflare_sdk.common.kueue.kueue.local_queue_exists", - return_value="true", + "codeflare_sdk.ray.cluster.cluster._check_aw_exists", + return_value=True, ) - cluster = get_cluster( - "test-cluster-a" - ) # see tests/test_cluster_yamls/support_clusters - cluster_config = cluster.config - assert cluster_config.name == "test-cluster-a" and cluster_config.namespace == "ns" - assert cluster_config.head_cpu_requests == 2 and cluster_config.head_cpu_limits == 2 - assert ( - cluster_config.head_memory_requests == "8G" - and cluster_config.head_memory_limits == "8G" - ) - assert ( - cluster_config.worker_cpu_requests == 1 - and cluster_config.worker_cpu_limits == 1 - ) - assert ( - cluster_config.worker_memory_requests == "2G" - and cluster_config.worker_memory_limits == "2G" - ) - assert cluster_config.num_workers == 1 - assert cluster_config.write_to_file == False - assert cluster_config.local_queue == "local_default_queue" + with open(f"{expected_clusters_dir}/appwrapper/unit-test-all-params.yaml") as f: + expected_aw = yaml.load(f, Loader=yaml.FullLoader) + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_namespaced_custom_object", + return_value=expected_aw, + ) + get_cluster("aw-all-params", "ns", write_to_file=True) + assert filecmp.cmp( + f"{aw_dir}aw-all-params.yaml", + f"{expected_clusters_dir}/appwrapper/unit-test-all-params.yaml", + shallow=True, + ) def test_wait_ready(mocker, capsys): @@ -356,10 +283,6 @@ def test_wait_ready(mocker, capsys): mocker.patch( "codeflare_sdk.ray.cluster.cluster._ray_cluster_status", return_value=None ) - mocker.patch( - "codeflare_sdk.common.kueue.kueue.local_queue_exists", - return_value="true", - ) mocker.patch.object( client.CustomObjectsApi, "list_namespaced_custom_object", @@ -381,7 +304,6 @@ def test_wait_ready(mocker, capsys): namespace="ns", write_to_file=False, appwrapper=True, - local_queue="local-queue-default", ) ) try: @@ -460,7 +382,7 @@ def test_list_queue_rayclusters(mocker, capsys): ] mocker.patch("kubernetes.client.ApisApi", return_value=mock_api) - assert is_openshift_cluster() == True + assert _is_openshift_cluster() == True mocker.patch( "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", return_value=get_obj_none("ray.io", "v1", "ns", "rayclusters"), @@ -566,7 +488,7 @@ def test_map_to_ray_cluster(mocker): mocker.patch("kubernetes.config.load_kube_config") mocker.patch( - "codeflare_sdk.ray.cluster.cluster.is_openshift_cluster", return_value=True + "codeflare_sdk.ray.cluster.cluster._is_openshift_cluster", return_value=True ) mock_api_client = mocker.MagicMock(spec=client.ApiClient) @@ -608,3 +530,9 @@ def custom_side_effect(group, version, namespace, plural, **kwargs): assert result is not None assert result.dashboard == rc_dashboard + + +# Make sure to always keep this function last +def test_cleanup(): + os.remove(f"{aw_dir}test-all-params.yaml") + os.remove(f"{aw_dir}aw-all-params.yaml") diff --git a/src/codeflare_sdk/ray/cluster/test_config.py b/src/codeflare_sdk/ray/cluster/test_config.py index f1ac5355..1423fc2b 100644 --- a/src/codeflare_sdk/ray/cluster/test_config.py +++ b/src/codeflare_sdk/ray/cluster/test_config.py @@ -12,10 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -from codeflare_sdk.common.utils.unit_test_support import createClusterWrongType +from codeflare_sdk.common.utils.unit_test_support import ( + createClusterWrongType, + get_local_queue, + create_cluster_all_config_params, +) from codeflare_sdk.ray.cluster.cluster import ClusterConfiguration, Cluster from pathlib import Path -from unittest.mock import patch import filecmp import pytest import yaml @@ -31,17 +34,11 @@ def test_default_cluster_creation(mocker): mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch("kubernetes.client.CustomObjectsApi.list_namespaced_custom_object") - cluster = Cluster( - ClusterConfiguration( - name="default-cluster", - namespace="ns", - ) - ) + cluster = Cluster(ClusterConfiguration(name="default-cluster", namespace="ns")) - test_rc = yaml.load(cluster.app_wrapper_yaml, Loader=yaml.FullLoader) with open(f"{expected_clusters_dir}/ray/default-ray-cluster.yaml") as f: expected_rc = yaml.load(f, Loader=yaml.FullLoader) - assert test_rc == expected_rc + assert cluster.resource_yaml == expected_rc def test_default_appwrapper_creation(mocker): @@ -53,81 +50,46 @@ def test_default_appwrapper_creation(mocker): ClusterConfiguration(name="default-appwrapper", namespace="ns", appwrapper=True) ) - test_aw = yaml.load(cluster.app_wrapper_yaml, Loader=yaml.FullLoader) with open(f"{expected_clusters_dir}/ray/default-appwrapper.yaml") as f: expected_aw = yaml.load(f, Loader=yaml.FullLoader) - assert test_aw == expected_aw + assert cluster.resource_yaml == expected_aw -@patch.dict("os.environ", {"NB_PREFIX": "test-prefix"}) def test_config_creation_all_parameters(mocker): from codeflare_sdk.ray.cluster.config import DEFAULT_RESOURCE_MAPPING - mocker.patch( - "codeflare_sdk.common.kueue.kueue.local_queue_exists", - return_value="true", - ) - extended_resource_mapping = DEFAULT_RESOURCE_MAPPING - extended_resource_mapping.update({"example.com/gpu": "GPU"}) - - config = ClusterConfiguration( - name="test-all-params", - namespace="ns", - head_info=["test1", "test2"], - head_cpu_requests=4, - head_cpu_limits=8, - head_memory_requests=12, - head_memory_limits=16, - head_extended_resource_requests={"nvidia.com/gpu": 1}, - machine_types={"gpu.small", "gpu.large"}, - worker_cpu_requests=4, - worker_cpu_limits=8, - num_workers=10, - worker_memory_requests=12, - worker_memory_limits=16, - template=f"{parent}/src/codeflare_sdk/ray/templates/base-template.yaml", - appwrapper=False, - envs={"key1": "value1", "key2": "value2"}, - image="example/ray:tag", - image_pull_secrets=["secret1", "secret2"], - write_to_file=True, - verify_tls=True, - labels={"key1": "value1", "key2": "value2"}, - worker_extended_resource_requests={"nvidia.com/gpu": 1}, - extended_resource_mapping=extended_resource_mapping, - overwrite_default_resource_mapping=True, - local_queue="local-queue-default", - ) - Cluster(config) - - assert config.name == "test-all-params" and config.namespace == "ns" - assert config.head_info == ["test1", "test2"] - assert config.head_cpu_requests == 4 - assert config.head_cpu_limits == 8 - assert config.head_memory_requests == "12G" - assert config.head_memory_limits == "16G" - assert config.head_extended_resource_requests == {"nvidia.com/gpu": 1} - assert config.machine_types == {"gpu.small", "gpu.large"} - assert config.worker_cpu_requests == 4 - assert config.worker_cpu_limits == 8 - assert config.num_workers == 10 - assert config.worker_memory_requests == "12G" - assert config.worker_memory_limits == "16G" + expected_extended_resource_mapping = DEFAULT_RESOURCE_MAPPING + expected_extended_resource_mapping.update({"example.com/gpu": "GPU"}) + expected_extended_resource_mapping["intel.com/gpu"] = "TPU" + + cluster = create_cluster_all_config_params(mocker, "test-all-params", False) + assert cluster.config.name == "test-all-params" and cluster.config.namespace == "ns" + assert cluster.config.head_cpu_requests == 4 + assert cluster.config.head_cpu_limits == 8 + assert cluster.config.head_memory_requests == "12G" + assert cluster.config.head_memory_limits == "16G" + assert cluster.config.head_extended_resource_requests == { + "nvidia.com/gpu": 1, + "intel.com/gpu": 2, + } + assert cluster.config.worker_cpu_requests == 4 + assert cluster.config.worker_cpu_limits == 8 + assert cluster.config.num_workers == 10 + assert cluster.config.worker_memory_requests == "12G" + assert cluster.config.worker_memory_limits == "16G" + assert cluster.config.appwrapper == False + assert cluster.config.envs == {"key1": "value1", "key2": "value2"} + assert cluster.config.image == "example/ray:tag" + assert cluster.config.image_pull_secrets == ["secret1", "secret2"] + assert cluster.config.write_to_file == True + assert cluster.config.verify_tls == True + assert cluster.config.labels == {"key1": "value1", "key2": "value2"} + assert cluster.config.worker_extended_resource_requests == {"nvidia.com/gpu": 1} assert ( - config.template - == f"{parent}/src/codeflare_sdk/ray/templates/base-template.yaml" + cluster.config.extended_resource_mapping == expected_extended_resource_mapping ) - assert config.appwrapper == False - assert config.envs == {"key1": "value1", "key2": "value2"} - assert config.image == "example/ray:tag" - assert config.image_pull_secrets == ["secret1", "secret2"] - assert config.write_to_file == True - assert config.verify_tls == True - assert config.labels == {"key1": "value1", "key2": "value2"} - assert config.worker_extended_resource_requests == {"nvidia.com/gpu": 1} - assert config.extended_resource_mapping == extended_resource_mapping - assert config.overwrite_default_resource_mapping == True - assert config.local_queue == "local-queue-default" + assert cluster.config.overwrite_default_resource_mapping == True + assert cluster.config.local_queue == "local-queue-default" assert filecmp.cmp( f"{aw_dir}test-all-params.yaml", @@ -136,6 +98,15 @@ def test_config_creation_all_parameters(mocker): ) +def test_all_config_params_aw(mocker): + create_cluster_all_config_params(mocker, "aw-all-params", True) + assert filecmp.cmp( + f"{aw_dir}aw-all-params.yaml", + f"{expected_clusters_dir}/appwrapper/unit-test-all-params.yaml", + shallow=True, + ) + + def test_config_creation_wrong_type(): with pytest.raises(TypeError): createClusterWrongType() @@ -168,3 +139,4 @@ def test_cluster_config_deprecation_conversion(mocker): # Make sure to always keep this function last def test_cleanup(): os.remove(f"{aw_dir}test-all-params.yaml") + os.remove(f"{aw_dir}aw-all-params.yaml") diff --git a/src/codeflare_sdk/ray/cluster/test_pretty_print.py b/src/codeflare_sdk/ray/cluster/test_pretty_print.py index b0da4201..329a1354 100644 --- a/src/codeflare_sdk/ray/cluster/test_pretty_print.py +++ b/src/codeflare_sdk/ray/cluster/test_pretty_print.py @@ -29,6 +29,7 @@ ClusterConfiguration, _copy_to_ray, ) +from codeflare_sdk.common.utils.unit_test_support import get_local_queue def test_print_no_resources(capsys): @@ -100,15 +101,15 @@ def test_ray_details(mocker, capsys): return_value="", ) mocker.patch( - "codeflare_sdk.common.kueue.kueue.local_queue_exists", - return_value="true", + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), ) cf = Cluster( ClusterConfiguration( name="raytest2", namespace="ns", appwrapper=True, - local_queue="local_default_queue", + local_queue="local-queue-default", ) ) captured = capsys.readouterr() diff --git a/src/codeflare_sdk/ray/cluster/test_status.py b/src/codeflare_sdk/ray/cluster/test_status.py index 146d2190..27eda49e 100644 --- a/src/codeflare_sdk/ray/cluster/test_status.py +++ b/src/codeflare_sdk/ray/cluster/test_status.py @@ -23,6 +23,7 @@ RayCluster, ) import os +from ...common.utils.unit_test_support import get_local_queue aw_dir = os.path.expanduser("~/.codeflare/resources/") @@ -30,10 +31,6 @@ def test_cluster_status(mocker): mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch( - "codeflare_sdk.common.kueue.kueue.local_queue_exists", - return_value="true", - ) fake_ray = RayCluster( name="test", @@ -50,13 +47,19 @@ def test_cluster_status(mocker): head_mem_requests=8, head_mem_limits=8, ) + + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), + ) + cf = Cluster( ClusterConfiguration( name="test", namespace="ns", write_to_file=True, appwrapper=False, - local_queue="local_default_queue", + local_queue="local-queue-default", ) ) mocker.patch( diff --git a/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml b/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml index a072e9e9..9166eced 100644 --- a/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml +++ b/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml @@ -18,7 +18,6 @@ spec: spec: autoscalerOptions: idleTimeoutSeconds: 60 - imagePullPolicy: Always resources: limits: cpu: 500m @@ -44,7 +43,7 @@ spec: fieldRef: fieldPath: status.podIP image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 - imagePullPolicy: Always + imagePullPolicy: IfNotPresent lifecycle: preStop: exec: diff --git a/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml b/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml new file mode 100644 index 00000000..6d2c5440 --- /dev/null +++ b/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml @@ -0,0 +1,176 @@ +apiVersion: workload.codeflare.dev/v1beta2 +kind: AppWrapper +metadata: + labels: + kueue.x-k8s.io/queue-name: local-queue-default + name: aw-all-params + namespace: ns +spec: + components: + - template: + apiVersion: ray.io/v1 + kind: RayCluster + metadata: + annotations: + app.kubernetes.io/managed-by: test-prefix + labels: + controller-tools.k8s.io: '1.0' + key1: value1 + key2: value2 + name: aw-all-params + namespace: ns + spec: + autoscalerOptions: + idleTimeoutSeconds: 60 + resources: + limits: + cpu: 500m + memory: 512Mi + requests: + cpu: 500m + memory: 512Mi + upscalingMode: Default + enableInTreeAutoscaling: false + headGroupSpec: + enableIngress: false + rayStartParams: + block: 'true' + dashboard-host: 0.0.0.0 + num-gpus: '1' + resources: '"{\"TPU\": 2}"' + serviceType: ClusterIP + template: + spec: + containers: + - env: + - name: key1 + value: value1 + - name: key2 + value: value2 + image: example/ray:tag + imagePullPolicy: Always + lifecycle: + preStop: + exec: + command: + - /bin/sh + - -c + - ray stop + name: ray-head + ports: + - containerPort: 6379 + name: gcs + - containerPort: 8265 + name: dashboard + - containerPort: 10001 + name: client + resources: + limits: + cpu: 8 + intel.com/gpu: 2 + memory: 16G + nvidia.com/gpu: 1 + requests: + cpu: 4 + intel.com/gpu: 2 + memory: 12G + nvidia.com/gpu: 1 + volumeMounts: + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + imagePullSecrets: + - name: secret1 + - name: secret2 + volumes: + - configMap: + items: + - key: ca-bundle.crt + path: odh-trusted-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert + rayVersion: 2.35.0 + workerGroupSpecs: + - groupName: small-group-aw-all-params + maxReplicas: 10 + minReplicas: 10 + rayStartParams: + block: 'true' + num-gpus: '1' + resources: '"{}"' + replicas: 10 + template: + spec: + containers: + - env: + - name: key1 + value: value1 + - name: key2 + value: value2 + image: example/ray:tag + imagePullPolicy: Always + lifecycle: + preStop: + exec: + command: + - /bin/sh + - -c + - ray stop + name: machine-learning + resources: + limits: + cpu: 8 + memory: 16G + nvidia.com/gpu: 1 + requests: + cpu: 4 + memory: 12G + nvidia.com/gpu: 1 + volumeMounts: + - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-trusted-ca-bundle.crt + name: odh-trusted-ca-cert + subPath: odh-trusted-ca-bundle.crt + - mountPath: /etc/pki/tls/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + - mountPath: /etc/ssl/certs/odh-ca-bundle.crt + name: odh-ca-cert + subPath: odh-ca-bundle.crt + imagePullSecrets: + - name: secret1 + - name: secret2 + volumes: + - configMap: + items: + - key: ca-bundle.crt + path: odh-trusted-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-trusted-ca-cert + - configMap: + items: + - key: odh-ca-bundle.crt + path: odh-ca-bundle.crt + name: odh-trusted-ca-bundle + optional: true + name: odh-ca-cert diff --git a/tests/test_cluster_yamls/kueue/aw_kueue.yaml b/tests/test_cluster_yamls/kueue/aw_kueue.yaml index 2c6d868a..402ffb6a 100644 --- a/tests/test_cluster_yamls/kueue/aw_kueue.yaml +++ b/tests/test_cluster_yamls/kueue/aw_kueue.yaml @@ -18,7 +18,6 @@ spec: spec: autoscalerOptions: idleTimeoutSeconds: 60 - imagePullPolicy: Always resources: limits: cpu: 500m @@ -76,7 +75,6 @@ spec: - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt - imagePullSecrets: [] volumes: - configMap: items: @@ -103,14 +101,10 @@ spec: resources: '"{}"' replicas: 2 template: - metadata: - annotations: - key: value - labels: - key: value spec: containers: - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + imagePullPolicy: Always lifecycle: preStop: exec: @@ -139,7 +133,6 @@ spec: - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt - imagePullSecrets: [] volumes: - configMap: items: diff --git a/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml b/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml index 0c4efb29..a5cb3616 100644 --- a/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml +++ b/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml @@ -18,7 +18,6 @@ spec: spec: autoscalerOptions: idleTimeoutSeconds: 60 - imagePullPolicy: Always resources: limits: cpu: 500m @@ -76,7 +75,6 @@ spec: - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt - imagePullSecrets: [] volumes: - configMap: items: @@ -103,14 +101,10 @@ spec: resources: '"{}"' replicas: 2 template: - metadata: - annotations: - key: value - labels: - key: value spec: containers: - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + imagePullPolicy: Always lifecycle: preStop: exec: @@ -139,7 +133,6 @@ spec: - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt - imagePullSecrets: [] volumes: - configMap: items: diff --git a/tests/test_cluster_yamls/ray/default-appwrapper.yaml b/tests/test_cluster_yamls/ray/default-appwrapper.yaml index 60152c1e..3e97474d 100644 --- a/tests/test_cluster_yamls/ray/default-appwrapper.yaml +++ b/tests/test_cluster_yamls/ray/default-appwrapper.yaml @@ -16,7 +16,6 @@ spec: spec: autoscalerOptions: idleTimeoutSeconds: 60 - imagePullPolicy: Always resources: limits: cpu: 500m @@ -74,7 +73,6 @@ spec: - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt - imagePullSecrets: [] volumes: - configMap: items: @@ -101,14 +99,10 @@ spec: resources: '"{}"' replicas: 1 template: - metadata: - annotations: - key: value - labels: - key: value spec: containers: - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + imagePullPolicy: Always lifecycle: preStop: exec: @@ -137,7 +131,6 @@ spec: - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt - imagePullSecrets: [] volumes: - configMap: items: diff --git a/tests/test_cluster_yamls/ray/default-ray-cluster.yaml b/tests/test_cluster_yamls/ray/default-ray-cluster.yaml index 7a3329b6..34de53d2 100644 --- a/tests/test_cluster_yamls/ray/default-ray-cluster.yaml +++ b/tests/test_cluster_yamls/ray/default-ray-cluster.yaml @@ -8,7 +8,6 @@ metadata: spec: autoscalerOptions: idleTimeoutSeconds: 60 - imagePullPolicy: Always resources: limits: cpu: 500m @@ -66,7 +65,6 @@ spec: - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt - imagePullSecrets: [] volumes: - configMap: items: @@ -93,14 +91,10 @@ spec: resources: '"{}"' replicas: 1 template: - metadata: - annotations: - key: value - labels: - key: value spec: containers: - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + imagePullPolicy: Always lifecycle: preStop: exec: @@ -129,7 +123,6 @@ spec: - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt - imagePullSecrets: [] volumes: - configMap: items: diff --git a/tests/test_cluster_yamls/ray/unit-test-all-params.yaml b/tests/test_cluster_yamls/ray/unit-test-all-params.yaml index eda7270f..8426eede 100644 --- a/tests/test_cluster_yamls/ray/unit-test-all-params.yaml +++ b/tests/test_cluster_yamls/ray/unit-test-all-params.yaml @@ -13,7 +13,6 @@ metadata: spec: autoscalerOptions: idleTimeoutSeconds: 60 - imagePullPolicy: Always resources: limits: cpu: 500m @@ -29,14 +28,16 @@ spec: block: 'true' dashboard-host: 0.0.0.0 num-gpus: '1' - resources: '"{}"' + resources: '"{\"TPU\": 2}"' serviceType: ClusterIP template: spec: containers: - - env: &id001 - key1: value1 - key2: value2 + - env: + - name: key1 + value: value1 + - name: key2 + value: value2 image: example/ray:tag imagePullPolicy: Always lifecycle: @@ -57,10 +58,12 @@ spec: resources: limits: cpu: 8 + intel.com/gpu: 2 memory: 16G nvidia.com/gpu: 1 requests: cpu: 4 + intel.com/gpu: 2 memory: 12G nvidia.com/gpu: 1 volumeMounts: @@ -105,15 +108,15 @@ spec: resources: '"{}"' replicas: 10 template: - metadata: - annotations: - key: value - labels: - key: value spec: containers: - - env: *id001 + - env: + - name: key1 + value: value1 + - name: key2 + value: value2 image: example/ray:tag + imagePullPolicy: Always lifecycle: preStop: exec: diff --git a/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml b/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml index 9b8a647f..fe26900d 100644 --- a/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml +++ b/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml @@ -18,7 +18,6 @@ spec: spec: autoscalerOptions: idleTimeoutSeconds: 60 - imagePullPolicy: Always resources: limits: cpu: 500m @@ -40,7 +39,7 @@ spec: spec: containers: - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 - imagePullPolicy: Always + imagePullPolicy: IfNotPresent lifecycle: preStop: exec: diff --git a/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml b/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml index 763eb5c2..eed571fe 100644 --- a/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml +++ b/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml @@ -18,7 +18,6 @@ spec: spec: autoscalerOptions: idleTimeoutSeconds: 60 - imagePullPolicy: Always resources: limits: cpu: 500m @@ -40,7 +39,7 @@ spec: spec: containers: - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 - imagePullPolicy: Always + imagePullPolicy: IfNotPresent lifecycle: preStop: exec: diff --git a/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml b/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml index f12ffde0..5f5d456c 100644 --- a/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml +++ b/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml @@ -9,7 +9,6 @@ metadata: spec: autoscalerOptions: idleTimeoutSeconds: 60 - imagePullPolicy: Always resources: limits: cpu: 500m @@ -31,7 +30,7 @@ spec: spec: containers: - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 - imagePullPolicy: Always + imagePullPolicy: IfNotPresent lifecycle: preStop: exec: diff --git a/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml b/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml index 1d41e365..3bf894db 100644 --- a/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml +++ b/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml @@ -9,7 +9,6 @@ metadata: spec: autoscalerOptions: idleTimeoutSeconds: 60 - imagePullPolicy: Always resources: limits: cpu: 500m @@ -31,7 +30,7 @@ spec: spec: containers: - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 - imagePullPolicy: Always + imagePullPolicy: IfNotPresent lifecycle: preStop: exec: diff --git a/ui-tests/playwright.config.js b/ui-tests/playwright.config.js index 88003f8e..1ba51f15 100644 --- a/ui-tests/playwright.config.js +++ b/ui-tests/playwright.config.js @@ -2,7 +2,7 @@ const baseConfig = require('@jupyterlab/galata/lib/playwright-config'); module.exports = { ...baseConfig, - timeout: 460000, + timeout: 600000, webServer: { command: 'yarn start', url: 'http://localhost:8888/lab', From eb5ce8daae989989307d577252511eda9a52ee53 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Thu, 7 Nov 2024 09:33:30 +0000 Subject: [PATCH 412/496] Updated coverage.svg --- coverage.svg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/coverage.svg b/coverage.svg index fe061433..59d64b37 100644 --- a/coverage.svg +++ b/coverage.svg @@ -15,7 +15,7 @@ coverage coverage - 94% - 94% + 93% + 93% From ed1cc7af3047c36b8342af82d168006367e84c98 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Wed, 30 Oct 2024 16:03:24 +0000 Subject: [PATCH 413/496] Add Snyk security workflow to track multiple tags --- .github/workflows/release.yaml | 36 ++++++++++++++++++++++++ .github/workflows/snyk-security.yaml | 41 ++++++++++++++++++++++++++++ 2 files changed, 77 insertions(+) create mode 100644 .github/workflows/snyk-security.yaml diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index ccac5260..2b2e392a 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -27,15 +27,22 @@ on: type: string default: "project-codeflare" +env: + PR_BRANCH_NAME: snyk-tag-monitoring-${{ github.run_id }} + jobs: release: runs-on: ubuntu-latest permissions: contents: write id-token: write # This permission is required for trusted publishing + pull-requests: write # This permission is required for creating PRs steps: - name: Checkout the repository uses: actions/checkout@v4 + with: + submodules: recursive + token: ${{ secrets.GH_CLI_TOKEN }} - name: Install Python uses: actions/setup-python@v5 with: @@ -81,3 +88,32 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.CODEFLARE_MACHINE_ACCOUNT_TOKEN }} shell: bash + + - name: Append tag to Snyk monitoring list + run: | + sed -i 's/list_of_released_tags=(/list_of_released_tags=("v${{ github.event.inputs.release-version }}", /' .github/workflows/snyk-security.yaml + + - name: Commit and push changes + run: | + git config --global user.email "138894154+codeflare-machine-account@users.noreply.github.com" + git config --global user.name "codeflare-machine-account" + git checkout -b $PR_BRANCH_NAME + git commit -am "Update snyk-security.yaml" + git push --set-upstream origin "$PR_BRANCH_NAME" + + - name: Create Pull Request + run: | + gh pr create \ + --title "$pr_title" \ + --body "$pr_body" \ + --head ${{ env.PR_BRANCH_NAME }} \ + --base main \ + --label "lgtm" \ + --label "approved" + env: + GITHUB_TOKEN: ${{ secrets.GH_CLI_TOKEN }} + pr_title: "[CodeFlare-Machine] Append tag v${{ github.event.inputs.release-version }} to Snyk monitoring list" + pr_body: | + :rocket: This is an automated Pull Request generated by [release.yaml](https://github.com/project-codeflare/codeflare-sdk/blob/main/.github/workflows/release.yaml) workflow. + + This PR appends to the list of tags that Snyk will be monitoring. diff --git a/.github/workflows/snyk-security.yaml b/.github/workflows/snyk-security.yaml new file mode 100644 index 00000000..ee497f31 --- /dev/null +++ b/.github/workflows/snyk-security.yaml @@ -0,0 +1,41 @@ +name: Snyk Security +on: + push: + branches: + - main + +jobs: + snyk-scan: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Install Snyk CLI + run: npm install -g snyk + + - name: Snyk Monitor and Test multiple projects + env: + SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} + SNYK_ORG: ${{ secrets.SNYK_ORG }} + run: | + echo "Fetching tags" + git fetch origin 'refs/tags/*:refs/tags/*' + + echo "Authenticating with Snyk" + snyk auth ${SNYK_TOKEN} + + echo "Scanning project: codeflare-sdk/main" + snyk monitor --all-projects --exclude=requirements.txt --org=${SNYK_ORG} --target-reference="main" + + # This list is based off RHOAI Supported versions: https://access.redhat.com/support/policy/updates/rhoai-sm/lifecycle + # Compared to the tags in the ImageStream annotations: https://github.com/red-hat-data-services/notebooks/blob/rhoai-2.8/manifests/base/jupyter-datascience-notebook-imagestream.yaml + # Loop through the list of released tags and scan each project + list_of_released_tags=("v0.22.0" "v0.21.1" "v0.19.1", "v0.16.4", "vv0.14.1") + for project in "${list_of_released_tags[@]}"; do + echo "Scanning project: codeflare-sdk/$project" + git checkout $project + snyk monitor --all-projects --exclude=requirements.txt --org=${SNYK_ORG} --target-reference="$(git describe --tags)" + done From 84b571a9d1044e2cf14146f69a69c07b3d23b69e Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Wed, 6 Nov 2024 10:56:54 +0000 Subject: [PATCH 414/496] Cache npm dependencies in snyk-security workflow --- .github/workflows/release.yaml | 5 ++--- .github/workflows/snyk-security.yaml | 6 +++++- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 2b2e392a..dacd904c 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -41,7 +41,6 @@ jobs: - name: Checkout the repository uses: actions/checkout@v4 with: - submodules: recursive token: ${{ secrets.GH_CLI_TOKEN }} - name: Install Python uses: actions/setup-python@v5 @@ -95,8 +94,8 @@ jobs: - name: Commit and push changes run: | - git config --global user.email "138894154+codeflare-machine-account@users.noreply.github.com" - git config --global user.name "codeflare-machine-account" + git config --global user.email "${{ vars.CODEFLARE_MACHINE_EMAIL }}" + git config --global user.name "${{ vars.CODEFLARE_MACHINE_NAME }}" git checkout -b $PR_BRANCH_NAME git commit -am "Update snyk-security.yaml" git push --set-upstream origin "$PR_BRANCH_NAME" diff --git a/.github/workflows/snyk-security.yaml b/.github/workflows/snyk-security.yaml index ee497f31..3d43da45 100644 --- a/.github/workflows/snyk-security.yaml +++ b/.github/workflows/snyk-security.yaml @@ -10,8 +10,12 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v4 + + - name: Setup Node.js to cache dependencies + uses: actions/setup-node@v4 with: - submodules: recursive + node-version: 20 + cache: 'npm' - name: Install Snyk CLI run: npm install -g snyk From 9af07a0926befa35fc990f52d59cb75df75ca28a Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Wed, 6 Nov 2024 15:46:40 +0000 Subject: [PATCH 415/496] Snyk Security workflow enhancements --- .github/workflows/release.yaml | 37 ++++++++++------------------ .github/workflows/snyk-security.yaml | 16 ------------ 2 files changed, 13 insertions(+), 40 deletions(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index dacd904c..ddc23b5a 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -88,31 +88,20 @@ jobs: GITHUB_TOKEN: ${{ secrets.CODEFLARE_MACHINE_ACCOUNT_TOKEN }} shell: bash - - name: Append tag to Snyk monitoring list + - name: Install Snyk CLI and setup monitoring for new release tag + env: + SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} + SNYK_ORG: ${{ secrets.SNYK_ORG }} run: | - sed -i 's/list_of_released_tags=(/list_of_released_tags=("v${{ github.event.inputs.release-version }}", /' .github/workflows/snyk-security.yaml + echo "Installing Snyk CLI" + npm install -g snyk - - name: Commit and push changes - run: | - git config --global user.email "${{ vars.CODEFLARE_MACHINE_EMAIL }}" - git config --global user.name "${{ vars.CODEFLARE_MACHINE_NAME }}" - git checkout -b $PR_BRANCH_NAME - git commit -am "Update snyk-security.yaml" - git push --set-upstream origin "$PR_BRANCH_NAME" + echo "Fetching tags" + git fetch origin 'refs/tags/*:refs/tags/*' - - name: Create Pull Request - run: | - gh pr create \ - --title "$pr_title" \ - --body "$pr_body" \ - --head ${{ env.PR_BRANCH_NAME }} \ - --base main \ - --label "lgtm" \ - --label "approved" - env: - GITHUB_TOKEN: ${{ secrets.GH_CLI_TOKEN }} - pr_title: "[CodeFlare-Machine] Append tag v${{ github.event.inputs.release-version }} to Snyk monitoring list" - pr_body: | - :rocket: This is an automated Pull Request generated by [release.yaml](https://github.com/project-codeflare/codeflare-sdk/blob/main/.github/workflows/release.yaml) workflow. + echo "Authenticating with Snyk" + snyk auth ${SNYK_TOKEN} - This PR appends to the list of tags that Snyk will be monitoring. + echo "Scanning project: codeflare-sdk/v${{ github.event.inputs.release-version }}" + git checkout v${{ github.event.inputs.release-version }} + snyk monitor --all-projects --exclude=requirements.txt --org=${SNYK_ORG} --target-reference="$(git describe --tags)" diff --git a/.github/workflows/snyk-security.yaml b/.github/workflows/snyk-security.yaml index 3d43da45..ba4af2dc 100644 --- a/.github/workflows/snyk-security.yaml +++ b/.github/workflows/snyk-security.yaml @@ -11,12 +11,6 @@ jobs: - name: Checkout code uses: actions/checkout@v4 - - name: Setup Node.js to cache dependencies - uses: actions/setup-node@v4 - with: - node-version: 20 - cache: 'npm' - - name: Install Snyk CLI run: npm install -g snyk @@ -33,13 +27,3 @@ jobs: echo "Scanning project: codeflare-sdk/main" snyk monitor --all-projects --exclude=requirements.txt --org=${SNYK_ORG} --target-reference="main" - - # This list is based off RHOAI Supported versions: https://access.redhat.com/support/policy/updates/rhoai-sm/lifecycle - # Compared to the tags in the ImageStream annotations: https://github.com/red-hat-data-services/notebooks/blob/rhoai-2.8/manifests/base/jupyter-datascience-notebook-imagestream.yaml - # Loop through the list of released tags and scan each project - list_of_released_tags=("v0.22.0" "v0.21.1" "v0.19.1", "v0.16.4", "vv0.14.1") - for project in "${list_of_released_tags[@]}"; do - echo "Scanning project: codeflare-sdk/$project" - git checkout $project - snyk monitor --all-projects --exclude=requirements.txt --org=${SNYK_ORG} --target-reference="$(git describe --tags)" - done From 493e560be4a4c406b621d1a7f34dfd7295d9e0e4 Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Tue, 29 Oct 2024 11:58:01 +0000 Subject: [PATCH 416/496] docs: enhance ray module code documentation --- src/codeflare_sdk/ray/client/ray_jobs.py | 145 ++++++++++++++++++----- src/codeflare_sdk/ray/cluster/cluster.py | 101 ++++++++++++---- src/codeflare_sdk/ray/cluster/config.py | 68 +++++++---- 3 files changed, 239 insertions(+), 75 deletions(-) diff --git a/src/codeflare_sdk/ray/client/ray_jobs.py b/src/codeflare_sdk/ray/client/ray_jobs.py index 9f002295..2c0ceee0 100644 --- a/src/codeflare_sdk/ray/client/ray_jobs.py +++ b/src/codeflare_sdk/ray/client/ray_jobs.py @@ -24,16 +24,28 @@ class RayJobClient: """ - A class that functions as a wrapper for the Ray Job Submission Client. - - parameters: - address -- Either (1) the address of the Ray cluster, or (2) the HTTP address of the dashboard server on the head node, e.g. “http://:8265”. In case (1) it must be specified as an address that can be passed to ray.init(), - e.g. a Ray Client address (ray://:10001), or “auto”, or “localhost:”. If unspecified, will try to connect to a running local Ray cluster. This argument is always overridden by the RAY_ADDRESS environment variable. - create_cluster_if_needed -- Indicates whether the cluster at the specified address needs to already be running. Ray doesn't start a cluster before interacting with jobs, but third-party job managers may do so. - cookies -- Cookies to use when sending requests to the HTTP job server. - metadata -- Arbitrary metadata to store along with all jobs. New metadata specified per job will be merged with the global metadata provided here via a simple dict update. - headers -- Headers to use when sending requests to the HTTP job server, used for cases like authentication to a remote cluster. - verify -- Boolean indication to verify the server's TLS certificate or a path to a file or directory of trusted certificates. Default: True. + A wrapper class for the Ray Job Submission Client, used for interacting with Ray clusters to manage job + submissions, deletions, and other job-related information. + + Args: + address (Optional[str]): + The Ray cluster's address, which may be either the Ray Client address, HTTP address + of the dashboard server on the head node, or "auto" / "localhost:" for a local cluster. + This is overridden by the RAY_ADDRESS environment variable if set. + create_cluster_if_needed (bool): + If True, a new cluster will be created if not already running at the + specified address. By default, Ray requires an existing cluster. + cookies (Optional[Dict[str, Any]]): + HTTP cookies to send with requests to the job server. + metadata (Optional[Dict[str, Any]]): + Global metadata to store with all jobs, merged with job-specific + metadata during job submission. + headers (Optional[Dict[str, Any]]): + HTTP headers to send with requests to the job server, can be used for + authentication. + verify (Optional[Union[str, bool]]): + If True, verifies the server's TLS certificate. Can also be a path + to trusted certificates. Default is True. """ def __init__( @@ -67,18 +79,35 @@ def submit_job( entrypoint_resources: Optional[Dict[str, float]] = None, ) -> str: """ - Method for submitting jobs to a Ray Cluster and returning the job id with entrypoint being a mandatory field. - - Parameters: - entrypoint -- The shell command to run for this job. - submission_id -- A unique ID for this job. - runtime_env -- The runtime environment to install and run this job in. - metadata -- Arbitrary data to store along with this job. - job_id -- DEPRECATED. This has been renamed to submission_id - entrypoint_num_cpus -- The quantity of CPU cores to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0. - entrypoint_num_gpus -- The quantity of GPUs to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0. - entrypoint_memory –- The quantity of memory to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. Defaults to 0. - entrypoint_resources -- The quantity of custom resources to reserve for the execution of the entrypoint command, separately from any tasks or actors launched by it. + Submits a job to the Ray cluster with specified resources and returns the job ID. + + Args: + entrypoint (str): + The command to execute for this job. + job_id (Optional[str]): + Deprecated, use `submission_id`. A unique job identifier. + runtime_env (Optional[Dict[str, Any]]): + The runtime environment for this job. + metadata (Optional[Dict[str, str]]): + Metadata associated with the job, merged with global metadata. + submission_id (Optional[str]): + Unique ID for the job submission. + entrypoint_num_cpus (Optional[Union[int, float]]): + The quantity of CPU cores to reserve for the execution of the entrypoint command, + separately from any tasks or actors launched by it. Defaults to 0. + entrypoint_num_gpus (Optional[Union[int, float]]): + The quantity of GPUs to reserve for the execution of the entrypoint command, + separately from any tasks or actors launched by it. Defaults to 0. + entrypoint_memory (Optional[int]): + The quantity of memory to reserve for the execution of the entrypoint command, + separately from any tasks or actors launched by it. Defaults to 0. + entrypoint_resources (Optional[Dict[str, float]]): + The quantity of custom resources to reserve for the execution of the entrypoint command, + separately from any tasks or actors launched by it. + + Returns: + str: + The unique identifier for the submitted job. """ return self.rayJobClient.submit_job( entrypoint=entrypoint, @@ -94,7 +123,15 @@ def submit_job( def delete_job(self, job_id: str) -> (bool, str): """ - Method for deleting jobs with the job id being a mandatory field. + Deletes a job by job ID. + + Args: + job_id (str): + The unique identifier of the job to delete. + + Returns: + tuple(bool, str): + A tuple with deletion status and a message. """ deletion_status = self.rayJobClient.delete_job(job_id=job_id) @@ -107,37 +144,77 @@ def delete_job(self, job_id: str) -> (bool, str): def get_address(self) -> str: """ - Method for getting the address from the RayJobClient + Retrieves the address of the connected Ray cluster. + + Returns: + str: + The Ray cluster's address. """ return self.rayJobClient.get_address() def get_job_info(self, job_id: str): """ - Method for getting the job info with the job id being a mandatory field. + Fetches information about a job by job ID. + + Args: + job_id (str): + The unique identifier of the job. + + Returns: + JobInfo: + Information about the job's status, progress, and other details. """ return self.rayJobClient.get_job_info(job_id=job_id) def get_job_logs(self, job_id: str) -> str: """ - Method for getting the job logs with the job id being a mandatory field. + Retrieves the logs for a specific job by job ID. + + Args: + job_id (str): + The unique identifier of the job. + + Returns: + str: + Logs output from the job. """ return self.rayJobClient.get_job_logs(job_id=job_id) def get_job_status(self, job_id: str) -> str: """ - Method for getting the job's status with the job id being a mandatory field. + Fetches the current status of a job by job ID. + + Args: + job_id (str): + The unique identifier of the job. + + Returns: + str: + The job's status. """ return self.rayJobClient.get_job_status(job_id=job_id) def list_jobs(self) -> List[JobDetails]: """ - Method for getting a list of current jobs in the Ray Cluster. + Lists all current jobs in the Ray cluster. + + Returns: + List[JobDetails]: + A list of job details for each current job in the cluster. """ return self.rayJobClient.list_jobs() def stop_job(self, job_id: str) -> (bool, str): """ - Method for stopping a job with the job id being a mandatory field. + Stops a running job by job ID. + + Args: + job_id (str): + The unique identifier of the job to stop. + + Returns: + tuple(bool, str): + A tuple with the stop status and a message. """ stop_job_status = self.rayJobClient.stop_job(job_id=job_id) if stop_job_status: @@ -148,6 +225,14 @@ def stop_job(self, job_id: str) -> (bool, str): def tail_job_logs(self, job_id: str) -> Iterator[str]: """ - Method for getting an iterator that follows the logs of a job with the job id being a mandatory field. + Continuously streams the logs of a job. + + Args: + job_id (str): + The unique identifier of the job. + + Returns: + Iterator[str]: + An iterator that yields log entries in real-time. """ return self.rayJobClient.tail_job_logs(job_id=job_id) diff --git a/src/codeflare_sdk/ray/cluster/cluster.py b/src/codeflare_sdk/ray/cluster/cluster.py index fe29eaa9..fd059277 100644 --- a/src/codeflare_sdk/ray/cluster/cluster.py +++ b/src/codeflare_sdk/ray/cluster/cluster.py @@ -296,6 +296,17 @@ def status( return status, ready def is_dashboard_ready(self) -> bool: + """ + Checks if the cluster's dashboard is ready and accessible. + + This method attempts to send a GET request to the cluster dashboard URI. + If the request is successful (HTTP status code 200), it returns True. + If an SSL error occurs, it returns False, indicating the dashboard is not ready. + + Returns: + bool: + True if the dashboard is ready, False otherwise. + """ try: response = requests.get( self.cluster_dashboard_uri(), @@ -313,8 +324,22 @@ def is_dashboard_ready(self) -> bool: def wait_ready(self, timeout: Optional[int] = None, dashboard_check: bool = True): """ - Waits for requested cluster to be ready, up to an optional timeout (s). - Checks every five seconds. + Waits for the requested cluster to be ready, up to an optional timeout. + + This method checks the status of the cluster every five seconds until it is + ready or the timeout is reached. If dashboard_check is enabled, it will also + check for the readiness of the dashboard. + + Args: + timeout (Optional[int]): + The maximum time to wait for the cluster to be ready in seconds. If None, waits indefinitely. + dashboard_check (bool): + Flag to determine if the dashboard readiness should + be checked. Defaults to True. + + Raises: + TimeoutError: + If the timeout is reached before the cluster or dashboard is ready. """ print("Waiting for requested resources to be set up...") time = 0 @@ -346,6 +371,21 @@ def wait_ready(self, timeout: Optional[int] = None, dashboard_check: bool = True time += 5 def details(self, print_to_console: bool = True) -> RayCluster: + """ + Retrieves details about the Ray Cluster. + + This method returns a copy of the Ray Cluster information and optionally prints + the details to the console. + + Args: + print_to_console (bool): + Flag to determine if the cluster details should be + printed to the console. Defaults to True. + + Returns: + RayCluster: + A copy of the Ray Cluster details. + """ cluster = _copy_to_ray(self) if print_to_console: pretty_print.print_clusters([cluster]) @@ -447,6 +487,13 @@ def _head_worker_extended_resources_from_rc_dict(rc: Dict) -> Tuple[dict, dict]: return head_extended_resources, worker_extended_resources def local_client_url(self): + """ + Constructs the URL for the local Ray client. + + Returns: + str: + The Ray client URL based on the ingress domain. + """ ingress_domain = _get_ingress_domain(self) return f"ray://{ingress_domain}" @@ -504,6 +551,13 @@ def list_all_queued( def get_current_namespace(): # pragma: no cover + """ + Retrieves the current Kubernetes namespace. + + Returns: + str: + The current namespace or None if not found. + """ if os.path.isfile("/var/run/secrets/kubernetes.io/serviceaccount/namespace"): try: file = open("/var/run/secrets/kubernetes.io/serviceaccount/namespace", "r") @@ -528,26 +582,29 @@ def get_cluster( verify_tls: bool = True, write_to_file: bool = False, ): - """Returns the given Ray Cluster/AppWrapper as a Cluster Object - - The get_cluster() method is used for retrieving a Ray Cluster that already exists in your K8s Cluster. - Returned is a basic Cluster object which includes the exact yaml for your Ray Cluster under Cluster.resource_yaml. - - Parameters - ---------- - cluster_name : str - The name of the Ray Cluster/AppWrapper - namespace : str - The namespace of the Ray Cluster/AppWrapper - verify_tls : bool - A boolean indicating whether to verify TLS when connecting to the cluster - write_to_file : bool - A boolean indicating whether or not to write the resource to a Yaml file - - Raises - ------ - Exception - If the Ray Cluster/AppWrapper cannot be found/does not exist + """ + Retrieves an existing Ray Cluster or AppWrapper as a Cluster object. + + This function fetches an existing Ray Cluster or AppWrapper from the Kubernetes cluster and returns + it as a `Cluster` object, including its YAML configuration under `Cluster.resource_yaml`. + + Args: + cluster_name (str): + The name of the Ray Cluster or AppWrapper. + namespace (str, optional): + The Kubernetes namespace where the Ray Cluster or AppWrapper is located. Default is "default". + verify_tls (bool, optional): + Whether to verify TLS when connecting to the cluster. Default is True. + write_to_file (bool, optional): + If True, writes the resource configuration to a YAML file. Default is False. + + Returns: + Cluster: + A Cluster object representing the retrieved Ray Cluster or AppWrapper. + + Raises: + Exception: + If the Ray Cluster or AppWrapper cannot be found or does not exist. """ config_check() api_instance = client.CustomObjectsApi(get_api_client()) diff --git a/src/codeflare_sdk/ray/cluster/config.py b/src/codeflare_sdk/ray/cluster/config.py index b8b1652e..f321c278 100644 --- a/src/codeflare_sdk/ray/cluster/config.py +++ b/src/codeflare_sdk/ray/cluster/config.py @@ -44,29 +44,51 @@ class ClusterConfiguration: This dataclass is used to specify resource requirements and other details, and is passed in as an argument when creating a Cluster object. - Attributes: - - name: The name of the cluster. - - namespace: The namespace in which the cluster should be created. - - head_cpus: The number of CPUs to allocate to the head node. - - head_memory: The amount of memory to allocate to the head node. - - head_gpus: The number of GPUs to allocate to the head node. (Deprecated, use head_extended_resource_requests) - - head_extended_resource_requests: A dictionary of extended resource requests for the head node. ex: {"nvidia.com/gpu": 1} - - min_cpus: The minimum number of CPUs to allocate to each worker. - - max_cpus: The maximum number of CPUs to allocate to each worker. - - num_workers: The number of workers to create. - - min_memory: The minimum amount of memory to allocate to each worker. - - max_memory: The maximum amount of memory to allocate to each worker. - - num_gpus: The number of GPUs to allocate to each worker. (Deprecated, use worker_extended_resource_requests) - - appwrapper: A boolean indicating whether to use an AppWrapper. - - envs: A dictionary of environment variables to set for the cluster. - - image: The image to use for the cluster. - - image_pull_secrets: A list of image pull secrets to use for the cluster. - - write_to_file: A boolean indicating whether to write the cluster configuration to a file. - - verify_tls: A boolean indicating whether to verify TLS when connecting to the cluster. - - labels: A dictionary of labels to apply to the cluster. - - worker_extended_resource_requests: A dictionary of extended resource requests for each worker. ex: {"nvidia.com/gpu": 1} - - extended_resource_mapping: A dictionary of custom resource mappings to map extended resource requests to RayCluster resource names - - overwrite_default_resource_mapping: A boolean indicating whether to overwrite the default resource mapping. + Args: + name: + The name of the cluster. + namespace: + The namespace in which the cluster should be created. + head_cpus: + The number of CPUs to allocate to the head node. + head_memory: + The amount of memory to allocate to the head node. + head_gpus: + The number of GPUs to allocate to the head node. (Deprecated, use head_extended_resource_requests) + head_extended_resource_requests: + A dictionary of extended resource requests for the head node. ex: {"nvidia.com/gpu": 1} + min_cpus: + The minimum number of CPUs to allocate to each worker. + max_cpus: + The maximum number of CPUs to allocate to each worker. + num_workers: + The number of workers to create. + min_memory: + The minimum amount of memory to allocate to each worker. + max_memory: + The maximum amount of memory to allocate to each worker. + num_gpus: + The number of GPUs to allocate to each worker. (Deprecated, use worker_extended_resource_requests) + appwrapper: + A boolean indicating whether to use an AppWrapper. + envs: + A dictionary of environment variables to set for the cluster. + image: + The image to use for the cluster. + image_pull_secrets: + A list of image pull secrets to use for the cluster. + write_to_file: + A boolean indicating whether to write the cluster configuration to a file. + verify_tls: + A boolean indicating whether to verify TLS when connecting to the cluster. + labels: + A dictionary of labels to apply to the cluster. + worker_extended_resource_requests: + A dictionary of extended resource requests for each worker. ex: {"nvidia.com/gpu": 1} + extended_resource_mapping: + A dictionary of custom resource mappings to map extended resource requests to RayCluster resource names + overwrite_default_resource_mapping: + A boolean indicating whether to overwrite the default resource mapping. """ name: str From 70d453d951360c3a75bfd5d379ed05410689fdf9 Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Tue, 12 Nov 2024 10:03:24 +0000 Subject: [PATCH 417/496] removing params that do not exist --- docs/sphinx/user-docs/cluster-configuration.rst | 1 - src/codeflare_sdk/common/utils/unit_test_support.py | 1 - 2 files changed, 2 deletions(-) diff --git a/docs/sphinx/user-docs/cluster-configuration.rst b/docs/sphinx/user-docs/cluster-configuration.rst index 238ad51c..dc3f2cf4 100644 --- a/docs/sphinx/user-docs/cluster-configuration.rst +++ b/docs/sphinx/user-docs/cluster-configuration.rst @@ -25,7 +25,6 @@ requirements for creating the Ray Cluster. worker_memory_requests=2, # Default 2 worker_memory_limits=2, # Default 2 # image="", # Optional Field - machine_types=["m5.xlarge", "g4dn.xlarge"], labels={"exampleLabel": "example", "secondLabel": "example"}, )) diff --git a/src/codeflare_sdk/common/utils/unit_test_support.py b/src/codeflare_sdk/common/utils/unit_test_support.py index 88b65aa4..9345fbc3 100644 --- a/src/codeflare_sdk/common/utils/unit_test_support.py +++ b/src/codeflare_sdk/common/utils/unit_test_support.py @@ -62,7 +62,6 @@ def createClusterWrongType(): worker_memory_limits=6, worker_extended_resource_requests={"nvidia.com/gpu": 7}, appwrapper=True, - machine_types=[True, False], image_pull_secrets=["unit-test-pull-secret"], image="quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06", write_to_file=True, From 23445708a234df91e317d01c80e3c38b3f07ddc0 Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Tue, 12 Nov 2024 14:12:37 +0000 Subject: [PATCH 418/496] Update comments in demo notebooks --- demo-notebooks/guided-demos/0_basic_ray.ipynb | 2 +- .../guided-demos/1_cluster_job_client.ipynb | 7 +++++++ .../guided-demos/2_basic_interactive.ipynb | 8 ++++++++ .../guided-demos/3_widget_example.ipynb | 2 +- .../tests/widget_notebook_example.test.ts | 16 ++++++++-------- .../widgets-cell-3-linux.png | Bin 7692 -> 8156 bytes 6 files changed, 25 insertions(+), 10 deletions(-) diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb index 51fb0026..be05130e 100644 --- a/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -88,7 +88,7 @@ "id": "12eef53c", "metadata": {}, "source": [ - "Next, we want to bring our cluster up, so we call the `up()` function below to submit our Ray Cluster onto the queue, and begin the process of obtaining our resource cluster." + "To create the Ray Cluster, we can click the `Cluster Up` button to submit our Ray Cluster onto the queue, and begin the process of creating a Ray Cluster resource. Alternatively, you can run the code cell below to do the same." ] }, { diff --git a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb index aaed9930..8db650dd 100644 --- a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb @@ -76,6 +76,13 @@ "))" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To create the Ray Cluster, we can tick the `Wait for cluster?` checkbox and click the `Cluster Up` button to submit our Ray Cluster onto the queue, and begin the process of creating a Ray Cluster resource while waiting for the Ray Dashboard to be available. Alternatively, you can run the code cell below to do the same." + ] + }, { "cell_type": "code", "execution_count": null, diff --git a/demo-notebooks/guided-demos/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/2_basic_interactive.ipynb index 8610a055..5528d04b 100644 --- a/demo-notebooks/guided-demos/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/2_basic_interactive.ipynb @@ -81,6 +81,14 @@ "))" ] }, + { + "cell_type": "markdown", + "id": "6973247b", + "metadata": {}, + "source": [ + "To create the Ray Cluster, we can tick the `Wait for cluster?` checkbox and click the `Cluster Up` button to submit our Ray Cluster onto the queue, and begin the process of creating a Ray Cluster resource while waiting for the Ray Dashboard to be available. Alternatively, you can run the code cell below to do the same." + ] + }, { "cell_type": "code", "execution_count": null, diff --git a/demo-notebooks/guided-demos/3_widget_example.ipynb b/demo-notebooks/guided-demos/3_widget_example.ipynb index cf0df6dd..243c75ec 100644 --- a/demo-notebooks/guided-demos/3_widget_example.ipynb +++ b/demo-notebooks/guided-demos/3_widget_example.ipynb @@ -65,7 +65,7 @@ "# Create and configure our cluster object\n", "# The SDK will try to find the name of your default local queue based on the annotation \"kueue.x-k8s.io/default-queue\": \"true\" unless you specify the local queue manually below\n", "cluster = Cluster(ClusterConfiguration(\n", - " name='raytest',\n", + " name='widgettest',\n", " head_cpu_requests='500m',\n", " head_cpu_limits='500m',\n", " head_memory_requests=2,\n", diff --git a/ui-tests/tests/widget_notebook_example.test.ts b/ui-tests/tests/widget_notebook_example.test.ts index db746e00..d37c225c 100644 --- a/ui-tests/tests/widget_notebook_example.test.ts +++ b/ui-tests/tests/widget_notebook_example.test.ts @@ -88,7 +88,7 @@ test.describe("Visual Regression", () => { await interactWithWidget(page, upDownWidgetCellIndex, 'button:has-text("Cluster Up")', async (button) => { await button.click(); - const successMessage = await page.waitForSelector('text=Ray Cluster: \'raytest\' has successfully been created', { timeout: 10000 }); + const successMessage = await page.waitForSelector('text=Ray Cluster: \'widgettest\' has successfully been created', { timeout: 10000 }); expect(successMessage).not.toBeNull(); const resourcesMessage = await page.waitForSelector('text=Waiting for requested resources to be set up...'); @@ -105,20 +105,20 @@ test.describe("Visual Regression", () => { await interactWithWidget(page, upDownWidgetCellIndex, 'button:has-text("Cluster Down")', async (button) => { await button.click(); - const clusterDownMessage = await page.waitForSelector('text=Ray Cluster: \'raytest\' has successfully been deleted', { timeout: 5000 }); + const clusterDownMessage = await page.waitForSelector('text=Ray Cluster: \'widgettest\' has successfully been deleted', { timeout: 5000 }); expect(clusterDownMessage).not.toBeNull(); }); await runPreviousCell(page, cellCount, '(, False)'); // Replace text in ClusterConfiguration to run a new RayCluster - const cell = page.getByText('raytest').first(); - await cell.fill('"raytest-1"'); + const cell = page.getByText('widgettest').first(); + await cell.fill('"widgettest-1"'); await page.notebook.runCell(cellCount - 3, true); // Run ClusterConfiguration cell await interactWithWidget(page, upDownWidgetCellIndex, 'button:has-text("Cluster Up")', async (button) => { await button.click(); - const successMessage = await page.waitForSelector('text=Ray Cluster: \'raytest-1\' has successfully been created', { timeout: 10000 }); + const successMessage = await page.waitForSelector('text=Ray Cluster: \'widgettest-1\' has successfully been created', { timeout: 10000 }); expect(successMessage).not.toBeNull(); }); @@ -146,13 +146,13 @@ test.describe("Visual Regression", () => { await interactWithWidget(page, viewClustersCellIndex, 'button:has-text("Open Ray Dashboard")', async (button) => { await button.click(); - const successMessage = await page.waitForSelector('text=Opening Ray Dashboard for raytest-1 cluster', { timeout: 5000 }); + const successMessage = await page.waitForSelector('text=Opening Ray Dashboard for widgettest-1 cluster', { timeout: 5000 }); expect(successMessage).not.toBeNull(); }); await interactWithWidget(page, viewClustersCellIndex, 'button:has-text("View Jobs")', async (button) => { await button.click(); - const successMessage = await page.waitForSelector('text=Opening Ray Jobs Dashboard for raytest-1 cluster', { timeout: 5000 }); + const successMessage = await page.waitForSelector('text=Opening Ray Jobs Dashboard for widgettest-1 cluster', { timeout: 5000 }); expect(successMessage).not.toBeNull(); }); @@ -161,7 +161,7 @@ test.describe("Visual Regression", () => { const noClustersMessage = await page.waitForSelector(`text=No clusters found in the ${namespace} namespace.`, { timeout: 5000 }); expect(noClustersMessage).not.toBeNull(); - const successMessage = await page.waitForSelector(`text=Cluster raytest-1 in the ${namespace} namespace was deleted successfully.`, { timeout: 5000 }); + const successMessage = await page.waitForSelector(`text=Cluster widgettest-1 in the ${namespace} namespace was deleted successfully.`, { timeout: 5000 }); expect(successMessage).not.toBeNull(); }); diff --git a/ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-3-linux.png b/ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-3-linux.png index bd55aa7784a429a7cdb660b8d4e299e3eeb26f3c..1454d17ca7c04e2fc1f89e7a0114c7bfd5009416 100644 GIT binary patch literal 8156 zcma)BWmHrRv>gjj6hQ=q57CiO2|-eFBxR6pkdRJE=~4la5Rgt$L11VkrIiwp9=fEZ zrE3`8nQy)I)?4q#dvAU)EVy&$-gEZZXYYN3loTFcxV>v1my#HMiu|adR!T$D_o5hrh^BoPEzM{phN4`rwzv z3zCT{RFs9+$f?fUrG4kl$if`V8$`}aOP*%&cIejKx1Sh#8?UErD2w?cNLA9=s#cDs zbk{}ClAj`F#rvl2{z~9>;%;zuaOzCfrVVdlfNw`2Ui*t;PyPGbi$F#y!Y^mZBGm}* zd%q55%SFaABLHwrNoGc?yk1M0t0idgky}hii(k)T|q%X zo!7zUAgMj#kc>lE*sCu?ex2<)0`YRMvrt|o?!F%Fl`B6`xYhE>?VTNKD=VUrPW%D1DK4A`AX;I36+_x`Hql~kY$aZQ})bq z@qhO>W(IA%)+cJj#Ka1QNf3y7YuYmOQrYv-$jHdx;9wHu>gwv$)YSLy-#0fmC2m~1 z_$pH|)ymScW=5gq-9HQS^Sfa}=MaeNd~cpd3i{yOe0@&}3pwe-U1xt%XlrX57#J+6 zX=%lmmX?Nvb>?apcYY9gLM139#9>f7*4W5+`}U8~(NQdRdv|rzZ`tEubIx;jMZ3x+ z@7=q12?^Q?3S`Li>T0j1V5;Wk=7E6$Wo2btO5J_^Y71-YoPq*7L&J@R%c#HqpylLn z`!nG?+uLK~)HsdqhM;DydFRPVYiP57(l}Bj3M=JNZ;l(DjTnj9x^Rii{`z z6sDlSpxibkC@3f(pegKz(2xzE`owY}r8pNiw|Ph6m$bBE_bs#gaqS#kot-YOu7iVv zuJ-m74l_jRLn9-V4l~jc5)F%enW`tRw%Iv3BU(*jUOW-wMlTpVtHuyl59yvoDFL-&4pc6M;9Nn%ox ztLK=3zW&US5v!K8$WP(XX(7)iP6BQet6c zm64gbbN|F*)$oR%o>eh1{huTPhiqI74X3aeRlr0mDkm>b#Jn@vyz9e; zTQv;S?(VMssId282|#%;a%gC%DdgJLWmQ$xs(^*XMH*V#5olj(YQdK;rGPa0@WJlx zkC~aShue!`VPR2s6l)#N-gR2&6mgoDD~}Kq6wI2O$PZW^$lKc3z~tpUx3%5f+ncO* z=T@&Z@1p+~g&Jgms#;oF0Hit?zUksF(dDjs;L&Q4hFt{~q@i<)I<+vqgXZbl}?b|i`#*hZ}Rc*{QUV-DNc#iLS9aejh#IpAi&$( zTS`*$YjH90$UWrj_F^C0A{iOkj~_pvp`D$b{t>T;xNqs+=NAwdo}5e#{ysR!fA3z9 zVE=L9MKX=!SbPZb^S=~0%E`T6VDFD#aai79|QGC8># z#sL5du)ybSykcQtF)Gr+|t0E@j!O`We(Dyyc39U5|j=BT-QcvPmR&*BcY2sh^C1&}|a ze|5^z%Bs?BV>*`oDg1$viRsMg(^=n`{h=mr-n@AnPTym}0~3h^MqKuT(IiyKPA} zfObxHHeh75*1HM{{AiRIfpGJD;~qJps;@r*{OOB7Sh%N}K`vfhQ*-1L^Y*Qbxw-j? zCj~Wrus%C0EA)m?*`Sc2#=kGXlAxf+@!<}DR4v1Cd4N6pba>0mLavdK5f1xpL8p~a z6;b!ibn~f~uNRN*tnzWCJr1XaZs+G`T$x;2vSQhcyHo`gl#n=c;)s`(hYCG@+>#>X z>av_~eVpGeNGvHP_6ZJ8OY>_}JrLVeKFO+vMWk$I#{5n?NyKw^u04*Gg=Kqt`|wXR z`6lkbiA6E5px_!MWtz`zd}S2fju@AOsLcLQ6b?>rT#V_H&0y(>pA%Z%GjmcTlZ zPx>I@S!U7mncKXRz-NL|nU@!c*${}owfn#Ev?9KEU0GRK2?;+B(WfdZy{S3O3d!+N zQGML(f9_aYTayJ+($EYK3^Y&K%RYI6*;8IxUXF^2f@u!$_m`BCf+4{OH+=t&1Rh{z z4QZHaY;OKKY(wNHM$*&WUE#E#0Ee2JFC1(w7!0oi;G>{fQBeYP3f|twc6G;mZ=VtI zZ~;t#>R@@pLOQ{1DcacN`uR!RY_NU)yu+v{D-?IQvy2?)A8L2q`pr5zsSU zNq*%@WL(_IOgL(vskgWH{{8!-WzT`Ozr+r0-rPIEAFYSqM9puw0YrFc`wL_R{Ma7LzTDMdARP_5Tb z(F!39;x#h^gI|*og?PSR75qCREsYyXL1<~s<+Y}J2r*cj*XiiwYS^e~XapQ*e|`!B zx%}+Ulmm*O@=54~*MN%H)dKc68R}S4nCQ%u(81%?@eO9L?tFBs%FU1EWL#ff8>x+A+~9a zWa8!J1^xNy(QP9-T7y!XtWE2n_-1cbHJuU)TQJ)mha$Eo!=pG0uU_K}& zh*=R<|A2t;Z%zwaU8upH9!pD00l2hCrYA7>rZiWbDuvS^uW0J?Afz6z?+gZ!w+IDv9(a~24^8a;JRTT)ms;VR4 zc0lnpcAZ9}BCuDmuAn&thq<)0G&@`3b9~s`EG_;sfb`lxzTQ7{bfB+Z`$+3+XqegB z`fSd&_UCAJCGoE>7u3z$KeV%bcJ_5v)}M5A6bVu_Q{hXGg^G#_Sb&-V+`&TXH{G>b z?;~c#4{R(f+4=dk)`R(qsN?7kv#sT1Ay->4h}Z@AHed|EATYRH?Sa|u$}ntLxO-Su z2L}fPBq%BxRKB=}OfM;U(eqgfv@E;^^Yzm1yE-}wI!rf$6V0MoM@vddN=VG)6x8FIs;a9iM`CGf%lF`cfb-H-^}hCYRw1F$;o$?&^kAAp zsM&SCnxLVl|J=|6k6wNny_6Ik896;SxAUim!3Pb!EFSX{mEd z*o{e_qg5yx*-g>p*jVYzzH%^eAf&byx^l9zaNB)~O-)Ui3MsO3a?x>dlOVNX-cen> z>gM6`LD>Ck>I2gA=S$1WVQ#!mj`yJb!^6Y4t5!qTgy6 z_ih4s6LIk$u9M#7-bcH2C-@|*YUtl|Q;3U;i+B0@)YSW^C<&5pV3R=r!(n^SXpqA! zjEw0;MLlC<)rEx->=?s9OT-nxJmF*Pxno|#D%cK@84>-DGm69t3xi#+?qUM5Aw zE?7!e#9`M0VSt5yCmr?veNjmXorNoQbZvTi8dy=*K~xS>1u&pAjRbaMqh4-{VP&HR zCr;3SdR=B_X2C&0!=s~=;?3{=F*i1L+F4S!QqBb`r9?9F^7cH+kgL7xUSQOw^TVV$G(5x^Ol0af+w-2rY0hS90^cM8}0Cc z>HO}`xFfHkp%M1>Eho@qTqrHCmA$|#2!nuUlai8-aX4LFUAWgOb)a$K7k?sPtAS>N z>jg+)X0@cHGt8}dX!Ay&tiklF4Tw)o-iwZoPS+swiZ?MaA>uax(m&c=ZEtIHbageT zut&ylZSwN*ebr*iJc&z68tv|`UoS$f#>K^{Cdm|P<>nKv0p_jq*RR}?lIZri`Gtj~ zgalAl<8{7uA3whQ{!FPK91G_13&E5THfjXoQTOky5LXx%r))RRo8A^~W|aDs`sq)Z#KeK0eQ$eXOnZ z8H@#FgE*CTam zlmwpW;OOXyjEoW!9ho@{Io9e_Fw`5>HKRz4Y{U{?z#ni}z?(NNdS=-lKXUru_KVHC zc2|l950(~x1d#Ia@%fXxyC39x-og$JhSP9M>*(mHtMl^nm#3vkC@X{1{X3BN6v)N9 znuiF1xb@rq&d#NHQSU=Q?GOo>QN zPKHfV8d(aXlj@5{`j6H5`a-OQJuVBPyraUQ19rN+tW1StV`z97BBIz4gL@1N47YE8 z4kv-c08KIs$Ah*5$qv}n)gAk4+P+T(dy)j%m5S;uC-WM%ZiXW8eBl zMh+)W@ca)S?ibYImnzNRMm8X2!7iKD9q<059v=}=2>P8YkO2tYebdCsoZryUaEYb{ zgRzvCm)FpsF1f$dZ1K0BJ{ekBu_Se@m0hPrAbyLy7Qz%rsjEA~F7hroH4YpP84g^=DCk4j-jtM4 zfByW*=TJb)%I0eqGcz)pKyV370C^%zZ`YfS1_KTZFHEl}DQPm-7DG!*3tr;Fg$v*- zKwRpAq<3;EE-&Xtnt&A@FH>2977DVkEJ3sb+;7A8R6@ddZ*5$!0rI(2lrJ7wmayi_ zO(Snvg9fp+&DbS1E*{Ry%fk*T?eKuW1Mot&b&i;rpNA)uhP!!U!Uxc$=W5K!&(F`v zxeo*l`I61^=axJW0~eQ;c64>QIyg{JP=G!(adUf!Y=pMJ(k3u3Sg-GX#Bc#e9sHA^ zsf2{8pa6R}oaE$wb)imv&1|Hnr?auKfv>oF^(qMx91F-Fono^~aqTg&u@f+bB_%va z|L@Ph%|{=4$MZcO8Lx81I5z{ItlzVlM?Jepl$Mt#FD`BbOA<<@*B7^^EF~2X67s~y z2j5;B`un+OPSOF&2RAf6er#$EL2{+b>Inbn3^8fa!5M;_5yG6Irl;4}(OFtu&CAMq zh%KsT&B9>v4C?TZFpHefL`6p@L+!|$Ao4{NykvB)kC$VPYNcDc`>_tl_l+RXH{v>{ z@LC{pSL5bhgOFd#F*s4PsZ|2){TqdY0z^FeeG5%E!+#gcXcY{{)qp*P@}LIc+DuLd z_*nC$C=@%8_dfT*#HYT+@md+`t$ooYfpGX_fnB_f*uUp!IKQStIO)rJu}H!r0>o)z zGz;OKgpLtIc)w*&P;39|0seu42)71MXis%?=taGTwxR*e{{H?21?x(Ugm2+Q>^EZ! znfUlLq@|min&wQY?iwx0@_31@;FehM@XkKqshU%%v* z3Ss}WMc?DM9djF=W7WvzCjkOANG3*xhU8KN`#U=F($h`4|7{1qjLMWRq;RlWRg{&* zZp_clLh1_Xt!m)8bLZen0gW70V?8|%Fo*>!UqCmY`u9NuR>rK1mfJxp1NsY%MnkA( z;B#OK;R-~CC!=285M=`-b#;Y-U^zGlV+@jMoeOUF<enrnm%LWEC#KD8l_@Ac?yNMD7>V{Ofq_h-h1cC#7Sy`)*5=)UNODMs0 z9!AVx4p1g-vcT31P#<&iJuvrg-Vlk)XlTR+1e`-k>gy9iHi#a>f`Y!etm=S0AEa&$ zzj+f}a8_1)Ea5e<+G%ZM^f@tc1a?$kZ!ee|Sj3R7qabU6kns2K-}UwN)6+)T^AM0z z-`9c^vk1ylIBQKnQK0-5LK$c!4 zf^TVr^aA!N2D1q6r?8}i(f^^45Ttp%7Cay{A=1I;4TwE-T^malaDux87X{%uuxnR} z(BqV^usr$r*4)pBG^wbmt(eP>L}g`JsH*nCr3~AYRafI6;Pf2$=CPAqSZJ@L6cHaE zpPOp~;U`2#C%LJqsaaWfPyPZ0!vqbL+ojgj9RC&vR(ScLFad=N2?&sqm+ycW2eL)T zYe|sv^Yf4!yb#orlLN~bP97N>>jH~qbJNkPkCm6VRLoUVTN}+(dsSV%-*azGITeyW z&l*Vuh5X9OM0mzL5D3VNIZR7J z(C34%2JCh#%BTcVfro65) zklw4e=1zkP#;KaixYK-kC;V&1*?0&4y7vfCIccmEJ9Y%{%pWBTBYBXH@AjEwpRDOoUE zpyMD0`9!w(0-n`b%F15Y(Z#2Qi(a^NX=h`D27>i0Nr;600}%NC#Tz7I1L@0Ft|_vsa};!&*JuZN@h2AcerZj zyfiSq5ClktP`9?XgAe?bED*&4xf*`6odc!?Y!Ez4bzL3lx2}hxjMR9Rr=;k?!%*l^ zX6Bu`09)Jf%9yyga`51qZ!STrjvp@{#VcR{K2qvc%J9J@4ff+xvmLPQjofNjqDl@4$e&(X$ch^oJ+y*y9VAx z_<4OUlosADIH*X8;pBHxU~q6Kn`9(J)m#%+CtUPYHP3N3B02nMuDv38MVj`Jj*R<) zD7})&0~#6|(Y!lLDgQ_XmWJ}`HjZd~E9vegV!KD7bhnX{?jvidWJ$?bPVW;4J03$;7Dm($sJG#_;C+sgcI5xC0|6t$! zeHrdy-|yl_^hbaE^vT}I=_wD7u-B2JVRM?+Sy1@q_Vx>T`PEl*4hx-jyb1aJtdAb; z@9Y>CXdHTnD^cUpov2xqcPoLZypZ|Kb`SBM0+GOq63sJb}4$?b5 zBVV_iot{4Z`}ePey}iA~Zrk&Vi{^}xBSUtg)D=GGXn%i6Ln-($&P2Cuq#Atk(M5Y!#>dB}r_cIdC&r^iAP}F4*osR^9Gsj&LPMi#50?)%XV=!& zSc%$4Mh>gj(UDIhl9JB$#%vX1IB02TQZh39n8(px9>&HDyOk*^KL!WOwiT|@R#sMW znRUX(S#x`wpPi~1x%gtDMn^{pgiqjECPzlxW}8C9Qp1?n-wt1;b#rr5MJbdP7ay&Z zbTQlx%1vmV{pa%KLakhjzRW~npCo2$OUt8!*-#s6YexAviBsI(%9a*s{6JN0?ZXkX zWIJAp5zDy{avm`;F)h}p>gsAw4!iq0@eSN$# zvSEgom-kn5^Bpl+Sy|JLxW-pS6ciM6baYh0-g0<{%LR46J35kuykzwcrhob>r$3*l z^AUdf6wxsAwsJf-mlZz{elvC$n!&3vk2^U%6%-V_N?T&qHUF0P)~#EQC@HWd$GBRj zqob247hCMH|AvJnXKTw|+s)JSWTM(dS6BDNix+IHte-x8%23HFFXxGFpWAfJ%*=FI z8y`V?9gmh;nMBJa2|VNCD*1fMu2uM>sVTs+42?ePNtf#A=!j&LAFXk7c=4kB^mzXn zg@TSwoEkH1(Gwn?uGZFYS!8KxY5#qdtevJ1@^q=t&i3{@K@vehL4$*X7vGC8Bbu6; z*bUJM*$y-H|B#c%EASy?k#lo%L_|cSq@;A=iwR$v;^N|{_?_jL5tB7;ef9Npf`a<2M8tRRYH4e$ ztEovNd-HV5p&$jkj&^1m0##vG1l;V2eFZ%Ba#sqGNRvOmf;{$CZe8=gfB*jL<}bCb zn`tY#NaS2FDHoN1D-A-Hg@r{|M~BbCZ3^x6x+(bC+v#^|%m`@)*s#>h`5jDOSF*4z zl*(dyXnlSCw>D8xQCtG5458-6M*o0-w$Y%FkW=`-zq;N%c#fR%bm1S5R#)3%;GiB< zcvnZr;wh`EhXe(Ulv+jC)ty7>o6OD3%)mZ9Aq#=>(8^`e%vYL&6Jlg!Y-?+SpYVTB zTY_t&6B84ionHt!-%g7N3f3khBpe+b4diI%WM{)fKp(GFqZAU;&csvtFy*#W`8hd# zy}fDdsm|S_V`J476%SZg0{#87zJ1$ptyy!H#XH2Hl51*eGUeklb8~Z{J!fad@i1>V zCg{SYIk~v#>FBsJa&N=gcf;^JaF+Su4wDAK{6o}QthXtod+O$&>M$z7ozKPo9J7nPMcySOMSDn=J@ zm~~QIx^zjbWn-p6U0t2uHl)=UO7I<_7^+WRP7aS&K|vuYDe1-)ad~+*1_p_}>qHb3 z2@w%uUP8_*T1rYvOdV+R%kCy|*nyjDZ-~3m6~}9r77@|=w>5I^y3Y{?_4n^zD4;C0 zwuNmb=xRhC#?i#Y1kPUUV?;!R7VDZ_-Sg+qw*ukzAt50!A&~g~Vzlb-f4*Fqsym10 zs&-!O9U3yMv_}R8;?lx(nX0O))J(`inv6NCBOaGyzAkBczYyvAfQf<|6=|_vH_+77 zBytOEINQOfGACy?8SYhPXD|I}{9KGfMo!Mg#x~YZ&26pa(vYsEsYyV}DIqB-N%XfN zkYJ95gq+-^wiG|m3&v*jBSWGf3A*raNTfgW{^q83o%iYP@=)WeP3SXY)$UZ{5Rn48 zJD@^;mKS6!J^cOsb<5wJZZ=al2a}59UA=l0)27bM%$%2(r@t<#eQ0~h%G{jSdYHrV zer`z69VUeYUS+y)1_p*)y~<)xgf`hn`c)F@VY0~L1a*0N`6#~fDVC8;)$A#c{k76Z zD;gY$=NUH7Ow)s{5$_2nhc(u~YFW1XCwircL$EsaIp-P~+Ozh}w2Z0(vL zs$XbomS<)@XSl8Gmdz?nx~ zoL79FN{#NcE32DPK(GBt(k!iA>1kMnSU zI$9Y+*$W5?Lg$c^k--v5y1K`pfOkGOB9Tb5t|a7#6*gFGNnxRq zswzxY85x-^)CXT*=xjK@TO=e84D(Sam*qjW5pmPRU=gOijEsz5#+>IMWCkbL9Xo0x zkIry+b*-qXf@#qi%f)p6KJs*813yozC@mwyc)Y?cah=}t*3+k@zkdA!>EPkv(bXL* zwHo5);khHGqpN%8_U&QoveHr~S=qL=iR!E)sK3cFo3SdVWov6|6&01e^(kTVZW?A+ zE30J?Q#R<}s>89$t9KZGHDX_|Zctws8#^>HYhQjJ?Jh(0z|>z^U+*$4X=rHRC*Aw$ ziwgs^@z<}*SFR-S+I?tym33>l+GV|b%G=7!Y++?3AuLP;@kqN^LQ&E7Xa{3tWCS22 z7;{~$<=ur#d#hv7k&&;htg2r%e(DlFAMfcYGLv_Dz{nWTKGzxd)Cx#MLqlT=wX(YU z^yyPve2U?DV(ex3`1qg`9G#sr_o(a4FY_g5Dhi+Ov_GmdE*UAF(}m_5x|`RWxkP^5ryW)nLAU zPC>!cP!V?c^Np|J!aES+*eUis9^+I`j^ z%x^mNds6|A8R!dZ%Sf7T^I1Z3SXxFr`YC|^GTKFtgYzS2nMsD3Gk!=dKBz$eoylQ` zgVViyBol#UXPnck5o*|XC0=P2EY^w4>_noD&*h>1UDhW}N=6*{R8EEhL}(69_ouS* zB4JBG@-WLE;j<}R(kj(9pt6cd0!81y>(#pNf><5%Q7ry`e+d^R*;U%1e0>&H){JXT zj*h0kV?^;%d(6GPyfH+Kz|nK zRc@`8PqoQ{K0)n9#m0_IWV%lX$7Kk49uz`PN}5?biAzmQ1yJ%dg7K%Pr>CIM35Wh2 z)~>G~&90xFmzNk71ykPrU}GkKzjI-`y{&D0VxpSdSxrst&71t+Re214E&E_$7#S>XKkt+^pBBjfziKu)svQ5FKB{`&PS z46*r+_~tqLj^Dql#x$LrcA+sm!ZOfBOG`_WUi%ZoiC*iq`x<#V_YHyIIepGf$|v2e zU_<~gz^P7oZg;?R@i{wOpxQCe(uzq=p7Lct5OSJzCIHQbORFg>hlhm`l99#5#C%Ky z7qc<{BVD+57CHzhBu66;7MJRIIM*J{#l_{aIy$$w*z%Gi0H3nTxHSTXLYq(EUoatm zHO<7u#QvrASy@>ZE?j_fho8+~C_w|__+8kOy9n^`zGY|M4wB$^S=-s(-rn0Y01*|T zq4CQ=BH0Yl0(^Y#dn+UQ!8K`V^{uU#Kx%KTDo0GNb_x1q>qU{)xW}b*LN#@DsSq+r zJW_sV){y7ena|C=RyZFjeD)w*+IeO8vHOlWEL&Bzk4?9%W`EK{ELBNkVX-G2%v6TZ z^~)DwB*DA}8ZAH{Swh3ZbHX75fr99Q$hNn(#tQog(f(~iuz(@k-N(L!9p-O1RhF0U z13&^%$#%GUfR37)nl29&Pz!r|>giFa$0Q_BkdW{?%>OPdEc{ITNM#j-=$0687>t<6 zh=^}-z=Z--UCqr*4<9~cX0}`#uiT#R$jQ%N8|VS2cpyb1vnsExtqmUovJ6N3U04XE zSV9l-ObacjUMl8_uU5C}UyHidZ~6-5*-y#w{n$iT4SDxiIB!1e|?cW~R? zXjW}b;}$|-USZ)ne5?7nx!Tjcu`O~O*myuan3eqNX@!Mh&9g`l79>(qLL%XrLlam% zusj_dtKF&Mp#0QcJIY#GT9%eeJX4+}MhyYyXZv+(vmjy3q15{u8;dokv0P@L&hH8B zH~%yq930r959dMZPp5qeG;+0Q3Go1C!6D(%f;dBuffDqi{j;-~!N1(Qhxqg7&+zcD zpPwHz!}(VGBSA7rn43QLBKi#mhlg7hwoL*rzJJ$hyq7`=#lXgv2eJ&%LP|oiGE!pp zNX>)UPXXi4d2N^x);G5Xl*(X-NxR2`V>nC=Ysy^(Dm!ToNLCNshr#a#wa_^10a$|mOf46eeQvI?R8ik6 z14Bc1#5$7%moXTNmoG)|?Ck6Sgex5}x8pl9Bl@9OIuo94gI@yI)QfykR#tZD%8isB zb0A&V0zX1dkkXQpNW={)feN!{?*VU9#RH+z>Khuoyu4IZ zR7TYZ@h*d_f$0Y?gBsVOswzGNg_VUxscp#f^isDLeE=}+>yl+E-%?XWS1DY%EG#T` zty}lar|l39&HFZ1VLM-epgrspIo|cklKO479qh$Q}b>^}k>^@I;^7hk=`) zm1XDXSYto;NrDav+uqUf$>YbBZri4Ic8_6!*%s#JO4N&ve7nV3u!esEaZ&EgAkp)` zW4UZ61)y3E?U|TRUdMa-)y_6RSH<<>f%qY$T+gFKlv-a|S+VS{;(*0MwS@!+59Vr% z0G(&G+UrA9B zoUsbvBojdhi0uR3+>E^sGLej@P_ZwfSS=z`Z$ol02q2iAmInF9KNo-Yp%m-Rj;z?D zSb7SC;OMH>cR;Q9ksap)orl3?VE zXoFh!(yS~@LEYJ>FJI2KyM*WVtrsyE2q2Q(YgT}}(yu|G^Vp0s5PgQOk_x4guN{Gj z6K0UnwPUz&s(^9+zaIV0xZW7^|p<)3~2(Yy_RafV_`O1as9$Xp%F~4lMMi4 zO?7n(OH1&uObiSy!o0}T)FSJV;(II_Fv5P<*S|VB*i^}K?Ej|h?CcD)T_sCRQqly* z3=C|5d~j>fSU{54K?#%&@eLlWc)$&dZDu6*m78RDsj2JCx~P17d{ULnO-;+3mNgCw zW98=g*K4*{M!2A@lWI9-!b`t@fAZvs()#r~*Zg5B_YWzR^1Pk?0cFtG+$<#}H8e1A zlajB$ug{@9ifzmG^;>)U;<7St(CVGtUGVJy7i!E-E-sIlnYVU#)s>Y=VHSbZU!?{9 zJb}^$OR*Xp91NL_?G*YnrvRPSV-Cmzti2{M4`fd5bE3?Qu(Y(~lu6IWNjH(|K3sT9*#8mfv6x_a- zrzebzXhZK2&ByS$cOS$fuAMs^Y#8AO((|7{$S4&|5*{5bMG0mtl$2{8*zZx5XHS|W zB(Z*BKr~>5(cebFwm}U7t_mIgjid^6hnAV1ehIM}c(jA#W3Yrljen@h2i<-=qPrag z(c9nod2WbUe0Rt6nf zw`l+9Q31r5wcn#5B*<5bycHy2BMYEbcY^7CZd_bi@HnMWn=)&(35qTfX1cD`Ko`tOC=FO1xy7S4t zzCO5HV8a5c5HwTj6?^6FzSbwHBo>FH~0YS2eZEQds}b>-ldAovaN_rFOkl<@NuO7ifV0o`iE%= zz}rxNzXTrWEdBFfUdP35NFEs#lR#b|K(jG6&dADo_GY|duN9^u&JIb0+5tItyu1lg zv%tWFGZsby;9^!z4!s``BR@a?9kCd>c`#(ylr<*Cb^bRgsL9la@bDg(F1tHBAT;nA z%dN>oq2gm=a`mgZ`T55nLQhF~xw|v~*24nW(b3Tn;tg$WQuS@nX`tHHmX?E)?XEdp zpgfpX7U!NMAuV0Nm`1APJb*cvkF{2#O1^D-JQ}PEj?XTU!M&j z>k)-IIzHY$Jduy*o~ZSR?lAQ^*=z>UvRP8o&=7Ll`qj|j%LmE0Bi||LC*_r>_;@QL zBii#Hq}c7_1G->Y-OgJZB;~e_fyIFOSy+Ixrbm=*xe8E)E-Ws71823c4FL-=o6gSh zdfmeOJiBgL?&-d$qoCIl4vv%veOXCK|M%}j7eXK(y5?WMv$MlJXH)-@g9LKQ;)c;t zoh4u3O$aX)6L?|Jpz`vt31j(sdw2I+u)*5r>wwsR>wu$_SFoHyO`XhsRs&DiEh(*X zeD8{{-PYe$1?B_+w}_uF9Vb72{@e}}59#jr($cH6!1~z6Q&dE8XTZn;7XCuY6`+6` z8XR;!P*Ks>pMX#mkZ9rJrAzPxKW8@?ZnsuCEP(U;@Zker9Tpf`Bko%b=E}&*W@l$x zT3fR|e5hS)9JGSG{1v{PnW%F5-1ZI|hk;?su1QIm23`SEDlRS#bR#2^<@E#b0C;n% z*26_y+*gg+Yh(H++`-_kV(5?~oPWw4?AZ46_qVGY2WoXXXz~VYe8qROcU{8C&ThuO zlFRgW3O>fM%GXz1Lxaf4@GP%Hu#}}g7vQtRbdU2 zwH{e3W#XNko$)-jUxI?%U^5|Y2FGn;V{_U|%?0lGZ%a6QlcHN@o!W0W{0GFoGI9U_ From b349460e6a57685c1d641064df427d44e175e04e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Nov 2024 22:19:00 +0000 Subject: [PATCH 419/496] build(deps): bump torchmetrics in /demo-notebooks/guided-demos Bumps [torchmetrics](https://github.com/Lightning-AI/torchmetrics) from 1.5.1 to 1.6.0. - [Release notes](https://github.com/Lightning-AI/torchmetrics/releases) - [Changelog](https://github.com/Lightning-AI/torchmetrics/blob/master/CHANGELOG.md) - [Commits](https://github.com/Lightning-AI/torchmetrics/compare/v1.5.1...v1.6.0) --- updated-dependencies: - dependency-name: torchmetrics dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .../guided-demos/notebook-ex-outputs/requirements.txt | 2 +- demo-notebooks/guided-demos/preview_nbs/requirements.txt | 2 +- demo-notebooks/guided-demos/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt b/demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt index 27a5620c..c748bec2 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/requirements.txt @@ -1,4 +1,4 @@ pytorch_lightning==2.4.0 ray_lightning -torchmetrics==1.5.1 +torchmetrics==1.6.0 torchvision==0.20.1 diff --git a/demo-notebooks/guided-demos/preview_nbs/requirements.txt b/demo-notebooks/guided-demos/preview_nbs/requirements.txt index 27a5620c..c748bec2 100644 --- a/demo-notebooks/guided-demos/preview_nbs/requirements.txt +++ b/demo-notebooks/guided-demos/preview_nbs/requirements.txt @@ -1,4 +1,4 @@ pytorch_lightning==2.4.0 ray_lightning -torchmetrics==1.5.1 +torchmetrics==1.6.0 torchvision==0.20.1 diff --git a/demo-notebooks/guided-demos/requirements.txt b/demo-notebooks/guided-demos/requirements.txt index 27a5620c..c748bec2 100644 --- a/demo-notebooks/guided-demos/requirements.txt +++ b/demo-notebooks/guided-demos/requirements.txt @@ -1,4 +1,4 @@ pytorch_lightning==2.4.0 ray_lightning -torchmetrics==1.5.1 +torchmetrics==1.6.0 torchvision==0.20.1 From 2bf575b815ad44b8cfd414ff79e7c580ac298e89 Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Wed, 13 Nov 2024 15:43:19 +0000 Subject: [PATCH 420/496] fix: removes label selector from old e2e tests --- tests/e2e/heterogeneous_clusters_kind_test.py | 2 +- .../e2e/heterogeneous_clusters_oauth_test.py | 2 +- tests/e2e/support.py | 40 +++++++++++++------ 3 files changed, 29 insertions(+), 15 deletions(-) diff --git a/tests/e2e/heterogeneous_clusters_kind_test.py b/tests/e2e/heterogeneous_clusters_kind_test.py index 8f814a7c..052fa7b8 100644 --- a/tests/e2e/heterogeneous_clusters_kind_test.py +++ b/tests/e2e/heterogeneous_clusters_kind_test.py @@ -25,7 +25,7 @@ def teardown_method(self): @pytest.mark.nvidia_gpu def test_heterogeneous_clusters(self): create_namespace(self) - create_kueue_resources(self, 2) + create_kueue_resources(self, 2, with_labels=True, with_tolerations=True) self.run_heterogeneous_clusters() def run_heterogeneous_clusters( diff --git a/tests/e2e/heterogeneous_clusters_oauth_test.py b/tests/e2e/heterogeneous_clusters_oauth_test.py index 4a7a687c..d57cff48 100644 --- a/tests/e2e/heterogeneous_clusters_oauth_test.py +++ b/tests/e2e/heterogeneous_clusters_oauth_test.py @@ -24,7 +24,7 @@ def teardown_method(self): def test_heterogeneous_clusters(self): create_namespace(self) - create_kueue_resources(self, 2) + create_kueue_resources(self, 2, with_labels=True, with_tolerations=True) self.run_heterogeneous_clusters() def run_heterogeneous_clusters( diff --git a/tests/e2e/support.py b/tests/e2e/support.py index 60488466..d76b460c 100644 --- a/tests/e2e/support.py +++ b/tests/e2e/support.py @@ -66,12 +66,14 @@ def create_namespace(self): return RuntimeError(e) -def create_new_resource_flavor(self, num_flavors): +def create_new_resource_flavor(self, num_flavors, with_labels, with_tolerations): self.resource_flavors = [] for i in range(num_flavors): default = i < 1 resource_flavor = f"test-resource-flavor-{random_choice()}" - create_resource_flavor(self, resource_flavor, default) + create_resource_flavor( + self, resource_flavor, default, with_labels, with_tolerations + ) self.resource_flavors.append(resource_flavor) @@ -173,7 +175,9 @@ def create_cluster_queue(self, cluster_queue, flavor): self.cluster_queue = cluster_queue -def create_resource_flavor(self, flavor, default=True): +def create_resource_flavor( + self, flavor, default=True, with_labels=False, with_tolerations=False +): worker_label, worker_value = os.getenv("WORKER_LABEL", "worker-1=true").split("=") control_label, control_value = os.getenv( "CONTROL_LABEL", "ingress-ready=true" @@ -182,9 +186,11 @@ def create_resource_flavor(self, flavor, default=True): "TOLERATION_KEY", "node-role.kubernetes.io/control-plane" ) - node_labels = ( - {worker_label: worker_value} if default else {control_label: control_value} - ) + node_labels = {} + if with_labels: + node_labels = ( + {worker_label: worker_value} if default else {control_label: control_value} + ) resource_flavor_json = { "apiVersion": "kueue.x-k8s.io/v1beta1", @@ -192,13 +198,19 @@ def create_resource_flavor(self, flavor, default=True): "metadata": {"name": flavor}, "spec": { "nodeLabels": node_labels, - "tolerations": [ + **( { - "key": toleration_key, - "operator": "Exists", - "effect": "NoSchedule", + "tolerations": [ + { + "key": toleration_key, + "operator": "Exists", + "effect": "NoSchedule", + } + ] } - ], + if with_tolerations + else {} + ), }, } @@ -260,9 +272,11 @@ def create_local_queue(self, cluster_queue, local_queue, is_default=True): self.local_queue = local_queue -def create_kueue_resources(self, resource_ammount=1): +def create_kueue_resources( + self, resource_ammount=1, with_labels=False, with_tolerations=False +): print("creating Kueue resources ...") - create_new_resource_flavor(self, resource_ammount) + create_new_resource_flavor(self, resource_ammount, with_labels, with_tolerations) create_new_cluster_queue(self, resource_ammount) create_new_local_queue(self, resource_ammount) From d2b47e68de314f96fb699d0292001bde29340120 Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Mon, 18 Nov 2024 14:45:32 +0000 Subject: [PATCH 421/496] fix(ci): remove 'continue-on-error' from unit test workflow for accurate test reporting --- .github/workflows/unit-tests.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index 34327533..267e1a6c 100755 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -29,7 +29,6 @@ jobs: coverage run -m pytest coverage=$(coverage report -m | tail -1 | tail -c 4 | head -c 2) if (( $coverage < 90 )); then echo "Coverage failed at ${coverage}%"; exit 1; else echo "Coverage passed, ${coverage}%"; fi - continue-on-error: true - name: Upload to Codecov uses: codecov/codecov-action@v4 with: From 2fc4eb29baa7558fa1a98135fa54f543ef475d4c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Nov 2024 21:38:52 +0000 Subject: [PATCH 422/496] build(deps): bump aiohttp from 3.10.8 to 3.10.11 Bumps [aiohttp](https://github.com/aio-libs/aiohttp) from 3.10.8 to 3.10.11. - [Release notes](https://github.com/aio-libs/aiohttp/releases) - [Changelog](https://github.com/aio-libs/aiohttp/blob/master/CHANGES.rst) - [Commits](https://github.com/aio-libs/aiohttp/compare/v3.10.8...v3.10.11) --- updated-dependencies: - dependency-name: aiohttp dependency-type: indirect ... Signed-off-by: dependabot[bot] --- poetry.lock | 186 ++++++++++++++++++++++++++-------------------------- 1 file changed, 93 insertions(+), 93 deletions(-) diff --git a/poetry.lock b/poetry.lock index ef86eafa..9dd98d00 100644 --- a/poetry.lock +++ b/poetry.lock @@ -13,108 +13,108 @@ files = [ [[package]] name = "aiohttp" -version = "3.10.8" +version = "3.10.11" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.10.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a1ba7bc139592339ddeb62c06486d0fa0f4ca61216e14137a40d626c81faf10c"}, - {file = "aiohttp-3.10.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85e4d7bd05d18e4b348441e7584c681eff646e3bf38f68b2626807f3add21aa2"}, - {file = "aiohttp-3.10.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:69de056022e7abf69cb9fec795515973cc3eeaff51e3ea8d72a77aa933a91c52"}, - {file = "aiohttp-3.10.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee3587506898d4a404b33bd19689286ccf226c3d44d7a73670c8498cd688e42c"}, - {file = "aiohttp-3.10.8-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fe285a697c851734285369614443451462ce78aac2b77db23567507484b1dc6f"}, - {file = "aiohttp-3.10.8-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10c7932337285a6bfa3a5fe1fd4da90b66ebfd9d0cbd1544402e1202eb9a8c3e"}, - {file = "aiohttp-3.10.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd9716ef0224fe0d0336997eb242f40619f9f8c5c57e66b525a1ebf9f1d8cebe"}, - {file = "aiohttp-3.10.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ceacea31f8a55cdba02bc72c93eb2e1b77160e91f8abd605969c168502fd71eb"}, - {file = "aiohttp-3.10.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9721554bfa9e15f6e462da304374c2f1baede3cb06008c36c47fa37ea32f1dc4"}, - {file = "aiohttp-3.10.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:22cdeb684d8552490dd2697a5138c4ecb46f844892df437aaf94f7eea99af879"}, - {file = "aiohttp-3.10.8-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e56bb7e31c4bc79956b866163170bc89fd619e0581ce813330d4ea46921a4881"}, - {file = "aiohttp-3.10.8-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:3a95d2686bc4794d66bd8de654e41b5339fab542b2bca9238aa63ed5f4f2ce82"}, - {file = "aiohttp-3.10.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d82404a0e7b10e0d7f022cf44031b78af8a4f99bd01561ac68f7c24772fed021"}, - {file = "aiohttp-3.10.8-cp310-cp310-win32.whl", hash = "sha256:4e10b04542d27e21538e670156e88766543692a0a883f243ba8fad9ddea82e53"}, - {file = "aiohttp-3.10.8-cp310-cp310-win_amd64.whl", hash = "sha256:680dbcff5adc7f696ccf8bf671d38366a1f620b5616a1d333d0cb33956065395"}, - {file = "aiohttp-3.10.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:33a68011a38020ed4ff41ae0dbf4a96a202562ecf2024bdd8f65385f1d07f6ef"}, - {file = "aiohttp-3.10.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6c7efa6616a95e3bd73b8a69691012d2ef1f95f9ea0189e42f338fae080c2fc6"}, - {file = "aiohttp-3.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ddb9b9764cfb4459acf01c02d2a59d3e5066b06a846a364fd1749aa168efa2be"}, - {file = "aiohttp-3.10.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7f270f4ca92760f98a42c45a58674fff488e23b144ec80b1cc6fa2effed377"}, - {file = "aiohttp-3.10.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6984dda9d79064361ab58d03f6c1e793ea845c6cfa89ffe1a7b9bb400dfd56bd"}, - {file = "aiohttp-3.10.8-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f6d47e392c27206701565c8df4cac6ebed28fdf6dcaea5b1eea7a4631d8e6db"}, - {file = "aiohttp-3.10.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a72f89aea712c619b2ca32c6f4335c77125ede27530ad9705f4f349357833695"}, - {file = "aiohttp-3.10.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c36074b26f3263879ba8e4dbd33db2b79874a3392f403a70b772701363148b9f"}, - {file = "aiohttp-3.10.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e32148b4a745e70a255a1d44b5664de1f2e24fcefb98a75b60c83b9e260ddb5b"}, - {file = "aiohttp-3.10.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5aa1a073514cf59c81ad49a4ed9b5d72b2433638cd53160fd2f3a9cfa94718db"}, - {file = "aiohttp-3.10.8-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d3a79200a9d5e621c4623081ddb25380b713c8cf5233cd11c1aabad990bb9381"}, - {file = "aiohttp-3.10.8-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e45fdfcb2d5bcad83373e4808825b7512953146d147488114575780640665027"}, - {file = "aiohttp-3.10.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f78e2a78432c537ae876a93013b7bc0027ba5b93ad7b3463624c4b6906489332"}, - {file = "aiohttp-3.10.8-cp311-cp311-win32.whl", hash = "sha256:f8179855a4e4f3b931cb1764ec87673d3fbdcca2af496c8d30567d7b034a13db"}, - {file = "aiohttp-3.10.8-cp311-cp311-win_amd64.whl", hash = "sha256:ef9b484604af05ca745b6108ca1aaa22ae1919037ae4f93aaf9a37ba42e0b835"}, - {file = "aiohttp-3.10.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ab2d6523575fc98896c80f49ac99e849c0b0e69cc80bf864eed6af2ae728a52b"}, - {file = "aiohttp-3.10.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f5d5d5401744dda50b943d8764508d0e60cc2d3305ac1e6420935861a9d544bc"}, - {file = "aiohttp-3.10.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de23085cf90911600ace512e909114385026b16324fa203cc74c81f21fd3276a"}, - {file = "aiohttp-3.10.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4618f0d2bf523043866a9ff8458900d8eb0a6d4018f251dae98e5f1fb699f3a8"}, - {file = "aiohttp-3.10.8-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21c1925541ca84f7b5e0df361c0a813a7d6a56d3b0030ebd4b220b8d232015f9"}, - {file = "aiohttp-3.10.8-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:497a7d20caea8855c5429db3cdb829385467217d7feb86952a6107e033e031b9"}, - {file = "aiohttp-3.10.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c887019dbcb4af58a091a45ccf376fffe800b5531b45c1efccda4bedf87747ea"}, - {file = "aiohttp-3.10.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40d2d719c3c36a7a65ed26400e2b45b2d9ed7edf498f4df38b2ae130f25a0d01"}, - {file = "aiohttp-3.10.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:57359785f27394a8bcab0da6dcd46706d087dfebf59a8d0ad2e64a4bc2f6f94f"}, - {file = "aiohttp-3.10.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a961ee6f2cdd1a2be4735333ab284691180d40bad48f97bb598841bfcbfb94ec"}, - {file = "aiohttp-3.10.8-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:fe3d79d6af839ffa46fdc5d2cf34295390894471e9875050eafa584cb781508d"}, - {file = "aiohttp-3.10.8-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9a281cba03bdaa341c70b7551b2256a88d45eead149f48b75a96d41128c240b3"}, - {file = "aiohttp-3.10.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c6769d71bfb1ed60321363a9bc05e94dcf05e38295ef41d46ac08919e5b00d19"}, - {file = "aiohttp-3.10.8-cp312-cp312-win32.whl", hash = "sha256:a3081246bab4d419697ee45e555cef5cd1def7ac193dff6f50be761d2e44f194"}, - {file = "aiohttp-3.10.8-cp312-cp312-win_amd64.whl", hash = "sha256:ab1546fc8e00676febc81c548a876c7bde32f881b8334b77f84719ab2c7d28dc"}, - {file = "aiohttp-3.10.8-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:b1a012677b8e0a39e181e218de47d6741c5922202e3b0b65e412e2ce47c39337"}, - {file = "aiohttp-3.10.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2df786c96c57cd6b87156ba4c5f166af7b88f3fc05f9d592252fdc83d8615a3c"}, - {file = "aiohttp-3.10.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8885ca09d3a9317219c0831276bfe26984b17b2c37b7bf70dd478d17092a4772"}, - {file = "aiohttp-3.10.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4dbf252ac19860e0ab56cd480d2805498f47c5a2d04f5995d8d8a6effd04b48c"}, - {file = "aiohttp-3.10.8-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b2036479b6b94afaaca7d07b8a68dc0e67b0caf5f6293bb6a5a1825f5923000"}, - {file = "aiohttp-3.10.8-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:365783e1b7c40b59ed4ce2b5a7491bae48f41cd2c30d52647a5b1ee8604c68ad"}, - {file = "aiohttp-3.10.8-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:270e653b5a4b557476a1ed40e6b6ce82f331aab669620d7c95c658ef976c9c5e"}, - {file = "aiohttp-3.10.8-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8960fabc20bfe4fafb941067cda8e23c8c17c98c121aa31c7bf0cdab11b07842"}, - {file = "aiohttp-3.10.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f21e8f2abed9a44afc3d15bba22e0dfc71e5fa859bea916e42354c16102b036f"}, - {file = "aiohttp-3.10.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fecd55e7418fabd297fd836e65cbd6371aa4035a264998a091bbf13f94d9c44d"}, - {file = "aiohttp-3.10.8-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:badb51d851358cd7535b647bb67af4854b64f3c85f0d089c737f75504d5910ec"}, - {file = "aiohttp-3.10.8-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e860985f30f3a015979e63e7ba1a391526cdac1b22b7b332579df7867848e255"}, - {file = "aiohttp-3.10.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:71462f8eeca477cbc0c9700a9464e3f75f59068aed5e9d4a521a103692da72dc"}, - {file = "aiohttp-3.10.8-cp313-cp313-win32.whl", hash = "sha256:177126e971782769b34933e94fddd1089cef0fe6b82fee8a885e539f5b0f0c6a"}, - {file = "aiohttp-3.10.8-cp313-cp313-win_amd64.whl", hash = "sha256:98a4eb60e27033dee9593814ca320ee8c199489fbc6b2699d0f710584db7feb7"}, - {file = "aiohttp-3.10.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ffef3d763e4c8fc97e740da5b4d0f080b78630a3914f4e772a122bbfa608c1db"}, - {file = "aiohttp-3.10.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:597128cb7bc5f068181b49a732961f46cb89f85686206289d6ccb5e27cb5fbe2"}, - {file = "aiohttp-3.10.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f23a6c1d09de5de89a33c9e9b229106cb70dcfdd55e81a3a3580eaadaa32bc92"}, - {file = "aiohttp-3.10.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da57af0c54a302b7c655fa1ccd5b1817a53739afa39924ef1816e7b7c8a07ccb"}, - {file = "aiohttp-3.10.8-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e7a6af57091056a79a35104d6ec29d98ec7f1fb7270ad9c6fff871b678d1ff8"}, - {file = "aiohttp-3.10.8-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32710d6b3b6c09c60c794d84ca887a3a2890131c0b02b3cefdcc6709a2260a7c"}, - {file = "aiohttp-3.10.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b91f4f62ad39a8a42d511d66269b46cb2fb7dea9564c21ab6c56a642d28bff5"}, - {file = "aiohttp-3.10.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:471a8c47344b9cc309558b3fcc469bd2c12b49322b4b31eb386c4a2b2d44e44a"}, - {file = "aiohttp-3.10.8-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fc0e7f91705445d79beafba9bb3057dd50830e40fe5417017a76a214af54e122"}, - {file = "aiohttp-3.10.8-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:85431c9131a9a0f65260dc7a65c800ca5eae78c4c9931618f18c8e0933a0e0c1"}, - {file = "aiohttp-3.10.8-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:b91557ee0893da52794b25660d4f57bb519bcad8b7df301acd3898f7197c5d81"}, - {file = "aiohttp-3.10.8-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:4954e6b06dd0be97e1a5751fc606be1f9edbdc553c5d9b57d72406a8fbd17f9d"}, - {file = "aiohttp-3.10.8-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a087c84b4992160ffef7afd98ef24177c8bd4ad61c53607145a8377457385100"}, - {file = "aiohttp-3.10.8-cp38-cp38-win32.whl", hash = "sha256:e1f0f7b27171b2956a27bd8f899751d0866ddabdd05cbddf3520f945130a908c"}, - {file = "aiohttp-3.10.8-cp38-cp38-win_amd64.whl", hash = "sha256:c4916070e12ae140110aa598031876c1bf8676a36a750716ea0aa5bd694aa2e7"}, - {file = "aiohttp-3.10.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5284997e3d88d0dfb874c43e51ae8f4a6f4ca5b90dcf22995035187253d430db"}, - {file = "aiohttp-3.10.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9443d9ebc5167ce1fbb552faf2d666fb22ef5716a8750be67efd140a7733738c"}, - {file = "aiohttp-3.10.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b667e2a03407d79a76c618dc30cedebd48f082d85880d0c9c4ec2faa3e10f43e"}, - {file = "aiohttp-3.10.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98fae99d5c2146f254b7806001498e6f9ffb0e330de55a35e72feb7cb2fa399b"}, - {file = "aiohttp-3.10.8-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8296edd99d0dd9d0eb8b9e25b3b3506eef55c1854e9cc230f0b3f885f680410b"}, - {file = "aiohttp-3.10.8-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ce46dfb49cfbf9e92818be4b761d4042230b1f0e05ffec0aad15b3eb162b905"}, - {file = "aiohttp-3.10.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c38cfd355fd86c39b2d54651bd6ed7d63d4fe3b5553f364bae3306e2445f847"}, - {file = "aiohttp-3.10.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:713dff3f87ceec3bde4f3f484861464e722cf7533f9fa6b824ec82bb5a9010a7"}, - {file = "aiohttp-3.10.8-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:21a72f4a9c69a8567a0aca12042f12bba25d3139fd5dd8eeb9931f4d9e8599cd"}, - {file = "aiohttp-3.10.8-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6d1ad868624f6cea77341ef2877ad4e71f7116834a6cd7ec36ec5c32f94ee6ae"}, - {file = "aiohttp-3.10.8-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:a78ba86d5a08207d1d1ad10b97aed6ea48b374b3f6831d02d0b06545ac0f181e"}, - {file = "aiohttp-3.10.8-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:aff048793d05e1ce05b62e49dccf81fe52719a13f4861530706619506224992b"}, - {file = "aiohttp-3.10.8-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d088ca05381fd409793571d8e34eca06daf41c8c50a05aeed358d2d340c7af81"}, - {file = "aiohttp-3.10.8-cp39-cp39-win32.whl", hash = "sha256:ee97c4e54f457c366e1f76fbbf3e8effee9de57dae671084a161c00f481106ce"}, - {file = "aiohttp-3.10.8-cp39-cp39-win_amd64.whl", hash = "sha256:d95ae4420669c871667aad92ba8cce6251d61d79c1a38504621094143f94a8b4"}, - {file = "aiohttp-3.10.8.tar.gz", hash = "sha256:21f8225f7dc187018e8433c9326be01477fb2810721e048b33ac49091b19fb4a"}, + {file = "aiohttp-3.10.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5077b1a5f40ffa3ba1f40d537d3bec4383988ee51fbba6b74aa8fb1bc466599e"}, + {file = "aiohttp-3.10.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8d6a14a4d93b5b3c2891fca94fa9d41b2322a68194422bef0dd5ec1e57d7d298"}, + {file = "aiohttp-3.10.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ffbfde2443696345e23a3c597049b1dd43049bb65337837574205e7368472177"}, + {file = "aiohttp-3.10.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20b3d9e416774d41813bc02fdc0663379c01817b0874b932b81c7f777f67b217"}, + {file = "aiohttp-3.10.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b943011b45ee6bf74b22245c6faab736363678e910504dd7531a58c76c9015a"}, + {file = "aiohttp-3.10.11-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48bc1d924490f0d0b3658fe5c4b081a4d56ebb58af80a6729d4bd13ea569797a"}, + {file = "aiohttp-3.10.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e12eb3f4b1f72aaaf6acd27d045753b18101524f72ae071ae1c91c1cd44ef115"}, + {file = "aiohttp-3.10.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f14ebc419a568c2eff3c1ed35f634435c24ead2fe19c07426af41e7adb68713a"}, + {file = "aiohttp-3.10.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:72b191cdf35a518bfc7ca87d770d30941decc5aaf897ec8b484eb5cc8c7706f3"}, + {file = "aiohttp-3.10.11-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5ab2328a61fdc86424ee540d0aeb8b73bbcad7351fb7cf7a6546fc0bcffa0038"}, + {file = "aiohttp-3.10.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:aa93063d4af05c49276cf14e419550a3f45258b6b9d1f16403e777f1addf4519"}, + {file = "aiohttp-3.10.11-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:30283f9d0ce420363c24c5c2421e71a738a2155f10adbb1a11a4d4d6d2715cfc"}, + {file = "aiohttp-3.10.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e5358addc8044ee49143c546d2182c15b4ac3a60be01c3209374ace05af5733d"}, + {file = "aiohttp-3.10.11-cp310-cp310-win32.whl", hash = "sha256:e1ffa713d3ea7cdcd4aea9cddccab41edf6882fa9552940344c44e59652e1120"}, + {file = "aiohttp-3.10.11-cp310-cp310-win_amd64.whl", hash = "sha256:778cbd01f18ff78b5dd23c77eb82987ee4ba23408cbed233009fd570dda7e674"}, + {file = "aiohttp-3.10.11-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:80ff08556c7f59a7972b1e8919f62e9c069c33566a6d28586771711e0eea4f07"}, + {file = "aiohttp-3.10.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c8f96e9ee19f04c4914e4e7a42a60861066d3e1abf05c726f38d9d0a466e695"}, + {file = "aiohttp-3.10.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fb8601394d537da9221947b5d6e62b064c9a43e88a1ecd7414d21a1a6fba9c24"}, + {file = "aiohttp-3.10.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ea224cf7bc2d8856d6971cea73b1d50c9c51d36971faf1abc169a0d5f85a382"}, + {file = "aiohttp-3.10.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db9503f79e12d5d80b3efd4d01312853565c05367493379df76d2674af881caa"}, + {file = "aiohttp-3.10.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0f449a50cc33f0384f633894d8d3cd020e3ccef81879c6e6245c3c375c448625"}, + {file = "aiohttp-3.10.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82052be3e6d9e0c123499127782a01a2b224b8af8c62ab46b3f6197035ad94e9"}, + {file = "aiohttp-3.10.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20063c7acf1eec550c8eb098deb5ed9e1bb0521613b03bb93644b810986027ac"}, + {file = "aiohttp-3.10.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:489cced07a4c11488f47aab1f00d0c572506883f877af100a38f1fedaa884c3a"}, + {file = "aiohttp-3.10.11-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ea9b3bab329aeaa603ed3bf605f1e2a6f36496ad7e0e1aa42025f368ee2dc07b"}, + {file = "aiohttp-3.10.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ca117819d8ad113413016cb29774b3f6d99ad23c220069789fc050267b786c16"}, + {file = "aiohttp-3.10.11-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2dfb612dcbe70fb7cdcf3499e8d483079b89749c857a8f6e80263b021745c730"}, + {file = "aiohttp-3.10.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9b615d3da0d60e7d53c62e22b4fd1c70f4ae5993a44687b011ea3a2e49051b8"}, + {file = "aiohttp-3.10.11-cp311-cp311-win32.whl", hash = "sha256:29103f9099b6068bbdf44d6a3d090e0a0b2be6d3c9f16a070dd9d0d910ec08f9"}, + {file = "aiohttp-3.10.11-cp311-cp311-win_amd64.whl", hash = "sha256:236b28ceb79532da85d59aa9b9bf873b364e27a0acb2ceaba475dc61cffb6f3f"}, + {file = "aiohttp-3.10.11-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7480519f70e32bfb101d71fb9a1f330fbd291655a4c1c922232a48c458c52710"}, + {file = "aiohttp-3.10.11-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f65267266c9aeb2287a6622ee2bb39490292552f9fbf851baabc04c9f84e048d"}, + {file = "aiohttp-3.10.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7400a93d629a0608dc1d6c55f1e3d6e07f7375745aaa8bd7f085571e4d1cee97"}, + {file = "aiohttp-3.10.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f34b97e4b11b8d4eb2c3a4f975be626cc8af99ff479da7de49ac2c6d02d35725"}, + {file = "aiohttp-3.10.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e7b825da878464a252ccff2958838f9caa82f32a8dbc334eb9b34a026e2c636"}, + {file = "aiohttp-3.10.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9f92a344c50b9667827da308473005f34767b6a2a60d9acff56ae94f895f385"}, + {file = "aiohttp-3.10.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc6f1ab987a27b83c5268a17218463c2ec08dbb754195113867a27b166cd6087"}, + {file = "aiohttp-3.10.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1dc0f4ca54842173d03322793ebcf2c8cc2d34ae91cc762478e295d8e361e03f"}, + {file = "aiohttp-3.10.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7ce6a51469bfaacff146e59e7fb61c9c23006495d11cc24c514a455032bcfa03"}, + {file = "aiohttp-3.10.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:aad3cd91d484d065ede16f3cf15408254e2469e3f613b241a1db552c5eb7ab7d"}, + {file = "aiohttp-3.10.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f4df4b8ca97f658c880fb4b90b1d1ec528315d4030af1ec763247ebfd33d8b9a"}, + {file = "aiohttp-3.10.11-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2e4e18a0a2d03531edbc06c366954e40a3f8d2a88d2b936bbe78a0c75a3aab3e"}, + {file = "aiohttp-3.10.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6ce66780fa1a20e45bc753cda2a149daa6dbf1561fc1289fa0c308391c7bc0a4"}, + {file = "aiohttp-3.10.11-cp312-cp312-win32.whl", hash = "sha256:a919c8957695ea4c0e7a3e8d16494e3477b86f33067478f43106921c2fef15bb"}, + {file = "aiohttp-3.10.11-cp312-cp312-win_amd64.whl", hash = "sha256:b5e29706e6389a2283a91611c91bf24f218962717c8f3b4e528ef529d112ee27"}, + {file = "aiohttp-3.10.11-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:703938e22434d7d14ec22f9f310559331f455018389222eed132808cd8f44127"}, + {file = "aiohttp-3.10.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9bc50b63648840854e00084c2b43035a62e033cb9b06d8c22b409d56eb098413"}, + {file = "aiohttp-3.10.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5f0463bf8b0754bc744e1feb61590706823795041e63edf30118a6f0bf577461"}, + {file = "aiohttp-3.10.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6c6dec398ac5a87cb3a407b068e1106b20ef001c344e34154616183fe684288"}, + {file = "aiohttp-3.10.11-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcaf2d79104d53d4dcf934f7ce76d3d155302d07dae24dff6c9fffd217568067"}, + {file = "aiohttp-3.10.11-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:25fd5470922091b5a9aeeb7e75be609e16b4fba81cdeaf12981393fb240dd10e"}, + {file = "aiohttp-3.10.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbde2ca67230923a42161b1f408c3992ae6e0be782dca0c44cb3206bf330dee1"}, + {file = "aiohttp-3.10.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:249c8ff8d26a8b41a0f12f9df804e7c685ca35a207e2410adbd3e924217b9006"}, + {file = "aiohttp-3.10.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:878ca6a931ee8c486a8f7b432b65431d095c522cbeb34892bee5be97b3481d0f"}, + {file = "aiohttp-3.10.11-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8663f7777ce775f0413324be0d96d9730959b2ca73d9b7e2c2c90539139cbdd6"}, + {file = "aiohttp-3.10.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:6cd3f10b01f0c31481fba8d302b61603a2acb37b9d30e1d14e0f5a58b7b18a31"}, + {file = "aiohttp-3.10.11-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4e8d8aad9402d3aa02fdc5ca2fe68bcb9fdfe1f77b40b10410a94c7f408b664d"}, + {file = "aiohttp-3.10.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:38e3c4f80196b4f6c3a85d134a534a56f52da9cb8d8e7af1b79a32eefee73a00"}, + {file = "aiohttp-3.10.11-cp313-cp313-win32.whl", hash = "sha256:fc31820cfc3b2863c6e95e14fcf815dc7afe52480b4dc03393c4873bb5599f71"}, + {file = "aiohttp-3.10.11-cp313-cp313-win_amd64.whl", hash = "sha256:4996ff1345704ffdd6d75fb06ed175938c133425af616142e7187f28dc75f14e"}, + {file = "aiohttp-3.10.11-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:74baf1a7d948b3d640badeac333af581a367ab916b37e44cf90a0334157cdfd2"}, + {file = "aiohttp-3.10.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:473aebc3b871646e1940c05268d451f2543a1d209f47035b594b9d4e91ce8339"}, + {file = "aiohttp-3.10.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c2f746a6968c54ab2186574e15c3f14f3e7f67aef12b761e043b33b89c5b5f95"}, + {file = "aiohttp-3.10.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d110cabad8360ffa0dec8f6ec60e43286e9d251e77db4763a87dcfe55b4adb92"}, + {file = "aiohttp-3.10.11-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0099c7d5d7afff4202a0c670e5b723f7718810000b4abcbc96b064129e64bc7"}, + {file = "aiohttp-3.10.11-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0316e624b754dbbf8c872b62fe6dcb395ef20c70e59890dfa0de9eafccd2849d"}, + {file = "aiohttp-3.10.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a5f7ab8baf13314e6b2485965cbacb94afff1e93466ac4d06a47a81c50f9cca"}, + {file = "aiohttp-3.10.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c891011e76041e6508cbfc469dd1a8ea09bc24e87e4c204e05f150c4c455a5fa"}, + {file = "aiohttp-3.10.11-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9208299251370ee815473270c52cd3f7069ee9ed348d941d574d1457d2c73e8b"}, + {file = "aiohttp-3.10.11-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:459f0f32c8356e8125f45eeff0ecf2b1cb6db1551304972702f34cd9e6c44658"}, + {file = "aiohttp-3.10.11-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:14cdc8c1810bbd4b4b9f142eeee23cda528ae4e57ea0923551a9af4820980e39"}, + {file = "aiohttp-3.10.11-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:971aa438a29701d4b34e4943e91b5e984c3ae6ccbf80dd9efaffb01bd0b243a9"}, + {file = "aiohttp-3.10.11-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:9a309c5de392dfe0f32ee57fa43ed8fc6ddf9985425e84bd51ed66bb16bce3a7"}, + {file = "aiohttp-3.10.11-cp38-cp38-win32.whl", hash = "sha256:9ec1628180241d906a0840b38f162a3215114b14541f1a8711c368a8739a9be4"}, + {file = "aiohttp-3.10.11-cp38-cp38-win_amd64.whl", hash = "sha256:9c6e0ffd52c929f985c7258f83185d17c76d4275ad22e90aa29f38e211aacbec"}, + {file = "aiohttp-3.10.11-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cdc493a2e5d8dc79b2df5bec9558425bcd39aff59fc949810cbd0832e294b106"}, + {file = "aiohttp-3.10.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b3e70f24e7d0405be2348da9d5a7836936bf3a9b4fd210f8c37e8d48bc32eca6"}, + {file = "aiohttp-3.10.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968b8fb2a5eee2770eda9c7b5581587ef9b96fbdf8dcabc6b446d35ccc69df01"}, + {file = "aiohttp-3.10.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deef4362af9493d1382ef86732ee2e4cbc0d7c005947bd54ad1a9a16dd59298e"}, + {file = "aiohttp-3.10.11-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:686b03196976e327412a1b094f4120778c7c4b9cff9bce8d2fdfeca386b89829"}, + {file = "aiohttp-3.10.11-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3bf6d027d9d1d34e1c2e1645f18a6498c98d634f8e373395221121f1c258ace8"}, + {file = "aiohttp-3.10.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:099fd126bf960f96d34a760e747a629c27fb3634da5d05c7ef4d35ef4ea519fc"}, + {file = "aiohttp-3.10.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c73c4d3dae0b4644bc21e3de546530531d6cdc88659cdeb6579cd627d3c206aa"}, + {file = "aiohttp-3.10.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0c5580f3c51eea91559db3facd45d72e7ec970b04528b4709b1f9c2555bd6d0b"}, + {file = "aiohttp-3.10.11-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fdf6429f0caabfd8a30c4e2eaecb547b3c340e4730ebfe25139779b9815ba138"}, + {file = "aiohttp-3.10.11-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:d97187de3c276263db3564bb9d9fad9e15b51ea10a371ffa5947a5ba93ad6777"}, + {file = "aiohttp-3.10.11-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:0acafb350cfb2eba70eb5d271f55e08bd4502ec35e964e18ad3e7d34d71f7261"}, + {file = "aiohttp-3.10.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c13ed0c779911c7998a58e7848954bd4d63df3e3575f591e321b19a2aec8df9f"}, + {file = "aiohttp-3.10.11-cp39-cp39-win32.whl", hash = "sha256:22b7c540c55909140f63ab4f54ec2c20d2635c0289cdd8006da46f3327f971b9"}, + {file = "aiohttp-3.10.11-cp39-cp39-win_amd64.whl", hash = "sha256:7b26b1551e481012575dab8e3727b16fe7dd27eb2711d2e63ced7368756268fb"}, + {file = "aiohttp-3.10.11.tar.gz", hash = "sha256:9dc2b8f3dcab2e39e0fa309c8da50c3b55e6f34ab25f1a71d3288f24924d33a7"}, ] [package.dependencies] aiohappyeyeballs = ">=2.3.0" aiosignal = ">=1.1.2" -async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} +async-timeout = {version = ">=4.0,<6.0", markers = "python_version < \"3.11\""} attrs = ">=17.3.0" frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" From e8f7709aa7717d053968fe181ebec2c1751f3be1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Nov 2024 23:34:00 +0000 Subject: [PATCH 423/496] build(deps-dev): bump @playwright/test in /ui-tests Bumps [@playwright/test](https://github.com/microsoft/playwright) from 1.48.2 to 1.49.0. - [Release notes](https://github.com/microsoft/playwright/releases) - [Commits](https://github.com/microsoft/playwright/compare/v1.48.2...v1.49.0) --- updated-dependencies: - dependency-name: "@playwright/test" dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- ui-tests/package.json | 2 +- ui-tests/yarn.lock | 28 ++++++++++++++-------------- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/ui-tests/package.json b/ui-tests/package.json index 89971922..41c47066 100644 --- a/ui-tests/package.json +++ b/ui-tests/package.json @@ -16,7 +16,7 @@ "license": "BSD-3-Clause", "devDependencies": { "@jupyterlab/galata": "^5.3.0", - "@playwright/test": "^1.48.2", + "@playwright/test": "^1.49.0", "yarn-deduplicate": "^6.0.1" } } diff --git a/ui-tests/yarn.lock b/ui-tests/yarn.lock index 5d383ffc..cf0b5b0c 100644 --- a/ui-tests/yarn.lock +++ b/ui-tests/yarn.lock @@ -1170,12 +1170,12 @@ dependencies: exenv-es6 "^1.1.1" -"@playwright/test@^1.48.0", "@playwright/test@^1.48.2": - version "1.48.2" - resolved "https://registry.npmjs.org/@playwright/test/-/test-1.48.2.tgz#87dd40633f980872283404c8142a65744d3f13d6" - integrity sha512-54w1xCWfXuax7dz4W2M9uw0gDyh+ti/0K/MxcCUxChFh37kkdxPdfZDw5QBbuPUJHr1CiHJ1hXgSs+GgeQc5Zw== +"@playwright/test@^1.48.0", "@playwright/test@^1.49.0": + version "1.49.0" + resolved "https://registry.npmjs.org/@playwright/test/-/test-1.49.0.tgz#74227385b58317ee076b86b56d0e1e1b25cff01e" + integrity sha512-DMulbwQURa8rNIQrf94+jPJQ4FmOVdpE5ZppRNvWVjvhC+6sOeo28r8MgIpQRYouXRtt/FCCXU7zn20jnHR4Qw== dependencies: - playwright "1.48.2" + playwright "1.49.0" "@rjsf/core@^5.13.4": version "5.21.0" @@ -2098,17 +2098,17 @@ picocolors@^1.0.1: resolved "https://registry.npmjs.org/picocolors/-/picocolors-1.1.0.tgz#5358b76a78cde483ba5cef6a9dc9671440b27d59" integrity sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw== -playwright-core@1.48.2: - version "1.48.2" - resolved "https://registry.npmjs.org/playwright-core/-/playwright-core-1.48.2.tgz#cd76ed8af61690edef5c05c64721c26a8db2f3d7" - integrity sha512-sjjw+qrLFlriJo64du+EK0kJgZzoQPsabGF4lBvsid+3CNIZIYLgnMj9V6JY5VhM2Peh20DJWIVpVljLLnlawA== +playwright-core@1.49.0: + version "1.49.0" + resolved "https://registry.npmjs.org/playwright-core/-/playwright-core-1.49.0.tgz#8e69ffed3f41855b854982f3632f2922c890afcb" + integrity sha512-R+3KKTQF3npy5GTiKH/T+kdhoJfJojjHESR1YEWhYuEKRVfVaxH3+4+GvXE5xyCngCxhxnykk0Vlah9v8fs3jA== -playwright@1.48.2: - version "1.48.2" - resolved "https://registry.npmjs.org/playwright/-/playwright-1.48.2.tgz#fca45ae8abdc34835c715718072aaff7e305167e" - integrity sha512-NjYvYgp4BPmiwfe31j4gHLa3J7bD2WiBz8Lk2RoSsmX38SVIARZ18VYjxLjAcDsAhA+F4iSEXTSGgjua0rrlgQ== +playwright@1.49.0: + version "1.49.0" + resolved "https://registry.npmjs.org/playwright/-/playwright-1.49.0.tgz#df6b9e05423377a99658202844a294a8afb95d0a" + integrity sha512-eKpmys0UFDnfNb3vfsf8Vx2LEOtflgRebl0Im2eQQnYMA4Aqd+Zw8bEOB+7ZKvN76901mRnqdsiOGKxzVTbi7A== dependencies: - playwright-core "1.48.2" + playwright-core "1.49.0" optionalDependencies: fsevents "2.3.2" From 6500c41c2d1985f6fdba07463844fb4228fbb17e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Nov 2024 22:07:57 +0000 Subject: [PATCH 424/496] build(deps-dev): bump jupyterlab from 4.2.5 to 4.3.1 Bumps [jupyterlab](https://github.com/jupyterlab/jupyterlab) from 4.2.5 to 4.3.1. - [Release notes](https://github.com/jupyterlab/jupyterlab/releases) - [Changelog](https://github.com/jupyterlab/jupyterlab/blob/@jupyterlab/lsp@4.3.1/CHANGELOG.md) - [Commits](https://github.com/jupyterlab/jupyterlab/compare/@jupyterlab/lsp@4.2.5...@jupyterlab/lsp@4.3.1) --- updated-dependencies: - dependency-name: jupyterlab dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 14 +++++++------- pyproject.toml | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9dd98d00..413122c1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1666,13 +1666,13 @@ test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (> [[package]] name = "jupyterlab" -version = "4.2.5" +version = "4.3.1" description = "JupyterLab computational environment" optional = false python-versions = ">=3.8" files = [ - {file = "jupyterlab-4.2.5-py3-none-any.whl", hash = "sha256:73b6e0775d41a9fee7ee756c80f58a6bed4040869ccc21411dc559818874d321"}, - {file = "jupyterlab-4.2.5.tar.gz", hash = "sha256:ae7f3a1b8cb88b4f55009ce79fa7c06f99d70cd63601ee4aa91815d054f46f75"}, + {file = "jupyterlab-4.3.1-py3-none-any.whl", hash = "sha256:2d9a1c305bc748e277819a17a5d5e22452e533e835f4237b2f30f3b0e491e01f"}, + {file = "jupyterlab-4.3.1.tar.gz", hash = "sha256:a4a338327556443521731d82f2a6ccf926df478914ca029616621704d47c3c65"}, ] [package.dependencies] @@ -1693,9 +1693,9 @@ tornado = ">=6.2.0" traitlets = "*" [package.extras] -dev = ["build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.3.5)"] -docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-jupyter", "sphinx (>=1.8,<7.3.0)", "sphinx-copybutton"] -docs-screenshots = ["altair (==5.3.0)", "ipython (==8.16.1)", "ipywidgets (==8.1.2)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.1.post2)", "matplotlib (==3.8.3)", "nbconvert (>=7.0.0)", "pandas (==2.2.1)", "scipy (==1.12.0)", "vega-datasets (==0.9.0)"] +dev = ["build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.6.9)"] +docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-jupyter", "sphinx (>=1.8,<8.1.0)", "sphinx-copybutton"] +docs-screenshots = ["altair (==5.4.1)", "ipython (==8.16.1)", "ipywidgets (==8.1.5)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.2.post3)", "matplotlib (==3.9.2)", "nbconvert (>=7.0.0)", "pandas (==2.2.3)", "scipy (==1.14.1)", "vega-datasets (==0.9.0)"] test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] upgrade-extension = ["copier (>=9,<10)", "jinja2-time (<0.3)", "pydantic (<3.0)", "pyyaml-include (<3.0)", "tomli-w (<2.0)"] @@ -4212,4 +4212,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "c1aae8c255de1b3e2e8bcd45f6d4eddb81a395397fd46359500b94af18c8d37c" +content-hash = "a8bc47ba5ed0d0d30a52b50bf986b85da8cdc8b85f9dbe47ebae32dab695e95f" diff --git a/pyproject.toml b/pyproject.toml index c86799b5..ef88b255 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,7 +45,7 @@ pytest = "7.4.0" coverage = "7.6.4" pytest-mock = "3.11.1" pytest-timeout = "2.3.1" -jupyterlab = "4.2.5" +jupyterlab = "4.3.1" [tool.pytest.ini_options] filterwarnings = [ From 9e2adefefa6af8687677ae9169341ece5f690469 Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Tue, 19 Nov 2024 12:21:52 +0000 Subject: [PATCH 425/496] fix: cell snapshot that changes after jupyterlab bump --- .../widgets-cell-5-linux.png | Bin 10040 -> 10323 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-5-linux.png b/ui-tests/tests/widget_notebook_example.test.ts-snapshots/widgets-cell-5-linux.png index 92781875d7266212ec05c3417aa0e72ce60e60f9..c352985319e4b55696d7733a4bb895799aa748e4 100644 GIT binary patch literal 10323 zcmdUVWmHsA8}6tG2q++el!TOYgMfg9ASvCQ(h3Y63eqJxk|QA9NH@|jbayj!cizpn z*0=6mcdfhbzq{5wKbXVJnLYdL_j#Y^dEb446yznJVi03MAdsh0AH|g*kVm25+y(t1 z_{$@Jq0G=k6b)_;Ay+A5>I;0E&kSj0d$Xsjx3_PX5y>^PQR`mjdi@>O zU$2rJgy3fvNf8^K!bk^sE>vF;@VgMmm*<74&;E1vTYx&}{-o*a&p-E5gLH^Q-A@`q ziYCQy|N4dW|LY3^%(EYzlPhRyYAPta$3aP!e~F8`Q7z`_S^xVt1uLsvzg$54oY52y@KLci5Mwl*^QS6Y$kLtT!Ii;JsW zVU7FZ1vR?p$B)@LIXOrnM}FP+@4p2Ed@v6V3>?@vPFV%xg3+0@w|9Qdq3W2M)A=?V zOxFDQV`6Gb^S_LY`vv}>(d{JI)JDv&-GI%XAGL3EY|PBWet5f{#x}+f+e9irAeNds}q%k)yeR(QZH8YX-PD6Fo{%B1h z;E|Y=_h{}8R(sNN2U^}gb@jsS=eR`qQ>g0O6*l~~wxUW(2){MqyQKBKc@4Ew&BTU_ zn=_<&|E`)wRDxS`U*9J!w}@XFt#z(&0aOUY?4uHza^Gm4W4_@ltmC1f({>4bM#k^B zROfHG;SfTdFjLd`#l;#Km=hy;YTE0fBDmAx3JjYb86o@v>*-6~+BF;^O)#+WYz-^< zBgnNmrPM1~27T}3Zp#KbL9Sz7RaJ7Y+m_|_`WlaFVb#^yy6WhFEH=u;UxkI(!rlw( zgHPMqP()OatI2RdcUWLxW8F0rJK{7mv+SGx*IlV%0qx-#mT%K?;vE+t(C~w~IXXJIDM9OrC&tSO z<3%~MWY?E@@R*S~Tq?55%Tr`<@!9TRI@NN=9sz!Rm4S$)1E=P>%iV4I)Ku*czq?PO zOTl?o4p43p928$%HhUAxFKp0lG5z^;Ev*&gX?y!GZma7*e@+8~z!KF~{Ql-yJFZrM zk;v|QS+aIwWk+1VB$%8O*6{c1vJepw`GfDn1O)h#lPf~AV{|%a`&9&<^m!ts2#)yB_ebBmyBkDBn=daPfBkA= zcwA~a$E9U&U|=hYHOb7|c$YF`Qeqr18OMy_?+*|qm`VQVIm7_x9$={Ro-ns<4RuL0 z2g8HmEOi(YyH=$w)GTxDpPyoQ_-J+YO>TH|RjupMwsD77eqqY17p=WL(}P3yW8Ko* zc|~fOnJl5y-^@fkC?VLF{U_Dm-x{6iulhXyoGuB)L2 z2LJAE^u1r|pMFlJ2plr+@0*->qp6WEE2I78gkibms=E7cDa*v9oKA(coKwY#U1Qtc zO#CZc5!=hzNlj5*PdT-*p;qyeYKK~={HLLWglQtIeqK9tAC1Js+T>=9L&MS5(F~cb zxq63)2`RR4X zGrAgY%-j$Eh^eT|g^~1&iFN&po6?f8+f&`xn4fFVjZGx^YxWd?D=c$yVgEHlF#l;= zTgY;9Qno{WXP@2+yua67c(|IAU$nJP1v8h|PA}&KEpLUWm}JfVp~OGI>vUYv{Mb38 zoTZkT@g$rOcP^rD?@=CQvjkL+EG%Rz;GgJKVBiGYyU^vp=jL`NEG%_66IxNu+~DqP zVYKu{S$Q*Wu)`AI{cx$RJtZaO;v%F_7f)T4rl(w2r-GAE9>#ivfoW&X2F~+Rgz0!o zh+6)*YgepS)!H#JYJ2RZ1md;W|8xhP`rfqDhuv z@L3}=c~(Vp()`BSUtC&Yz}Wjv6Aintt9uAscu8R5zSQc3+R{0!+}rNI9T!jDu%_6} z$;sK$0-c#f^H_pF$T6ih4K6LaUx+*j0<6mRGLuvw)5OrQ%r?387yDY{^=m#SrXwa| zlF&kykXHhL{apS0RLu}LQBl&^PgmEL6QS!rf7H~}HQXKxx~ByAM{%Z?KYskWv{YX% z{&nu}-zFA(h6hHaQLx6lxw#RptpUoe5@T*sQpcH@6f!=J;;|<_E(=?EMyA}J{bck{ zpFS27o5rK-OeVj)w5+XKFVf-`)zloFnlk7mX236Gexs1W6P85IalDTc3%l!-zmf4Y zG_;jPgx*}O9v{Dulg+%mG)oc~*V3Oyeg^~|97fhlMe)1*%YDX}7)}M@5g#F~n6;?6 zy{_}c0tl0nAB6-Pm3Y>sm6U8E;w}H0DHdq#&hVWk+O?{xf+1zS&MEIsZAx{9yEf!> zNaMeZtRP`R-a>POkraWsNlDwCofpV{MnYogP$FZ8^DnCM@=}%ziB9-oVKgHn*?bj< zBP3Kux2vCCLHHnaouJY`--hI^CTH*~ONxmkPoxuPz?w*{h*_s6=W;D6ifPQ(dxA_wY8@h`> z|2J=vmc0gdxAWq(5_VOkWEfqKql}`2b}c_^5xkszTntBCQ$k`C?(ODqvz+(1K3`T` zO|50__UrqmW_IJ4EIATWv&@Z^*0vs*3L()Ce(E0sXEdGu{XjS%QOEc%YJ*o*q`?4f zAulatWnNK_jCNuBlaZ;<%nbFXsB-a;?JZF7+3E;u?`84}!zs`=uq)e4>0}KYoyHnm z#pH2I*gZzx96o|)uj}*9JW1nc{nXJLcFmOerK zF5V+_^x}8HOm=t0Q9lIZ#L{xoeDt&~YyTeY_4SRUFu@3p9nFVnq1w~*(-kM?=8$jA zKGe{k7PVC~7YCA-H{|y!E0R8ls(>GsSc_z}9*yTrwwqb6K0?RG?+5l&z!_s{$u?j2 z>NSI9^Jw0-wmFOW@p+3DleW3>Wr|?5_jQSRbu1hno~V7BEKtOv@3p?Qb-VE3Dke`I zOQQ1E-Ln(8-)x^>d(1GWP<{u^XD*h^+ELHB>Rr6pFvS9}*5tNU0;XGIeuINPOG`zC zIzeTXS*JvM>%U&%bAP=!e5t6IK)!o|htf>-rmDOo^8D8WPQ!zx37csU8;-tY4 zvJITBbO{&fQrQ`X9E*HnBIHYP>R5a`r~?{rZ<~mWpQd!mX_|u zR6PYAuV23sT*I9#lV4uD?Vg$OxH>hJk~;bDfyTmOgqF|s6=h(&f=`F08EbKuqgad24oMFzkd^CahFOEI4) zR+jI48iqi8(WN#QIL~)$XST+3$WPSrUGQGQuBYvQwK))QR;zPi|2kRveKdc0vzw9n z#p=TTZC5nl;LyM3>B1_pWW<-(#~ZoAclmuiJ>j7-V?$|Q9q>@tZRg&Ri8ODKk;NA2 z)dvQcE_$z9-^YMrlr+PO>bmm^vfr2eu5}h*!DI$Am`$kV_Eg%gxsBGTYQ;# zr%P$>PIugeZ&VJJK5QHzgRV|#cBduh8tO`Hbqz&DTdvNk$BV}6yn5>n7t^sw_>@Mc zF2}HiWBz0k(9pEm&gNDdJm9OQDJq&*(7EOjN-ZhCKh{JHd} z4)1ksh|FAXB6pEKa<&SW-A>PYY<9D%nuh${#N-%{|ArvUjZvNUG3r`-t4f{ASDVpX z!{FdtPP3oRi$Fj+xkElkN!b}03$Jpau?4%OZ|(L`QQ)kus3b)xU2LiX!-}}5sY}sQ zHZ^rJp{_40D@gtBxwRJ^O};a!9G9Y}uX4OSLQnSKfmEjZ*-~z9azOz|ZVNlhO=~ME zOm~tN&zc4&8&L5eU_qYPS;_A%kmTMm=4Nw~WfsR}79*o{=%T$-HE(G$%vt1dT0wf4 zGBO^LL%oYhp9A~x{BEnLUp^(jtDjZ&V-+as?lo*TE~>1oTfK;1ORI+F;E9*l9+}X# z`8|Ff1N=l2a!Pa!MO`X3cXpn(2CXzTwJdj-rwH&0zpI09HMuR}zR>YFV|mHC2WlW= zH#aG?X9|4eKr4n@0(?)7eNIlax3*rrrs}fSGhFmjFH$oxN0a;Y#dAk1`eZAol1={P zz#=E_C^|AINm$=%@!u^9K=~Fa-VrSi&qsA$#8gdsh947ba15AC~7Q;a7 zqmtRaC-tVm;`DTiDHJ}ewojGE&CMBK9aN;xAp~9Rf2A1(R$ojOHO7sOmJ7N3WP&NE z!#9`lEbP4BOXXw~77EYi*;-kZdfjw&MPujY9H65+x+jI`h)Oe?y&?=^H#S3itZVN? zd!!5GdcC5>MY1D!+uD-&q{!hNRC;x*sHmovd~BdDfX?{^Z6@|ge=l|*~ zjz@CR4iAeu{rF{p^(m#II*DW5^TuKd_|cret?cPRwlnMgd8X=(q@+VVJw!P^4b3P%!D%BD@=ZkKiWIZ$mfPOmo`6{^W@=oyYo9gRYYkBz z%P6SdNU*W0XeB?ap;n=zG$-=f`Sau4H+}91-o0%L|MlxvRn__bZ0_KLk05VB1LyvA z;q6DI`>xNoWeTbL&JH9TbbnqcDh{r#nbkq=KUu(RVa%Q|WFs8T`UK}R56{V=7r_Dj zEOGHaNZXT>6CWR+y1F_+-tvlyL(Ut`e?mQz*13g+W*52N!^5AU`W`42{HI6u--ekp z=0t1j575tgEM-Z~!O7`;xsg?AH_t(cMM;^OnVFe#HL&6M;K2hjG6HWR5pnS!@>yq0 zNF>tE#>RBg$S&ZRXMtZpfCvkwWZg9}&-KW@e_PrPX+Qxn&+0QjS2#f{N19)02cGk#+11gd{F5E}HI> znwnZ+QPEwD+tM;9x|2{@Ss)NXY4}&#Sbe2-YBuTafi!K`4PTNo-OLxWp?{{!$^2|c$VA4I5{{DkB+F&MP+2R7FzLqb<7>DvuI~+Xa7gz`;<9*76dNBu*&2kWt)4T;(o4{l$7xE^MmS;Rlg6U<@Q7gSg4mQ zIuwodh!nv@KL5f(7AW*&Wo0FdgqMMlQP}%7i3u*?Z2R$JXH(zSM2RuD-nb(?na4&; zO>L}+!H}MkQse?|JKxZ2c2`wZ74nkR%dK{8Z7n%DdAvyf?l>0$G3Y$-SUjDw%)=(8G*6%`fDPt8s?N4LfbsDy_< zmo*n8WlUGv*-aETcXV`gcjxBjGEh=3Vsc$728V>G=WCQ0cRT~3y}Z1V0exGnvd_|WNRF(*ocsgjZF`X70zxf z0qzpO;@!J^D5@7p(sva+(EV0|p3dR%O5WCsBO0l3`C%1VHbPgPxA847^r=I)-DkN^;5JyjMY zpQW4#f^#msu)J)H=L8UYxZ3j+3(L{baeHTHcxWhFt|Nln*}(x5lyGHb0LWMv7)HbE zc8xb{>+5gr=H7vy2jpgMZq5@%OHEBkOspgf;Gyi$H8?mpoGCXvI2g}mF$Ch!DJmc! z0A}h#(QtFBEhE#Tnx}R$`6wYFAu=-Z^z;dBY$?dkM<5U^`VIMMX-fL{hxf)14M-Vv;KjY~yw3L*!Pdh{?*YhUC1jsxHmbrCTSxLG1Z)#oT3XI#G817*C@Dq3V6dE=je7U9I3~@1y}ij% zQE4FYnVDw`{4qg>H+WoJUmQJo@&qipo}Qk`#pTHsK0dy(k`j;Y3?Uw#*iIu@p_!SP zJoTcvM(@VdR6~I8N9eWn^|J`X_E>>7*f#>Vy}P?BCnq;DM)xTNi-d=Xg@uKl-U5(J zJ@?ea#QEW>3?5$e1C*xx>S|ZObplS?F1OeBnFeyN^N>-!aHiTJfFckCp}nmwa>E`j z_+@vd+7}I5$m5*t-Mb_>TwhEqJv9|1Ivo{NdVPKJlnq$XuV23cItTLxC*S~(8L2Zi zGFk#(M0xx;r)N-`t^VNjl!u*NN=8QJIyySZlvfwC6vD##KoWqD$j*L4MI|O8LJ5*AJ3BB*f(Bhw zLt~<^FI>7IJ~lSC-s`5Ozc?>97tjwQ0|OHiQ)pNiul0n2wzdpTtIuQ1b;lZC-xhGf zqF2}XZWHw}FYno0y*o(T^t3cGArC@QQiPXx9E_fq*Lh8W0&?W?0; z2R^=P!xn#2b8~Fl6|e!e>RC-)o~#EW_i_x~x{TnroJoLZC7Dzvgw@y7@PpJ`ivqk0 z7U=TgVq<;1wXN+j8k)AYHXh30z(CVsxVfxsXnOi`wqiO83JPGi=g*&8T3LaK1DvT9 z>GgJWWEB8MxdQc_j* zmY28k?_URHWxOLL8G8Dh3>jFI@?U9b>3VL!VMJK%S@Z3tY=i*%7Z(>$DAdTv2+*^& zliB>E6%HNyLKQ{D$cPBLk!(f43#zKBGB6fyHn#nP0}EiQlpBFifpz}*^XIp3-=d?H z&CJ%Cv41+@Nxnd{}UB znlv#9$zv3hZ1TsCACr)fbXk`d7qd}Owam`eYez}*5aZ$wI?!@ZQ2Z9olj&mOR(aJ8YuEs`2#&cV#!C?`rreJrK4Xji}1&~A@KyzGNb?#>@M~Waf!J<40Ap*{~ z1Ew9F;C%Y~dW7%c(4l_*2OkQ5Z1TQ$Rv&O$fu4~N6L%s%08@}{4>)tv@P*mx>M93^ zcfoHOq-IY~PZu=0w6xTuIw^_NmGkoE>I_B1#l_{PLnRifFWAEt5!$oP z?rt|%S2?&)eP#0G8AJp}-}6MZa-;o;A_Ri|;QmVj z&YAD;k4SQ|v9mLW50;iV@6Ca1?StR|7n~^E#R$)i(BB70p(7$ zx0~JF-U#2GQytNQePC>C?9FCwN=nMZK}IDR8O%v_pvNGd03d*nY;A3U*FZ>Z0(h5| zm#3$vqd8r!v=#sz`0)cp1P7!&Vh^^kxTpwrlm`Yn0S9z-f$HIrw&<%@ujHIwlyh(c zGcb@Uwl`P5wYix#yy3}`XX+XcSnzJ83(pdVhnqW$TsY+-+uz?Gq~%iz15kv#(Pxn= z=;>vUsk))vM7c25n@CN_S79!yhnFh?eNC5);JAysHyluCz1vA{Wu>#OF6cY#0R9^t z837ZGAQRNm)RbbmO8+P$qa1SA4I=rRLkjG{(;STe<+kUL)zeF%2;AS_Pw7GJtSwui zHsba0@VLKO8p#MUIVq_p*x0b2KR;VpkqCReJF+G?VT8A4Dn5N0wz6VkNftr`9vM(q zf9dc)F0PTDo~_Bh<|f{!2;GUZ_x%g;z<+=cCwsh45lsM-Kp(gYZ^vt*@1b!zsOvbatb zJU=itHrCM4z(m3fIIO5>FRtPtr0Fh=)wy=wReP!as|!vyw+nTZLw{YropAD@T($z(5IO3v|s zax^?N)Z=Vd7nmZjQ!Ohi3v!y8h6eaBue%%PL)hz+_4Z=J7PJ18dSJ1;yQNWmLF!KI znDNR0#|y%Pi1q=bK_wYZZC?HF-@lrhwOxO?bg*#;^u5Y{RT@|wz}Xi^-i=5ZRn?Q7 z=}KTuRf3|Tl#Gpy_4LR@a1eR8z%$s+){M@~Ab_s|D$B$qOT!CvA2=e!)}2rV8sK;E ztjO{1Y%RzxU`Bw&k^jZ^eF`{|o}|A~nj@eJ04q5)JzdM(y-kD_{}2T0If|mb{wEeK znh%na>Z+=d^Q7oLIJmf}JoqnPg5pF%;`Sz(3kW*kOQ0rP+}z;0@sSaLUCzp-rKNv| z<&wb10doN~ekJ%5#szoKHF$*OH4Dq`-rl_<1IYKfOvDzL5|P{Ye+*)O(yJx(CIwn;CuTP zgbhe6)X@=Oai-B5?#hk;=?-QDd^{=vH7M<$$=`)2+mw&?uI{0Eyd@`xhlGG#-zicc zEUb;!QNMnHdCH{^$|t?0q^IWt_F7D=rK#!tVFn+tYJeMbtG+D$k&0e-yln8i>NGyv zo4-3hFcuJ~l}qFyA|zCR?oO4<$jk30+BFQ0jX60uY@NQQj00BQ)YKGoK)AWNyCxez zb)crI3h1?_rY4Hhb=zvaKoLMs(EStz4UL455CN!|j}TOQK)Qi<`)q01-roKk1q2WE(R8siS1<;+-vHuc|=gq6YiV~ML{-?^L_LF zl*#F7P=yp}{xt(u7ZII)?`x!_*f}^7K&@JR*gQQgz{G?Y90VP5Q$hWy0nT6ejA}3N z@h2xHtmo<`M@C|%I4LQcr>5Ng{dh9(d6ryRSs4}eygUaGCqOeIgcyLQ&g;gFm6a7( zGqCDp{EpevR?em-CQM8DiQ|c<>iI* zLv!=CW^KvOijL84^m!ZiMVh1~>f0$!V^_M!PbjiHo>* z{Qgi^R(9B%YiwvJt&`o{+>{I_?JT&!b%S!-Stdd1n z|E?lLQbVQ9BID2Pi&KHdTBinTq0IVJ`MdGuhe}Mgf!@LG?Gm8k1wQ%|>3ItoNCcz_ z1WTBf*56f%MKUNNLfC%gN literal 10040 zcmc(FRa{k3_bsRhg0zB^NOyNjcXu}^-3>>jq?OKtw6t`0#{uaE>F(~k^t<=|{tx%z zd%OFAz1e5&wbz*h46$sJ-@PT`GC7%XxtW!RbGC*Dhf!=p^Z9cOAqQ*Mej8D9-b0>Cn~j?s z0+#}ZJebt4kpI3N4(`);W_g!?S8#AIpG(vJyENkdmi?cfgP#6Zw>nry;KM6!Zf>HZ zu>vrXBxzkhG^!PC%?I8fv|dy>Qm z>CKa%PyhS$m<5j=B9b^N>#ePDW8H_Z{~EPAypu-wclFZpggpQ0zXO}-Keyf>#!yKYmzI_m6eMp^ z|EE27BENLO*RNl*v)9BV;s4$1ZUZlBd)AMJa+)Hz5EDY9rFbY?%2&eo5gc*6*F-%#Usxy?w?AAIKt-)ae=Fz0ZA6M$ zW5e@4qlg>g*GTK*eS1Oj7J6J@>(h0xRBC5GOhGTF|KVu2w6D2&pM%4&avM?T-o^I% zL^x0#4{r(v^A!_|JUW#u(mBsD1-4?^*OgOYL2y6#W{kS)0S13rT{D1B;4GKc-~IFY z8Qf}NQ!XVHzh1|PqtdmSw)P8BmeZB4Qeu_1bcl@A$KnR>DOT2UTC&xLi+w|(=WuXu zB$w=>X>S+Y@hUgG%SA*L`};i0D$=0>xBJDqS@{K9ZT|lB^ybpio9yh3(=+#$LpFs) zVn`e~ul04C?=aA#N&`9ta1$0IBU{jaD48rQM9E5fIy$6ACnySeTF|; zpMohJ2jf$MrnyDX)-LhNG7s%Lhpy?`rt0{I5L5Tz`&@ZZq%;+!BXW$RxqD!~?}eIw zTd|t=6IWL+FfeF&>b!s7TBPlPY8e}+s*u_`%+fY?y3*(we0{~ZwZ-1#9ejAPce&&n z5E!M9mb|qukW44C6udb+JecI1m~h#h%;B4srnMZl+A~~G@OEY6A^pc6TfT;&ZixVl zOr^VA`2i&4_9*$Y3&Fz7FTMx1_EC;*7nnGL?ZXhTh8xjXH~WhVe$(-JDE~mtCDt$y9TV@|JWA6d({@Q*JK)yUJ)R&b{N$$wW9C((lB49)0b-`Kkln zDd`glnboq)Pfe^#zgrWRFks_2-&i@p#9<5%$-sV{_Exa71AF(tPe*ziN$7FCD-1Es z=plmOZgf;c%^#1;dc8l@dTQ(PdC<_e_eeRZ+{;X4tFW%uV&w6iUB-bU1YE%X^B*1p zSy;4c_^-C9Y+^EkgCClGqjhx3mwfY>L*ZWl_&b=BSCJNaNOU}ye{V?aYP>#>;Ld6D zd$HNqS|Rdewn|w;BO)*`s~}Ruv)ujg#Ch|Aos$v$rJ05K{_ak_iOrypY4=8Wc-6>A z-1c^q+d=0AEG*o|b7QdJ7xUE0%4=caqWO6(PkTwptI*f&GBUabEu6=fiZ@G-5)D^Q zQoBeQJDQ~>$;mPV_;y2sdd{0@>#MhQR?g0)DJjDXwIe)C!)t4O0se@mq@=&(A=A@H zw6wgN8@!ZMbR~K`Kb?cHlr;q3XVf-BqA+GPv^+kE%p5sRbP`GS#G?}vVqw`3^Z)d@ zc8ZPRoiR2pEHzkA*Buz@xpY|)EIIYEmDMtJM`XFXUNe z&@-Y4aq(xAx# zi$TxCL{+WF!a~CnCnv4Cg5rI6vM4I zrVFlPViV{ZA!QWNX#%@U15;2?dsucn-;vK5hDu|Eut!CYj}{CJ?EWkzF7x9RB^6*k z3M@S=SItXxcIM!;X>CDB(yw*+Br@XW8pp-aRd2rCQ|eheo0^(>|;JK_sL*VSyd#4*Tz1BwN2wyFQzdCg){?01q$x3H0-7(bz9pI-D%oKY^*NeGmmVR-;ryyGFrdDAgPuKK9|EOgxZ{-{HNCq@>-p z%C-5xkw!S$+tl;>OR!KY$;tIXk9v1om>MNEw)cXJXtLbNp<#uFEl^YQKnyEt>fW$$ zJ>UK0?D+T}U%qT_kD($U0FH5)jT;@oj7{j-oOUYFSFNe&5X?h-zEfM@pOTDBN5g>r z(uR?S2H>;v(D#jRKm6ljRP7H9p0J$`2BPH2rAR!AVThE9hyZgHWoD8~kpr_dHF;B1 zRd(Z8S$kf;H%!k=!Yj?rzM8K)bw7N2i~$@ctK6+PIuLTxg07Bg6V@+&s@$rP<^nKXWQwI&ZdLhCEXo?vjiGqf?0c4lFJE zP0AnZczJwdO^=m*{QlZPz>kkEp1rxQJSK{jG z1Yw~P+61lwNEyj{dqFJ^CuTQOQ*ci5s8NZD(ScnkLM^Ju$gE*F@>5fq)zuaz=3P4J zv~&agU4i@bxw-Mq=R4CnJ${-asWSO}LnQ;l!`V7J?|%OL>igTtvCh^$-E}=QHgbqi zRyJd%LUQKjT|`$G63O?dny#+#aq9bf*x{0?s+fyhk|5D-YxltIC8~P>epu-E{(jjo z0AZWPM$<6l;B=`jZ%$@o}E?AmG}1Eot&3XImyiI zWM_Z&`qeKxu~+?$G&DQODT^%$Tm2zZz=@;XyiW+?#?8Ah_Ywc+TAl-{`150mB!+&BgG3}_FyqdeWJSh8+3vc zAsd?`fsc#~Z+4Ck7D6p7P%aLMVMB0mt-a0Lk4{eK*Cw^>R0JkZ*r1ix@|x?vXg|eN&S^+8P$tj*V5O zHdXwJb5yLA>DJ4T2pSsGD?}|rrFZsLH~D`x?$aAL`%AI_t-892{k@;|`6#`86K0mH zLX&2eig9s7y6dF0v~hTNkNor;+1uJ18?wOh7!B@r5A^qNsz0UbnJQ!YNx5p^Eb<}> zLC}-JeeJG~jAP+rDan5`LN07-vjR2;N3OT2ZUdePi+73W;9Yz$8bCgV77g{2GP4+~ zDveW#Y=e~)k$~snxg#kU>h5k!f)5)ZH z?(>E-!n-@948^9i4!jO({K&7NGU^f%K_FD;10^pvFc<4u4v^V`C>z zP6`I)e6UgYZhx=Wo%Jvi5xKg;C@990sPEr)Gs)O0povI*+If6#f{))(JYNihnQyLk z;7*N7I!igcKyeNW9p#b4al80T;>p&{PcAkxXu~cK(iQ*P+!(dJham)r z=XG(3(4Rjs5|c1RDm05tme(3LS5zJ^Z2i>6cV!d4rf``COMR09D&T2T)7s9CW?Ng< z3!~2s-p8vwikJI^kkj0oi9&BiMy)!ay1Js?3f!+J)Zg9hOqIq&5y=X=y=?IHgxp?~ zXJ=Q3hJwK+FE(WvTpvka<;usfv3Opc4g=_4r=cl3IOw;YG>Xy15;M^?H~%Q|BvVO6 z5!xB^l$Sj%^@Nm+00o8S>C@rH#@{Ve*Y!F&K=a<-^5m6aeM>kz+@_IL=stGXQ?-L7rO0-EiLsPW%;$W^fcRTib74^cZv17 z1*~klOU*#P;>UJUU0?Y~$q|~5{X@EQa&;wP;n`lKM`ab=xxMy4HOS+bGoY=edR~5? z*xlA3;0Ah+YcBv)yf$5RPEMwwQuxC|Ou%$FFqY@f&f)*H^I%eApjWCWH7><_KaJ1T z)AO^H)n&Eq*h1M;2+FDmQ)x*&4zcn`??j>cWC@Wx9Pn;V7M4R()2AHl?Q(KA_4U7; zl+`C(Sy(8JkA;(y6L1+o>ZK$nkD3A_)znfjT=I7{uzz-F4pDx;8iWbAh2i z29hq^ipYo`U)oPvTT?hWwVDfWPlx^85>3t36B2?IvmSgS+uA_B;NH`=Tp2y;tEnj{ z-M5TGqmp87H`L$7(K8yIlt#ofzECE*FmzDebkBnt5qWb{<$hp9M~EaRXMxMu*zD3L zDMsEkY#*7kzL~o5Mq@2&I3xxKDA_01IgkKlr)!ge7WcWixv^IYCZ8ff{1iqa?Br&!wSIoLHS%eu zBCDvVcVa@--0Y~hpt^cACivXas- zyM_Aj?@*cNF>;gZ1MXb*ZoZe1mWh(Zc=F!zFu?G)S63fTHx*szKS4$L`3u|Iw|vwG z&`@OgT&+y4XkCppK78;p`mq{@n`Ms_^b8K}_nEv9Hv;A7=vrf7dH$ak+w~UvcGmKf*jNl- zUtxs6iL6^^u}>qINTN$Sp)HKI>_$?c=0X&+=?1Z~+DYcHUTlE~jeP!Bhxs(j;rTXo zWpfi310(T&)@%M#97|;Xzoj0;|9ADLv9U2NEiGol{6FQNrSiU0WLo6Fa*yMc*K`_N z+uI}oy=JWF=)=DkbaZujczCq6wS(<;4h}5xZYl!E;>Tvjr>CV()t~zM`o2P5kI@PK zKh?2+MY4ye=W^s&A2nFa$s0kVOEyumY^AHlE1%yU|_c) z%b(?Cl9)cxfti_^zg4Vi?s<|9{g#IZKjc7>3Re=6jGG&;|5){|9`eJq?ImKRFu=QKWm>TXJqi}Op`s-^IW#V zv~T!Pk?S*0_R-#+ikB;8n4-a?OtgNiX#l`8WiXtLks;bVfx_~pxN( zGjs^APYfUQ%rR0!FhUXrwXXdy%B&5k!+-pECm_)DaV$5Y)>2h<>|mjRgM%Y3Hg*sO zLqSIFW_d|RM~6qN^`+MD{-(3DlPQ@VxbM>VxXZ=f%&qc?;l}Ju%65m4|8&M?$LN5FD)(2`gpN5LdJXB)71qMuGP~y4rb=9!^0N$ z!^KU_zXokD^I5;}N+#$((6_{WbuR-uKhtaS;^5}?=oGlUxgjJZ^zihwpRXk$Cy$Me z9vvHFYi_eysCT`+*q^q1@|n3gZtQGpBrA>2-OJMxkAQ%Sot>P5!Z%PtNZQ;yCnhE) zI5;>z|D(0FwU3XFq2ZDk;`s|+K0f=U<|YdmezGoD5WlNsL`_XiUER5p6Q=+r04Tsw zfW3}gsB5Us_Z|FY+}lO@N*K)v#j^ zB{w%WIr-Yraz{Zy0azo|Jcawz*$@3d?pauL3s#huM|$70gXa+t5t($W_jcJ4;o$xZ zT+dpoJl4DD-FSOjpbaS>7!vw%`aX8Ea3w&Yg3Qc6GBZV->5}9kzkdByS-E$37!?-g zeYz1*T6#E`#OCSg84(dtmUVM`dw+L#c6P?j!NFy}sHdvxPo%4%VPR^TRZw7MYr8N# z{dH<)aq(b#J3lW^lJvXV;i5cc%(1+joZKF9Sy|c0iw0n*v0Qm_ENOlHh4yw*Ha6@f zmxnt~Z*OlQAt5p{G6sg?*4FpD5m0{5(f)ob;3fboM@L6FIXPYMv3_95*C4%;gjn11x#(^6f8{OUANEyU_S8X7|2I3jWE*w@7bC2M9}QDyq%tGE<3v2f%2gjKM)U5KLe# zz|a_UtN$dJ)*2NR6&*>-Lm&`KOUpfHJG&CleNfQz?^1WiJ;d8CD83dJ78w~C>|Dlp zJ79mp*4Fm89n=cZXN4#za;mE*VKCeNSQ;=^upv@ow-6l{2bzGR$@4TfJbZAj#-0wu zfm{+PIeEYWp`x5z=WyEWcQVPr!9fXZPX~vi#U}5F@bG7#-l3)joDoyx?c29tb^zkj z=P@gPm6a_&g@%N@AbEv{SLd>=qOLxHfPw?uJmN~gp>`lPHTCZPa;dPekV(IuUGTvW zCG_n4oQ;ibcXzjE{S^RoxgVv;*^{}QXlEBl)&vm`-=MiSkp<|cb}U?6ng#}bzbh#M zML_U5Iy;Yzjd|akZ3m$eBqk++f6UFz#l^&)PdMD)oa@%u={LAB(9nd0g$=-9sB}wZ zropkXYF=J9CHf5}CMHMsVBZG?1?6zQ4x5;GX>!u->S&oIU}0{qu(b3MczJmEOE4}( z#NfzCG#_gn9eiS9Lp?n`8JYgk9NA#&%BH4!SPEBteZ48tCml=6P0%ga^)YDi^95TL z^78UUcfkHp#6l8C`gKkejG%^;Ai4J$KopeMm6nBtg`Yn)ElpoqI={FWHZVZP$JbC( zgS!Ol=pbWcR1g>Mg25OV{~nPsG1%$uataE57khYM&&rP{2*rXM3Wb7KCnY5TQ*^-m zBcF$enV6f)D=55GdzdnW{Ds=-X(h{_NExoXlM7AWHO0lKOfC@2uX>F?*hPtKP(=HhlG@smnRGQ z@`8m63zJn*QE_&5*4NhugPJVXodGx@mihTJe6=%(Mi-Y#pgI7dadFjDRIs@|cpa~Z z!=mgK8>OjiFfqRa5Bg@KI0Q7?4R#mwwC=5+2gocFV1Bfjs2nGIOXb7>1Gny_n+%IKR$|S&MWzdk= z&GmJJg)2=wMNfD4Vx#Bj!~O00`Z_QwQigVg1rfKyJ06~yzCIcflD5`XqhY835O`zW z2qXcORaIpp&1QXAMP}za6INDM0EIF$GXcsG9G{Fd%3)$+K7an4l9Cdj1Xu?h9UVm| ze}-UaLPAYWj=2pBNcN2g4ejNvM&eYlP&6IBO8HdbV2Y^b%4I;SM34ZIfHxQqhaH`s zuC9ZQuR5s5t*ouZ#UBlX!(bJYlanU9lwF;jznp-(>{+;oizA5if~8Ms$WfvO@7&+t zpP!$fW}nEFPdRGc3RoB$Tix9yz{L$#=VNAmpB5>XF6jFkZ^TATO6qH1U=E-HIe|-b zM#hrU&EepHfK+Zr5ji=aU~G?9x;pmdS`aZr*enL$xO@!_4NXrczL(Ttd*1>XyhH8^ z01mjq$jFE(pRdiz%2a)S{$*s~Hk>{Cv7xDD~@Cq~@#A>S`Riw&?MV4GVfjTPv$}$_{dJ zatx8>+XR3rC~WzwC()6>&{>+@xYpaLmSD4-TbYU*jAi!6r-cV_7CGzEhcvcTR# zet!NHKMse6#w3X49V|CDx4VoS;iM*jicqKlV8;(1ex{_%0kHk`K$zU{`|xu{o2sg^ zvUZ~f%f%=Dp!e@yCU?(OStm3$-Zgn&0%a2&6H|s~?z5TUisr5Ys2)(-bb&bl2~kl| zu;sq|WOZdFv-v$xtAGQpqQFvXXlNub8RiO#>FK3{clGpusIqALf-oM&7#kM{hzKg^ z8+s2Ex-Q_rnVArZ$gZv~Emm48s>`)LY`|3@y(uXvX>V^Ao*G$Pd;pZ=aub6FVhNb9 zvFAv^&dx3}BqSs<((m!%9%xapZ~yw$t8OJB2{pCY!1eUU4i{Hf9&Tq^5W?&1doiJd7XJ3AZnP*cMNlPD}G00|vpPBB>Lhp^_weZRETDbx& z)y&P#_V@Rf=-+M%7&v)FY&_(gkQZWo1l}uU;G?Ajmp`(fRoJ05t-%2TXRn z7L=5v4XTP>_e*&f?}3esjQ#|p69fGVdU?z5@gp}k$o@4pcGi$~8NJnHkv7ofAiDi= z%+XMv%fhpKSaCYv{@LBm)Dm`rpCQM)Ya7iS>og4Kir)v?i4?H z^5n~xFH8oFKNzz;ljs>3OifI{$1Fe^0&TIfx|)yM!4G-R10kTLs(g@5sq3ttMz07#Ki2?&a`U zPnB@6vFSFr?E~uRH9ILY?fGR2^wGmg7mBm<=|Y2hj1+m}kZ3R(kZh30`%7TIi;D|? z19Gt#F!#j7H{YeyG&H&`ZaM82=YRh;0!r7?G8nctQfgc)0W@hOArIE8S7M~!HA)TS ziNXPo0G$qy0?2S18+s8bDX9-YwxFS*3Edq%JxB2elqDi63jB+MogHxg_Ti!8!WmP` zozq$`CRiRHmD3n6BOsPb4Vub-{Zdj_cXe|!C^JDgU;7yoqw3{VUtN6xrt&Ke9e@rX zW&j94n-JcN24%Ey^YFwaB%~xKgZx-iQ4xKKjfSSLvr|l7e!qtp0uq_n`1o$F^M7X* zCaY8-$U`I%A8-MX-~i$!z`$VMs%ijJ2nzCFLJXg&bEaZqI&Aj6{fUx5ul=R;HJCbh z#>dNxH$;pV)agrG9tw(!BYeYtC{crqL>%d(l~s9J8LIEp;m!GOegg8lfSsM4=M4OB zhCnX>ctOUC!>C71Ng2c-1b7#uimaTRxls77xcGR31~=3J{2n7fY>4P@fn)90y*$eI_H(! zj>pGA8BgZ}3>2=?qH_z9l_xiRkXr9zT9VwF;=rq0Pc&nEu1+CIZ*P#LZO74RN}LKp z;94C3xdPQqjEybYM4pfV2A#^|Y;gvG`}_SD*{uqw^%zBKmzi_{(}B5g|ErN*QNEA< z`#Br<90$#RvHIU Date: Wed, 20 Nov 2024 16:49:04 +0000 Subject: [PATCH 426/496] fix: get_cluster with job submission --- src/codeflare_sdk/ray/cluster/cluster.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/codeflare_sdk/ray/cluster/cluster.py b/src/codeflare_sdk/ray/cluster/cluster.py index fd059277..a3f34554 100644 --- a/src/codeflare_sdk/ray/cluster/cluster.py +++ b/src/codeflare_sdk/ray/cluster/cluster.py @@ -72,6 +72,7 @@ def __init__(self, config: ClusterConfiguration): request. """ self.config = config + self._job_submission_client = None if self.config is None: warnings.warn( "Please provide a ClusterConfiguration to initialise the Cluster object" @@ -80,7 +81,6 @@ def __init__(self, config: ClusterConfiguration): else: self.resource_yaml = self.create_resource() - self._job_submission_client = None if is_notebook(): cluster_up_down_buttons(self) From 003a287bacc867d385f928c45e6bbd499ad4ac74 Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Wed, 13 Nov 2024 10:36:43 +0000 Subject: [PATCH 427/496] enhance error handling in _kube_api_error_handling --- .../kubernetes_cluster/kube_api_helpers.py | 59 +++++++++++-------- 1 file changed, 35 insertions(+), 24 deletions(-) diff --git a/src/codeflare_sdk/common/kubernetes_cluster/kube_api_helpers.py b/src/codeflare_sdk/common/kubernetes_cluster/kube_api_helpers.py index 8974a3f3..17a0c248 100644 --- a/src/codeflare_sdk/common/kubernetes_cluster/kube_api_helpers.py +++ b/src/codeflare_sdk/common/kubernetes_cluster/kube_api_helpers.py @@ -19,34 +19,45 @@ import executing from kubernetes import client, config -from urllib3.util import parse_url -import os + +ERROR_MESSAGES = { + "Not Found": "The requested resource could not be located.\n" + "Please verify the resource name and namespace.", + "Unauthorized": "Access to the API is unauthorized.\n" + "Check your credentials or permissions.", + "Forbidden": "Access denied to the Kubernetes resource.\n" + "Ensure your role has sufficient permissions for this operation.", + "Conflict": "A conflict occurred with the RayCluster resource.\n" + "Only one RayCluster with the same name is allowed. " + "Please delete or rename the existing RayCluster before creating a new one with the desired name.", +} # private methods def _kube_api_error_handling( e: Exception, print_error: bool = True ): # pragma: no cover - perm_msg = ( - "Action not permitted, have you put in correct/up-to-date auth credentials?" - ) - nf_msg = "No instances found, nothing to be done." - exists_msg = "Resource with this name already exists." - if type(e) == config.ConfigException: - raise PermissionError(perm_msg) - if type(e) == executing.executing.NotOneValueFound: + def print_message(message: str): if print_error: - print(nf_msg) - return - if type(e) == client.ApiException: - if e.reason == "Not Found": - if print_error: - print(nf_msg) - return - elif e.reason == "Unauthorized" or e.reason == "Forbidden": - if print_error: - print(perm_msg) - return - elif e.reason == "Conflict": - raise FileExistsError(exists_msg) - raise e + print(message) + + if isinstance(e, client.ApiException): + # Retrieve message based on reason, defaulting if reason is not known + message = ERROR_MESSAGES.get( + e.reason, f"Unexpected API error encountered (Reason: {e.reason})" + ) + full_message = f"{message}\nResponse: {e.body}" + print_message(full_message) + + elif isinstance(e, config.ConfigException): + message = "Configuration error: Unable to load Kubernetes configuration. Verify the config file path and format." + print_message(message) + + elif isinstance(e, executing.executing.NotOneValueFound): + message = "Execution error: Expected exactly one value in the operation but found none or multiple." + print_message(message) + + else: + message = f"Unexpected error:\n{str(e)}" + print_message(message) + raise e From 97cd6eaf0996ed1f3ab07a6ea7268d7c13b7f7f8 Mon Sep 17 00:00:00 2001 From: Jiri Petrlik Date: Wed, 27 Nov 2024 10:08:12 +0100 Subject: [PATCH 428/496] RHOAIENG-10449 - Add PR check for additional-demos notebooks (#684) --- .../additional_demo_notebook_tests.yaml | 255 ++++++++++++++++++ demo-notebooks/additional-demos/mnist.py | 160 +++++++++++ .../additional-demos/requirements.txt | 5 + 3 files changed, 420 insertions(+) create mode 100644 .github/workflows/additional_demo_notebook_tests.yaml create mode 100644 demo-notebooks/additional-demos/mnist.py create mode 100644 demo-notebooks/additional-demos/requirements.txt diff --git a/.github/workflows/additional_demo_notebook_tests.yaml b/.github/workflows/additional_demo_notebook_tests.yaml new file mode 100644 index 00000000..03201e10 --- /dev/null +++ b/.github/workflows/additional_demo_notebook_tests.yaml @@ -0,0 +1,255 @@ +name: Additional demo notebooks tests + +on: + pull_request: + types: [ labeled ] + workflow_dispatch: + +concurrency: + group: ${{ github.head_ref }}-${{ github.workflow }} + cancel-in-progress: true + +env: + CODEFLARE_OPERATOR_IMG: "quay.io/project-codeflare/codeflare-operator:dev" + +jobs: + verify-local_interactive: + if: ${{ github.event.label.name == 'test-additional-notebooks' }} + runs-on: ubuntu-20.04-4core + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Checkout common repo code + uses: actions/checkout@v4 + with: + repository: 'project-codeflare/codeflare-common' + ref: 'main' + path: 'common' + + - name: Checkout CodeFlare operator repository + uses: actions/checkout@v4 + with: + repository: project-codeflare/codeflare-operator + path: codeflare-operator + + - name: Set Go + uses: actions/setup-go@v5 + with: + go-version-file: './codeflare-operator/go.mod' + cache-dependency-path: "./codeflare-operator/go.sum" + + - name: Set up gotestfmt + uses: gotesttools/gotestfmt-action@v2 + with: + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up specific Python version + uses: actions/setup-python@v5 + with: + python-version: '3.9' + cache: 'pip' # caching pip dependencies + + - name: Setup and start KinD cluster + uses: ./common/github-actions/kind + + - name: Deploy CodeFlare stack + id: deploy + run: | + cd codeflare-operator + echo Setting up CodeFlare stack + make setup-e2e + echo Deploying CodeFlare operator + make deploy -e IMG="${CODEFLARE_OPERATOR_IMG}" -e ENV="e2e" + kubectl wait --timeout=120s --for=condition=Available=true deployment -n openshift-operators codeflare-operator-manager + cd .. + + - name: Setup Additional demo notebooks execution + run: | + echo "Installing papermill and dependencies..." + pip install poetry papermill ipython ipykernel + # Disable virtualenv due to problems using packaged in virtualenv in papermill + poetry config virtualenvs.create false + + echo "Installing SDK..." + poetry install --with test,docs + + - name: Run local_interactive.ipynb + run: | + set -euo pipefail + + # Remove login/logout cells, as KinD doesn't support authentication using token + jq -r 'del(.cells[] | select(.source[] | contains("Create authentication object and log in to desired user account")))' local_interactive.ipynb > local_interactive.ipynb.tmp && mv local_interactive.ipynb.tmp local_interactive.ipynb + jq -r 'del(.cells[] | select(.source[] | contains("auth.logout()")))' local_interactive.ipynb > local_interactive.ipynb.tmp && mv local_interactive.ipynb.tmp local_interactive.ipynb + # Rewrite cluster_uri() to local_client_url() to retrieve client URL available out of cluster, as the test is executed outside of cluster + sed -i "s/cluster_uri()/local_client_url()/g" local_interactive.ipynb + # Replace async logs with waiting for job to finish, async logs don't work properly in papermill + JOB_WAIT=$(jq -r '.' ${GITHUB_WORKSPACE}/.github/resources/wait_for_job_cell.json) + jq --argjson job_wait "$JOB_WAIT" -r '(.cells[] | select(.source[] | contains("async for lines in client.tail_job_logs"))) |= $job_wait' local_interactive.ipynb > local_interactive.ipynb.tmp && mv local_interactive.ipynb.tmp local_interactive.ipynb + # Set explicit namespace as SDK need it (currently) to resolve local queues + sed -i "s/worker_cpu_requests=1,/worker_cpu_requests='250m', namespace='default',/" local_interactive.ipynb + # Run notebook + poetry run papermill local_interactive.ipynb local_interactive_out.ipynb --log-output --execution-timeout 1200 + env: + GRPC_DNS_RESOLVER: "native" + working-directory: demo-notebooks/additional-demos + + - name: Print CodeFlare operator logs + if: always() && steps.deploy.outcome == 'success' + run: | + echo "Printing CodeFlare operator logs" + kubectl logs -n openshift-operators --tail -1 -l app.kubernetes.io/name=codeflare-operator | tee ${TEMP_DIR}/codeflare-operator.log + + - name: Print Kueue operator logs + if: always() && steps.deploy.outcome == 'success' + run: | + echo "Printing Kueue operator logs" + KUEUE_CONTROLLER_POD=$(kubectl get pods -n kueue-system | grep kueue-controller | awk '{print $1}') + kubectl logs -n kueue-system --tail -1 ${KUEUE_CONTROLLER_POD} | tee ${TEMP_DIR}/kueue.log + + - name: Print KubeRay operator logs + if: always() && steps.deploy.outcome == 'success' + run: | + echo "Printing KubeRay operator logs" + kubectl logs -n ray-system --tail -1 -l app.kubernetes.io/name=kuberay | tee ${TEMP_DIR}/kuberay.log + + - name: Export all KinD pod logs + uses: ./common/github-actions/kind-export-logs + if: always() && steps.deploy.outcome == 'success' + with: + output-directory: ${TEMP_DIR} + + - name: Upload logs + uses: actions/upload-artifact@v4 + if: always() && steps.deploy.outcome == 'success' + with: + name: logs-local_interactive + retention-days: 10 + path: | + ${{ env.TEMP_DIR }}/**/*.log + + verify-ray_job_client: + if: ${{ github.event.label.name == 'test-additional-notebooks' }} + runs-on: ubuntu-20.04-4core + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Checkout common repo code + uses: actions/checkout@v4 + with: + repository: 'project-codeflare/codeflare-common' + ref: 'main' + path: 'common' + + - name: Checkout CodeFlare operator repository + uses: actions/checkout@v4 + with: + repository: project-codeflare/codeflare-operator + path: codeflare-operator + + - name: Set Go + uses: actions/setup-go@v5 + with: + go-version-file: './codeflare-operator/go.mod' + cache-dependency-path: "./codeflare-operator/go.sum" + + - name: Set up gotestfmt + uses: gotesttools/gotestfmt-action@v2 + with: + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up specific Python version + uses: actions/setup-python@v5 + with: + python-version: '3.9' + cache: 'pip' # caching pip dependencies + + - name: Setup and start KinD cluster + uses: ./common/github-actions/kind + + - name: Deploy CodeFlare stack + id: deploy + run: | + cd codeflare-operator + echo Setting up CodeFlare stack + make setup-e2e + echo Deploying CodeFlare operator + make deploy -e IMG="${CODEFLARE_OPERATOR_IMG}" -e ENV="e2e" + kubectl wait --timeout=120s --for=condition=Available=true deployment -n openshift-operators codeflare-operator-manager + cd .. + + - name: Setup Additional demo notebooks execution + run: | + echo "Installing papermill and dependencies..." + pip install poetry papermill ipython ipykernel + # Disable virtualenv due to problems using packaged in virtualenv in papermill + poetry config virtualenvs.create false + + echo "Installing SDK..." + poetry install --with test,docs + + - name: Run ray_job_client.ipynb + run: | + set -euo pipefail + + # Remove login/logout cells, as KinD doesn't support authentication using token + jq -r 'del(.cells[] | select(.source[] | contains("Create authentication object for user permissions")))' ray_job_client.ipynb > ray_job_client.ipynb.tmp && mv ray_job_client.ipynb.tmp ray_job_client.ipynb + jq -r 'del(.cells[] | select(.source[] | contains("auth.logout()")))' ray_job_client.ipynb > ray_job_client.ipynb.tmp && mv ray_job_client.ipynb.tmp ray_job_client.ipynb + # Rewrite cluster_uri() to local_client_url() to retrieve client URL available out of cluster, as the test is executed outside of cluster + sed -i "s/cluster_uri()/local_client_url()/g" ray_job_client.ipynb + # Replace async logs with waiting for job to finish, async logs don't work properly in papermill + JOB_WAIT=$(jq -r '.' ${GITHUB_WORKSPACE}/.github/resources/wait_for_job_cell.json) + jq --argjson job_wait "$JOB_WAIT" -r '(.cells[] | select(.source[] | contains("async for lines in client.tail_job_logs"))) |= $job_wait' ray_job_client.ipynb > ray_job_client.ipynb.tmp && mv ray_job_client.ipynb.tmp ray_job_client.ipynb + # Set explicit namespace as SDK need it (currently) to resolve local queues + sed -i "s/worker_cpu_requests=1,/worker_cpu_requests='250m', namespace='default',/" ray_job_client.ipynb + sed -i "s/worker_memory_requests=4,/worker_memory_requests=1,/" ray_job_client.ipynb + sed -i "s/worker_memory_limits=4,/worker_memory_limits=1,/" ray_job_client.ipynb + sed -i "s/'Authorization': .*/'Authorization': None\",/" ray_job_client.ipynb + sed -i "s/num_workers=2/num_workers=1/" ray_job_client.ipynb + sed -i "s/RayJobClient(address=ray_dashboard, headers=header, verify=True)/RayJobClient(address=ray_dashboard, verify=False)/" ray_job_client.ipynb + # Run notebook + poetry run papermill ray_job_client.ipynb hf_interactive_out.ipynb --log-output --execution-timeout 1200 + env: + GRPC_DNS_RESOLVER: "native" + working-directory: demo-notebooks/additional-demos + + - name: Print CodeFlare operator logs + if: always() && steps.deploy.outcome == 'success' + run: | + echo "Printing CodeFlare operator logs" + kubectl logs -n openshift-operators --tail -1 -l app.kubernetes.io/name=codeflare-operator | tee ${TEMP_DIR}/codeflare-operator.log + + - name: Print Kueue operator logs + if: always() && steps.deploy.outcome == 'success' + run: | + echo "Printing Kueue operator logs" + KUEUE_CONTROLLER_POD=$(kubectl get pods -n kueue-system | grep kueue-controller | awk '{print $1}') + kubectl logs -n kueue-system --tail -1 ${KUEUE_CONTROLLER_POD} | tee ${TEMP_DIR}/kueue.log + + - name: Print KubeRay operator logs + if: always() && steps.deploy.outcome == 'success' + run: | + echo "Printing KubeRay operator logs" + kubectl logs -n ray-system --tail -1 -l app.kubernetes.io/name=kuberay | tee ${TEMP_DIR}/kuberay.log + + - name: Export all KinD pod logs + uses: ./common/github-actions/kind-export-logs + if: always() && steps.deploy.outcome == 'success' + with: + output-directory: ${TEMP_DIR} + + - name: Upload logs + uses: actions/upload-artifact@v4 + if: always() && steps.deploy.outcome == 'success' + with: + name: logs-ray_job_client + retention-days: 10 + path: | + ${{ env.TEMP_DIR }}/**/*.log diff --git a/demo-notebooks/additional-demos/mnist.py b/demo-notebooks/additional-demos/mnist.py new file mode 100644 index 00000000..6eb663dc --- /dev/null +++ b/demo-notebooks/additional-demos/mnist.py @@ -0,0 +1,160 @@ +# Copyright 2022 IBM, Red Hat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# In[] +import os + +import torch +from pytorch_lightning import LightningModule, Trainer +from pytorch_lightning.callbacks.progress import TQDMProgressBar +from pytorch_lightning.loggers import CSVLogger +from torch import nn +from torch.nn import functional as F +from torch.utils.data import DataLoader, random_split +from torchmetrics import Accuracy +from torchvision import transforms +from torchvision.datasets import MNIST + +PATH_DATASETS = os.environ.get("PATH_DATASETS", ".") +BATCH_SIZE = 256 if torch.cuda.is_available() else 64 +# %% + +print("prior to running the trainer") +print("MASTER_ADDR: is ", os.getenv("MASTER_ADDR")) +print("MASTER_PORT: is ", os.getenv("MASTER_PORT")) + + +class LitMNIST(LightningModule): + def __init__(self, data_dir=PATH_DATASETS, hidden_size=64, learning_rate=2e-4): + super().__init__() + + # Set our init args as class attributes + self.data_dir = data_dir + self.hidden_size = hidden_size + self.learning_rate = learning_rate + + # Hardcode some dataset specific attributes + self.num_classes = 10 + self.dims = (1, 28, 28) + channels, width, height = self.dims + self.transform = transforms.Compose( + [ + transforms.ToTensor(), + transforms.Normalize((0.1307,), (0.3081,)), + ] + ) + + # Define PyTorch model + self.model = nn.Sequential( + nn.Flatten(), + nn.Linear(channels * width * height, hidden_size), + nn.ReLU(), + nn.Dropout(0.1), + nn.Linear(hidden_size, hidden_size), + nn.ReLU(), + nn.Dropout(0.1), + nn.Linear(hidden_size, self.num_classes), + ) + + self.val_accuracy = Accuracy() + self.test_accuracy = Accuracy() + + def forward(self, x): + x = self.model(x) + return F.log_softmax(x, dim=1) + + def training_step(self, batch, batch_idx): + x, y = batch + logits = self(x) + loss = F.nll_loss(logits, y) + return loss + + def validation_step(self, batch, batch_idx): + x, y = batch + logits = self(x) + loss = F.nll_loss(logits, y) + preds = torch.argmax(logits, dim=1) + self.val_accuracy.update(preds, y) + + # Calling self.log will surface up scalars for you in TensorBoard + self.log("val_loss", loss, prog_bar=True) + self.log("val_acc", self.val_accuracy, prog_bar=True) + + def test_step(self, batch, batch_idx): + x, y = batch + logits = self(x) + loss = F.nll_loss(logits, y) + preds = torch.argmax(logits, dim=1) + self.test_accuracy.update(preds, y) + + # Calling self.log will surface up scalars for you in TensorBoard + self.log("test_loss", loss, prog_bar=True) + self.log("test_acc", self.test_accuracy, prog_bar=True) + + def configure_optimizers(self): + optimizer = torch.optim.Adam(self.parameters(), lr=self.learning_rate) + return optimizer + + #################### + # DATA RELATED HOOKS + #################### + + def prepare_data(self): + # download + print("Downloading MNIST dataset...") + MNIST(self.data_dir, train=True, download=True) + MNIST(self.data_dir, train=False, download=True) + + def setup(self, stage=None): + # Assign train/val datasets for use in dataloaders + if stage == "fit" or stage is None: + mnist_full = MNIST(self.data_dir, train=True, transform=self.transform) + self.mnist_train, self.mnist_val = random_split(mnist_full, [55000, 5000]) + + # Assign test dataset for use in dataloader(s) + if stage == "test" or stage is None: + self.mnist_test = MNIST( + self.data_dir, train=False, transform=self.transform + ) + + def train_dataloader(self): + return DataLoader(self.mnist_train, batch_size=BATCH_SIZE) + + def val_dataloader(self): + return DataLoader(self.mnist_val, batch_size=BATCH_SIZE) + + def test_dataloader(self): + return DataLoader(self.mnist_test, batch_size=BATCH_SIZE) + + +# Init DataLoader from MNIST Dataset + +model = LitMNIST() + +print("GROUP: ", int(os.environ.get("GROUP_WORLD_SIZE", 1))) +print("LOCAL: ", int(os.environ.get("LOCAL_WORLD_SIZE", 1))) + +# Initialize a trainer +trainer = Trainer( + accelerator="auto", + # devices=1 if torch.cuda.is_available() else None, # limiting got iPython runs + max_epochs=5, + callbacks=[TQDMProgressBar(refresh_rate=20)], + num_nodes=int(os.environ.get("GROUP_WORLD_SIZE", 1)), + devices=int(os.environ.get("LOCAL_WORLD_SIZE", 1)), + strategy="ddp", +) + +# Train the model ⚡ +trainer.fit(model) diff --git a/demo-notebooks/additional-demos/requirements.txt b/demo-notebooks/additional-demos/requirements.txt new file mode 100644 index 00000000..5f86ab53 --- /dev/null +++ b/demo-notebooks/additional-demos/requirements.txt @@ -0,0 +1,5 @@ +pytorch_lightning==1.9.5 +ray_lightning +torchmetrics==0.9.1 +torchvision==0.19.0 +minio From e666e0a183663d16846efacc673ea12caee9cb3d Mon Sep 17 00:00:00 2001 From: Srihari Date: Mon, 25 Nov 2024 13:29:37 +0530 Subject: [PATCH 429/496] Add test coverage to validate the functionality of the get_cluster method --- .../e2e/mnist_raycluster_sdk_aw_kind_test.py | 7 +-- tests/e2e/mnist_raycluster_sdk_kind_test.py | 10 ++--- tests/e2e/mnist_raycluster_sdk_oauth_test.py | 11 ++--- tests/e2e/support.py | 43 +++++++++++++++++++ 4 files changed, 58 insertions(+), 13 deletions(-) diff --git a/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py b/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py index 4623a9e5..b94fe252 100644 --- a/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py +++ b/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py @@ -2,7 +2,7 @@ from time import sleep -from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication +from codeflare_sdk import Cluster, ClusterConfiguration from codeflare_sdk.ray.client import RayJobClient import pytest @@ -68,6 +68,9 @@ def run_mnist_raycluster_sdk_kind( cluster.details() self.assert_jobsubmit_withoutlogin_kind(cluster, accelerator, number_of_gpus) + assert_get_cluster_and_jobsubmit( + self, "mnist", accelerator="gpu", number_of_gpus=1 + ) # Assertions @@ -106,8 +109,6 @@ def assert_jobsubmit_withoutlogin_kind(self, cluster, accelerator, number_of_gpu client.delete_job(submission_id) - cluster.down() - def assert_job_completion(self, status): if status == "SUCCEEDED": print(f"Job has completed: '{status}'") diff --git a/tests/e2e/mnist_raycluster_sdk_kind_test.py b/tests/e2e/mnist_raycluster_sdk_kind_test.py index 6bfb19af..42d0c46b 100644 --- a/tests/e2e/mnist_raycluster_sdk_kind_test.py +++ b/tests/e2e/mnist_raycluster_sdk_kind_test.py @@ -2,7 +2,7 @@ from time import sleep -from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication +from codeflare_sdk import Cluster, ClusterConfiguration from codeflare_sdk.ray.client import RayJobClient import pytest @@ -44,8 +44,6 @@ def run_mnist_raycluster_sdk_kind( num_workers=1, head_cpu_requests="500m", head_cpu_limits="500m", - head_memory_requests=2, - head_memory_limits=2, worker_cpu_requests="500m", worker_cpu_limits=1, worker_memory_requests=1, @@ -68,6 +66,10 @@ def run_mnist_raycluster_sdk_kind( self.assert_jobsubmit_withoutlogin_kind(cluster, accelerator, number_of_gpus) + assert_get_cluster_and_jobsubmit( + self, "mnist", accelerator="gpu", number_of_gpus=1 + ) + # Assertions def assert_jobsubmit_withoutlogin_kind(self, cluster, accelerator, number_of_gpus): @@ -105,8 +107,6 @@ def assert_jobsubmit_withoutlogin_kind(self, cluster, accelerator, number_of_gpu client.delete_job(submission_id) - cluster.down() - def assert_job_completion(self, status): if status == "SUCCEEDED": print(f"Job has completed: '{status}'") diff --git a/tests/e2e/mnist_raycluster_sdk_oauth_test.py b/tests/e2e/mnist_raycluster_sdk_oauth_test.py index d3e69868..212c9784 100644 --- a/tests/e2e/mnist_raycluster_sdk_oauth_test.py +++ b/tests/e2e/mnist_raycluster_sdk_oauth_test.py @@ -2,7 +2,11 @@ from time import sleep -from codeflare_sdk import Cluster, ClusterConfiguration, TokenAuthentication +from codeflare_sdk import ( + Cluster, + ClusterConfiguration, + TokenAuthentication, +) from codeflare_sdk.ray.client import RayJobClient import pytest @@ -44,8 +48,6 @@ def run_mnist_raycluster_sdk_oauth(self): num_workers=1, head_cpu_requests="500m", head_cpu_limits="500m", - head_memory_requests=4, - head_memory_limits=4, worker_cpu_requests=1, worker_cpu_limits=1, worker_memory_requests=1, @@ -68,6 +70,7 @@ def run_mnist_raycluster_sdk_oauth(self): self.assert_jobsubmit_withoutLogin(cluster) self.assert_jobsubmit_withlogin(cluster) + assert_get_cluster_and_jobsubmit(self, "mnist") # Assertions @@ -132,8 +135,6 @@ def assert_jobsubmit_withlogin(self, cluster): client.delete_job(submission_id) - cluster.down() - def assert_job_completion(self, status): if status == "SUCCEEDED": print(f"Job has completed: '{status}'") diff --git a/tests/e2e/support.py b/tests/e2e/support.py index d76b460c..2ff33e91 100644 --- a/tests/e2e/support.py +++ b/tests/e2e/support.py @@ -3,6 +3,7 @@ import random import string import subprocess +from codeflare_sdk import get_cluster from kubernetes import client, config import kubernetes.client from codeflare_sdk.common.kubernetes_cluster.kube_api_helpers import ( @@ -348,3 +349,45 @@ def get_nodes_by_label(self, node_labels): label_selector = ",".join(f"{k}={v}" for k, v in node_labels.items()) nodes = self.api_instance.list_node(label_selector=label_selector) return [node.metadata.name for node in nodes.items] + + +def assert_get_cluster_and_jobsubmit( + self, cluster_name, accelerator=None, number_of_gpus=None +): + # Retrieve the cluster + cluster = get_cluster(cluster_name, self.namespace, False) + + cluster.details() + + # Initialize the job client + client = cluster.job_client + + # Submit a job and get the submission ID + env_vars = ( + get_setup_env_variables(ACCELERATOR=accelerator) + if accelerator + else get_setup_env_variables() + ) + submission_id = client.submit_job( + entrypoint="python mnist.py", + runtime_env={ + "working_dir": "./tests/e2e/", + "pip": "./tests/e2e/mnist_pip_requirements.txt", + "env_vars": env_vars, + }, + entrypoint_num_cpus=1 if number_of_gpus is None else None, + entrypoint_num_gpus=number_of_gpus, + ) + print(f"Submitted job with ID: {submission_id}") + + # Fetch the list of jobs and validate + job_list = client.list_jobs() + print(f"List of Jobs: {job_list}") + + # Validate the number of jobs in the list + assert len(job_list) == 1 + + # Validate the submission ID matches + assert job_list[0].submission_id == submission_id + + cluster.down() From be9763ae046c21c0921a87faab34ef0f7633d79a Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Wed, 7 Aug 2024 15:20:02 +0100 Subject: [PATCH 430/496] fix: cluster configuration validation for bool type As bool is a subtype of int, True/False was considered as 1/0 --- src/codeflare_sdk/common/utils/unit_test_support.py | 2 +- src/codeflare_sdk/ray/cluster/config.py | 12 +++++++++--- src/codeflare_sdk/ray/cluster/test_config.py | 4 +++- 3 files changed, 13 insertions(+), 5 deletions(-) diff --git a/src/codeflare_sdk/common/utils/unit_test_support.py b/src/codeflare_sdk/common/utils/unit_test_support.py index 9345fbc3..8e034378 100644 --- a/src/codeflare_sdk/common/utils/unit_test_support.py +++ b/src/codeflare_sdk/common/utils/unit_test_support.py @@ -55,7 +55,7 @@ def createClusterWrongType(): config = ClusterConfiguration( name="unit-test-cluster", namespace="ns", - num_workers=2, + num_workers=True, worker_cpu_requests=[], worker_cpu_limits=4, worker_memory_requests=5, diff --git a/src/codeflare_sdk/ray/cluster/config.py b/src/codeflare_sdk/ray/cluster/config.py index f321c278..b8b097f8 100644 --- a/src/codeflare_sdk/ray/cluster/config.py +++ b/src/codeflare_sdk/ray/cluster/config.py @@ -242,13 +242,15 @@ def _memory_to_resource(self): def _validate_types(self): """Validate the types of all fields in the ClusterConfiguration dataclass.""" + errors = [] for field_info in fields(self): value = getattr(self, field_info.name) expected_type = field_info.type if not self._is_type(value, expected_type): - raise TypeError( - f"'{field_info.name}' should be of type {expected_type}" - ) + errors.append(f"'{field_info.name}' should be of type {expected_type}.") + + if errors: + raise TypeError("Type validation failed:\n" + "\n".join(errors)) @staticmethod def _is_type(value, expected_type): @@ -268,6 +270,10 @@ def check_type(value, expected_type): ) if origin_type is tuple: return all(check_type(elem, etype) for elem, etype in zip(value, args)) + if expected_type is int: + return isinstance(value, int) and not isinstance(value, bool) + if expected_type is bool: + return isinstance(value, bool) return isinstance(value, expected_type) return check_type(value, expected_type) diff --git a/src/codeflare_sdk/ray/cluster/test_config.py b/src/codeflare_sdk/ray/cluster/test_config.py index 1423fc2b..3416fc28 100644 --- a/src/codeflare_sdk/ray/cluster/test_config.py +++ b/src/codeflare_sdk/ray/cluster/test_config.py @@ -108,9 +108,11 @@ def test_all_config_params_aw(mocker): def test_config_creation_wrong_type(): - with pytest.raises(TypeError): + with pytest.raises(TypeError) as error_info: createClusterWrongType() + assert len(str(error_info.value).splitlines()) == 4 + def test_cluster_config_deprecation_conversion(mocker): config = ClusterConfiguration( From 3e7a96031dd74034b7fe2245e47871926c8a1d15 Mon Sep 17 00:00:00 2001 From: Akram Ben Aissi Date: Thu, 12 Dec 2024 17:16:37 +0100 Subject: [PATCH 431/496] Exclude unit tests code from coverage --- .github/workflows/coverage-badge.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/coverage-badge.yaml b/.github/workflows/coverage-badge.yaml index bae1212d..ed1b08e4 100644 --- a/.github/workflows/coverage-badge.yaml +++ b/.github/workflows/coverage-badge.yaml @@ -26,7 +26,7 @@ jobs: poetry install --with test - name: Generate coverage report run: | - coverage run -m pytest + coverage run -m pytest --omit="src/**/test_*.py,src/codeflare_sdk/common/utils/unit_test_support.py" - name: Coverage Badge uses: tj-actions/coverage-badge-py@v2 From a6252722b93b040c4e7b5fa42df531e6e15b9d54 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Tue, 17 Dec 2024 16:43:53 +0000 Subject: [PATCH 432/496] build: upgrade kubernetes dependency --- poetry.lock | 24 ++++++++++++++++++------ pyproject.toml | 2 +- 2 files changed, 19 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 413122c1..2c9b713e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. [[package]] name = "aiohappyeyeballs" @@ -880,6 +880,17 @@ files = [ {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, ] +[[package]] +name = "durationpy" +version = "0.9" +description = "Module for converting between datetime.timedelta and Go's Duration strings." +optional = false +python-versions = "*" +files = [ + {file = "durationpy-0.9-py3-none-any.whl", hash = "sha256:e65359a7af5cedad07fb77a2dd3f390f8eb0b74cb845589fa6c057086834dd38"}, + {file = "durationpy-0.9.tar.gz", hash = "sha256:fd3feb0a69a0057d582ef643c355c40d2fa1c942191f914d12203b1a01ac722a"}, +] + [[package]] name = "exceptiongroup" version = "1.2.2" @@ -1749,23 +1760,24 @@ files = [ [[package]] name = "kubernetes" -version = "26.1.0" +version = "31.0.0" description = "Kubernetes python client" optional = false python-versions = ">=3.6" files = [ - {file = "kubernetes-26.1.0-py2.py3-none-any.whl", hash = "sha256:e3db6800abf7e36c38d2629b5cb6b74d10988ee0cba6fba45595a7cbe60c0042"}, - {file = "kubernetes-26.1.0.tar.gz", hash = "sha256:5854b0c508e8d217ca205591384ab58389abdae608576f9c9afc35a3c76a366c"}, + {file = "kubernetes-31.0.0-py2.py3-none-any.whl", hash = "sha256:bf141e2d380c8520eada8b351f4e319ffee9636328c137aa432bc486ca1200e1"}, + {file = "kubernetes-31.0.0.tar.gz", hash = "sha256:28945de906c8c259c1ebe62703b56a03b714049372196f854105afe4e6d014c0"}, ] [package.dependencies] certifi = ">=14.05.14" +durationpy = ">=0.7" google-auth = ">=1.0.1" +oauthlib = ">=3.2.2" python-dateutil = ">=2.5.3" pyyaml = ">=5.4.1" requests = "*" requests-oauthlib = "*" -setuptools = ">=21.0.0" six = ">=1.9.0" urllib3 = ">=1.24.2" websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" @@ -4212,4 +4224,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "a8bc47ba5ed0d0d30a52b50bf986b85da8cdc8b85f9dbe47ebae32dab695e95f" +content-hash = "6e5e74183be00c02f89e0da419aec0b032025410f8d2ae8685263fb5bc38b911" diff --git a/pyproject.toml b/pyproject.toml index ef88b255..d31ea2e1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,7 +24,7 @@ python = "^3.9" openshift-client = "1.0.18" rich = ">=12.5,<14.0" ray = {version = "2.35.0", extras = ["data", "default"]} -kubernetes = ">= 25.3.0, < 27" +kubernetes = ">= 27.2.0" cryptography = "43.0.3" executing = "1.2.0" pydantic = "< 2" From 3ca9f199b6dd8d476d18802c63a33288e669d8a0 Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Wed, 18 Dec 2024 10:10:04 +0000 Subject: [PATCH 433/496] fix: ipywidget test to expect new error message --- ui-tests/tests/widget_notebook_example.test.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ui-tests/tests/widget_notebook_example.test.ts b/ui-tests/tests/widget_notebook_example.test.ts index d37c225c..3360ba83 100644 --- a/ui-tests/tests/widget_notebook_example.test.ts +++ b/ui-tests/tests/widget_notebook_example.test.ts @@ -81,8 +81,8 @@ test.describe("Visual Regression", () => { await interactWithWidget(page, upDownWidgetCellIndex, 'button:has-text("Cluster Down")', async (button) => { await button.click(); - const clusterDownMessage = await page.waitForSelector('text=No instances found, nothing to be done.', { timeout: 5000 }); - expect(clusterDownMessage).not.toBeNull(); + const clusterDownMessage = await page.waitForSelector('text=The requested resource could not be located.', { timeout: 5000 }); + expect(await clusterDownMessage.innerText()).toContain('The requested resource could not be located.'); }); await interactWithWidget(page, upDownWidgetCellIndex, 'button:has-text("Cluster Up")', async (button) => { From 3aaaaa58a57263c97c769926195cc139e9c811d3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 18 Dec 2024 10:59:52 +0000 Subject: [PATCH 434/496] build(deps): bump nanoid from 3.3.7 to 3.3.8 in /ui-tests Bumps [nanoid](https://github.com/ai/nanoid) from 3.3.7 to 3.3.8. - [Release notes](https://github.com/ai/nanoid/releases) - [Changelog](https://github.com/ai/nanoid/blob/main/CHANGELOG.md) - [Commits](https://github.com/ai/nanoid/compare/3.3.7...3.3.8) --- updated-dependencies: - dependency-name: nanoid dependency-type: indirect ... Signed-off-by: dependabot[bot] --- ui-tests/yarn.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ui-tests/yarn.lock b/ui-tests/yarn.lock index cf0b5b0c..bf9629eb 100644 --- a/ui-tests/yarn.lock +++ b/ui-tests/yarn.lock @@ -2059,9 +2059,9 @@ ms@2.0.0: integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== nanoid@^3.3.7: - version "3.3.7" - resolved "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz#d0c301a691bc8d54efa0a2226ccf3fe2fd656bd8" - integrity sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g== + version "3.3.8" + resolved "https://registry.npmjs.org/nanoid/-/nanoid-3.3.8.tgz#b1be3030bee36aaff18bacb375e5cce521684baf" + integrity sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w== node-fetch@^2.6.7: version "2.7.0" From c8e77fca264b32620b16d7b461855ad4c33de166 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Wed, 18 Dec 2024 11:09:13 +0000 Subject: [PATCH 435/496] ci: fix coverage badge wf and omit test files from unit test coverage --- .github/workflows/coverage-badge.yaml | 2 +- .github/workflows/unit-tests.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/coverage-badge.yaml b/.github/workflows/coverage-badge.yaml index ed1b08e4..af273ce9 100644 --- a/.github/workflows/coverage-badge.yaml +++ b/.github/workflows/coverage-badge.yaml @@ -26,7 +26,7 @@ jobs: poetry install --with test - name: Generate coverage report run: | - coverage run -m pytest --omit="src/**/test_*.py,src/codeflare_sdk/common/utils/unit_test_support.py" + coverage run --omit="src/**/test_*.py,src/codeflare_sdk/common/utils/unit_test_support.py" -m pytest - name: Coverage Badge uses: tj-actions/coverage-badge-py@v2 diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index 267e1a6c..51934628 100755 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -26,7 +26,7 @@ jobs: poetry install --with test - name: Test with pytest and check coverage run: | - coverage run -m pytest + coverage run --omit="src/**/test_*.py,src/codeflare_sdk/common/utils/unit_test_support.py" -m pytest coverage=$(coverage report -m | tail -1 | tail -c 4 | head -c 2) if (( $coverage < 90 )); then echo "Coverage failed at ${coverage}%"; exit 1; else echo "Coverage passed, ${coverage}%"; fi - name: Upload to Codecov From 814efb4cf557458957ce01f81569cb9bedda10f2 Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Thu, 2 Jan 2025 10:30:57 +0000 Subject: [PATCH 436/496] Rename GPU GitHub runner to avoid version confusion (#787) --- .github/workflows/e2e_tests.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index fea42ab6..61781aed 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -21,7 +21,7 @@ env: jobs: kubernetes: - runs-on: ubuntu-20.04-4core-gpu + runs-on: gpu-t4-4-core steps: - name: Checkout code From 757a23c89eebf4868931a8b85982dfcbe0f764d1 Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Thu, 19 Dec 2024 13:05:04 +0000 Subject: [PATCH 437/496] update GitHub workflows to use Python 3.11 by default --- .../workflows/additional_demo_notebook_tests.yaml | 4 ++-- .github/workflows/coverage-badge.yaml | 4 ++-- .github/workflows/e2e_tests.yaml | 2 +- .github/workflows/guided_notebook_tests.yaml | 6 +++--- .github/workflows/release.yaml | 2 +- .github/workflows/ui_notebooks_test.yaml | 2 +- .github/workflows/unit-tests.yml | 12 ++++++------ 7 files changed, 16 insertions(+), 16 deletions(-) diff --git a/.github/workflows/additional_demo_notebook_tests.yaml b/.github/workflows/additional_demo_notebook_tests.yaml index 03201e10..a081f9ee 100644 --- a/.github/workflows/additional_demo_notebook_tests.yaml +++ b/.github/workflows/additional_demo_notebook_tests.yaml @@ -50,7 +50,7 @@ jobs: - name: Set up specific Python version uses: actions/setup-python@v5 with: - python-version: '3.9' + python-version: '3.11' cache: 'pip' # caching pip dependencies - name: Setup and start KinD cluster @@ -168,7 +168,7 @@ jobs: - name: Set up specific Python version uses: actions/setup-python@v5 with: - python-version: '3.9' + python-version: '3.11' cache: 'pip' # caching pip dependencies - name: Setup and start KinD cluster diff --git a/.github/workflows/coverage-badge.yaml b/.github/workflows/coverage-badge.yaml index af273ce9..e2fdddae 100644 --- a/.github/workflows/coverage-badge.yaml +++ b/.github/workflows/coverage-badge.yaml @@ -13,10 +13,10 @@ jobs: steps: - uses: actions/checkout@v4 - - name: Set up Python 3.9 + - name: Set up Python 3.11 uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: 3.11 - name: Install dependencies run: | python -m pip install --upgrade pip diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index 61781aed..fca6d6e7 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -56,7 +56,7 @@ jobs: - name: Set up specific Python version uses: actions/setup-python@v5 with: - python-version: '3.9' + python-version: '3.11' cache: 'pip' # caching pip dependencies - name: Setup NVidia GPU environment for KinD diff --git a/.github/workflows/guided_notebook_tests.yaml b/.github/workflows/guided_notebook_tests.yaml index 7a77d5a3..71a82284 100644 --- a/.github/workflows/guided_notebook_tests.yaml +++ b/.github/workflows/guided_notebook_tests.yaml @@ -49,7 +49,7 @@ jobs: - name: Set up specific Python version uses: actions/setup-python@v5 with: - python-version: '3.9' + python-version: '3.11' cache: 'pip' # caching pip dependencies - name: Setup and start KinD cluster @@ -160,7 +160,7 @@ jobs: - name: Set up specific Python version uses: actions/setup-python@v5 with: - python-version: '3.9' + python-version: '3.11' cache: 'pip' # caching pip dependencies - name: Setup NVidia GPU environment for KinD @@ -282,7 +282,7 @@ jobs: - name: Set up specific Python version uses: actions/setup-python@v5 with: - python-version: '3.9' + python-version: '3.11' cache: 'pip' # caching pip dependencies - name: Setup NVidia GPU environment for KinD diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index ddc23b5a..50e3f9e1 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -17,7 +17,7 @@ on: default: 'project-codeflare' python_version: type: string - default: "3.8" + default: "3.11" required: true poetry_version: type: string diff --git a/.github/workflows/ui_notebooks_test.yaml b/.github/workflows/ui_notebooks_test.yaml index 28f7e06c..6d82c49e 100644 --- a/.github/workflows/ui_notebooks_test.yaml +++ b/.github/workflows/ui_notebooks_test.yaml @@ -49,7 +49,7 @@ jobs: - name: Set up specific Python version uses: actions/setup-python@v5 with: - python-version: "3.9" + python-version: "3.11" cache: "pip" # caching pip dependencies - name: Setup and start KinD cluster diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index 51934628..cbe3603d 100755 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -10,16 +10,16 @@ jobs: unit-tests: runs-on: ubuntu-latest - container: - image: quay.io/project-codeflare/codeflare-sdk-precommit:v0.0.3 steps: - uses: actions/checkout@v4 + + - name: Set up python + uses: actions/setup-python@v5 + with: + python-version: '3.11' - name: Install poetry run: pip install poetry - - uses: actions/setup-python@v5 - with: - python-version: '3.8' - - name: Install dependencies + - name: Install dependencies with poetry run: | poetry config virtualenvs.create false poetry lock --no-update From 0c30442931e70b64e67754f0fd8c0da19977018b Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Thu, 19 Dec 2024 17:45:14 +0000 Subject: [PATCH 438/496] update unit tests to work on both Python 3.9 and 3.11 --- src/codeflare_sdk/common/kueue/test_kueue.py | 34 +++++++++++-------- .../common/utils/unit_test_support.py | 28 +++++++++++++++ src/codeflare_sdk/ray/cluster/test_config.py | 19 ++++++----- tests/test_cluster_yamls/kueue/aw_kueue.yaml | 4 +-- .../kueue/ray_cluster_kueue.yaml | 4 +-- .../ray/default-appwrapper.yaml | 4 +-- .../ray/default-ray-cluster.yaml | 4 +-- 7 files changed, 67 insertions(+), 30 deletions(-) diff --git a/src/codeflare_sdk/common/kueue/test_kueue.py b/src/codeflare_sdk/common/kueue/test_kueue.py index 77095d4d..0093058c 100644 --- a/src/codeflare_sdk/common/kueue/test_kueue.py +++ b/src/codeflare_sdk/common/kueue/test_kueue.py @@ -11,7 +11,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from ..utils.unit_test_support import get_local_queue, createClusterConfig +from ..utils.unit_test_support import ( + apply_template, + get_local_queue, + createClusterConfig, + get_template_variables, +) from unittest.mock import patch from codeflare_sdk.ray.cluster.cluster import Cluster, ClusterConfiguration import yaml @@ -52,21 +57,21 @@ def test_cluster_creation_no_aw_local_queue(mocker): config.local_queue = "local-queue-default" cluster = Cluster(config) assert cluster.resource_yaml == f"{aw_dir}unit-test-cluster-kueue.yaml" - assert filecmp.cmp( - f"{aw_dir}unit-test-cluster-kueue.yaml", + expected_rc = apply_template( f"{parent}/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml", - shallow=True, + get_template_variables(), ) + with open(f"{aw_dir}unit-test-cluster-kueue.yaml", "r") as f: + cluster_kueue = yaml.load(f, Loader=yaml.FullLoader) + assert cluster_kueue == expected_rc + # With resources loaded in memory, no Local Queue specified. config = createClusterConfig() config.name = "unit-test-cluster-kueue" config.write_to_file = False cluster = Cluster(config) - - with open(f"{parent}/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml") as f: - expected_rc = yaml.load(f, Loader=yaml.FullLoader) - assert cluster.resource_yaml == expected_rc + assert cluster.resource_yaml == expected_rc def test_aw_creation_local_queue(mocker): @@ -86,12 +91,15 @@ def test_aw_creation_local_queue(mocker): config.local_queue = "local-queue-default" cluster = Cluster(config) assert cluster.resource_yaml == f"{aw_dir}unit-test-aw-kueue.yaml" - assert filecmp.cmp( - f"{aw_dir}unit-test-aw-kueue.yaml", + expected_rc = apply_template( f"{parent}/tests/test_cluster_yamls/kueue/aw_kueue.yaml", - shallow=True, + get_template_variables(), ) + with open(f"{aw_dir}unit-test-aw-kueue.yaml", "r") as f: + aw_kueue = yaml.load(f, Loader=yaml.FullLoader) + assert aw_kueue == expected_rc + # With resources loaded in memory, no Local Queue specified. config = createClusterConfig() config.name = "unit-test-aw-kueue" @@ -99,9 +107,7 @@ def test_aw_creation_local_queue(mocker): config.write_to_file = False cluster = Cluster(config) - with open(f"{parent}/tests/test_cluster_yamls/kueue/aw_kueue.yaml") as f: - expected_rc = yaml.load(f, Loader=yaml.FullLoader) - assert cluster.resource_yaml == expected_rc + assert cluster.resource_yaml == expected_rc def test_get_local_queue_exists_fail(mocker): diff --git a/src/codeflare_sdk/common/utils/unit_test_support.py b/src/codeflare_sdk/common/utils/unit_test_support.py index 8e034378..4182fc93 100644 --- a/src/codeflare_sdk/common/utils/unit_test_support.py +++ b/src/codeflare_sdk/common/utils/unit_test_support.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import string +import sys from codeflare_sdk.ray.cluster.cluster import ( Cluster, ClusterConfiguration, @@ -255,6 +257,32 @@ def arg_check_del_effect(group, version, namespace, plural, name, *args): assert name == "ray-dashboard-unit-test-cluster-ray" +def apply_template(yaml_file_path, variables): + with open(yaml_file_path, "r") as file: + yaml_content = file.read() + + # Create a Template instance and substitute the variables + template = string.Template(yaml_content) + filled_yaml = template.substitute(variables) + + # Now load the filled YAML into a Python object + return yaml.load(filled_yaml, Loader=yaml.FullLoader) + + +def get_expected_image(): + python_version = sys.version_info + if python_version.major == 3 and python_version.minor == 9: + return "quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06" + else: + return "quay.io/modh/ray@sha256:db667df1bc437a7b0965e8031e905d3ab04b86390d764d120e05ea5a5c18d1b4" + + +def get_template_variables(): + return { + "image": get_expected_image(), + } + + def arg_check_apply_effect(group, version, namespace, plural, body, *args): assert namespace == "ns" assert args == tuple() diff --git a/src/codeflare_sdk/ray/cluster/test_config.py b/src/codeflare_sdk/ray/cluster/test_config.py index 3416fc28..b16cbaf2 100644 --- a/src/codeflare_sdk/ray/cluster/test_config.py +++ b/src/codeflare_sdk/ray/cluster/test_config.py @@ -13,15 +13,15 @@ # limitations under the License. from codeflare_sdk.common.utils.unit_test_support import ( + apply_template, createClusterWrongType, - get_local_queue, create_cluster_all_config_params, + get_template_variables, ) from codeflare_sdk.ray.cluster.cluster import ClusterConfiguration, Cluster from pathlib import Path import filecmp import pytest -import yaml import os parent = Path(__file__).resolve().parents[4] # project directory @@ -36,9 +36,11 @@ def test_default_cluster_creation(mocker): cluster = Cluster(ClusterConfiguration(name="default-cluster", namespace="ns")) - with open(f"{expected_clusters_dir}/ray/default-ray-cluster.yaml") as f: - expected_rc = yaml.load(f, Loader=yaml.FullLoader) - assert cluster.resource_yaml == expected_rc + expected_rc = apply_template( + f"{expected_clusters_dir}/ray/default-ray-cluster.yaml", + get_template_variables(), + ) + assert cluster.resource_yaml == expected_rc def test_default_appwrapper_creation(mocker): @@ -50,9 +52,10 @@ def test_default_appwrapper_creation(mocker): ClusterConfiguration(name="default-appwrapper", namespace="ns", appwrapper=True) ) - with open(f"{expected_clusters_dir}/ray/default-appwrapper.yaml") as f: - expected_aw = yaml.load(f, Loader=yaml.FullLoader) - assert cluster.resource_yaml == expected_aw + expected_aw = apply_template( + f"{expected_clusters_dir}/ray/default-appwrapper.yaml", get_template_variables() + ) + assert cluster.resource_yaml == expected_aw def test_config_creation_all_parameters(mocker): diff --git a/tests/test_cluster_yamls/kueue/aw_kueue.yaml b/tests/test_cluster_yamls/kueue/aw_kueue.yaml index 402ffb6a..b5b5ae3f 100644 --- a/tests/test_cluster_yamls/kueue/aw_kueue.yaml +++ b/tests/test_cluster_yamls/kueue/aw_kueue.yaml @@ -38,7 +38,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + - image: "${image}" imagePullPolicy: Always lifecycle: preStop: @@ -103,7 +103,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + - image: "${image}" imagePullPolicy: Always lifecycle: preStop: diff --git a/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml b/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml index a5cb3616..ad179a0b 100644 --- a/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml +++ b/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml @@ -38,7 +38,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + - image: "${image}" imagePullPolicy: Always lifecycle: preStop: @@ -103,7 +103,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + - image: "${image}" imagePullPolicy: Always lifecycle: preStop: diff --git a/tests/test_cluster_yamls/ray/default-appwrapper.yaml b/tests/test_cluster_yamls/ray/default-appwrapper.yaml index 3e97474d..eadfeaa0 100644 --- a/tests/test_cluster_yamls/ray/default-appwrapper.yaml +++ b/tests/test_cluster_yamls/ray/default-appwrapper.yaml @@ -36,7 +36,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + - image: "${image}" imagePullPolicy: Always lifecycle: preStop: @@ -101,7 +101,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + - image: "${image}" imagePullPolicy: Always lifecycle: preStop: diff --git a/tests/test_cluster_yamls/ray/default-ray-cluster.yaml b/tests/test_cluster_yamls/ray/default-ray-cluster.yaml index 34de53d2..056479e6 100644 --- a/tests/test_cluster_yamls/ray/default-ray-cluster.yaml +++ b/tests/test_cluster_yamls/ray/default-ray-cluster.yaml @@ -28,7 +28,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + - image: "${image}" imagePullPolicy: Always lifecycle: preStop: @@ -93,7 +93,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + - image: "${image}" imagePullPolicy: Always lifecycle: preStop: From 36f578d006f45fd21d6c3fd5c536c49087be72f3 Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Thu, 19 Dec 2024 19:21:02 +0000 Subject: [PATCH 439/496] update contributing guide and docs to recommend Python 3.11 --- CONTRIBUTING.md | 2 +- docs/sphinx/user-docs/e2e.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 884632da..8a87bad4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -6,7 +6,7 @@ Thank you for your interest in contributing to the CodeFlare SDK! ### Prerequisites -- Python 3.9 +- Python 3.11 - [Poetry](https://python-poetry.org/) ### Setting Up Your Development Environment diff --git a/docs/sphinx/user-docs/e2e.rst b/docs/sphinx/user-docs/e2e.rst index 846536f1..6f3d1462 100644 --- a/docs/sphinx/user-docs/e2e.rst +++ b/docs/sphinx/user-docs/e2e.rst @@ -4,7 +4,7 @@ Running e2e tests locally Pre-requisites ^^^^^^^^^^^^^^ -- We recommend using Python 3.9, along with Poetry. +- We recommend using Python 3.11, along with Poetry. On KinD clusters ---------------- From 7cb6631ffa77c5cdbb7a55eb6f3b0c309ae02aed Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Thu, 2 Jan 2025 11:21:43 +0000 Subject: [PATCH 440/496] Updated coverage.svg --- coverage.svg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/coverage.svg b/coverage.svg index 59d64b37..c1490035 100644 --- a/coverage.svg +++ b/coverage.svg @@ -15,7 +15,7 @@ coverage coverage - 93% - 93% + 90% + 90% From 1225d3888f1090acb17270d92fd8118cca4ab63a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 2 Jan 2025 12:11:44 +0000 Subject: [PATCH 441/496] build(deps): bump jinja2 from 3.1.4 to 3.1.5 Bumps [jinja2](https://github.com/pallets/jinja) from 3.1.4 to 3.1.5. - [Release notes](https://github.com/pallets/jinja/releases) - [Changelog](https://github.com/pallets/jinja/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/jinja/compare/3.1.4...3.1.5) --- updated-dependencies: - dependency-name: jinja2 dependency-type: indirect ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 2c9b713e..88224764 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1457,13 +1457,13 @@ testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] [[package]] name = "jinja2" -version = "3.1.4" +version = "3.1.5" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, + {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, + {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, ] [package.dependencies] From 28ef1380d75feede4e19030ebe6145c32908be7b Mon Sep 17 00:00:00 2001 From: Karel Suta Date: Thu, 2 Jan 2025 13:42:50 +0100 Subject: [PATCH 442/496] Rename GPU GitHub runner to avoid version confusion --- .github/workflows/guided_notebook_tests.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/guided_notebook_tests.yaml b/.github/workflows/guided_notebook_tests.yaml index 71a82284..dc44a4bf 100644 --- a/.github/workflows/guided_notebook_tests.yaml +++ b/.github/workflows/guided_notebook_tests.yaml @@ -125,7 +125,7 @@ jobs: verify-1_cluster_job_client: if: ${{ contains(github.event.pull_request.labels.*.name, 'test-guided-notebooks') }} - runs-on: ubuntu-20.04-4core-gpu + runs-on: gpu-t4-4-core steps: - name: Checkout code @@ -247,7 +247,7 @@ jobs: verify-2_basic_interactive: if: ${{ contains(github.event.pull_request.labels.*.name, 'test-guided-notebooks') }} - runs-on: ubuntu-20.04-4core-gpu + runs-on: gpu-t4-4-core steps: - name: Checkout code From 6936c2ccfbd7e3fd1aa300e834c89307112101dc Mon Sep 17 00:00:00 2001 From: ChristianZaccaria Date: Thu, 2 Jan 2025 12:55:20 +0000 Subject: [PATCH 443/496] Run workflow on labeling a pull request --- .github/workflows/guided_notebook_tests.yaml | 1 + .github/workflows/ui_notebooks_test.yaml | 1 + 2 files changed, 2 insertions(+) diff --git a/.github/workflows/guided_notebook_tests.yaml b/.github/workflows/guided_notebook_tests.yaml index dc44a4bf..5c241615 100644 --- a/.github/workflows/guided_notebook_tests.yaml +++ b/.github/workflows/guided_notebook_tests.yaml @@ -3,6 +3,7 @@ name: Guided notebooks tests on: pull_request: branches: [ main ] + types: [ labeled ] concurrency: group: ${{ github.head_ref }}-${{ github.workflow }} diff --git a/.github/workflows/ui_notebooks_test.yaml b/.github/workflows/ui_notebooks_test.yaml index 6d82c49e..d18d0e61 100644 --- a/.github/workflows/ui_notebooks_test.yaml +++ b/.github/workflows/ui_notebooks_test.yaml @@ -3,6 +3,7 @@ name: UI notebooks tests on: pull_request: branches: [ main ] + types: [ labeled ] concurrency: group: ${{ github.head_ref }}-${{ github.workflow }} From 77e79524930296e007e7e11e10114331046d6eff Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 19 Dec 2024 12:03:44 +0000 Subject: [PATCH 444/496] feat: add annotations parameter to cluster configuration --- src/codeflare_sdk/ray/cluster/build_ray_cluster.py | 7 +++---- src/codeflare_sdk/ray/cluster/config.py | 3 +++ 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/src/codeflare_sdk/ray/cluster/build_ray_cluster.py b/src/codeflare_sdk/ray/cluster/build_ray_cluster.py index e590d483..11171338 100644 --- a/src/codeflare_sdk/ray/cluster/build_ray_cluster.py +++ b/src/codeflare_sdk/ray/cluster/build_ray_cluster.py @@ -191,7 +191,7 @@ def get_metadata(cluster: "codeflare_sdk.ray.cluster.Cluster"): ) # Get the NB annotation if it exists - could be useful in future for a "annotations" parameter. - annotations = get_nb_annotations() + annotations = with_nb_annotations(cluster.config.annotations) if annotations != {}: object_meta.annotations = annotations # As annotations are not a guarantee they are appended to the metadata after creation. return object_meta @@ -213,11 +213,10 @@ def get_labels(cluster: "codeflare_sdk.ray.cluster.Cluster"): return labels -def get_nb_annotations(): +def with_nb_annotations(annotations: dict): """ - The get_nb_annotations() function generates the annotation for NB Prefix if the SDK is running in a notebook + The with_nb_annotations() function generates the annotation for NB Prefix if the SDK is running in a notebook and appends any user set annotations """ - annotations = {} # Notebook annotation nb_prefix = os.environ.get("NB_PREFIX") diff --git a/src/codeflare_sdk/ray/cluster/config.py b/src/codeflare_sdk/ray/cluster/config.py index b8b097f8..e3069029 100644 --- a/src/codeflare_sdk/ray/cluster/config.py +++ b/src/codeflare_sdk/ray/cluster/config.py @@ -89,6 +89,8 @@ class ClusterConfiguration: A dictionary of custom resource mappings to map extended resource requests to RayCluster resource names overwrite_default_resource_mapping: A boolean indicating whether to overwrite the default resource mapping. + annotations: + A dictionary of annotations to apply to the cluster. """ name: str @@ -126,6 +128,7 @@ class ClusterConfiguration: extended_resource_mapping: Dict[str, str] = field(default_factory=dict) overwrite_default_resource_mapping: bool = False local_queue: Optional[str] = None + annotations: Dict[str, str] = field(default_factory=dict) def __post_init__(self): if not self.verify_tls: From 2fc94881c9970362bd3b12d968fac31ec5902729 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 19 Dec 2024 12:05:16 +0000 Subject: [PATCH 445/496] test: add unit test for annotations parameter --- src/codeflare_sdk/common/utils/unit_test_support.py | 1 + src/codeflare_sdk/ray/cluster/test_config.py | 5 +++++ .../test_cluster_yamls/appwrapper/unit-test-all-params.yaml | 2 ++ tests/test_cluster_yamls/ray/unit-test-all-params.yaml | 2 ++ 4 files changed, 10 insertions(+) diff --git a/src/codeflare_sdk/common/utils/unit_test_support.py b/src/codeflare_sdk/common/utils/unit_test_support.py index 4182fc93..388b6b4e 100644 --- a/src/codeflare_sdk/common/utils/unit_test_support.py +++ b/src/codeflare_sdk/common/utils/unit_test_support.py @@ -442,5 +442,6 @@ def create_cluster_all_config_params(mocker, cluster_name, is_appwrapper) -> Clu extended_resource_mapping={"example.com/gpu": "GPU", "intel.com/gpu": "TPU"}, overwrite_default_resource_mapping=True, local_queue="local-queue-default", + annotations={"key1": "value1", "key2": "value2"}, ) return Cluster(config) diff --git a/src/codeflare_sdk/ray/cluster/test_config.py b/src/codeflare_sdk/ray/cluster/test_config.py index b16cbaf2..aafb23dd 100644 --- a/src/codeflare_sdk/ray/cluster/test_config.py +++ b/src/codeflare_sdk/ray/cluster/test_config.py @@ -93,6 +93,11 @@ def test_config_creation_all_parameters(mocker): ) assert cluster.config.overwrite_default_resource_mapping == True assert cluster.config.local_queue == "local-queue-default" + assert cluster.config.annotations == { + "app.kubernetes.io/managed-by": "test-prefix", + "key1": "value1", + "key2": "value2", + } assert filecmp.cmp( f"{aw_dir}test-all-params.yaml", diff --git a/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml b/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml index 6d2c5440..36d186d1 100644 --- a/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml +++ b/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml @@ -13,6 +13,8 @@ spec: metadata: annotations: app.kubernetes.io/managed-by: test-prefix + key1: value1 + key2: value2 labels: controller-tools.k8s.io: '1.0' key1: value1 diff --git a/tests/test_cluster_yamls/ray/unit-test-all-params.yaml b/tests/test_cluster_yamls/ray/unit-test-all-params.yaml index 8426eede..a6514bd8 100644 --- a/tests/test_cluster_yamls/ray/unit-test-all-params.yaml +++ b/tests/test_cluster_yamls/ray/unit-test-all-params.yaml @@ -3,6 +3,8 @@ kind: RayCluster metadata: annotations: app.kubernetes.io/managed-by: test-prefix + key1: value1 + key2: value2 labels: controller-tools.k8s.io: '1.0' key1: value1 From 22a24d8769421c1130740289a3998a880ce1f48b Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 19 Dec 2024 12:07:26 +0000 Subject: [PATCH 446/496] docs: update user docs with annotations parameter example --- docs/sphinx/user-docs/cluster-configuration.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/sphinx/user-docs/cluster-configuration.rst b/docs/sphinx/user-docs/cluster-configuration.rst index dc3f2cf4..411760f8 100644 --- a/docs/sphinx/user-docs/cluster-configuration.rst +++ b/docs/sphinx/user-docs/cluster-configuration.rst @@ -26,6 +26,7 @@ requirements for creating the Ray Cluster. worker_memory_limits=2, # Default 2 # image="", # Optional Field labels={"exampleLabel": "example", "secondLabel": "example"}, + annotations={"key1":"value1", "key2":"value2"}, )) .. note:: From 7fef14b83469fd9e65070aca705b29b094543e53 Mon Sep 17 00:00:00 2001 From: Karel Suta Date: Fri, 3 Jan 2025 12:12:33 +0100 Subject: [PATCH 447/496] Migrate GitHub runners to newest Ubuntu --- .github/workflows/additional_demo_notebook_tests.yaml | 4 ++-- .github/workflows/guided_notebook_tests.yaml | 2 +- .github/workflows/odh-notebooks-sync.yml | 2 +- .github/workflows/ui_notebooks_test.yaml | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/additional_demo_notebook_tests.yaml b/.github/workflows/additional_demo_notebook_tests.yaml index a081f9ee..096cb509 100644 --- a/.github/workflows/additional_demo_notebook_tests.yaml +++ b/.github/workflows/additional_demo_notebook_tests.yaml @@ -15,7 +15,7 @@ env: jobs: verify-local_interactive: if: ${{ github.event.label.name == 'test-additional-notebooks' }} - runs-on: ubuntu-20.04-4core + runs-on: ubuntu-latest-4core steps: - name: Checkout code @@ -133,7 +133,7 @@ jobs: verify-ray_job_client: if: ${{ github.event.label.name == 'test-additional-notebooks' }} - runs-on: ubuntu-20.04-4core + runs-on: ubuntu-latest-4core steps: - name: Checkout code diff --git a/.github/workflows/guided_notebook_tests.yaml b/.github/workflows/guided_notebook_tests.yaml index 5c241615..3309c6a1 100644 --- a/.github/workflows/guided_notebook_tests.yaml +++ b/.github/workflows/guided_notebook_tests.yaml @@ -15,7 +15,7 @@ env: jobs: verify-0_basic_ray: if: ${{ contains(github.event.pull_request.labels.*.name, 'test-guided-notebooks') }} - runs-on: ubuntu-20.04-4core + runs-on: ubuntu-latest-4core steps: - name: Checkout code diff --git a/.github/workflows/odh-notebooks-sync.yml b/.github/workflows/odh-notebooks-sync.yml index f0853bff..c2ae6d3a 100644 --- a/.github/workflows/odh-notebooks-sync.yml +++ b/.github/workflows/odh-notebooks-sync.yml @@ -37,7 +37,7 @@ env: jobs: build: - runs-on: ubuntu-22.04-8core + runs-on: ubuntu-latest-8core steps: - name: Clone repository and Sync run: | diff --git a/.github/workflows/ui_notebooks_test.yaml b/.github/workflows/ui_notebooks_test.yaml index d18d0e61..1b5ad524 100644 --- a/.github/workflows/ui_notebooks_test.yaml +++ b/.github/workflows/ui_notebooks_test.yaml @@ -15,7 +15,7 @@ env: jobs: verify-3_widget_example: if: ${{ contains(github.event.pull_request.labels.*.name, 'test-guided-notebooks') || contains(github.event.pull_request.labels.*.name, 'test-ui-notebooks') }} - runs-on: ubuntu-20.04-4core + runs-on: ubuntu-latest-4core steps: - name: Checkout code From 6b0a3ccee5acdaf2541ea8f034c58dbcab559a70 Mon Sep 17 00:00:00 2001 From: Ignas Baranauskas Date: Mon, 6 Jan 2025 10:05:49 +0000 Subject: [PATCH 448/496] fix: remove --no-update as it is the default in poetry 2.0 --- .github/workflows/coverage-badge.yaml | 2 +- .github/workflows/unit-tests.yml | 2 +- tests/e2e/install-codeflare-sdk.sh | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/coverage-badge.yaml b/.github/workflows/coverage-badge.yaml index e2fdddae..0d2d41f6 100644 --- a/.github/workflows/coverage-badge.yaml +++ b/.github/workflows/coverage-badge.yaml @@ -22,7 +22,7 @@ jobs: python -m pip install --upgrade pip pip install poetry poetry config virtualenvs.create false - poetry lock --no-update + poetry lock poetry install --with test - name: Generate coverage report run: | diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index cbe3603d..6697fc80 100755 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -22,7 +22,7 @@ jobs: - name: Install dependencies with poetry run: | poetry config virtualenvs.create false - poetry lock --no-update + poetry lock poetry install --with test - name: Test with pytest and check coverage run: | diff --git a/tests/e2e/install-codeflare-sdk.sh b/tests/e2e/install-codeflare-sdk.sh index e7808582..8ec5e1e6 100644 --- a/tests/e2e/install-codeflare-sdk.sh +++ b/tests/e2e/install-codeflare-sdk.sh @@ -9,7 +9,7 @@ poetry config virtualenvs.create false cd codeflare-sdk # Lock dependencies and install them -poetry lock --no-update +poetry lock poetry install --with test,docs # Return to the workdir From ca834c7f7dcf61d4a6acb495a1aea20128b2d5bd Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 21 Nov 2024 14:17:07 +0000 Subject: [PATCH 449/496] feat: add custom volumes/volume mounts for ray clusters --- .../ray/cluster/build_ray_cluster.py | 26 ++++++++++++++++--- src/codeflare_sdk/ray/cluster/config.py | 7 +++++ 2 files changed, 30 insertions(+), 3 deletions(-) diff --git a/src/codeflare_sdk/ray/cluster/build_ray_cluster.py b/src/codeflare_sdk/ray/cluster/build_ray_cluster.py index 11171338..a08f3f73 100644 --- a/src/codeflare_sdk/ray/cluster/build_ray_cluster.py +++ b/src/codeflare_sdk/ray/cluster/build_ray_cluster.py @@ -249,7 +249,7 @@ def get_pod_spec(cluster: "codeflare_sdk.ray.cluster.Cluster", containers): """ pod_spec = V1PodSpec( containers=containers, - volumes=VOLUMES, + volumes=generate_custom_storage(cluster.config.volumes, VOLUMES), ) if cluster.config.image_pull_secrets != []: pod_spec.image_pull_secrets = generate_image_pull_secrets(cluster) @@ -295,7 +295,9 @@ def get_head_container_spec( cluster.config.head_memory_limits, cluster.config.head_extended_resource_requests, ), - volume_mounts=VOLUME_MOUNTS, + volume_mounts=generate_custom_storage( + cluster.config.volume_mounts, VOLUME_MOUNTS + ), ) if cluster.config.envs != {}: head_container.env = generate_env_vars(cluster) @@ -337,7 +339,9 @@ def get_worker_container_spec( cluster.config.worker_memory_limits, cluster.config.worker_extended_resource_requests, ), - volume_mounts=VOLUME_MOUNTS, + volume_mounts=generate_custom_storage( + cluster.config.volume_mounts, VOLUME_MOUNTS + ), ) if cluster.config.envs != {}: @@ -521,6 +525,22 @@ def wrap_cluster( # Etc. +def generate_custom_storage(provided_storage: list, default_storage: list): + """ + The generate_custom_storage function updates the volumes/volume mounts configs with the default volumes/volume mounts. + """ + storage_list = provided_storage.copy() + + if storage_list == []: + storage_list = default_storage + else: + # We append the list of volumes/volume mounts with the defaults and return the full list + for storage in default_storage: + storage_list.append(storage) + + return storage_list + + def write_to_file(cluster: "codeflare_sdk.ray.cluster.Cluster", resource: dict): """ The write_to_file function writes the built Ray Cluster/AppWrapper dict as a yaml file in the .codeflare folder diff --git a/src/codeflare_sdk/ray/cluster/config.py b/src/codeflare_sdk/ray/cluster/config.py index e3069029..7a78e730 100644 --- a/src/codeflare_sdk/ray/cluster/config.py +++ b/src/codeflare_sdk/ray/cluster/config.py @@ -22,6 +22,7 @@ import warnings from dataclasses import dataclass, field, fields from typing import Dict, List, Optional, Union, get_args, get_origin +from kubernetes.client import V1Volume, V1VolumeMount dir = pathlib.Path(__file__).parent.parent.resolve() @@ -91,6 +92,10 @@ class ClusterConfiguration: A boolean indicating whether to overwrite the default resource mapping. annotations: A dictionary of annotations to apply to the cluster. + volumes: + A list of V1Volume objects to add to the Cluster + volume_mounts: + A list of V1VolumeMount objects to add to the Cluster """ name: str @@ -129,6 +134,8 @@ class ClusterConfiguration: overwrite_default_resource_mapping: bool = False local_queue: Optional[str] = None annotations: Dict[str, str] = field(default_factory=dict) + volumes: list[V1Volume] = field(default_factory=list) + volume_mounts: list[V1VolumeMount] = field(default_factory=list) def __post_init__(self): if not self.verify_tls: From 7e5f896681611335c4e791d83474c4e8696ffca6 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 21 Nov 2024 14:50:03 +0000 Subject: [PATCH 450/496] test: add volumes/volume mounts tests --- .../common/utils/unit_test_support.py | 42 +++++++++++++++++++ src/codeflare_sdk/ray/cluster/test_config.py | 4 ++ .../appwrapper/unit-test-all-params.yaml | 36 ++++++++++++++++ .../ray/unit-test-all-params.yaml | 36 ++++++++++++++++ 4 files changed, 118 insertions(+) diff --git a/src/codeflare_sdk/common/utils/unit_test_support.py b/src/codeflare_sdk/common/utils/unit_test_support.py index 388b6b4e..82f301a2 100644 --- a/src/codeflare_sdk/common/utils/unit_test_support.py +++ b/src/codeflare_sdk/common/utils/unit_test_support.py @@ -417,6 +417,7 @@ def create_cluster_all_config_params(mocker, cluster_name, is_appwrapper) -> Clu "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), ) + volumes, volume_mounts = get_example_extended_storage_opts() config = ClusterConfiguration( name=cluster_name, @@ -443,5 +444,46 @@ def create_cluster_all_config_params(mocker, cluster_name, is_appwrapper) -> Clu overwrite_default_resource_mapping=True, local_queue="local-queue-default", annotations={"key1": "value1", "key2": "value2"}, + volumes=volumes, + volume_mounts=volume_mounts, ) return Cluster(config) + + +def get_example_extended_storage_opts(): + from kubernetes.client import ( + V1Volume, + V1VolumeMount, + V1EmptyDirVolumeSource, + V1ConfigMapVolumeSource, + V1KeyToPath, + V1SecretVolumeSource, + ) + + volume_mounts = [ + V1VolumeMount(mount_path="/home/ray/test1", name="test"), + V1VolumeMount( + mount_path="/home/ray/test2", + name="test2", + ), + V1VolumeMount( + mount_path="/home/ray/test2", + name="test3", + ), + ] + + volumes = [ + V1Volume( + name="test", + empty_dir=V1EmptyDirVolumeSource(size_limit="500Gi"), + ), + V1Volume( + name="test2", + config_map=V1ConfigMapVolumeSource( + name="config-map-test", + items=[V1KeyToPath(key="test", path="/home/ray/test2/data.txt")], + ), + ), + V1Volume(name="test3", secret=V1SecretVolumeSource(secret_name="test-secret")), + ] + return volumes, volume_mounts diff --git a/src/codeflare_sdk/ray/cluster/test_config.py b/src/codeflare_sdk/ray/cluster/test_config.py index aafb23dd..5302e0eb 100644 --- a/src/codeflare_sdk/ray/cluster/test_config.py +++ b/src/codeflare_sdk/ray/cluster/test_config.py @@ -15,6 +15,7 @@ from codeflare_sdk.common.utils.unit_test_support import ( apply_template, createClusterWrongType, + get_example_extended_storage_opts, create_cluster_all_config_params, get_template_variables, ) @@ -64,6 +65,7 @@ def test_config_creation_all_parameters(mocker): expected_extended_resource_mapping = DEFAULT_RESOURCE_MAPPING expected_extended_resource_mapping.update({"example.com/gpu": "GPU"}) expected_extended_resource_mapping["intel.com/gpu"] = "TPU" + volumes, volume_mounts = get_example_extended_storage_opts() cluster = create_cluster_all_config_params(mocker, "test-all-params", False) assert cluster.config.name == "test-all-params" and cluster.config.namespace == "ns" @@ -98,6 +100,8 @@ def test_config_creation_all_parameters(mocker): "key1": "value1", "key2": "value2", } + assert cluster.config.volumes == volumes + assert cluster.config.volume_mounts == volume_mounts assert filecmp.cmp( f"{aw_dir}test-all-params.yaml", diff --git a/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml b/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml index 36d186d1..e0ecc75d 100644 --- a/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml +++ b/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml @@ -78,6 +78,12 @@ spec: memory: 12G nvidia.com/gpu: 1 volumeMounts: + - mountPath: /home/ray/test1 + name: test + - mountPath: /home/ray/test2 + name: test2 + - mountPath: /home/ray/test2 + name: test3 - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt @@ -94,6 +100,18 @@ spec: - name: secret1 - name: secret2 volumes: + - emptyDir: + sizeLimit: 500Gi + name: test + - configMap: + items: + - key: test + path: /home/ray/test2/data.txt + name: config-map-test + name: test2 + - name: test3 + secret: + secretName: test-secret - configMap: items: - key: ca-bundle.crt @@ -146,6 +164,12 @@ spec: memory: 12G nvidia.com/gpu: 1 volumeMounts: + - mountPath: /home/ray/test1 + name: test + - mountPath: /home/ray/test2 + name: test2 + - mountPath: /home/ray/test2 + name: test3 - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt @@ -162,6 +186,18 @@ spec: - name: secret1 - name: secret2 volumes: + - emptyDir: + sizeLimit: 500Gi + name: test + - configMap: + items: + - key: test + path: /home/ray/test2/data.txt + name: config-map-test + name: test2 + - name: test3 + secret: + secretName: test-secret - configMap: items: - key: ca-bundle.crt diff --git a/tests/test_cluster_yamls/ray/unit-test-all-params.yaml b/tests/test_cluster_yamls/ray/unit-test-all-params.yaml index a6514bd8..e743e9fe 100644 --- a/tests/test_cluster_yamls/ray/unit-test-all-params.yaml +++ b/tests/test_cluster_yamls/ray/unit-test-all-params.yaml @@ -69,6 +69,12 @@ spec: memory: 12G nvidia.com/gpu: 1 volumeMounts: + - mountPath: /home/ray/test1 + name: test + - mountPath: /home/ray/test2 + name: test2 + - mountPath: /home/ray/test2 + name: test3 - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt @@ -85,6 +91,18 @@ spec: - name: secret1 - name: secret2 volumes: + - emptyDir: + sizeLimit: 500Gi + name: test + - configMap: + items: + - key: test + path: /home/ray/test2/data.txt + name: config-map-test + name: test2 + - name: test3 + secret: + secretName: test-secret - configMap: items: - key: ca-bundle.crt @@ -137,6 +155,12 @@ spec: memory: 12G nvidia.com/gpu: 1 volumeMounts: + - mountPath: /home/ray/test1 + name: test + - mountPath: /home/ray/test2 + name: test2 + - mountPath: /home/ray/test2 + name: test3 - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert subPath: odh-trusted-ca-bundle.crt @@ -153,6 +177,18 @@ spec: - name: secret1 - name: secret2 volumes: + - emptyDir: + sizeLimit: 500Gi + name: test + - configMap: + items: + - key: test + path: /home/ray/test2/data.txt + name: config-map-test + name: test2 + - name: test3 + secret: + secretName: test-secret - configMap: items: - key: ca-bundle.crt From 051ee3c470e95d87c88829b81166409a410ece09 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 21 Nov 2024 15:20:54 +0000 Subject: [PATCH 451/496] docs: add user docs for custom volumes/volume mounts --- .../user-docs/cluster-configuration.rst | 49 +++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/docs/sphinx/user-docs/cluster-configuration.rst b/docs/sphinx/user-docs/cluster-configuration.rst index 411760f8..9f9fdddb 100644 --- a/docs/sphinx/user-docs/cluster-configuration.rst +++ b/docs/sphinx/user-docs/cluster-configuration.rst @@ -27,6 +27,8 @@ requirements for creating the Ray Cluster. # image="", # Optional Field labels={"exampleLabel": "example", "secondLabel": "example"}, annotations={"key1":"value1", "key2":"value2"}, + volumes=[], # See Custom Volumes/Volume Mounts + volume_mounts=[], # See Custom Volumes/Volume Mounts )) .. note:: @@ -49,6 +51,53 @@ apply additional labels to the RayCluster resource. After creating their ``cluster``, a user can call ``cluster.up()`` and ``cluster.down()`` to respectively create or remove the Ray Cluster. +Custom Volumes/Volume Mounts +---------------------------- +| To add custom Volumes and Volume Mounts to your Ray Cluster you need to create two lists ``volumes`` and ``volume_mounts``. The lists consist of ``V1Volume`` and ``V1VolumeMount`` objects respectively. +| Populating these parameters will create Volumes and Volume Mounts for the head and each worker pod. + +.. code:: python + + from kubernetes.client import V1Volume, V1VolumeMount, V1EmptyDirVolumeSource, V1ConfigMapVolumeSource, V1KeyToPath, V1SecretVolumeSource + # In this example we are using the Config Map, EmptyDir and Secret Volume types + volume_mounts_list = [ + V1VolumeMount( + mount_path="/home/ray/test1", + name = "test" + ), + V1VolumeMount( + mount_path = "/home/ray/test2", + name = "test2", + ), + V1VolumeMount( + mount_path = "/home/ray/test3", + name = "test3", + ) + ] + + volumes_list = [ + V1Volume( + name="test", + empty_dir=V1EmptyDirVolumeSource(size_limit="2Gi"), + ), + V1Volume( + name="test2", + config_map=V1ConfigMapVolumeSource( + name="test-config-map", + items=[V1KeyToPath(key="test", path="data.txt")] + ) + ), + V1Volume( + name="test3", + secret=V1SecretVolumeSource( + secret_name="test-secret" + ) + ) + ] + +| For more information on creating Volumes and Volume Mounts with Python check out the Python Kubernetes docs (`Volumes `__, `Volume Mounts `__). +| You can also find further information on Volumes and Volume Mounts by visiting the Kubernetes `documentation `__. + Deprecating Parameters ---------------------- From d89ee5a5320056c131da258e3c61d6de71bb9549 Mon Sep 17 00:00:00 2001 From: Jiri Petrlik Date: Mon, 6 Jan 2025 14:13:45 +0100 Subject: [PATCH 452/496] RHOAIENG-8098 - ClusterConfiguration should support tolerations --- .../common/utils/unit_test_support.py | 11 +++++++++ .../ray/cluster/build_ray_cluster.py | 24 +++++++++++++++---- src/codeflare_sdk/ray/cluster/config.py | 13 ++++++++-- .../appwrapper/unit-test-all-params.yaml | 10 ++++++++ .../ray/unit-test-all-params.yaml | 10 ++++++++ 5 files changed, 62 insertions(+), 6 deletions(-) diff --git a/src/codeflare_sdk/common/utils/unit_test_support.py b/src/codeflare_sdk/common/utils/unit_test_support.py index 82f301a2..373283b8 100644 --- a/src/codeflare_sdk/common/utils/unit_test_support.py +++ b/src/codeflare_sdk/common/utils/unit_test_support.py @@ -22,6 +22,7 @@ import yaml from pathlib import Path from kubernetes import client +from kubernetes.client import V1Toleration from unittest.mock import patch parent = Path(__file__).resolve().parents[4] # project directory @@ -427,8 +428,18 @@ def create_cluster_all_config_params(mocker, cluster_name, is_appwrapper) -> Clu head_memory_requests=12, head_memory_limits=16, head_extended_resource_requests={"nvidia.com/gpu": 1, "intel.com/gpu": 2}, + head_tolerations=[ + V1Toleration( + key="key1", operator="Equal", value="value1", effect="NoSchedule" + ) + ], worker_cpu_requests=4, worker_cpu_limits=8, + worker_tolerations=[ + V1Toleration( + key="key2", operator="Equal", value="value2", effect="NoSchedule" + ) + ], num_workers=10, worker_memory_requests=12, worker_memory_limits=16, diff --git a/src/codeflare_sdk/ray/cluster/build_ray_cluster.py b/src/codeflare_sdk/ray/cluster/build_ray_cluster.py index a08f3f73..215ac32e 100644 --- a/src/codeflare_sdk/ray/cluster/build_ray_cluster.py +++ b/src/codeflare_sdk/ray/cluster/build_ray_cluster.py @@ -16,7 +16,7 @@ This sub-module exists primarily to be used internally by the Cluster object (in the cluster sub-module) for RayCluster/AppWrapper generation. """ -from typing import Union, Tuple, Dict +from typing import List, Union, Tuple, Dict from ...common import _kube_api_error_handling from ...common.kubernetes_cluster import get_api_client, config_check from kubernetes.client.exceptions import ApiException @@ -40,6 +40,7 @@ V1PodTemplateSpec, V1PodSpec, V1LocalObjectReference, + V1Toleration, ) import yaml @@ -139,7 +140,11 @@ def build_ray_cluster(cluster: "codeflare_sdk.ray.cluster.Cluster"): "resources": head_resources, }, "template": { - "spec": get_pod_spec(cluster, [get_head_container_spec(cluster)]) + "spec": get_pod_spec( + cluster, + [get_head_container_spec(cluster)], + cluster.config.head_tolerations, + ) }, }, "workerGroupSpecs": [ @@ -154,7 +159,11 @@ def build_ray_cluster(cluster: "codeflare_sdk.ray.cluster.Cluster"): "resources": worker_resources, }, "template": V1PodTemplateSpec( - spec=get_pod_spec(cluster, [get_worker_container_spec(cluster)]) + spec=get_pod_spec( + cluster, + [get_worker_container_spec(cluster)], + cluster.config.worker_tolerations, + ) ), } ], @@ -243,14 +252,21 @@ def update_image(image) -> str: return image -def get_pod_spec(cluster: "codeflare_sdk.ray.cluster.Cluster", containers): +def get_pod_spec( + cluster: "codeflare_sdk.ray.cluster.Cluster", + containers: List, + tolerations: List[V1Toleration], +) -> V1PodSpec: """ The get_pod_spec() function generates a V1PodSpec for the head/worker containers """ + pod_spec = V1PodSpec( containers=containers, volumes=generate_custom_storage(cluster.config.volumes, VOLUMES), + tolerations=tolerations or None, ) + if cluster.config.image_pull_secrets != []: pod_spec.image_pull_secrets = generate_image_pull_secrets(cluster) diff --git a/src/codeflare_sdk/ray/cluster/config.py b/src/codeflare_sdk/ray/cluster/config.py index 7a78e730..ab64be83 100644 --- a/src/codeflare_sdk/ray/cluster/config.py +++ b/src/codeflare_sdk/ray/cluster/config.py @@ -22,7 +22,7 @@ import warnings from dataclasses import dataclass, field, fields from typing import Dict, List, Optional, Union, get_args, get_origin -from kubernetes.client import V1Volume, V1VolumeMount +from kubernetes.client import V1Toleration, V1Volume, V1VolumeMount dir = pathlib.Path(__file__).parent.parent.resolve() @@ -58,6 +58,8 @@ class ClusterConfiguration: The number of GPUs to allocate to the head node. (Deprecated, use head_extended_resource_requests) head_extended_resource_requests: A dictionary of extended resource requests for the head node. ex: {"nvidia.com/gpu": 1} + head_tolerations: + List of tolerations for head nodes. min_cpus: The minimum number of CPUs to allocate to each worker. max_cpus: @@ -70,6 +72,8 @@ class ClusterConfiguration: The maximum amount of memory to allocate to each worker. num_gpus: The number of GPUs to allocate to each worker. (Deprecated, use worker_extended_resource_requests) + worker_tolerations: + List of tolerations for worker nodes. appwrapper: A boolean indicating whether to use an AppWrapper. envs: @@ -110,6 +114,7 @@ class ClusterConfiguration: head_extended_resource_requests: Dict[str, Union[str, int]] = field( default_factory=dict ) + head_tolerations: Optional[List[V1Toleration]] = None worker_cpu_requests: Union[int, str] = 1 worker_cpu_limits: Union[int, str] = 1 min_cpus: Optional[Union[int, str]] = None # Deprecating @@ -120,6 +125,7 @@ class ClusterConfiguration: min_memory: Optional[Union[int, str]] = None # Deprecating max_memory: Optional[Union[int, str]] = None # Deprecating num_gpus: Optional[int] = None # Deprecating + worker_tolerations: Optional[List[V1Toleration]] = None appwrapper: bool = False envs: Dict[str, str] = field(default_factory=dict) image: str = "" @@ -272,7 +278,10 @@ def check_type(value, expected_type): if origin_type is Union: return any(check_type(value, union_type) for union_type in args) if origin_type is list: - return all(check_type(elem, args[0]) for elem in value) + if value is not None: + return all(check_type(elem, args[0]) for elem in (value or [])) + else: + return True if origin_type is dict: return all( check_type(k, args[0]) and check_type(v, args[1]) diff --git a/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml b/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml index e0ecc75d..0977d659 100644 --- a/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml +++ b/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml @@ -99,6 +99,11 @@ spec: imagePullSecrets: - name: secret1 - name: secret2 + tolerations: + - effect: NoSchedule + key: key1 + operator: Equal + value: value1 volumes: - emptyDir: sizeLimit: 500Gi @@ -185,6 +190,11 @@ spec: imagePullSecrets: - name: secret1 - name: secret2 + tolerations: + - effect: NoSchedule + key: key2 + operator: Equal + value: value2 volumes: - emptyDir: sizeLimit: 500Gi diff --git a/tests/test_cluster_yamls/ray/unit-test-all-params.yaml b/tests/test_cluster_yamls/ray/unit-test-all-params.yaml index e743e9fe..188319ab 100644 --- a/tests/test_cluster_yamls/ray/unit-test-all-params.yaml +++ b/tests/test_cluster_yamls/ray/unit-test-all-params.yaml @@ -90,6 +90,11 @@ spec: imagePullSecrets: - name: secret1 - name: secret2 + tolerations: + - effect: NoSchedule + key: key1 + operator: Equal + value: value1 volumes: - emptyDir: sizeLimit: 500Gi @@ -176,6 +181,11 @@ spec: imagePullSecrets: - name: secret1 - name: secret2 + tolerations: + - effect: NoSchedule + key: key2 + operator: Equal + value: value2 volumes: - emptyDir: sizeLimit: 500Gi From 000d9c370fa7bc5c44681a91371de1c2d8fee637 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Fri, 10 Jan 2025 10:17:57 +0000 Subject: [PATCH 453/496] ci: remove intel from image updates in odh sync workflow --- .github/workflows/odh-notebooks-sync.yml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/odh-notebooks-sync.yml b/.github/workflows/odh-notebooks-sync.yml index c2ae6d3a..0f83eee5 100644 --- a/.github/workflows/odh-notebooks-sync.yml +++ b/.github/workflows/odh-notebooks-sync.yml @@ -56,10 +56,8 @@ jobs: python-version: ${{ env.PYTHON_VERSION }} cache: 'pipenv' - # Sync fails with pipenv 2024.1.0 (current latest version) - # TODO: We should retry with later versions of pipenv once they are available. - name: Install pipenv and pip-versions - run: pip install pipenv==2024.0.3 pip-versions + run: pip install pipenv==2024.4.0 pip-versions - name: Update Pipfiles in accordance with Codeflare-SDK latest release run: | @@ -74,7 +72,7 @@ jobs: # replace existing version of cf-sdk with new version in Pipfile sed -i "s/codeflare-sdk = .*$/codeflare-sdk = \"~=$CODEFLARE_RELEASE_VERSION\"/g" Pipfile # Lock dependencies, ensuring pre-release are included and clear previous state - if ! pipenv lock --pre --clear ; then + if ! pipenv lock --verbose --pre --clear ; then echo "Failed to lock dependencies" exit 1 fi @@ -98,7 +96,8 @@ jobs: echo "Version ${CODEFLARE_RELEASE_VERSION} is available for $package_name" # list all Pipfile paths having Codeflare-SDK listed # Extracting only directories from file paths, excluding a `.gitworkflow` and `.git` directory - directories+=($(grep --exclude-dir=.git --exclude-dir=.github --include="Pipfile*" -rl "${package_name} = \"~=.*\"" | xargs dirname | sort | uniq)) + # Extracting Intel directories as they are not supported in RHOAI + directories+=($(grep --exclude-dir=.git --exclude-dir=.github --exclude-dir=intel --exclude-dir=jupyter/intel --include="Pipfile*" -rl "${package_name} = \"~=.*\"" | xargs dirname | sort | uniq)) counter=0 total=${#directories[@]} for dir in "${directories[@]}"; do From b5c13dc205654ef86bcb9538275e976e33e46f92 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Fri, 10 Jan 2025 13:16:47 +0000 Subject: [PATCH 454/496] ci: remove tensorflow from image updates in odh sync workflow --- .github/workflows/odh-notebooks-sync.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/odh-notebooks-sync.yml b/.github/workflows/odh-notebooks-sync.yml index 0f83eee5..0b55890b 100644 --- a/.github/workflows/odh-notebooks-sync.yml +++ b/.github/workflows/odh-notebooks-sync.yml @@ -97,7 +97,8 @@ jobs: # list all Pipfile paths having Codeflare-SDK listed # Extracting only directories from file paths, excluding a `.gitworkflow` and `.git` directory # Extracting Intel directories as they are not supported in RHOAI - directories+=($(grep --exclude-dir=.git --exclude-dir=.github --exclude-dir=intel --exclude-dir=jupyter/intel --include="Pipfile*" -rl "${package_name} = \"~=.*\"" | xargs dirname | sort | uniq)) + # Removing tensorflow image TEMPORARILY until solution has been made for the tf2onnx package dependency resolution + directories+=($(grep --exclude-dir=.git --exclude-dir=.github --exclude-dir=intel --exclude-dir=tensorflow --exclude-dir=rocm-tensorflow --include="Pipfile*" -rl "${package_name} = \"~=.*\"" | xargs dirname | sort | uniq)) counter=0 total=${#directories[@]} for dir in "${directories[@]}"; do From 372345e5687517c5a0ee4f1c750d86b77091fcc7 Mon Sep 17 00:00:00 2001 From: Akram Ben Aissi Date: Tue, 28 Jan 2025 18:14:42 +0100 Subject: [PATCH 455/496] Adds RayCluster.apply() (#778) - Adds RayCluster.apply() implementation - Adds e2e tests for apply - Adds unit tests for apply --- CONTRIBUTING.md | 2 +- .../user-docs/ray-cluster-interaction.rst | 6 + src/codeflare_sdk/common/kueue/test_kueue.py | 133 +++++++++- .../common/utils/unit_test_support.py | 66 ++++- .../common/widgets/test_widgets.py | 4 +- src/codeflare_sdk/ray/cluster/cluster.py | 97 ++++++- src/codeflare_sdk/ray/cluster/test_cluster.py | 238 +++++++++++++++++- src/codeflare_sdk/ray/cluster/test_config.py | 7 +- tests/e2e/cluster_apply_kind_test.py | 156 ++++++++++++ tests/e2e/support.py | 16 ++ 10 files changed, 687 insertions(+), 38 deletions(-) create mode 100644 tests/e2e/cluster_apply_kind_test.py diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 8a87bad4..1d6371db 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -76,7 +76,7 @@ pytest -v src/codeflare_sdk ### Local e2e Testing -- Please follow the [e2e documentation](https://github.com/project-codeflare/codeflare-sdk/blob/main/docs/e2e.md) +- Please follow the [e2e documentation](https://github.com/project-codeflare/codeflare-sdk/blob/main/docs/sphinx/user-docs/e2e.rst) #### Code Coverage diff --git a/docs/sphinx/user-docs/ray-cluster-interaction.rst b/docs/sphinx/user-docs/ray-cluster-interaction.rst index 8e7929b4..717f8067 100644 --- a/docs/sphinx/user-docs/ray-cluster-interaction.rst +++ b/docs/sphinx/user-docs/ray-cluster-interaction.rst @@ -66,6 +66,12 @@ cluster.up() | The ``cluster.up()`` function creates a Ray Cluster in the given namespace. +cluster.apply() +------------ + +| The ``cluster.apply()`` function applies a Ray Cluster in the given namespace. If the cluster already exists, it is updated. +| If it does not exist it is created. + cluster.down() -------------- diff --git a/src/codeflare_sdk/common/kueue/test_kueue.py b/src/codeflare_sdk/common/kueue/test_kueue.py index 0093058c..bbc54e9e 100644 --- a/src/codeflare_sdk/common/kueue/test_kueue.py +++ b/src/codeflare_sdk/common/kueue/test_kueue.py @@ -12,10 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. from ..utils.unit_test_support import ( - apply_template, get_local_queue, - createClusterConfig, + create_cluster_config, get_template_variables, + apply_template, ) from unittest.mock import patch from codeflare_sdk.ray.cluster.cluster import Cluster, ClusterConfiguration @@ -23,7 +23,7 @@ import os import filecmp from pathlib import Path -from .kueue import list_local_queues +from .kueue import list_local_queues, local_queue_exists, add_queue_label parent = Path(__file__).resolve().parents[4] # project directory aw_dir = os.path.expanduser("~/.codeflare/resources/") @@ -51,7 +51,7 @@ def test_cluster_creation_no_aw_local_queue(mocker): "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), ) - config = createClusterConfig() + config = create_cluster_config() config.name = "unit-test-cluster-kueue" config.write_to_file = True config.local_queue = "local-queue-default" @@ -67,7 +67,7 @@ def test_cluster_creation_no_aw_local_queue(mocker): assert cluster_kueue == expected_rc # With resources loaded in memory, no Local Queue specified. - config = createClusterConfig() + config = create_cluster_config() config.name = "unit-test-cluster-kueue" config.write_to_file = False cluster = Cluster(config) @@ -84,7 +84,7 @@ def test_aw_creation_local_queue(mocker): "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), ) - config = createClusterConfig() + config = create_cluster_config() config.name = "unit-test-aw-kueue" config.appwrapper = True config.write_to_file = True @@ -101,7 +101,7 @@ def test_aw_creation_local_queue(mocker): assert aw_kueue == expected_rc # With resources loaded in memory, no Local Queue specified. - config = createClusterConfig() + config = create_cluster_config() config.name = "unit-test-aw-kueue" config.appwrapper = True config.write_to_file = False @@ -120,7 +120,7 @@ def test_get_local_queue_exists_fail(mocker): "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), ) - config = createClusterConfig() + config = create_cluster_config() config.name = "unit-test-aw-kueue" config.appwrapper = True config.write_to_file = True @@ -175,6 +175,123 @@ def test_list_local_queues(mocker): assert lqs == [] +def test_local_queue_exists_found(mocker): + # Mock Kubernetes client and list_namespaced_custom_object method + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mock_api_instance = mocker.Mock() + mocker.patch("kubernetes.client.CustomObjectsApi", return_value=mock_api_instance) + mocker.patch("codeflare_sdk.ray.cluster.cluster.config_check") + + # Mock return value for list_namespaced_custom_object + mock_api_instance.list_namespaced_custom_object.return_value = { + "items": [ + {"metadata": {"name": "existing-queue"}}, + {"metadata": {"name": "another-queue"}}, + ] + } + + # Call the function + namespace = "test-namespace" + local_queue_name = "existing-queue" + result = local_queue_exists(namespace, local_queue_name) + + # Assertions + assert result is True + mock_api_instance.list_namespaced_custom_object.assert_called_once_with( + group="kueue.x-k8s.io", + version="v1beta1", + namespace=namespace, + plural="localqueues", + ) + + +def test_local_queue_exists_not_found(mocker): + # Mock Kubernetes client and list_namespaced_custom_object method + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mock_api_instance = mocker.Mock() + mocker.patch("kubernetes.client.CustomObjectsApi", return_value=mock_api_instance) + mocker.patch("codeflare_sdk.ray.cluster.cluster.config_check") + + # Mock return value for list_namespaced_custom_object + mock_api_instance.list_namespaced_custom_object.return_value = { + "items": [ + {"metadata": {"name": "another-queue"}}, + {"metadata": {"name": "different-queue"}}, + ] + } + + # Call the function + namespace = "test-namespace" + local_queue_name = "non-existent-queue" + result = local_queue_exists(namespace, local_queue_name) + + # Assertions + assert result is False + mock_api_instance.list_namespaced_custom_object.assert_called_once_with( + group="kueue.x-k8s.io", + version="v1beta1", + namespace=namespace, + plural="localqueues", + ) + + +import pytest +from unittest import mock # If you're also using mocker from pytest-mock + + +def test_add_queue_label_with_valid_local_queue(mocker): + # Mock the kubernetes.client.CustomObjectsApi and its response + mock_api_instance = mocker.patch("kubernetes.client.CustomObjectsApi") + mock_api_instance.return_value.list_namespaced_custom_object.return_value = { + "items": [ + {"metadata": {"name": "valid-queue"}}, + ] + } + + # Mock other dependencies + mocker.patch("codeflare_sdk.common.kueue.local_queue_exists", return_value=True) + mocker.patch( + "codeflare_sdk.common.kueue.get_default_kueue_name", + return_value="default-queue", + ) + + # Define input item and parameters + item = {"metadata": {}} + namespace = "test-namespace" + local_queue = "valid-queue" + + # Call the function + add_queue_label(item, namespace, local_queue) + + # Assert that the label is added to the item + assert item["metadata"]["labels"] == {"kueue.x-k8s.io/queue-name": "valid-queue"} + + +def test_add_queue_label_with_invalid_local_queue(mocker): + # Mock the kubernetes.client.CustomObjectsApi and its response + mock_api_instance = mocker.patch("kubernetes.client.CustomObjectsApi") + mock_api_instance.return_value.list_namespaced_custom_object.return_value = { + "items": [ + {"metadata": {"name": "valid-queue"}}, + ] + } + + # Mock the local_queue_exists function to return False + mocker.patch("codeflare_sdk.common.kueue.local_queue_exists", return_value=False) + + # Define input item and parameters + item = {"metadata": {}} + namespace = "test-namespace" + local_queue = "invalid-queue" + + # Call the function and expect a ValueError + with pytest.raises( + ValueError, + match="local_queue provided does not exist or is not in this namespace", + ): + add_queue_label(item, namespace, local_queue) + + # Make sure to always keep this function last def test_cleanup(): os.remove(f"{aw_dir}unit-test-cluster-kueue.yaml") diff --git a/src/codeflare_sdk/common/utils/unit_test_support.py b/src/codeflare_sdk/common/utils/unit_test_support.py index 373283b8..28a30381 100644 --- a/src/codeflare_sdk/common/utils/unit_test_support.py +++ b/src/codeflare_sdk/common/utils/unit_test_support.py @@ -29,32 +29,34 @@ aw_dir = os.path.expanduser("~/.codeflare/resources/") -def createClusterConfig(): +def create_cluster_config(num_workers=2, write_to_file=False): config = ClusterConfiguration( name="unit-test-cluster", namespace="ns", - num_workers=2, + num_workers=num_workers, worker_cpu_requests=3, worker_cpu_limits=4, worker_memory_requests=5, worker_memory_limits=6, appwrapper=True, - write_to_file=False, + write_to_file=write_to_file, ) return config -def createClusterWithConfig(mocker): - mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") - mocker.patch( - "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", - return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, - ) - cluster = Cluster(createClusterConfig()) +def create_cluster(mocker, num_workers=2, write_to_file=False): + cluster = Cluster(create_cluster_config(num_workers, write_to_file)) return cluster -def createClusterWrongType(): +def patch_cluster_with_dynamic_client(mocker, cluster, dynamic_client=None): + mocker.patch.object(cluster, "get_dynamic_client", return_value=dynamic_client) + mocker.patch.object(cluster, "down", return_value=None) + mocker.patch.object(cluster, "config_check", return_value=None) + # mocker.patch.object(cluster, "_throw_for_no_raycluster", return_value=None) + + +def create_cluster_wrong_type(): config = ClusterConfiguration( name="unit-test-cluster", namespace="ns", @@ -412,6 +414,48 @@ def mocked_ingress(port, cluster_name="unit-test-cluster", annotations: dict = N return mock_ingress +# Global dictionary to maintain state in the mock +cluster_state = {} + + +# The mock side_effect function for server_side_apply +def mock_server_side_apply(resource, body=None, name=None, namespace=None, **kwargs): + # Simulate the behavior of server_side_apply: + # Update a mock state that represents the cluster's current configuration. + # Stores the state in a global dictionary for simplicity. + + global cluster_state + + if not resource or not body or not name or not namespace: + raise ValueError("Missing required parameters for server_side_apply") + + # Extract worker count from the body if it exists + try: + worker_count = ( + body["spec"]["workerGroupSpecs"][0]["replicas"] + if "spec" in body and "workerGroupSpecs" in body["spec"] + else None + ) + except KeyError: + worker_count = None + + # Apply changes to the cluster_state mock + cluster_state[name] = { + "namespace": namespace, + "worker_count": worker_count, + "body": body, + } + + # Return a response that mimics the behavior of a successful apply + return { + "status": "success", + "applied": True, + "name": name, + "namespace": namespace, + "worker_count": worker_count, + } + + @patch.dict("os.environ", {"NB_PREFIX": "test-prefix"}) def create_cluster_all_config_params(mocker, cluster_name, is_appwrapper) -> Cluster: mocker.patch( diff --git a/src/codeflare_sdk/common/widgets/test_widgets.py b/src/codeflare_sdk/common/widgets/test_widgets.py index 12c23854..a7d3de92 100644 --- a/src/codeflare_sdk/common/widgets/test_widgets.py +++ b/src/codeflare_sdk/common/widgets/test_widgets.py @@ -15,7 +15,7 @@ import codeflare_sdk.common.widgets.widgets as cf_widgets import pandas as pd from unittest.mock import MagicMock, patch -from ..utils.unit_test_support import get_local_queue, createClusterConfig +from ..utils.unit_test_support import get_local_queue, create_cluster_config from codeflare_sdk.ray.cluster.cluster import Cluster from codeflare_sdk.ray.cluster.status import ( RayCluster, @@ -38,7 +38,7 @@ def test_cluster_up_down_buttons(mocker): "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), ) - cluster = Cluster(createClusterConfig()) + cluster = Cluster(create_cluster_config()) with patch("ipywidgets.Button") as MockButton, patch( "ipywidgets.Checkbox" diff --git a/src/codeflare_sdk/ray/cluster/cluster.py b/src/codeflare_sdk/ray/cluster/cluster.py index a3f34554..b42efd87 100644 --- a/src/codeflare_sdk/ray/cluster/cluster.py +++ b/src/codeflare_sdk/ray/cluster/cluster.py @@ -52,9 +52,15 @@ import requests from kubernetes import config +from kubernetes.dynamic import DynamicClient +from kubernetes import client as k8s_client +from kubernetes.client.rest import ApiException + from kubernetes.client.rest import ApiException import warnings +CF_SDK_FIELD_MANAGER = "codeflare-sdk" + class Cluster: """ @@ -84,6 +90,12 @@ def __init__(self, config: ClusterConfiguration): if is_notebook(): cluster_up_down_buttons(self) + def get_dynamic_client(self): # pragma: no cover + return DynamicClient(get_api_client()) + + def config_check(self): + return config_check() + @property def _client_headers(self): k8_client = get_api_client() @@ -95,9 +107,7 @@ def _client_headers(self): @property def _client_verify_tls(self): - if not _is_openshift_cluster or not self.config.verify_tls: - return False - return True + return _is_openshift_cluster and self.config.verify_tls @property def job_client(self): @@ -121,7 +131,6 @@ def create_resource(self): Called upon cluster object creation, creates an AppWrapper yaml based on the specifications of the ClusterConfiguration. """ - if self.config.namespace is None: self.config.namespace = get_current_namespace() if self.config.namespace is None: @@ -130,7 +139,6 @@ def create_resource(self): raise TypeError( f"Namespace {self.config.namespace} is of type {type(self.config.namespace)}. Check your Kubernetes Authentication." ) - return build_ray_cluster(self) # creates a new cluster with the provided or default spec @@ -139,10 +147,11 @@ def up(self): Applies the Cluster yaml, pushing the resource request onto the Kueue localqueue. """ - + print( + "WARNING: The up() function is planned for deprecation in favor of apply()." + ) # check if RayCluster CustomResourceDefinition exists if not throw RuntimeError self._throw_for_no_raycluster() - namespace = self.config.namespace try: @@ -176,6 +185,54 @@ def up(self): except Exception as e: # pragma: no cover return _kube_api_error_handling(e) + # Applies a new cluster with the provided or default spec + def apply(self, force=False): + """ + Applies the Cluster yaml using server-side apply. + If 'force' is set to True, conflicts will be forced. + """ + # check if RayCluster CustomResourceDefinition exists if not throw RuntimeError + self._throw_for_no_raycluster() + namespace = self.config.namespace + name = self.config.name + try: + self.config_check() + api_instance = client.CustomObjectsApi(get_api_client()) + crds = self.get_dynamic_client().resources + if self.config.appwrapper: + api_version = "workload.codeflare.dev/v1beta2" + api_instance = crds.get(api_version=api_version, kind="AppWrapper") + # defaulting body to resource_yaml + body = self.resource_yaml + if self.config.write_to_file: + # if write_to_file is True, load the file from AppWrapper yaml and update body + with open(self.resource_yaml) as f: + aw = yaml.load(f, Loader=yaml.FullLoader) + body = aw + api_instance.server_side_apply( + field_manager=CF_SDK_FIELD_MANAGER, + group="workload.codeflare.dev", + version="v1beta2", + namespace=namespace, + plural="appwrappers", + body=body, + force_conflicts=force, + ) + print( + f"AppWrapper: '{name}' configuration has successfully been applied" + ) + else: + api_version = "ray.io/v1" + api_instance = crds.get(api_version=api_version, kind="RayCluster") + self._component_resources_apply( + namespace=namespace, api_instance=api_instance + ) + print(f"Ray Cluster: '{name}' has successfully been applied") + except AttributeError as e: + raise RuntimeError(f"Failed to initialize DynamicClient: {e}") + except Exception as e: # pragma: no cover + return _kube_api_error_handling(e) + def _throw_for_no_raycluster(self): api_instance = client.CustomObjectsApi(get_api_client()) try: @@ -204,7 +261,7 @@ def down(self): resource_name = self.config.name self._throw_for_no_raycluster() try: - config_check() + self.config_check() api_instance = client.CustomObjectsApi(get_api_client()) if self.config.appwrapper: api_instance.delete_namespaced_custom_object( @@ -507,6 +564,16 @@ def _component_resources_up( else: _create_resources(self.resource_yaml, namespace, api_instance) + def _component_resources_apply( + self, namespace: str, api_instance: client.CustomObjectsApi + ): + if self.config.write_to_file: + with open(self.resource_yaml) as f: + ray_cluster = yaml.safe_load(f) + _apply_ray_cluster(ray_cluster, namespace, api_instance) + else: + _apply_ray_cluster(self.resource_yaml, namespace, api_instance) + def _component_resources_down( self, namespace: str, api_instance: client.CustomObjectsApi ): @@ -744,6 +811,20 @@ def _create_resources(yamls, namespace: str, api_instance: client.CustomObjectsA ) +def _apply_ray_cluster( + yamls, namespace: str, api_instance: client.CustomObjectsApi, force=False +): + api_instance.server_side_apply( + field_manager=CF_SDK_FIELD_MANAGER, + group="ray.io", + version="v1", + namespace=namespace, + plural="rayclusters", + body=yamls, + force_conflicts=force, # Allow forcing conflicts if needed + ) + + def _check_aw_exists(name: str, namespace: str) -> bool: try: config_check() diff --git a/src/codeflare_sdk/ray/cluster/test_cluster.py b/src/codeflare_sdk/ray/cluster/test_cluster.py index 5e83c82a..298c416e 100644 --- a/src/codeflare_sdk/ray/cluster/test_cluster.py +++ b/src/codeflare_sdk/ray/cluster/test_cluster.py @@ -19,16 +19,18 @@ list_all_queued, ) from codeflare_sdk.common.utils.unit_test_support import ( - createClusterWithConfig, + create_cluster, arg_check_del_effect, ingress_retrieval, arg_check_apply_effect, get_local_queue, - createClusterConfig, + create_cluster_config, get_ray_obj, get_obj_none, get_ray_obj_with_status, get_aw_obj_with_status, + patch_cluster_with_dynamic_client, + route_list_retrieval, ) from codeflare_sdk.ray.cluster.cluster import _is_openshift_cluster from pathlib import Path @@ -67,11 +69,189 @@ def test_cluster_up_down(mocker): "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), ) - cluster = cluster = createClusterWithConfig(mocker) + cluster = create_cluster(mocker) cluster.up() cluster.down() +def test_cluster_apply_scale_up_scale_down(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mock_dynamic_client = mocker.Mock() + mocker.patch( + "kubernetes.dynamic.DynamicClient.resources", new_callable=mocker.PropertyMock + ) + mocker.patch( + "codeflare_sdk.ray.cluster.cluster.Cluster.create_resource", + return_value="./tests/test_cluster_yamls/ray/default-ray-cluster.yaml", + ) + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, + ) + + # Initialize test + initial_num_workers = 1 + scaled_up_num_workers = 2 + + # Step 1: Create cluster with initial workers + cluster = create_cluster(mocker, initial_num_workers) + patch_cluster_with_dynamic_client(mocker, cluster, mock_dynamic_client) + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_obj_none("ray.io", "v1", "ns", "rayclusters"), + ) + cluster.apply() + + # Step 2: Scale up the cluster + cluster = create_cluster(mocker, scaled_up_num_workers) + patch_cluster_with_dynamic_client(mocker, cluster, mock_dynamic_client) + cluster.apply() + + # Step 3: Scale down the cluster + cluster = create_cluster(mocker, initial_num_workers) + patch_cluster_with_dynamic_client(mocker, cluster, mock_dynamic_client) + cluster.apply() + + # Tear down + cluster.down() + + +def test_cluster_apply_with_file(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mock_dynamic_client = mocker.Mock() + mocker.patch("codeflare_sdk.ray.cluster.cluster.Cluster._throw_for_no_raycluster") + mocker.patch( + "kubernetes.dynamic.DynamicClient.resources", new_callable=mocker.PropertyMock + ) + mocker.patch( + "codeflare_sdk.ray.cluster.cluster.Cluster.create_resource", + return_value="./tests/test_cluster_yamls/ray/default-ray-cluster.yaml", + ) + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch( + "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", + return_value={"spec": {"domain": "apps.cluster.awsroute.org"}}, + ) + + # Step 1: Create cluster with initial workers + cluster = create_cluster(mocker, 1, write_to_file=True) + patch_cluster_with_dynamic_client(mocker, cluster, mock_dynamic_client) + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_obj_none("ray.io", "v1", "ns", "rayclusters"), + ) + cluster.apply() + # Tear down + cluster.down() + + +def test_cluster_apply_with_appwrapper(mocker): + # Mock Kubernetes client and dynamic client methods + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch( + "codeflare_sdk.ray.cluster.cluster._check_aw_exists", + return_value=True, + ) + mock_dynamic_client = mocker.Mock() + mocker.patch("codeflare_sdk.ray.cluster.cluster.Cluster._throw_for_no_raycluster") + mocker.patch( + "kubernetes.dynamic.DynamicClient.resources", new_callable=mocker.PropertyMock + ) + mocker.patch( + "codeflare_sdk.ray.cluster.cluster.Cluster.create_resource", + return_value="./tests/test_cluster_yamls/ray/default-ray-cluster.yaml", + ) + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + + # Create a cluster configuration with appwrapper set to False + cluster = create_cluster(mocker, 1, write_to_file=False) + patch_cluster_with_dynamic_client(mocker, cluster, mock_dynamic_client) + + # Mock listing RayCluster to simulate it doesn't exist + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_obj_none("ray.io", "v1", "ns", "rayclusters"), + ) + # Call the apply method + cluster.apply() + + # Assertions + print("Cluster applied without AppWrapper.") + + +def test_cluster_apply_without_appwrapper_write_to_file(mocker): + # Mock Kubernetes client and dynamic client methods + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch( + "codeflare_sdk.ray.cluster.cluster._check_aw_exists", + return_value=True, + ) + mock_dynamic_client = mocker.Mock() + mocker.patch("codeflare_sdk.ray.cluster.cluster.Cluster._throw_for_no_raycluster") + mocker.patch( + "kubernetes.dynamic.DynamicClient.resources", new_callable=mocker.PropertyMock + ) + mocker.patch( + "codeflare_sdk.ray.cluster.cluster.Cluster.create_resource", + return_value="./tests/test_cluster_yamls/ray/default-ray-cluster.yaml", + ) + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + + # Create a cluster configuration with appwrapper set to False + cluster = create_cluster(mocker, 1, write_to_file=True) + patch_cluster_with_dynamic_client(mocker, cluster, mock_dynamic_client) + cluster.config.appwrapper = False + + # Mock listing RayCluster to simulate it doesn't exist + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_obj_none("ray.io", "v1", "ns", "rayclusters"), + ) + # Call the apply method + cluster.apply() + + # Assertions + print("Cluster applied without AppWrapper.") + + +def test_cluster_apply_without_appwrapper(mocker): + # Mock Kubernetes client and dynamic client methods + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mock_dynamic_client = mocker.Mock() + mocker.patch("codeflare_sdk.ray.cluster.cluster.Cluster._throw_for_no_raycluster") + mocker.patch( + "kubernetes.dynamic.DynamicClient.resources", new_callable=mocker.PropertyMock + ) + mocker.patch( + "codeflare_sdk.ray.cluster.cluster.Cluster.create_resource", + return_value="./tests/test_cluster_yamls/ray/default-ray-cluster.yaml", + ) + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + + # Create a cluster configuration with appwrapper set to False + cluster = create_cluster(mocker, 1, write_to_file=False) + cluster.config.appwrapper = None + patch_cluster_with_dynamic_client(mocker, cluster, mock_dynamic_client) + + # Mock listing RayCluster to simulate it doesn't exist + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value=get_obj_none("ray.io", "v1", "ns", "rayclusters"), + ) + + # Call the apply method + cluster.apply() + + # Assertions + print("Cluster applied without AppWrapper.") + + def test_cluster_up_down_no_mcad(mocker): mocker.patch("codeflare_sdk.ray.cluster.cluster.Cluster._throw_for_no_raycluster") mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") @@ -98,7 +278,7 @@ def test_cluster_up_down_no_mcad(mocker): "kubernetes.client.CustomObjectsApi.list_cluster_custom_object", return_value={"items": []}, ) - config = createClusterConfig() + config = create_cluster_config() config.name = "unit-test-cluster-ray" config.appwrapper = False cluster = Cluster(config) @@ -117,7 +297,7 @@ def test_cluster_uris(mocker): "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), ) - cluster = cluster = createClusterWithConfig(mocker) + cluster = create_cluster(mocker) mocker.patch( "kubernetes.client.NetworkingV1Api.list_namespaced_ingress", return_value=ingress_retrieval( @@ -147,6 +327,52 @@ def test_cluster_uris(mocker): == "Dashboard not available yet, have you run cluster.up()?" ) + mocker.patch( + "codeflare_sdk.ray.cluster.cluster._is_openshift_cluster", return_value=True + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value={ + "items": [ + { + "metadata": { + "name": "ray-dashboard-unit-test-cluster", + }, + "spec": { + "host": "ray-dashboard-unit-test-cluster-ns.apps.cluster.awsroute.org", + "tls": {}, # Indicating HTTPS + }, + } + ] + }, + ) + cluster = create_cluster(mocker) + assert ( + cluster.cluster_dashboard_uri() + == "http://ray-dashboard-unit-test-cluster-ns.apps.cluster.awsroute.org" + ) + mocker.patch( + "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", + return_value={ + "items": [ + { + "metadata": { + "name": "ray-dashboard-unit-test-cluster", + }, + "spec": { + "host": "ray-dashboard-unit-test-cluster-ns.apps.cluster.awsroute.org", + "tls": {"termination": "passthrough"}, # Indicating HTTPS + }, + } + ] + }, + ) + cluster = create_cluster(mocker) + assert ( + cluster.cluster_dashboard_uri() + == "https://ray-dashboard-unit-test-cluster-ns.apps.cluster.awsroute.org" + ) + def test_ray_job_wrapping(mocker): import ray @@ -159,7 +385,7 @@ def ray_addr(self, *args): "kubernetes.client.CustomObjectsApi.list_namespaced_custom_object", return_value=get_local_queue("kueue.x-k8s.io", "v1beta1", "ns", "localqueues"), ) - cluster = cluster = createClusterWithConfig(mocker) + cluster = create_cluster(mocker) mocker.patch( "ray.job_submission.JobSubmissionClient._check_connection_and_version_with_url", return_value="None", diff --git a/src/codeflare_sdk/ray/cluster/test_config.py b/src/codeflare_sdk/ray/cluster/test_config.py index 5302e0eb..34cc4237 100644 --- a/src/codeflare_sdk/ray/cluster/test_config.py +++ b/src/codeflare_sdk/ray/cluster/test_config.py @@ -14,8 +14,8 @@ from codeflare_sdk.common.utils.unit_test_support import ( apply_template, - createClusterWrongType, get_example_extended_storage_opts, + create_cluster_wrong_type, create_cluster_all_config_params, get_template_variables, ) @@ -59,6 +59,7 @@ def test_default_appwrapper_creation(mocker): assert cluster.resource_yaml == expected_aw +@pytest.mark.filterwarnings("ignore::UserWarning") def test_config_creation_all_parameters(mocker): from codeflare_sdk.ray.cluster.config import DEFAULT_RESOURCE_MAPPING @@ -110,6 +111,7 @@ def test_config_creation_all_parameters(mocker): ) +@pytest.mark.filterwarnings("ignore::UserWarning") def test_all_config_params_aw(mocker): create_cluster_all_config_params(mocker, "aw-all-params", True) assert filecmp.cmp( @@ -121,11 +123,12 @@ def test_all_config_params_aw(mocker): def test_config_creation_wrong_type(): with pytest.raises(TypeError) as error_info: - createClusterWrongType() + create_cluster_wrong_type() assert len(str(error_info.value).splitlines()) == 4 +@pytest.mark.filterwarnings("ignore::UserWarning") def test_cluster_config_deprecation_conversion(mocker): config = ClusterConfiguration( name="test", diff --git a/tests/e2e/cluster_apply_kind_test.py b/tests/e2e/cluster_apply_kind_test.py new file mode 100644 index 00000000..398bf73b --- /dev/null +++ b/tests/e2e/cluster_apply_kind_test.py @@ -0,0 +1,156 @@ +from codeflare_sdk import Cluster, ClusterConfiguration +import pytest +from kubernetes import client + +from support import ( + initialize_kubernetes_client, + create_namespace, + delete_namespace, + get_ray_cluster, +) + + +@pytest.mark.kind +class TestRayClusterApply: + def setup_method(self): + initialize_kubernetes_client(self) + + def teardown_method(self): + delete_namespace(self) + + def test_cluster_apply(self): + self.setup_method() + create_namespace(self) + + cluster_name = "test-cluster-apply" + namespace = self.namespace + + # Initial configuration with 1 worker + initial_config = ClusterConfiguration( + name=cluster_name, + namespace=namespace, + num_workers=1, + head_cpu_requests="500m", + head_cpu_limits="1", + head_memory_requests="1Gi", + head_memory_limits="2Gi", + worker_cpu_requests="500m", + worker_cpu_limits="1", + worker_memory_requests="1Gi", + worker_memory_limits="2Gi", + write_to_file=True, + verify_tls=False, + ) + + # Create the cluster + cluster = Cluster(initial_config) + cluster.apply() + + # Wait for the cluster to be ready + cluster.wait_ready() + status = cluster.status() + assert status["ready"], f"Cluster {cluster_name} is not ready: {status}" + + # Verify the cluster is created + ray_cluster = get_ray_cluster(cluster_name, namespace) + assert ray_cluster is not None, "Cluster was not created successfully" + assert ( + ray_cluster["spec"]["workerGroupSpecs"][0]["replicas"] == 1 + ), "Initial worker count does not match" + + # Update configuration with 3 workers + updated_config = ClusterConfiguration( + name=cluster_name, + namespace=namespace, + num_workers=2, + head_cpu_requests="500m", + head_cpu_limits="1", + head_memory_requests="1Gi", + head_memory_limits="2Gi", + worker_cpu_requests="500m", + worker_cpu_limits="1", + worker_memory_requests="1Gi", + worker_memory_limits="2Gi", + write_to_file=True, + verify_tls=False, + ) + + # Apply the updated configuration + cluster.config = updated_config + cluster.apply() + + # Wait for the updated cluster to be ready + cluster.wait_ready() + updated_status = cluster.status() + assert updated_status[ + "ready" + ], f"Cluster {cluster_name} is not ready after update: {updated_status}" + + # Verify the cluster is updated + updated_ray_cluster = get_ray_cluster(cluster_name, namespace) + assert ( + updated_ray_cluster["spec"]["workerGroupSpecs"][0]["replicas"] == 2 + ), "Worker count was not updated" + + # Clean up + cluster.down() + ray_cluster = get_ray_cluster(cluster_name, namespace) + assert ray_cluster is None, "Cluster was not deleted successfully" + + def test_apply_invalid_update(self): + self.setup_method() + create_namespace(self) + + cluster_name = "test-cluster-apply-invalid" + namespace = self.namespace + + # Initial configuration + initial_config = ClusterConfiguration( + name=cluster_name, + namespace=namespace, + num_workers=1, + head_cpu_requests="500m", + head_cpu_limits="1", + head_memory_requests="1Gi", + head_memory_limits="2Gi", + worker_cpu_requests="500m", + worker_cpu_limits="1", + worker_memory_requests="1Gi", + worker_memory_limits="2Gi", + write_to_file=True, + verify_tls=False, + ) + + # Create the cluster + cluster = Cluster(initial_config) + cluster.apply() + + # Wait for the cluster to be ready + cluster.wait_ready() + status = cluster.status() + assert status["ready"], f"Cluster {cluster_name} is not ready: {status}" + + # Update with an invalid configuration (e.g., immutable field change) + invalid_config = ClusterConfiguration( + name=cluster_name, + namespace=namespace, + num_workers=2, + head_cpu_requests="1", + head_cpu_limits="2", # Changing CPU limits (immutable) + head_memory_requests="1Gi", + head_memory_limits="2Gi", + worker_cpu_requests="500m", + worker_cpu_limits="1", + worker_memory_requests="1Gi", + worker_memory_limits="2Gi", + write_to_file=True, + verify_tls=False, + ) + + # Try to apply the invalid configuration and expect failure + cluster.config = invalid_config + with pytest.raises(RuntimeError, match="Immutable fields detected"): + cluster.apply() + + # Clean up + cluster.down() diff --git a/tests/e2e/support.py b/tests/e2e/support.py index 2ff33e91..5e4ddbdf 100644 --- a/tests/e2e/support.py +++ b/tests/e2e/support.py @@ -11,6 +11,22 @@ ) +def get_ray_cluster(cluster_name, namespace): + api = client.CustomObjectsApi() + try: + return api.get_namespaced_custom_object( + group="ray.io", + version="v1", + namespace=namespace, + plural="rayclusters", + name=cluster_name, + ) + except client.exceptions.ApiException as e: + if e.status == 404: + return None + raise + + def get_ray_image(): default_ray_image = "quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06" return os.getenv("RAY_IMAGE", default_ray_image) From c31166558d4527d2cb824eff40d75e8c0b1373c5 Mon Sep 17 00:00:00 2001 From: Bobbins228 Date: Thu, 20 Feb 2025 09:21:47 +0000 Subject: [PATCH 456/496] add ray team to OWNERS file --- OWNERS | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/OWNERS b/OWNERS index f375f431..31e4619c 100644 --- a/OWNERS +++ b/OWNERS @@ -1,23 +1,25 @@ approvers: - astefanutti - Bobbins228 + - chipspeak - ChristianZaccaria - dimakis - Fiona-Waters - franciscojavierarceo - kpostoffice - - maxusmusti - - MichaelClifford + - pmccarthy + - szaher - varshaprasad96 reviewers: - astefanutti - Bobbins228 + - chipspeak - ChristianZaccaria - dimakis - Fiona-Waters - franciscojavierarceo - kpostoffice - - maxusmusti - - MichaelClifford + - pmccarthy + - szaher - varshaprasad96 - Ygnas From d109bb7fd84dfb208758d8355b001343ec18bac6 Mon Sep 17 00:00:00 2001 From: Pat O'Connor Date: Mon, 14 Apr 2025 16:26:36 +0100 Subject: [PATCH 457/496] task(RHOAIENG-22446): Updated OWNERS with Ray Team Signed-off-by: Pat O'Connor --- OWNERS | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/OWNERS b/OWNERS index 31e4619c..e26dd545 100644 --- a/OWNERS +++ b/OWNERS @@ -7,6 +7,9 @@ approvers: - Fiona-Waters - franciscojavierarceo - kpostoffice + - kryanbeane + - laurafitzgerald + - pawelpaszki - pmccarthy - szaher - varshaprasad96 @@ -19,6 +22,9 @@ reviewers: - Fiona-Waters - franciscojavierarceo - kpostoffice + - kryanbeane + - laurafitzgerald + - pawelpaszki - pmccarthy - szaher - varshaprasad96 From 769c04b1a511e85174cae005e066ac6cfdd1d491 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Tue, 15 Apr 2025 10:50:56 +0000 Subject: [PATCH 458/496] Updated coverage.svg --- coverage.svg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/coverage.svg b/coverage.svg index c1490035..a8c7e72a 100644 --- a/coverage.svg +++ b/coverage.svg @@ -15,7 +15,7 @@ coverage coverage - 90% - 90% + 92% + 92% From f45e02180bb5a7f47ffbf30619773ea0a292eb8e Mon Sep 17 00:00:00 2001 From: "openshift-merge-bot[bot]" Date: Tue, 15 Apr 2025 10:50:57 +0000 Subject: [PATCH 459/496] [create-pull-request] automated change --- poetry.lock | 269 ++++++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 227 insertions(+), 42 deletions(-) diff --git a/poetry.lock b/poetry.lock index 88224764..34340a62 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. [[package]] name = "aiohappyeyeballs" @@ -6,6 +6,7 @@ version = "2.4.3" description = "Happy Eyeballs for asyncio" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "aiohappyeyeballs-2.4.3-py3-none-any.whl", hash = "sha256:8a7a83727b2756f394ab2895ea0765a0a8c475e3c71e98d43d76f22b4b435572"}, {file = "aiohappyeyeballs-2.4.3.tar.gz", hash = "sha256:75cf88a15106a5002a8eb1dab212525c00d1f4c0fa96e551c9fbe6f09a621586"}, @@ -17,6 +18,7 @@ version = "3.10.11" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "aiohttp-3.10.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5077b1a5f40ffa3ba1f40d537d3bec4383988ee51fbba6b74aa8fb1bc466599e"}, {file = "aiohttp-3.10.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8d6a14a4d93b5b3c2891fca94fa9d41b2322a68194422bef0dd5ec1e57d7d298"}, @@ -121,7 +123,7 @@ multidict = ">=4.5,<7.0" yarl = ">=1.12.0,<2.0" [package.extras] -speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] +speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.2.0) ; sys_platform == \"linux\" or sys_platform == \"darwin\"", "brotlicffi ; platform_python_implementation != \"CPython\""] [[package]] name = "aiohttp-cors" @@ -129,6 +131,7 @@ version = "0.7.0" description = "CORS support for aiohttp" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "aiohttp-cors-0.7.0.tar.gz", hash = "sha256:4d39c6d7100fd9764ed1caf8cebf0eb01bf5e3f24e2e073fda6234bc48b19f5d"}, {file = "aiohttp_cors-0.7.0-py3-none-any.whl", hash = "sha256:0451ba59fdf6909d0e2cd21e4c0a43752bc0703d33fc78ae94d9d9321710193e"}, @@ -143,6 +146,7 @@ version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, @@ -157,6 +161,7 @@ version = "0.7.16" description = "A light, configurable Sphinx theme" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, @@ -168,6 +173,7 @@ version = "4.6.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" +groups = ["test"] files = [ {file = "anyio-4.6.0-py3-none-any.whl", hash = "sha256:c7d2e9d63e31599eeb636c8c5c03a7e108d73b345f064f1c19fdc87b79036a9a"}, {file = "anyio-4.6.0.tar.gz", hash = "sha256:137b4559cbb034c477165047febb6ff83f390fc3b20bf181c1fc0a728cb8beeb"}, @@ -181,7 +187,7 @@ typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1)"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\""] trio = ["trio (>=0.26.1)"] [[package]] @@ -190,6 +196,8 @@ version = "0.1.4" description = "Disable App Nap on macOS >= 10.9" optional = false python-versions = ">=3.6" +groups = ["test"] +markers = "platform_system == \"Darwin\"" files = [ {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, @@ -201,6 +209,7 @@ version = "23.1.0" description = "Argon2 for Python" optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, @@ -221,6 +230,7 @@ version = "21.2.0" description = "Low-level CFFI bindings for Argon2" optional = false python-versions = ">=3.6" +groups = ["test"] files = [ {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, @@ -258,6 +268,7 @@ version = "1.3.0" description = "Better dates & times for Python" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, @@ -277,6 +288,7 @@ version = "2.4.1" description = "Annotate AST trees with source code positions" optional = false python-versions = "*" +groups = ["main", "test"] files = [ {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, @@ -286,8 +298,8 @@ files = [ six = ">=1.12.0" [package.extras] -astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] -test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] +astroid = ["astroid (>=1,<2) ; python_version < \"3\"", "astroid (>=2,<4) ; python_version >= \"3\""] +test = ["astroid (>=1,<2) ; python_version < \"3\"", "astroid (>=2,<4) ; python_version >= \"3\"", "pytest"] [[package]] name = "async-lru" @@ -295,6 +307,7 @@ version = "2.0.4" description = "Simple LRU cache for asyncio" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "async-lru-2.0.4.tar.gz", hash = "sha256:b8a59a5df60805ff63220b2a0c5b5393da5521b113cd5465a44eb037d81a5627"}, {file = "async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224"}, @@ -309,6 +322,8 @@ version = "4.0.3" description = "Timeout context manager for asyncio programs" optional = false python-versions = ">=3.7" +groups = ["main"] +markers = "python_version < \"3.11\"" files = [ {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, @@ -320,18 +335,19 @@ version = "24.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" +groups = ["main", "test"] files = [ {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\" and python_version < \"3.13\"", "pytest-xdist[psutil]"] +cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\" and python_version < \"3.13\"", "pytest-xdist[psutil]"] +dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\"", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\" and python_version < \"3.13\"", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] +tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\" and python_version < \"3.13\"", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\" and python_version < \"3.13\""] [[package]] name = "babel" @@ -339,6 +355,7 @@ version = "2.16.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" +groups = ["docs", "test"] files = [ {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, @@ -353,6 +370,7 @@ version = "4.2.0" description = "Modern password hashing for your software and your servers" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "bcrypt-4.2.0-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:096a15d26ed6ce37a14c1ac1e48119660f21b24cba457f160a4b830f3fe6b5cb"}, {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c02d944ca89d9b1922ceb8a46460dd17df1ba37ab66feac4870f6862a1533c00"}, @@ -393,6 +411,7 @@ version = "4.12.3" description = "Screen-scraping library" optional = false python-versions = ">=3.6.0" +groups = ["test"] files = [ {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, @@ -414,6 +433,7 @@ version = "6.1.0" description = "An easy safelist-based HTML-sanitizing tool." optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, @@ -432,6 +452,7 @@ version = "5.5.0" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, @@ -443,6 +464,7 @@ version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main", "docs", "test"] files = [ {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, @@ -454,6 +476,7 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" +groups = ["main", "test"] files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -533,6 +556,7 @@ version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" +groups = ["main", "docs", "test"] files = [ {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, @@ -632,6 +656,7 @@ version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, @@ -646,10 +671,12 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "docs", "test"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +markers = {main = "platform_system == \"Windows\" or sys_platform == \"win32\"", docs = "sys_platform == \"win32\"", test = "sys_platform == \"win32\""} [[package]] name = "colorful" @@ -657,6 +684,7 @@ version = "0.5.6" description = "Terminal string styling done right, in Python." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "colorful-0.5.6-py2.py3-none-any.whl", hash = "sha256:eab8c1c809f5025ad2b5238a50bd691e26850da8cac8f90d660ede6ea1af9f1e"}, {file = "colorful-0.5.6.tar.gz", hash = "sha256:b56d5c01db1dac4898308ea889edcb113fbee3e6ec5df4bacffd61d5241b5b8d"}, @@ -671,6 +699,7 @@ version = "0.2.2" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." optional = false python-versions = ">=3.8" +groups = ["main", "test"] files = [ {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, @@ -688,6 +717,7 @@ version = "7.6.4" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" +groups = ["test"] files = [ {file = "coverage-7.6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f8ae553cba74085db385d489c7a792ad66f7f9ba2ee85bfa508aeb84cf0ba07"}, {file = "coverage-7.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8165b796df0bd42e10527a3f493c592ba494f16ef3c8b531288e3d0d72c1f6f0"}, @@ -754,7 +784,7 @@ files = [ ] [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "cryptography" @@ -762,6 +792,7 @@ version = "43.0.3" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, @@ -811,6 +842,7 @@ version = "1.8.6" description = "An implementation of the Debug Adapter Protocol for Python" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "debugpy-1.8.6-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:30f467c5345d9dfdcc0afdb10e018e47f092e383447500f125b4e013236bf14b"}, {file = "debugpy-1.8.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d73d8c52614432f4215d0fe79a7e595d0dd162b5c15233762565be2f014803b"}, @@ -842,6 +874,7 @@ version = "5.1.1" description = "Decorators for Humans" optional = false python-versions = ">=3.5" +groups = ["main", "test"] files = [ {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, @@ -853,6 +886,7 @@ version = "0.7.1" description = "XML bomb protection for Python stdlib modules" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["test"] files = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, @@ -864,6 +898,7 @@ version = "0.3.8" description = "Distribution utilities" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, @@ -875,6 +910,7 @@ version = "0.20.1" description = "Docutils -- Python Documentation Utilities" optional = false python-versions = ">=3.7" +groups = ["docs"] files = [ {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, @@ -886,6 +922,7 @@ version = "0.9" description = "Module for converting between datetime.timedelta and Go's Duration strings." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "durationpy-0.9-py3-none-any.whl", hash = "sha256:e65359a7af5cedad07fb77a2dd3f390f8eb0b74cb845589fa6c057086834dd38"}, {file = "durationpy-0.9.tar.gz", hash = "sha256:fd3feb0a69a0057d582ef643c355c40d2fa1c942191f914d12203b1a01ac722a"}, @@ -897,6 +934,8 @@ version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" +groups = ["main", "test"] +markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, @@ -911,13 +950,14 @@ version = "1.2.0" description = "Get the currently executing AST node of a frame, and other information" optional = false python-versions = "*" +groups = ["main", "test"] files = [ {file = "executing-1.2.0-py2.py3-none-any.whl", hash = "sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc"}, {file = "executing-1.2.0.tar.gz", hash = "sha256:19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107"}, ] [package.extras] -tests = ["asttokens", "littleutils", "pytest", "rich"] +tests = ["asttokens", "littleutils", "pytest", "rich ; python_version >= \"3.11\""] [[package]] name = "fastjsonschema" @@ -925,6 +965,7 @@ version = "2.20.0" description = "Fastest Python implementation of JSON schema" optional = false python-versions = "*" +groups = ["test"] files = [ {file = "fastjsonschema-2.20.0-py3-none-any.whl", hash = "sha256:5875f0b0fa7a0043a91e93a9b8f793bcbbba9691e7fd83dca95c28ba26d21f0a"}, {file = "fastjsonschema-2.20.0.tar.gz", hash = "sha256:3d48fc5300ee96f5d116f10fe6f28d938e6008f59a6a025c2649475b87f76a23"}, @@ -939,6 +980,7 @@ version = "3.16.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, @@ -947,7 +989,7 @@ files = [ [package.extras] docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] -typing = ["typing-extensions (>=4.12.2)"] +typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""] [[package]] name = "fqdn" @@ -955,6 +997,7 @@ version = "1.5.1" description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" optional = false python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" +groups = ["test"] files = [ {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, @@ -966,6 +1009,7 @@ version = "1.4.1" description = "A list-like structure which implements collections.abc.MutableSequence" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, @@ -1052,6 +1096,7 @@ version = "2024.9.0" description = "File-system specification" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "fsspec-2024.9.0-py3-none-any.whl", hash = "sha256:a0947d552d8a6efa72cc2c730b12c41d043509156966cca4fb157b0f2a0c574b"}, {file = "fsspec-2024.9.0.tar.gz", hash = "sha256:4b0afb90c2f21832df142f292649035d80b421f60a9e1c027802e5a0da2b04e8"}, @@ -1091,6 +1136,7 @@ version = "2.20.0" description = "Google API client core library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "google_api_core-2.20.0-py3-none-any.whl", hash = "sha256:ef0591ef03c30bb83f79b3d0575c3f31219001fc9c5cf37024d08310aeffed8a"}, {file = "google_api_core-2.20.0.tar.gz", hash = "sha256:f74dff1889ba291a4b76c5079df0711810e2d9da81abfdc99957bc961c1eb28f"}, @@ -1104,7 +1150,7 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4 requests = ">=2.18.0,<3.0.0.dev0" [package.extras] -grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev) ; python_version >= \"3.11\"", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0) ; python_version >= \"3.11\""] grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] @@ -1114,6 +1160,7 @@ version = "2.35.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "google_auth-2.35.0-py2.py3-none-any.whl", hash = "sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f"}, {file = "google_auth-2.35.0.tar.gz", hash = "sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a"}, @@ -1137,6 +1184,7 @@ version = "1.65.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "googleapis_common_protos-1.65.0-py2.py3-none-any.whl", hash = "sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63"}, {file = "googleapis_common_protos-1.65.0.tar.gz", hash = "sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0"}, @@ -1154,6 +1202,7 @@ version = "1.66.2" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "grpcio-1.66.2-cp310-cp310-linux_armv7l.whl", hash = "sha256:fe96281713168a3270878255983d2cb1a97e034325c8c2c25169a69289d3ecfa"}, {file = "grpcio-1.66.2-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:73fc8f8b9b5c4a03e802b3cd0c18b2b06b410d3c1dcbef989fdeb943bd44aff7"}, @@ -1221,6 +1270,7 @@ version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, @@ -1232,6 +1282,7 @@ version = "1.0.5" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, @@ -1253,6 +1304,7 @@ version = "0.27.2" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, @@ -1266,7 +1318,7 @@ idna = "*" sniffio = "*" [package.extras] -brotli = ["brotli", "brotlicffi"] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] @@ -1278,6 +1330,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +groups = ["main", "docs", "test"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -1292,6 +1345,7 @@ version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["docs"] files = [ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, @@ -1303,6 +1357,8 @@ version = "8.5.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" +groups = ["docs", "test"] +markers = "python_version == \"3.9\"" files = [ {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, @@ -1312,12 +1368,12 @@ files = [ zipp = ">=3.20" [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +test = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] [[package]] @@ -1326,6 +1382,7 @@ version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -1337,6 +1394,7 @@ version = "6.29.5" description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5"}, {file = "ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215"}, @@ -1370,6 +1428,7 @@ version = "8.18.1" description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.9" +groups = ["main", "test"] files = [ {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"}, @@ -1407,6 +1466,7 @@ version = "8.1.2" description = "Jupyter interactive widgets" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "ipywidgets-8.1.2-py3-none-any.whl", hash = "sha256:bbe43850d79fb5e906b14801d6c01402857996864d1e5b6fa62dd2ee35559f60"}, {file = "ipywidgets-8.1.2.tar.gz", hash = "sha256:d0b9b41e49bae926a866e613a39b0f0097745d2b9f1f3dd406641b4a57ec42c9"}, @@ -1428,6 +1488,7 @@ version = "20.11.0" description = "Operations with ISO 8601 durations" optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, @@ -1442,6 +1503,7 @@ version = "0.19.1" description = "An autocompletion tool for Python that can be used for text editors." optional = false python-versions = ">=3.6" +groups = ["main", "test"] files = [ {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, @@ -1461,10 +1523,12 @@ version = "3.1.5" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" +groups = ["main", "docs", "test"] files = [ {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, ] +markers = {main = "sys_platform != \"win32\""} [package.dependencies] MarkupSafe = ">=2.0" @@ -1478,6 +1542,7 @@ version = "0.9.25" description = "A Python implementation of the JSON5 data format." optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "json5-0.9.25-py3-none-any.whl", hash = "sha256:34ed7d834b1341a86987ed52f3f76cd8ee184394906b6e22a1e0deb9ab294e8f"}, {file = "json5-0.9.25.tar.gz", hash = "sha256:548e41b9be043f9426776f05df8635a00fe06104ea51ed24b67f908856e151ae"}, @@ -1489,6 +1554,7 @@ version = "3.0.0" description = "Identify specific nodes in a JSON document (RFC 6901)" optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, @@ -1500,6 +1566,7 @@ version = "4.23.0" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" +groups = ["main", "test"] files = [ {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, @@ -1529,6 +1596,7 @@ version = "2023.12.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" optional = false python-versions = ">=3.8" +groups = ["main", "test"] files = [ {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, @@ -1543,6 +1611,7 @@ version = "8.6.3" description = "Jupyter protocol implementation and client libraries" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f"}, {file = "jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419"}, @@ -1558,7 +1627,7 @@ traitlets = ">=5.3" [package.extras] docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] +test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko ; sys_platform == \"win32\"", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] [[package]] name = "jupyter-core" @@ -1566,6 +1635,7 @@ version = "5.7.2" description = "Jupyter core package. A base package on which Jupyter projects rely." optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, @@ -1586,6 +1656,7 @@ version = "0.10.0" description = "Jupyter Event System library" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "jupyter_events-0.10.0-py3-none-any.whl", hash = "sha256:4b72130875e59d57716d327ea70d3ebc3af1944d3717e5a498b8a06c6c159960"}, {file = "jupyter_events-0.10.0.tar.gz", hash = "sha256:670b8229d3cc882ec782144ed22e0d29e1c2d639263f92ca8383e66682845e22"}, @@ -1611,6 +1682,7 @@ version = "2.2.5" description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "jupyter-lsp-2.2.5.tar.gz", hash = "sha256:793147a05ad446f809fd53ef1cd19a9f5256fd0a2d6b7ce943a982cb4f545001"}, {file = "jupyter_lsp-2.2.5-py3-none-any.whl", hash = "sha256:45fbddbd505f3fbfb0b6cb2f1bc5e15e83ab7c79cd6e89416b248cb3c00c11da"}, @@ -1626,6 +1698,7 @@ version = "2.14.2" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "jupyter_server-2.14.2-py3-none-any.whl", hash = "sha256:47ff506127c2f7851a17bf4713434208fc490955d0e8632e95014a9a9afbeefd"}, {file = "jupyter_server-2.14.2.tar.gz", hash = "sha256:66095021aa9638ced276c248b1d81862e4c50f292d575920bbe960de1c56b12b"}, @@ -1662,6 +1735,7 @@ version = "0.5.3" description = "A Jupyter Server Extension Providing Terminals." optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "jupyter_server_terminals-0.5.3-py3-none-any.whl", hash = "sha256:41ee0d7dc0ebf2809c668e0fc726dfaf258fcd3e769568996ca731b6194ae9aa"}, {file = "jupyter_server_terminals-0.5.3.tar.gz", hash = "sha256:5ae0295167220e9ace0edcfdb212afd2b01ee8d179fe6f23c899590e9b8a5269"}, @@ -1681,6 +1755,7 @@ version = "4.3.1" description = "JupyterLab computational environment" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "jupyterlab-4.3.1-py3-none-any.whl", hash = "sha256:2d9a1c305bc748e277819a17a5d5e22452e533e835f4237b2f30f3b0e491e01f"}, {file = "jupyterlab-4.3.1.tar.gz", hash = "sha256:a4a338327556443521731d82f2a6ccf926df478914ca029616621704d47c3c65"}, @@ -1716,6 +1791,7 @@ version = "0.3.0" description = "Pygments theme using JupyterLab CSS variables" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, @@ -1727,6 +1803,7 @@ version = "2.27.3" description = "A set of server components for JupyterLab and JupyterLab like applications." optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "jupyterlab_server-2.27.3-py3-none-any.whl", hash = "sha256:e697488f66c3db49df675158a77b3b017520d772c6e1548c7d9bcc5df7944ee4"}, {file = "jupyterlab_server-2.27.3.tar.gz", hash = "sha256:eb36caca59e74471988f0ae25c77945610b887f777255aa21f8065def9e51ed4"}, @@ -1753,6 +1830,7 @@ version = "3.0.13" description = "Jupyter interactive widgets for JupyterLab" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "jupyterlab_widgets-3.0.13-py3-none-any.whl", hash = "sha256:e3cda2c233ce144192f1e29914ad522b2f4c40e77214b0cc97377ca3d323db54"}, {file = "jupyterlab_widgets-3.0.13.tar.gz", hash = "sha256:a2966d385328c1942b683a8cd96b89b8dd82c8b8f81dda902bb2bc06d46f5bed"}, @@ -1764,6 +1842,7 @@ version = "31.0.0" description = "Kubernetes python client" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "kubernetes-31.0.0-py2.py3-none-any.whl", hash = "sha256:bf141e2d380c8520eada8b351f4e319ffee9636328c137aa432bc486ca1200e1"}, {file = "kubernetes-31.0.0.tar.gz", hash = "sha256:28945de906c8c259c1ebe62703b56a03b714049372196f854105afe4e6d014c0"}, @@ -1791,6 +1870,7 @@ version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, @@ -1815,6 +1895,7 @@ version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" +groups = ["main", "docs", "test"] files = [ {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, @@ -1877,6 +1958,7 @@ files = [ {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] +markers = {main = "sys_platform != \"win32\""} [[package]] name = "matplotlib-inline" @@ -1884,6 +1966,7 @@ version = "0.1.7" description = "Inline Matplotlib backend for Jupyter" optional = false python-versions = ">=3.8" +groups = ["main", "test"] files = [ {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, @@ -1898,6 +1981,7 @@ version = "0.1.2" description = "Markdown URL utilities" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, @@ -1909,6 +1993,8 @@ version = "1.10.0" description = "A memory profiler for Python applications" optional = false python-versions = ">=3.7.0" +groups = ["main"] +markers = "sys_platform != \"win32\"" files = [ {file = "memray-1.10.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:843a688877691746f9d1835cfa8a65139948471bdd78720435808d20bc30a1cc"}, {file = "memray-1.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6937d7ef67d18ccc01c3250cdf3b4ef1445b859ee8756f09e3d11bd3ff0c7d67"}, @@ -1953,10 +2039,10 @@ rich = ">=11.2.0" [package.extras] benchmark = ["asv"] -dev = ["Cython", "IPython", "asv", "black", "bump2version", "check-manifest", "flake8", "furo", "greenlet", "ipython", "isort", "mypy", "pytest", "pytest-cov", "setuptools", "sphinx", "sphinx-argparse", "towncrier"] +dev = ["Cython", "IPython", "asv", "black", "bump2version", "check-manifest", "flake8", "furo", "greenlet ; python_version < \"3.12\"", "ipython", "isort", "mypy", "pytest", "pytest-cov", "setuptools ; python_version >= \"3.12\"", "sphinx", "sphinx-argparse", "towncrier"] docs = ["IPython", "bump2version", "furo", "sphinx", "sphinx-argparse", "towncrier"] lint = ["black", "check-manifest", "flake8", "isort", "mypy"] -test = ["Cython", "greenlet", "ipython", "pytest", "pytest-cov", "setuptools"] +test = ["Cython", "greenlet ; python_version < \"3.12\"", "ipython", "pytest", "pytest-cov", "setuptools ; python_version >= \"3.12\""] [[package]] name = "mistune" @@ -1964,6 +2050,7 @@ version = "3.0.2" description = "A sane and fast Markdown parser with useful plugins and renderers" optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"}, {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"}, @@ -1975,6 +2062,7 @@ version = "1.1.0" description = "MessagePack serializer" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ad442d527a7e358a469faf43fda45aaf4ac3249c8310a82f0ccff9164e5dccd"}, {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74bed8f63f8f14d75eec75cf3d04ad581da6b914001b474a5d3cd3372c8cc27d"}, @@ -2048,6 +2136,7 @@ version = "6.1.0" description = "multidict implementation" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, @@ -2152,6 +2241,7 @@ version = "0.10.0" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." optional = false python-versions = ">=3.8.0" +groups = ["test"] files = [ {file = "nbclient-0.10.0-py3-none-any.whl", hash = "sha256:f13e3529332a1f1f81d82a53210322476a168bb7090a0289c795fe9cc11c9d3f"}, {file = "nbclient-0.10.0.tar.gz", hash = "sha256:4b3f1b7dba531e498449c4db4f53da339c91d449dc11e9af3a43b4eb5c5abb09"}, @@ -2174,6 +2264,7 @@ version = "7.16.4" description = "Converting Jupyter Notebooks (.ipynb files) to other formats. Output formats include asciidoc, html, latex, markdown, pdf, py, rst, script. nbconvert can be used both as a Python library (`import nbconvert`) or as a command line tool (invoked as `jupyter nbconvert ...`)." optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "nbconvert-7.16.4-py3-none-any.whl", hash = "sha256:05873c620fe520b6322bf8a5ad562692343fe3452abda5765c7a34b7d1aa3eb3"}, {file = "nbconvert-7.16.4.tar.gz", hash = "sha256:86ca91ba266b0a448dc96fa6c5b9d98affabde2867b363258703536807f9f7f4"}, @@ -2212,6 +2303,7 @@ version = "5.10.4" description = "The Jupyter Notebook format" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b"}, {file = "nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a"}, @@ -2233,6 +2325,7 @@ version = "1.6.0" description = "Patch asyncio to allow nested event loops" optional = false python-versions = ">=3.5" +groups = ["test"] files = [ {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, @@ -2244,6 +2337,7 @@ version = "0.2.4" description = "A shim layer for notebook traits and config" optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "notebook_shim-0.2.4-py3-none-any.whl", hash = "sha256:411a5be4e9dc882a074ccbcae671eda64cceb068767e9a3419096986560e1cef"}, {file = "notebook_shim-0.2.4.tar.gz", hash = "sha256:b4b2cfa1b65d98307ca24361f5b30fe785b53c3fd07b7a47e89acb5e6ac638cb"}, @@ -2261,6 +2355,7 @@ version = "2.0.2" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece"}, {file = "numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04"}, @@ -2315,6 +2410,7 @@ version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, @@ -2331,6 +2427,7 @@ version = "0.11.4" description = "A stats collection and distributed tracing framework" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "opencensus-0.11.4-py2.py3-none-any.whl", hash = "sha256:a18487ce68bc19900336e0ff4655c5a116daf10c1b3685ece8d971bddad6a864"}, {file = "opencensus-0.11.4.tar.gz", hash = "sha256:cbef87d8b8773064ab60e5c2a1ced58bbaa38a6d052c41aec224958ce544eff2"}, @@ -2347,6 +2444,7 @@ version = "0.1.3" description = "OpenCensus Runtime Context" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "opencensus-context-0.1.3.tar.gz", hash = "sha256:a03108c3c10d8c80bb5ddf5c8a1f033161fa61972a9917f9b9b3a18517f0088c"}, {file = "opencensus_context-0.1.3-py2.py3-none-any.whl", hash = "sha256:073bb0590007af276853009fac7e4bab1d523c3f03baf4cb4511ca38967c6039"}, @@ -2358,6 +2456,7 @@ version = "1.0.18" description = "OpenShift python client" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "openshift-client-1.0.18.tar.gz", hash = "sha256:be3979440cfd96788146a3a1650dabe939d4d516eea0b39f87e66d2ab39495b1"}, {file = "openshift_client-1.0.18-py2.py3-none-any.whl", hash = "sha256:d8a84080307ccd9556f6c62a3707a3e6507baedee36fa425754f67db9ded528b"}, @@ -2374,6 +2473,7 @@ version = "7.7.0" description = "A decorator to automatically detect mismatch when overriding a method." optional = false python-versions = ">=3.6" +groups = ["test"] files = [ {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, @@ -2385,6 +2485,7 @@ version = "24.1" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["main", "docs", "test"] files = [ {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, @@ -2396,6 +2497,7 @@ version = "2.2.3" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, @@ -2482,6 +2584,7 @@ version = "1.5.1" description = "Utilities for writing pandoc filters in python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["test"] files = [ {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, @@ -2493,6 +2596,7 @@ version = "3.5.0" description = "SSH2 protocol library" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "paramiko-3.5.0-py3-none-any.whl", hash = "sha256:1fedf06b085359051cd7d0d270cebe19e755a8a921cc2ddbfa647fb0cd7d68f9"}, {file = "paramiko-3.5.0.tar.gz", hash = "sha256:ad11e540da4f55cedda52931f1a3f812a8238a7af7f62a60de538cd80bb28124"}, @@ -2504,8 +2608,8 @@ cryptography = ">=3.3" pynacl = ">=1.5" [package.extras] -all = ["gssapi (>=1.4.1)", "invoke (>=2.0)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] -gssapi = ["gssapi (>=1.4.1)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"] +all = ["gssapi (>=1.4.1) ; platform_system != \"Windows\"", "invoke (>=2.0)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8) ; platform_system == \"Windows\""] +gssapi = ["gssapi (>=1.4.1) ; platform_system != \"Windows\"", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8) ; platform_system == \"Windows\""] invoke = ["invoke (>=2.0)"] [[package]] @@ -2514,6 +2618,7 @@ version = "0.8.4" description = "A Python Parser" optional = false python-versions = ">=3.6" +groups = ["main", "test"] files = [ {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, @@ -2529,6 +2634,8 @@ version = "4.9.0" description = "Pexpect allows easy control of interactive console applications." optional = false python-versions = "*" +groups = ["main", "test"] +markers = "sys_platform != \"win32\"" files = [ {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, @@ -2543,6 +2650,7 @@ version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" +groups = ["main", "test"] files = [ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, @@ -2559,6 +2667,7 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -2574,6 +2683,7 @@ version = "0.21.0" description = "Python client for the Prometheus monitoring system." optional = false python-versions = ">=3.8" +groups = ["main", "test"] files = [ {file = "prometheus_client-0.21.0-py3-none-any.whl", hash = "sha256:4fa6b4dd0ac16d58bb587c04b1caae65b8c5043e85f778f42f5f632f6af2e166"}, {file = "prometheus_client-0.21.0.tar.gz", hash = "sha256:96c83c606b71ff2b0a433c98889d275f51ffec6c5e267de37c7a2b5c9aa9233e"}, @@ -2588,6 +2698,7 @@ version = "3.0.48" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.7.0" +groups = ["main", "test"] files = [ {file = "prompt_toolkit-3.0.48-py3-none-any.whl", hash = "sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e"}, {file = "prompt_toolkit-3.0.48.tar.gz", hash = "sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90"}, @@ -2602,6 +2713,7 @@ version = "1.24.0" description = "Beautiful, Pythonic protocol buffers." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"}, {file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"}, @@ -2619,6 +2731,7 @@ version = "5.28.2" description = "" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "protobuf-5.28.2-cp310-abi3-win32.whl", hash = "sha256:eeea10f3dc0ac7e6b4933d32db20662902b4ab81bf28df12218aa389e9c2102d"}, {file = "protobuf-5.28.2-cp310-abi3-win_amd64.whl", hash = "sha256:2c69461a7fcc8e24be697624c09a839976d82ae75062b11a0972e41fd2cd9132"}, @@ -2639,6 +2752,7 @@ version = "6.0.0" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["test"] files = [ {file = "psutil-6.0.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a021da3e881cd935e64a3d0a20983bda0bb4cf80e4f74fa9bfcb1bc5785360c6"}, {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1287c2b95f1c0a364d23bc6f2ea2365a8d4d9b726a3be7294296ff7ba97c17f0"}, @@ -2660,7 +2774,7 @@ files = [ ] [package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] +test = ["enum34 ; python_version <= \"3.4\"", "ipaddress ; python_version < \"3.0\"", "mock ; python_version < \"3.0\"", "pywin32 ; sys_platform == \"win32\"", "wmi ; sys_platform == \"win32\""] [[package]] name = "ptyprocess" @@ -2668,10 +2782,12 @@ version = "0.7.0" description = "Run a subprocess in a pseudo terminal" optional = false python-versions = "*" +groups = ["main", "test"] files = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, ] +markers = {main = "sys_platform != \"win32\"", test = "sys_platform != \"win32\" or os_name != \"nt\""} [[package]] name = "pure-eval" @@ -2679,6 +2795,7 @@ version = "0.2.3" description = "Safely evaluate AST nodes without side effects" optional = false python-versions = "*" +groups = ["main", "test"] files = [ {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, @@ -2693,6 +2810,7 @@ version = "0.3.14" description = "Sampling profiler for Python programs" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "py_spy-0.3.14-py2.py3-none-macosx_10_7_x86_64.whl", hash = "sha256:5b342cc5feb8d160d57a7ff308de153f6be68dcf506ad02b4d67065f2bae7f45"}, {file = "py_spy-0.3.14-py2.py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:fe7efe6c91f723442259d428bf1f9ddb9c1679828866b353d539345ca40d9dd2"}, @@ -2709,6 +2827,7 @@ version = "17.0.0" description = "Python library for Apache Arrow" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pyarrow-17.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a5c8b238d47e48812ee577ee20c9a2779e6a5904f1708ae240f53ecbee7c9f07"}, {file = "pyarrow-17.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db023dc4c6cae1015de9e198d41250688383c3f9af8f565370ab2b4cb5f62655"}, @@ -2760,6 +2879,7 @@ version = "0.6.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, @@ -2771,6 +2891,7 @@ version = "0.4.1" description = "A collection of ASN.1-based protocols modules" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, @@ -2785,6 +2906,7 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" +groups = ["main", "test"] files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, @@ -2796,6 +2918,7 @@ version = "1.10.18" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "pydantic-1.10.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e405ffcc1254d76bb0e760db101ee8916b620893e6edfbfee563b3c6f7a67c02"}, {file = "pydantic-1.10.18-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e306e280ebebc65040034bff1a0a81fd86b2f4f05daac0131f29541cafd80b80"}, @@ -2855,6 +2978,7 @@ version = "2.18.0" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" +groups = ["main", "docs", "test"] files = [ {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, @@ -2869,6 +2993,7 @@ version = "1.5.0" description = "Python binding to the Networking and Cryptography (NaCl) library" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, @@ -2895,6 +3020,7 @@ version = "7.4.0" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, @@ -2917,6 +3043,7 @@ version = "3.11.1" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "pytest-mock-3.11.1.tar.gz", hash = "sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f"}, {file = "pytest_mock-3.11.1-py3-none-any.whl", hash = "sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39"}, @@ -2934,6 +3061,7 @@ version = "2.3.1" description = "pytest plugin to abort hanging tests" optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "pytest-timeout-2.3.1.tar.gz", hash = "sha256:12397729125c6ecbdaca01035b9e5239d4db97352320af155b3f5de1ba5165d9"}, {file = "pytest_timeout-2.3.1-py3-none-any.whl", hash = "sha256:68188cb703edfc6a18fad98dc25a3c61e9f24d644b0b70f33af545219fc7813e"}, @@ -2948,6 +3076,7 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main", "test"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -2962,6 +3091,7 @@ version = "2.0.7" description = "A python library adding a json log formatter" optional = false python-versions = ">=3.6" +groups = ["test"] files = [ {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, @@ -2973,6 +3103,7 @@ version = "2024.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, @@ -2984,6 +3115,8 @@ version = "306" description = "Python for Window Extensions" optional = false python-versions = "*" +groups = ["test"] +markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\"" files = [ {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, @@ -3007,6 +3140,8 @@ version = "2.0.13" description = "Pseudo terminal support for Windows from Python." optional = false python-versions = ">=3.8" +groups = ["test"] +markers = "os_name == \"nt\"" files = [ {file = "pywinpty-2.0.13-cp310-none-win_amd64.whl", hash = "sha256:697bff211fb5a6508fee2dc6ff174ce03f34a9a233df9d8b5fe9c8ce4d5eaf56"}, {file = "pywinpty-2.0.13-cp311-none-win_amd64.whl", hash = "sha256:b96fb14698db1284db84ca38c79f15b4cfdc3172065b5137383910567591fa99"}, @@ -3022,6 +3157,7 @@ version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" +groups = ["main", "test"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -3084,6 +3220,7 @@ version = "26.2.0" description = "Python bindings for 0MQ" optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:ddf33d97d2f52d89f6e6e7ae66ee35a4d9ca6f36eda89c24591b0c40205a3629"}, {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dacd995031a01d16eec825bf30802fceb2c3791ef24bcce48fa98ce40918c27b"}, @@ -3205,6 +3342,7 @@ version = "2.35.0" description = "Ray provides a simple, universal API for building distributed applications." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "ray-2.35.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:1e7e2d2e987be728a81821b6fd2bccb23e4d8a6cca8417db08b24f06a08d8476"}, {file = "ray-2.35.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bd48be4c362004d31e5df072fd58b929efc67adfefc0adece41483b15f84539"}, @@ -3259,18 +3397,18 @@ smart-open = {version = "*", optional = true, markers = "extra == \"default\""} virtualenv = {version = ">=20.0.24,<20.21.1 || >20.21.1", optional = true, markers = "extra == \"default\""} [package.extras] -adag = ["cupy-cuda12x"] -air = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "fsspec", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "memray", "numpy (>=1.20)", "opencensus", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] -all = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "cupy-cuda12x", "dm-tree", "fastapi", "fsspec", "grpcio (!=1.56.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "gymnasium (==0.28.1)", "lz4", "memray", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "requests", "rich", "scikit-image", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "typer", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] -all-cpp = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "cupy-cuda12x", "dm-tree", "fastapi", "fsspec", "grpcio (!=1.56.0)", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "gymnasium (==0.28.1)", "lz4", "memray", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "ray-cpp (==2.35.0)", "requests", "rich", "scikit-image", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "typer", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +adag = ["cupy-cuda12x ; sys_platform != \"darwin\""] +air = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "fsspec", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "memray ; sys_platform != \"win32\"", "numpy (>=1.20)", "opencensus", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +all = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "cupy-cuda12x ; sys_platform != \"darwin\"", "dm-tree", "fastapi", "fsspec", "grpcio (!=1.56.0)", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "gymnasium (==0.28.1)", "lz4", "memray ; sys_platform != \"win32\"", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "requests", "rich", "scikit-image", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "typer", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +all-cpp = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "cupy-cuda12x ; sys_platform != \"darwin\"", "dm-tree", "fastapi", "fsspec", "grpcio (!=1.56.0)", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "gymnasium (==0.28.1)", "lz4", "memray ; sys_platform != \"win32\"", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "ray-cpp (==2.35.0)", "requests", "rich", "scikit-image", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "typer", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] client = ["grpcio (!=1.56.0)"] cpp = ["ray-cpp (==2.35.0)"] data = ["fsspec", "numpy (>=1.20)", "pandas (>=1.3)", "pyarrow (>=6.0.1)"] -default = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "memray", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "virtualenv (>=20.0.24,!=20.21.1)"] +default = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "memray ; sys_platform != \"win32\"", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "virtualenv (>=20.0.24,!=20.21.1)"] observability = ["opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk"] rllib = ["dm-tree", "fsspec", "gymnasium (==0.28.1)", "lz4", "pandas", "pyarrow (>=6.0.1)", "pyyaml", "requests", "rich", "scikit-image", "scipy", "tensorboardX (>=1.9)", "typer"] -serve = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "memray", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] -serve-grpc = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "grpcio (>=1.32.0)", "grpcio (>=1.42.0)", "memray", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +serve = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "memray ; sys_platform != \"win32\"", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +serve-grpc = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "memray ; sys_platform != \"win32\"", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] train = ["fsspec", "pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1.9)"] tune = ["fsspec", "pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1.9)"] @@ -3280,6 +3418,7 @@ version = "0.35.1" description = "JSON Referencing + Python" optional = false python-versions = ">=3.8" +groups = ["main", "test"] files = [ {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, @@ -3295,6 +3434,7 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["main", "docs", "test"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -3316,6 +3456,7 @@ version = "2.0.0" description = "OAuthlib authentication support for Requests." optional = false python-versions = ">=3.4" +groups = ["main"] files = [ {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, @@ -3334,6 +3475,7 @@ version = "0.1.4" description = "A pure python RFC3339 validator" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["test"] files = [ {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, @@ -3348,6 +3490,7 @@ version = "0.1.1" description = "Pure python rfc3986 validator" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["test"] files = [ {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, @@ -3359,6 +3502,7 @@ version = "13.9.4" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" +groups = ["main"] files = [ {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, @@ -3378,6 +3522,7 @@ version = "0.20.0" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.8" +groups = ["main", "test"] files = [ {file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"}, {file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"}, @@ -3490,6 +3635,7 @@ version = "4.9" description = "Pure-Python RSA implementation" optional = false python-versions = ">=3.6,<4" +groups = ["main"] files = [ {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, @@ -3504,15 +3650,16 @@ version = "1.8.3" description = "Send file to trash natively under Mac OS X, Windows and Linux" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["test"] files = [ {file = "Send2Trash-1.8.3-py3-none-any.whl", hash = "sha256:0c31227e0bd08961c7665474a3d1ef7193929fedda4233843689baa056be46c9"}, {file = "Send2Trash-1.8.3.tar.gz", hash = "sha256:b18e7a3966d99871aefeb00cfbcfdced55ce4871194810fc71f4aa484b953abf"}, ] [package.extras] -nativelib = ["pyobjc-framework-Cocoa", "pywin32"] -objc = ["pyobjc-framework-Cocoa"] -win32 = ["pywin32"] +nativelib = ["pyobjc-framework-Cocoa ; sys_platform == \"darwin\"", "pywin32 ; sys_platform == \"win32\""] +objc = ["pyobjc-framework-Cocoa ; sys_platform == \"darwin\""] +win32 = ["pywin32 ; sys_platform == \"win32\""] [[package]] name = "setuptools" @@ -3520,19 +3667,20 @@ version = "75.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"}, {file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.5.2) ; sys_platform != \"cygwin\""] +core = ["importlib-metadata (>=6) ; python_version < \"3.10\"", "importlib-resources (>=5.10.2) ; python_version < \"3.9\"", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.11.*)", "pytest-mypy"] [[package]] name = "six" @@ -3540,6 +3688,7 @@ version = "1.16.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["main", "test"] files = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, @@ -3551,6 +3700,7 @@ version = "7.0.4" description = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)" optional = false python-versions = "<4.0,>=3.7" +groups = ["main"] files = [ {file = "smart_open-7.0.4-py3-none-any.whl", hash = "sha256:4e98489932b3372595cddc075e6033194775165702887216b65eba760dfd8d47"}, {file = "smart_open-7.0.4.tar.gz", hash = "sha256:62b65852bdd1d1d516839fcb1f6bc50cd0f16e05b4ec44b52f43d38bcb838524"}, @@ -3576,6 +3726,7 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -3587,6 +3738,7 @@ version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." optional = false python-versions = "*" +groups = ["docs"] files = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, @@ -3598,6 +3750,7 @@ version = "2.6" description = "A modern CSS selector implementation for Beautiful Soup." optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, @@ -3609,6 +3762,7 @@ version = "7.4.7" description = "Python documentation generator" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, @@ -3645,6 +3799,7 @@ version = "3.0.1" description = "Read the Docs theme for Sphinx" optional = false python-versions = ">=3.8" +groups = ["docs"] files = [ {file = "sphinx_rtd_theme-3.0.1-py2.py3-none-any.whl", hash = "sha256:921c0ece75e90633ee876bd7b148cfaad136b481907ad154ac3669b6fc957916"}, {file = "sphinx_rtd_theme-3.0.1.tar.gz", hash = "sha256:a4c5745d1b06dfcb80b7704fe532eb765b44065a8fad9851e4258c8804140703"}, @@ -3664,6 +3819,7 @@ version = "2.0.0" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, @@ -3680,6 +3836,7 @@ version = "2.0.0" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, @@ -3696,6 +3853,7 @@ version = "2.1.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, @@ -3712,6 +3870,7 @@ version = "4.1" description = "Extension to include jQuery on newer Sphinx releases" optional = false python-versions = ">=2.7" +groups = ["docs"] files = [ {file = "sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a"}, {file = "sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae"}, @@ -3726,6 +3885,7 @@ version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" optional = false python-versions = ">=3.5" +groups = ["docs"] files = [ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, @@ -3740,6 +3900,7 @@ version = "2.0.0" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, @@ -3756,6 +3917,7 @@ version = "2.0.0" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" optional = false python-versions = ">=3.9" +groups = ["docs"] files = [ {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, @@ -3772,6 +3934,7 @@ version = "0.6.3" description = "Extract data from python stack frames and tracebacks for informative displays" optional = false python-versions = "*" +groups = ["main", "test"] files = [ {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, @@ -3791,6 +3954,7 @@ version = "0.18.1" description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0"}, {file = "terminado-0.18.1.tar.gz", hash = "sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e"}, @@ -3812,6 +3976,7 @@ version = "1.3.0" description = "A tiny CSS parser" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "tinycss2-1.3.0-py3-none-any.whl", hash = "sha256:54a8dbdffb334d536851be0226030e9505965bb2f30f21a4a82c55fb2a80fae7"}, {file = "tinycss2-1.3.0.tar.gz", hash = "sha256:152f9acabd296a8375fbca5b84c961ff95971fcfc32e79550c8df8e29118c54d"}, @@ -3830,6 +3995,8 @@ version = "2.0.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.7" +groups = ["docs", "test"] +markers = "python_version < \"3.11\"" files = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, @@ -3841,6 +4008,7 @@ version = "6.4.1" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, @@ -3861,6 +4029,7 @@ version = "5.14.3" description = "Traitlets Python configuration system" optional = false python-versions = ">=3.8" +groups = ["main", "test"] files = [ {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, @@ -3876,6 +4045,7 @@ version = "2.9.0.20240906" description = "Typing stubs for python-dateutil" optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "types-python-dateutil-2.9.0.20240906.tar.gz", hash = "sha256:9706c3b68284c25adffc47319ecc7947e5bb86b3773f843c73906fd598bc176e"}, {file = "types_python_dateutil-2.9.0.20240906-py3-none-any.whl", hash = "sha256:27c8cc2d058ccb14946eebcaaa503088f4f6dbc4fb6093d3d456a49aef2753f6"}, @@ -3887,10 +4057,12 @@ version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main", "test"] files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] +markers = {test = "python_version < \"3.11\""} [[package]] name = "tzdata" @@ -3898,6 +4070,7 @@ version = "2024.2" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" +groups = ["main"] files = [ {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, @@ -3909,6 +4082,7 @@ version = "1.3.0" description = "RFC 6570 URI Template Processor" optional = false python-versions = ">=3.7" +groups = ["test"] files = [ {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, @@ -3923,13 +4097,14 @@ version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" +groups = ["main", "docs", "test"] files = [ {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -3940,6 +4115,7 @@ version = "20.26.6" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "virtualenv-20.26.6-py3-none-any.whl", hash = "sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2"}, {file = "virtualenv-20.26.6.tar.gz", hash = "sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48"}, @@ -3952,7 +4128,7 @@ platformdirs = ">=3.9.1,<5" [package.extras] docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""] [[package]] name = "wcwidth" @@ -3960,6 +4136,7 @@ version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" +groups = ["main", "test"] files = [ {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, @@ -3971,6 +4148,7 @@ version = "24.8.0" description = "A library for working with the color formats defined by HTML and CSS." optional = false python-versions = ">=3.8" +groups = ["test"] files = [ {file = "webcolors-24.8.0-py3-none-any.whl", hash = "sha256:fc4c3b59358ada164552084a8ebee637c221e4059267d0f8325b3b560f6c7f0a"}, {file = "webcolors-24.8.0.tar.gz", hash = "sha256:08b07af286a01bcd30d583a7acadf629583d1f79bfef27dd2c2c5c263817277d"}, @@ -3986,6 +4164,7 @@ version = "0.5.1" description = "Character encoding aliases for legacy web content" optional = false python-versions = "*" +groups = ["test"] files = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, @@ -3997,6 +4176,7 @@ version = "1.8.0" description = "WebSocket client for Python with low level API options" optional = false python-versions = ">=3.8" +groups = ["main", "test"] files = [ {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, @@ -4013,6 +4193,7 @@ version = "4.0.13" description = "Jupyter interactive widgets for Jupyter Notebook" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "widgetsnbextension-4.0.13-py3-none-any.whl", hash = "sha256:74b2692e8500525cc38c2b877236ba51d34541e6385eeed5aec15a70f88a6c71"}, {file = "widgetsnbextension-4.0.13.tar.gz", hash = "sha256:ffcb67bc9febd10234a362795f643927f4e0c05d9342c727b65d2384f8feacb6"}, @@ -4024,6 +4205,7 @@ version = "1.16.0" description = "Module for decorators, wrappers and monkey patching." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, @@ -4103,6 +4285,7 @@ version = "1.13.1" description = "Yet another URL library" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "yarl-1.13.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:82e692fb325013a18a5b73a4fed5a1edaa7c58144dc67ad9ef3d604eccd451ad"}, {file = "yarl-1.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df4e82e68f43a07735ae70a2d84c0353e58e20add20ec0af611f32cd5ba43fb4"}, @@ -4208,20 +4391,22 @@ version = "3.20.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" +groups = ["docs", "test"] +markers = "python_version == \"3.9\"" files = [ {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = "^3.9" content-hash = "6e5e74183be00c02f89e0da419aec0b032025410f8d2ae8685263fb5bc38b911" From 9b96745d5311312f22931fc2d6e20d6823739e38 Mon Sep 17 00:00:00 2001 From: kryanbeane Date: Thu, 6 Mar 2025 16:01:31 +0000 Subject: [PATCH 460/496] Update reference to Ray ROCm image --- docs/sphinx/user-docs/cluster-configuration.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/sphinx/user-docs/cluster-configuration.rst b/docs/sphinx/user-docs/cluster-configuration.rst index 9f9fdddb..82156b2f 100644 --- a/docs/sphinx/user-docs/cluster-configuration.rst +++ b/docs/sphinx/user-docs/cluster-configuration.rst @@ -41,7 +41,7 @@ requirements for creating the Ray Cluster. If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default. If you are using ROCm compatible GPUs you - can use `quay.io/modh/ray:2.35.0-py39-rocm61`. You can also find + can use `quay.io/modh/ray:2.35.0-py311-rocm62` or `quay.io/modh/ray:2.35.0-py39-rocm62`. You can also find documentation on building a custom image `here `__. From ba4f6317c3a5ebc077f6688aba0e1dd217059fb9 Mon Sep 17 00:00:00 2001 From: kryanbeane Date: Fri, 2 May 2025 10:10:13 +0100 Subject: [PATCH 461/496] chore(RHOAIENG-24672): Update Ray to 2.44.1 and Py311 Runtime image refs --- .../additional-demos/local_interactive.ipynb | 2 +- .../additional-demos/ray_job_client.ipynb | 2 +- demo-notebooks/guided-demos/0_basic_ray.ipynb | 2 +- .../guided-demos/1_cluster_job_client.ipynb | 2 +- .../guided-demos/2_basic_interactive.ipynb | 2 +- .../guided-demos/3_widget_example.ipynb | 2 +- .../notebook-ex-outputs/0_basic_ray.ipynb | 2 +- .../1_cluster_job_client.ipynb | 2 +- .../2_basic_interactive.ipynb | 2 +- .../preview_nbs/0_basic_ray.ipynb | 2 +- .../preview_nbs/1_cluster_job_client.ipynb | 2 +- .../preview_nbs/2_basic_interactive.ipynb | 2 +- .../user-docs/cluster-configuration.rst | 2 +- poetry.lock | 3297 ++++++++++------- pyproject.toml | 2 +- .../common/utils/unit_test_support.py | 2 +- .../ray/cluster/build_ray_cluster.py | 4 +- .../appwrapper/test-case-bad.yaml | 2 +- .../appwrapper/unit-test-all-params.yaml | 2 +- tests/test_cluster_yamls/kueue/aw_kueue.yaml | 2 +- .../kueue/ray_cluster_kueue.yaml | 2 +- .../ray/default-appwrapper.yaml | 2 +- .../ray/default-ray-cluster.yaml | 2 +- .../ray/unit-test-all-params.yaml | 2 +- .../support_clusters/test-aw-a.yaml | 2 +- .../support_clusters/test-aw-b.yaml | 2 +- .../support_clusters/test-rc-a.yaml | 2 +- .../support_clusters/test-rc-b.yaml | 2 +- 28 files changed, 1888 insertions(+), 1465 deletions(-) diff --git a/demo-notebooks/additional-demos/local_interactive.ipynb b/demo-notebooks/additional-demos/local_interactive.ipynb index 09cb9b89..ed105b45 100644 --- a/demo-notebooks/additional-demos/local_interactive.ipynb +++ b/demo-notebooks/additional-demos/local_interactive.ipynb @@ -38,7 +38,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", - "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121 '\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/additional-demos/ray_job_client.ipynb b/demo-notebooks/additional-demos/ray_job_client.ipynb index 31c5793e..4b129190 100644 --- a/demo-notebooks/additional-demos/ray_job_client.ipynb +++ b/demo-notebooks/additional-demos/ray_job_client.ipynb @@ -44,7 +44,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", - "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121 '\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb index be05130e..603a52a2 100644 --- a/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -50,7 +50,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", - "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121 '\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb index 8db650dd..f33fa2ec 100644 --- a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb @@ -44,7 +44,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", - "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121 '\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/2_basic_interactive.ipynb index 5528d04b..fccca9d3 100644 --- a/demo-notebooks/guided-demos/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/2_basic_interactive.ipynb @@ -47,7 +47,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", - "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121 '\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/3_widget_example.ipynb b/demo-notebooks/guided-demos/3_widget_example.ipynb index 243c75ec..ea5ad08a 100644 --- a/demo-notebooks/guided-demos/3_widget_example.ipynb +++ b/demo-notebooks/guided-demos/3_widget_example.ipynb @@ -50,7 +50,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", - "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121 '\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb index 3e6dc193..fb1a8d61 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb @@ -50,7 +50,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", - "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121 '\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb index e79d47e7..7e62f52a 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb @@ -44,7 +44,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", - "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121 '\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb index f6417521..fe8f3274 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb @@ -47,7 +47,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", - "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121 '\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb index 3e6dc193..fb1a8d61 100644 --- a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb @@ -50,7 +50,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", - "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121 '\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb index 40195d64..34defbf9 100644 --- a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb @@ -44,7 +44,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", - "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121 '\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb index 8838a5ba..7f50437f 100644 --- a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb @@ -47,7 +47,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", - "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121 '\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/docs/sphinx/user-docs/cluster-configuration.rst b/docs/sphinx/user-docs/cluster-configuration.rst index 82156b2f..d60e9c92 100644 --- a/docs/sphinx/user-docs/cluster-configuration.rst +++ b/docs/sphinx/user-docs/cluster-configuration.rst @@ -36,7 +36,7 @@ requirements for creating the Ray Cluster. a RayCluster resource depend on the installed Python version: - For Python 3.9: `quay.io/modh/ray:2.35.0-py39-cu121` - - For Python 3.11: `quay.io/modh/ray:2.35.0-py311-cu121` + - For Python 3.11: `quay.io/modh/ray:2.44.1-py311-cu121 ` If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default. diff --git a/poetry.lock b/poetry.lock index 34340a62..1e0d12ab 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,116 +1,106 @@ -# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. [[package]] name = "aiohappyeyeballs" -version = "2.4.3" +version = "2.6.1" description = "Happy Eyeballs for asyncio" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "aiohappyeyeballs-2.4.3-py3-none-any.whl", hash = "sha256:8a7a83727b2756f394ab2895ea0765a0a8c475e3c71e98d43d76f22b4b435572"}, - {file = "aiohappyeyeballs-2.4.3.tar.gz", hash = "sha256:75cf88a15106a5002a8eb1dab212525c00d1f4c0fa96e551c9fbe6f09a621586"}, + {file = "aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8"}, + {file = "aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558"}, ] [[package]] name = "aiohttp" -version = "3.10.11" +version = "3.11.18" description = "Async http client/server framework (asyncio)" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "aiohttp-3.10.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5077b1a5f40ffa3ba1f40d537d3bec4383988ee51fbba6b74aa8fb1bc466599e"}, - {file = "aiohttp-3.10.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8d6a14a4d93b5b3c2891fca94fa9d41b2322a68194422bef0dd5ec1e57d7d298"}, - {file = "aiohttp-3.10.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ffbfde2443696345e23a3c597049b1dd43049bb65337837574205e7368472177"}, - {file = "aiohttp-3.10.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20b3d9e416774d41813bc02fdc0663379c01817b0874b932b81c7f777f67b217"}, - {file = "aiohttp-3.10.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b943011b45ee6bf74b22245c6faab736363678e910504dd7531a58c76c9015a"}, - {file = "aiohttp-3.10.11-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48bc1d924490f0d0b3658fe5c4b081a4d56ebb58af80a6729d4bd13ea569797a"}, - {file = "aiohttp-3.10.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e12eb3f4b1f72aaaf6acd27d045753b18101524f72ae071ae1c91c1cd44ef115"}, - {file = "aiohttp-3.10.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f14ebc419a568c2eff3c1ed35f634435c24ead2fe19c07426af41e7adb68713a"}, - {file = "aiohttp-3.10.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:72b191cdf35a518bfc7ca87d770d30941decc5aaf897ec8b484eb5cc8c7706f3"}, - {file = "aiohttp-3.10.11-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5ab2328a61fdc86424ee540d0aeb8b73bbcad7351fb7cf7a6546fc0bcffa0038"}, - {file = "aiohttp-3.10.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:aa93063d4af05c49276cf14e419550a3f45258b6b9d1f16403e777f1addf4519"}, - {file = "aiohttp-3.10.11-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:30283f9d0ce420363c24c5c2421e71a738a2155f10adbb1a11a4d4d6d2715cfc"}, - {file = "aiohttp-3.10.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e5358addc8044ee49143c546d2182c15b4ac3a60be01c3209374ace05af5733d"}, - {file = "aiohttp-3.10.11-cp310-cp310-win32.whl", hash = "sha256:e1ffa713d3ea7cdcd4aea9cddccab41edf6882fa9552940344c44e59652e1120"}, - {file = "aiohttp-3.10.11-cp310-cp310-win_amd64.whl", hash = "sha256:778cbd01f18ff78b5dd23c77eb82987ee4ba23408cbed233009fd570dda7e674"}, - {file = "aiohttp-3.10.11-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:80ff08556c7f59a7972b1e8919f62e9c069c33566a6d28586771711e0eea4f07"}, - {file = "aiohttp-3.10.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c8f96e9ee19f04c4914e4e7a42a60861066d3e1abf05c726f38d9d0a466e695"}, - {file = "aiohttp-3.10.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fb8601394d537da9221947b5d6e62b064c9a43e88a1ecd7414d21a1a6fba9c24"}, - {file = "aiohttp-3.10.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ea224cf7bc2d8856d6971cea73b1d50c9c51d36971faf1abc169a0d5f85a382"}, - {file = "aiohttp-3.10.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db9503f79e12d5d80b3efd4d01312853565c05367493379df76d2674af881caa"}, - {file = "aiohttp-3.10.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0f449a50cc33f0384f633894d8d3cd020e3ccef81879c6e6245c3c375c448625"}, - {file = "aiohttp-3.10.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82052be3e6d9e0c123499127782a01a2b224b8af8c62ab46b3f6197035ad94e9"}, - {file = "aiohttp-3.10.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20063c7acf1eec550c8eb098deb5ed9e1bb0521613b03bb93644b810986027ac"}, - {file = "aiohttp-3.10.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:489cced07a4c11488f47aab1f00d0c572506883f877af100a38f1fedaa884c3a"}, - {file = "aiohttp-3.10.11-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ea9b3bab329aeaa603ed3bf605f1e2a6f36496ad7e0e1aa42025f368ee2dc07b"}, - {file = "aiohttp-3.10.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ca117819d8ad113413016cb29774b3f6d99ad23c220069789fc050267b786c16"}, - {file = "aiohttp-3.10.11-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2dfb612dcbe70fb7cdcf3499e8d483079b89749c857a8f6e80263b021745c730"}, - {file = "aiohttp-3.10.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9b615d3da0d60e7d53c62e22b4fd1c70f4ae5993a44687b011ea3a2e49051b8"}, - {file = "aiohttp-3.10.11-cp311-cp311-win32.whl", hash = "sha256:29103f9099b6068bbdf44d6a3d090e0a0b2be6d3c9f16a070dd9d0d910ec08f9"}, - {file = "aiohttp-3.10.11-cp311-cp311-win_amd64.whl", hash = "sha256:236b28ceb79532da85d59aa9b9bf873b364e27a0acb2ceaba475dc61cffb6f3f"}, - {file = "aiohttp-3.10.11-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7480519f70e32bfb101d71fb9a1f330fbd291655a4c1c922232a48c458c52710"}, - {file = "aiohttp-3.10.11-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f65267266c9aeb2287a6622ee2bb39490292552f9fbf851baabc04c9f84e048d"}, - {file = "aiohttp-3.10.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7400a93d629a0608dc1d6c55f1e3d6e07f7375745aaa8bd7f085571e4d1cee97"}, - {file = "aiohttp-3.10.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f34b97e4b11b8d4eb2c3a4f975be626cc8af99ff479da7de49ac2c6d02d35725"}, - {file = "aiohttp-3.10.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e7b825da878464a252ccff2958838f9caa82f32a8dbc334eb9b34a026e2c636"}, - {file = "aiohttp-3.10.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9f92a344c50b9667827da308473005f34767b6a2a60d9acff56ae94f895f385"}, - {file = "aiohttp-3.10.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc6f1ab987a27b83c5268a17218463c2ec08dbb754195113867a27b166cd6087"}, - {file = "aiohttp-3.10.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1dc0f4ca54842173d03322793ebcf2c8cc2d34ae91cc762478e295d8e361e03f"}, - {file = "aiohttp-3.10.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7ce6a51469bfaacff146e59e7fb61c9c23006495d11cc24c514a455032bcfa03"}, - {file = "aiohttp-3.10.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:aad3cd91d484d065ede16f3cf15408254e2469e3f613b241a1db552c5eb7ab7d"}, - {file = "aiohttp-3.10.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f4df4b8ca97f658c880fb4b90b1d1ec528315d4030af1ec763247ebfd33d8b9a"}, - {file = "aiohttp-3.10.11-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2e4e18a0a2d03531edbc06c366954e40a3f8d2a88d2b936bbe78a0c75a3aab3e"}, - {file = "aiohttp-3.10.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6ce66780fa1a20e45bc753cda2a149daa6dbf1561fc1289fa0c308391c7bc0a4"}, - {file = "aiohttp-3.10.11-cp312-cp312-win32.whl", hash = "sha256:a919c8957695ea4c0e7a3e8d16494e3477b86f33067478f43106921c2fef15bb"}, - {file = "aiohttp-3.10.11-cp312-cp312-win_amd64.whl", hash = "sha256:b5e29706e6389a2283a91611c91bf24f218962717c8f3b4e528ef529d112ee27"}, - {file = "aiohttp-3.10.11-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:703938e22434d7d14ec22f9f310559331f455018389222eed132808cd8f44127"}, - {file = "aiohttp-3.10.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9bc50b63648840854e00084c2b43035a62e033cb9b06d8c22b409d56eb098413"}, - {file = "aiohttp-3.10.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5f0463bf8b0754bc744e1feb61590706823795041e63edf30118a6f0bf577461"}, - {file = "aiohttp-3.10.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6c6dec398ac5a87cb3a407b068e1106b20ef001c344e34154616183fe684288"}, - {file = "aiohttp-3.10.11-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcaf2d79104d53d4dcf934f7ce76d3d155302d07dae24dff6c9fffd217568067"}, - {file = "aiohttp-3.10.11-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:25fd5470922091b5a9aeeb7e75be609e16b4fba81cdeaf12981393fb240dd10e"}, - {file = "aiohttp-3.10.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbde2ca67230923a42161b1f408c3992ae6e0be782dca0c44cb3206bf330dee1"}, - {file = "aiohttp-3.10.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:249c8ff8d26a8b41a0f12f9df804e7c685ca35a207e2410adbd3e924217b9006"}, - {file = "aiohttp-3.10.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:878ca6a931ee8c486a8f7b432b65431d095c522cbeb34892bee5be97b3481d0f"}, - {file = "aiohttp-3.10.11-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8663f7777ce775f0413324be0d96d9730959b2ca73d9b7e2c2c90539139cbdd6"}, - {file = "aiohttp-3.10.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:6cd3f10b01f0c31481fba8d302b61603a2acb37b9d30e1d14e0f5a58b7b18a31"}, - {file = "aiohttp-3.10.11-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4e8d8aad9402d3aa02fdc5ca2fe68bcb9fdfe1f77b40b10410a94c7f408b664d"}, - {file = "aiohttp-3.10.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:38e3c4f80196b4f6c3a85d134a534a56f52da9cb8d8e7af1b79a32eefee73a00"}, - {file = "aiohttp-3.10.11-cp313-cp313-win32.whl", hash = "sha256:fc31820cfc3b2863c6e95e14fcf815dc7afe52480b4dc03393c4873bb5599f71"}, - {file = "aiohttp-3.10.11-cp313-cp313-win_amd64.whl", hash = "sha256:4996ff1345704ffdd6d75fb06ed175938c133425af616142e7187f28dc75f14e"}, - {file = "aiohttp-3.10.11-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:74baf1a7d948b3d640badeac333af581a367ab916b37e44cf90a0334157cdfd2"}, - {file = "aiohttp-3.10.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:473aebc3b871646e1940c05268d451f2543a1d209f47035b594b9d4e91ce8339"}, - {file = "aiohttp-3.10.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c2f746a6968c54ab2186574e15c3f14f3e7f67aef12b761e043b33b89c5b5f95"}, - {file = "aiohttp-3.10.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d110cabad8360ffa0dec8f6ec60e43286e9d251e77db4763a87dcfe55b4adb92"}, - {file = "aiohttp-3.10.11-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0099c7d5d7afff4202a0c670e5b723f7718810000b4abcbc96b064129e64bc7"}, - {file = "aiohttp-3.10.11-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0316e624b754dbbf8c872b62fe6dcb395ef20c70e59890dfa0de9eafccd2849d"}, - {file = "aiohttp-3.10.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a5f7ab8baf13314e6b2485965cbacb94afff1e93466ac4d06a47a81c50f9cca"}, - {file = "aiohttp-3.10.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c891011e76041e6508cbfc469dd1a8ea09bc24e87e4c204e05f150c4c455a5fa"}, - {file = "aiohttp-3.10.11-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9208299251370ee815473270c52cd3f7069ee9ed348d941d574d1457d2c73e8b"}, - {file = "aiohttp-3.10.11-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:459f0f32c8356e8125f45eeff0ecf2b1cb6db1551304972702f34cd9e6c44658"}, - {file = "aiohttp-3.10.11-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:14cdc8c1810bbd4b4b9f142eeee23cda528ae4e57ea0923551a9af4820980e39"}, - {file = "aiohttp-3.10.11-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:971aa438a29701d4b34e4943e91b5e984c3ae6ccbf80dd9efaffb01bd0b243a9"}, - {file = "aiohttp-3.10.11-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:9a309c5de392dfe0f32ee57fa43ed8fc6ddf9985425e84bd51ed66bb16bce3a7"}, - {file = "aiohttp-3.10.11-cp38-cp38-win32.whl", hash = "sha256:9ec1628180241d906a0840b38f162a3215114b14541f1a8711c368a8739a9be4"}, - {file = "aiohttp-3.10.11-cp38-cp38-win_amd64.whl", hash = "sha256:9c6e0ffd52c929f985c7258f83185d17c76d4275ad22e90aa29f38e211aacbec"}, - {file = "aiohttp-3.10.11-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cdc493a2e5d8dc79b2df5bec9558425bcd39aff59fc949810cbd0832e294b106"}, - {file = "aiohttp-3.10.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b3e70f24e7d0405be2348da9d5a7836936bf3a9b4fd210f8c37e8d48bc32eca6"}, - {file = "aiohttp-3.10.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968b8fb2a5eee2770eda9c7b5581587ef9b96fbdf8dcabc6b446d35ccc69df01"}, - {file = "aiohttp-3.10.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deef4362af9493d1382ef86732ee2e4cbc0d7c005947bd54ad1a9a16dd59298e"}, - {file = "aiohttp-3.10.11-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:686b03196976e327412a1b094f4120778c7c4b9cff9bce8d2fdfeca386b89829"}, - {file = "aiohttp-3.10.11-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3bf6d027d9d1d34e1c2e1645f18a6498c98d634f8e373395221121f1c258ace8"}, - {file = "aiohttp-3.10.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:099fd126bf960f96d34a760e747a629c27fb3634da5d05c7ef4d35ef4ea519fc"}, - {file = "aiohttp-3.10.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c73c4d3dae0b4644bc21e3de546530531d6cdc88659cdeb6579cd627d3c206aa"}, - {file = "aiohttp-3.10.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0c5580f3c51eea91559db3facd45d72e7ec970b04528b4709b1f9c2555bd6d0b"}, - {file = "aiohttp-3.10.11-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fdf6429f0caabfd8a30c4e2eaecb547b3c340e4730ebfe25139779b9815ba138"}, - {file = "aiohttp-3.10.11-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:d97187de3c276263db3564bb9d9fad9e15b51ea10a371ffa5947a5ba93ad6777"}, - {file = "aiohttp-3.10.11-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:0acafb350cfb2eba70eb5d271f55e08bd4502ec35e964e18ad3e7d34d71f7261"}, - {file = "aiohttp-3.10.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c13ed0c779911c7998a58e7848954bd4d63df3e3575f591e321b19a2aec8df9f"}, - {file = "aiohttp-3.10.11-cp39-cp39-win32.whl", hash = "sha256:22b7c540c55909140f63ab4f54ec2c20d2635c0289cdd8006da46f3327f971b9"}, - {file = "aiohttp-3.10.11-cp39-cp39-win_amd64.whl", hash = "sha256:7b26b1551e481012575dab8e3727b16fe7dd27eb2711d2e63ced7368756268fb"}, - {file = "aiohttp-3.10.11.tar.gz", hash = "sha256:9dc2b8f3dcab2e39e0fa309c8da50c3b55e6f34ab25f1a71d3288f24924d33a7"}, + {file = "aiohttp-3.11.18-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:96264854fedbea933a9ca4b7e0c745728f01380691687b7365d18d9e977179c4"}, + {file = "aiohttp-3.11.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9602044ff047043430452bc3a2089743fa85da829e6fc9ee0025351d66c332b6"}, + {file = "aiohttp-3.11.18-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5691dc38750fcb96a33ceef89642f139aa315c8a193bbd42a0c33476fd4a1609"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:554c918ec43f8480b47a5ca758e10e793bd7410b83701676a4782672d670da55"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a4076a2b3ba5b004b8cffca6afe18a3b2c5c9ef679b4d1e9859cf76295f8d4f"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:767a97e6900edd11c762be96d82d13a1d7c4fc4b329f054e88b57cdc21fded94"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0ddc9337a0fb0e727785ad4f41163cc314376e82b31846d3835673786420ef1"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f414f37b244f2a97e79b98d48c5ff0789a0b4b4609b17d64fa81771ad780e415"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fdb239f47328581e2ec7744ab5911f97afb10752332a6dd3d98e14e429e1a9e7"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:f2c50bad73ed629cc326cc0f75aed8ecfb013f88c5af116f33df556ed47143eb"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a8d8f20c39d3fa84d1c28cdb97f3111387e48209e224408e75f29c6f8e0861d"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:106032eaf9e62fd6bc6578c8b9e6dc4f5ed9a5c1c7fb2231010a1b4304393421"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:b491e42183e8fcc9901d8dcd8ae644ff785590f1727f76ca86e731c61bfe6643"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ad8c745ff9460a16b710e58e06a9dec11ebc0d8f4dd82091cefb579844d69868"}, + {file = "aiohttp-3.11.18-cp310-cp310-win32.whl", hash = "sha256:8e57da93e24303a883146510a434f0faf2f1e7e659f3041abc4e3fb3f6702a9f"}, + {file = "aiohttp-3.11.18-cp310-cp310-win_amd64.whl", hash = "sha256:cc93a4121d87d9f12739fc8fab0a95f78444e571ed63e40bfc78cd5abe700ac9"}, + {file = "aiohttp-3.11.18-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:427fdc56ccb6901ff8088544bde47084845ea81591deb16f957897f0f0ba1be9"}, + {file = "aiohttp-3.11.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c828b6d23b984255b85b9b04a5b963a74278b7356a7de84fda5e3b76866597b"}, + {file = "aiohttp-3.11.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5c2eaa145bb36b33af1ff2860820ba0589e165be4ab63a49aebfd0981c173b66"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d518ce32179f7e2096bf4e3e8438cf445f05fedd597f252de9f54c728574756"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0700055a6e05c2f4711011a44364020d7a10fbbcd02fbf3e30e8f7e7fddc8717"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8bd1cde83e4684324e6ee19adfc25fd649d04078179890be7b29f76b501de8e4"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73b8870fe1c9a201b8c0d12c94fe781b918664766728783241a79e0468427e4f"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25557982dd36b9e32c0a3357f30804e80790ec2c4d20ac6bcc598533e04c6361"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7e889c9df381a2433802991288a61e5a19ceb4f61bd14f5c9fa165655dcb1fd1"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9ea345fda05bae217b6cce2acf3682ce3b13d0d16dd47d0de7080e5e21362421"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9f26545b9940c4b46f0a9388fd04ee3ad7064c4017b5a334dd450f616396590e"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:3a621d85e85dccabd700294494d7179ed1590b6d07a35709bb9bd608c7f5dd1d"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9c23fd8d08eb9c2af3faeedc8c56e134acdaf36e2117ee059d7defa655130e5f"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9e6b0e519067caa4fd7fb72e3e8002d16a68e84e62e7291092a5433763dc0dd"}, + {file = "aiohttp-3.11.18-cp311-cp311-win32.whl", hash = "sha256:122f3e739f6607e5e4c6a2f8562a6f476192a682a52bda8b4c6d4254e1138f4d"}, + {file = "aiohttp-3.11.18-cp311-cp311-win_amd64.whl", hash = "sha256:e6f3c0a3a1e73e88af384b2e8a0b9f4fb73245afd47589df2afcab6b638fa0e6"}, + {file = "aiohttp-3.11.18-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:63d71eceb9cad35d47d71f78edac41fcd01ff10cacaa64e473d1aec13fa02df2"}, + {file = "aiohttp-3.11.18-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d1929da615840969929e8878d7951b31afe0bac883d84418f92e5755d7b49508"}, + {file = "aiohttp-3.11.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d0aebeb2392f19b184e3fdd9e651b0e39cd0f195cdb93328bd124a1d455cd0e"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3849ead845e8444f7331c284132ab314b4dac43bfae1e3cf350906d4fff4620f"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5e8452ad6b2863709f8b3d615955aa0807bc093c34b8e25b3b52097fe421cb7f"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b8d2b42073611c860a37f718b3d61ae8b4c2b124b2e776e2c10619d920350ec"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40fbf91f6a0ac317c0a07eb328a1384941872f6761f2e6f7208b63c4cc0a7ff6"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ff5625413fec55216da5eaa011cf6b0a2ed67a565914a212a51aa3755b0009"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7f33a92a2fde08e8c6b0c61815521324fc1612f397abf96eed86b8e31618fdb4"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:11d5391946605f445ddafda5eab11caf310f90cdda1fd99865564e3164f5cff9"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3cc314245deb311364884e44242e00c18b5896e4fe6d5f942e7ad7e4cb640adb"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0f421843b0f70740772228b9e8093289924359d306530bcd3926f39acbe1adda"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e220e7562467dc8d589e31c1acd13438d82c03d7f385c9cd41a3f6d1d15807c1"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ab2ef72f8605046115bc9aa8e9d14fd49086d405855f40b79ed9e5c1f9f4faea"}, + {file = "aiohttp-3.11.18-cp312-cp312-win32.whl", hash = "sha256:12a62691eb5aac58d65200c7ae94d73e8a65c331c3a86a2e9670927e94339ee8"}, + {file = "aiohttp-3.11.18-cp312-cp312-win_amd64.whl", hash = "sha256:364329f319c499128fd5cd2d1c31c44f234c58f9b96cc57f743d16ec4f3238c8"}, + {file = "aiohttp-3.11.18-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:474215ec618974054cf5dc465497ae9708543cbfc312c65212325d4212525811"}, + {file = "aiohttp-3.11.18-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ced70adf03920d4e67c373fd692123e34d3ac81dfa1c27e45904a628567d804"}, + {file = "aiohttp-3.11.18-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2d9f6c0152f8d71361905aaf9ed979259537981f47ad099c8b3d81e0319814bd"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a35197013ed929c0aed5c9096de1fc5a9d336914d73ab3f9df14741668c0616c"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:540b8a1f3a424f1af63e0af2d2853a759242a1769f9f1ab053996a392bd70118"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9e6710ebebfce2ba21cee6d91e7452d1125100f41b906fb5af3da8c78b764c1"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8af2ef3b4b652ff109f98087242e2ab974b2b2b496304063585e3d78de0b000"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:28c3f975e5ae3dbcbe95b7e3dcd30e51da561a0a0f2cfbcdea30fc1308d72137"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c28875e316c7b4c3e745172d882d8a5c835b11018e33432d281211af35794a93"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:13cd38515568ae230e1ef6919e2e33da5d0f46862943fcda74e7e915096815f3"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0e2a92101efb9f4c2942252c69c63ddb26d20f46f540c239ccfa5af865197bb8"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e6d3e32b8753c8d45ac550b11a1090dd66d110d4ef805ffe60fa61495360b3b2"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ea4cf2488156e0f281f93cc2fd365025efcba3e2d217cbe3df2840f8c73db261"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d4df95ad522c53f2b9ebc07f12ccd2cb15550941e11a5bbc5ddca2ca56316d7"}, + {file = "aiohttp-3.11.18-cp313-cp313-win32.whl", hash = "sha256:cdd1bbaf1e61f0d94aced116d6e95fe25942f7a5f42382195fd9501089db5d78"}, + {file = "aiohttp-3.11.18-cp313-cp313-win_amd64.whl", hash = "sha256:bdd619c27e44382cf642223f11cfd4d795161362a5a1fc1fa3940397bc89db01"}, + {file = "aiohttp-3.11.18-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:469ac32375d9a716da49817cd26f1916ec787fc82b151c1c832f58420e6d3533"}, + {file = "aiohttp-3.11.18-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3cec21dd68924179258ae14af9f5418c1ebdbba60b98c667815891293902e5e0"}, + {file = "aiohttp-3.11.18-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b426495fb9140e75719b3ae70a5e8dd3a79def0ae3c6c27e012fc59f16544a4a"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad2f41203e2808616292db5d7170cccf0c9f9c982d02544443c7eb0296e8b0c7"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bc0ae0a5e9939e423e065a3e5b00b24b8379f1db46046d7ab71753dfc7dd0e1"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe7cdd3f7d1df43200e1c80f1aed86bb36033bf65e3c7cf46a2b97a253ef8798"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5199be2a2f01ffdfa8c3a6f5981205242986b9e63eb8ae03fd18f736e4840721"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ccec9e72660b10f8e283e91aa0295975c7bd85c204011d9f5eb69310555cf30"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1596ebf17e42e293cbacc7a24c3e0dc0f8f755b40aff0402cb74c1ff6baec1d3"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:eab7b040a8a873020113ba814b7db7fa935235e4cbaf8f3da17671baa1024863"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5d61df4a05476ff891cff0030329fee4088d40e4dc9b013fac01bc3c745542c2"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:46533e6792e1410f9801d09fd40cbbff3f3518d1b501d6c3c5b218f427f6ff08"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c1b90407ced992331dd6d4f1355819ea1c274cc1ee4d5b7046c6761f9ec11829"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a2fd04ae4971b914e54fe459dd7edbbd3f2ba875d69e057d5e3c8e8cac094935"}, + {file = "aiohttp-3.11.18-cp39-cp39-win32.whl", hash = "sha256:b2f317d1678002eee6fe85670039fb34a757972284614638f82b903a03feacdc"}, + {file = "aiohttp-3.11.18-cp39-cp39-win_amd64.whl", hash = "sha256:5e7007b8d1d09bce37b54111f593d173691c530b80f27c6493b928dabed9e6ef"}, + {file = "aiohttp-3.11.18.tar.gz", hash = "sha256:ae856e1138612b7e412db63b7708735cff4d38d0399f6a5435d3dac2669f558a"}, ] [package.dependencies] @@ -120,36 +110,37 @@ async-timeout = {version = ">=4.0,<6.0", markers = "python_version < \"3.11\""} attrs = ">=17.3.0" frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" -yarl = ">=1.12.0,<2.0" +propcache = ">=0.2.0" +yarl = ">=1.17.0,<2.0" [package.extras] speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.2.0) ; sys_platform == \"linux\" or sys_platform == \"darwin\"", "brotlicffi ; platform_python_implementation != \"CPython\""] [[package]] name = "aiohttp-cors" -version = "0.7.0" +version = "0.8.1" description = "CORS support for aiohttp" optional = false -python-versions = "*" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "aiohttp-cors-0.7.0.tar.gz", hash = "sha256:4d39c6d7100fd9764ed1caf8cebf0eb01bf5e3f24e2e073fda6234bc48b19f5d"}, - {file = "aiohttp_cors-0.7.0-py3-none-any.whl", hash = "sha256:0451ba59fdf6909d0e2cd21e4c0a43752bc0703d33fc78ae94d9d9321710193e"}, + {file = "aiohttp_cors-0.8.1-py3-none-any.whl", hash = "sha256:3180cf304c5c712d626b9162b195b1db7ddf976a2a25172b35bb2448b890a80d"}, + {file = "aiohttp_cors-0.8.1.tar.gz", hash = "sha256:ccacf9cb84b64939ea15f859a146af1f662a6b1d68175754a07315e305fb1403"}, ] [package.dependencies] -aiohttp = ">=1.1" +aiohttp = ">=3.9" [[package]] name = "aiosignal" -version = "1.3.1" +version = "1.3.2" description = "aiosignal: a list of registered asynchronous callbacks" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, + {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"}, + {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"}, ] [package.dependencies] @@ -169,25 +160,25 @@ files = [ [[package]] name = "anyio" -version = "4.6.0" +version = "4.9.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" groups = ["test"] files = [ - {file = "anyio-4.6.0-py3-none-any.whl", hash = "sha256:c7d2e9d63e31599eeb636c8c5c03a7e108d73b345f064f1c19fdc87b79036a9a"}, - {file = "anyio-4.6.0.tar.gz", hash = "sha256:137b4559cbb034c477165047febb6ff83f390fc3b20bf181c1fc0a728cb8beeb"}, + {file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"}, + {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}, ] [package.dependencies] exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] -doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\""] +doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] trio = ["trio (>=0.26.1)"] [[package]] @@ -284,121 +275,142 @@ test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock [[package]] name = "asttokens" -version = "2.4.1" +version = "3.0.0" description = "Annotate AST trees with source code positions" optional = false -python-versions = "*" +python-versions = ">=3.8" groups = ["main", "test"] files = [ - {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, - {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, + {file = "asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2"}, + {file = "asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7"}, ] -[package.dependencies] -six = ">=1.12.0" - [package.extras] -astroid = ["astroid (>=1,<2) ; python_version < \"3\"", "astroid (>=2,<4) ; python_version >= \"3\""] -test = ["astroid (>=1,<2) ; python_version < \"3\"", "astroid (>=2,<4) ; python_version >= \"3\"", "pytest"] +astroid = ["astroid (>=2,<4)"] +test = ["astroid (>=2,<4)", "pytest", "pytest-cov", "pytest-xdist"] [[package]] name = "async-lru" -version = "2.0.4" +version = "2.0.5" description = "Simple LRU cache for asyncio" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["test"] files = [ - {file = "async-lru-2.0.4.tar.gz", hash = "sha256:b8a59a5df60805ff63220b2a0c5b5393da5521b113cd5465a44eb037d81a5627"}, - {file = "async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224"}, + {file = "async_lru-2.0.5-py3-none-any.whl", hash = "sha256:ab95404d8d2605310d345932697371a5f40def0487c03d6d0ad9138de52c9943"}, + {file = "async_lru-2.0.5.tar.gz", hash = "sha256:481d52ccdd27275f42c43a928b4a50c3bfb2d67af4e78b170e3e0bb39c66e5bb"}, ] [package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} +typing_extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} [[package]] name = "async-timeout" -version = "4.0.3" +version = "5.0.1" description = "Timeout context manager for asyncio programs" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" groups = ["main"] markers = "python_version < \"3.11\"" files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, + {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, + {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, ] [[package]] name = "attrs" -version = "24.2.0" +version = "25.3.0" description = "Classes Without Boilerplate" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" groups = ["main", "test"] files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, + {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, + {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, ] [package.extras] -benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\" and python_version < \"3.13\"", "pytest-xdist[psutil]"] -cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\" and python_version < \"3.13\"", "pytest-xdist[psutil]"] -dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\"", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\" and python_version < \"3.13\"", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\" and python_version < \"3.13\"", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.9\" and python_version < \"3.13\""] +benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] +tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] [[package]] name = "babel" -version = "2.16.0" +version = "2.17.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" groups = ["docs", "test"] files = [ - {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, - {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, + {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, + {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, ] [package.extras] -dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] +dev = ["backports.zoneinfo ; python_version < \"3.9\"", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata ; sys_platform == \"win32\""] [[package]] name = "bcrypt" -version = "4.2.0" +version = "4.3.0" description = "Modern password hashing for your software and your servers" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" groups = ["main"] files = [ - {file = "bcrypt-4.2.0-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:096a15d26ed6ce37a14c1ac1e48119660f21b24cba457f160a4b830f3fe6b5cb"}, - {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c02d944ca89d9b1922ceb8a46460dd17df1ba37ab66feac4870f6862a1533c00"}, - {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d84cf6d877918620b687b8fd1bf7781d11e8a0998f576c7aa939776b512b98d"}, - {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:1bb429fedbe0249465cdd85a58e8376f31bb315e484f16e68ca4c786dcc04291"}, - {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:655ea221910bcac76ea08aaa76df427ef8625f92e55a8ee44fbf7753dbabb328"}, - {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:1ee38e858bf5d0287c39b7a1fc59eec64bbf880c7d504d3a06a96c16e14058e7"}, - {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0da52759f7f30e83f1e30a888d9163a81353ef224d82dc58eb5bb52efcabc399"}, - {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3698393a1b1f1fd5714524193849d0c6d524d33523acca37cd28f02899285060"}, - {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:762a2c5fb35f89606a9fde5e51392dad0cd1ab7ae64149a8b935fe8d79dd5ed7"}, - {file = "bcrypt-4.2.0-cp37-abi3-win32.whl", hash = "sha256:5a1e8aa9b28ae28020a3ac4b053117fb51c57a010b9f969603ed885f23841458"}, - {file = "bcrypt-4.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:8f6ede91359e5df88d1f5c1ef47428a4420136f3ce97763e31b86dd8280fbdf5"}, - {file = "bcrypt-4.2.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:c52aac18ea1f4a4f65963ea4f9530c306b56ccd0c6f8c8da0c06976e34a6e841"}, - {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3bbbfb2734f0e4f37c5136130405332640a1e46e6b23e000eeff2ba8d005da68"}, - {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3413bd60460f76097ee2e0a493ccebe4a7601918219c02f503984f0a7ee0aebe"}, - {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8d7bb9c42801035e61c109c345a28ed7e84426ae4865511eb82e913df18f58c2"}, - {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3d3a6d28cb2305b43feac298774b997e372e56c7c7afd90a12b3dc49b189151c"}, - {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9c1c4ad86351339c5f320ca372dfba6cb6beb25e8efc659bedd918d921956bae"}, - {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:27fe0f57bb5573104b5a6de5e4153c60814c711b29364c10a75a54bb6d7ff48d"}, - {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8ac68872c82f1add6a20bd489870c71b00ebacd2e9134a8aa3f98a0052ab4b0e"}, - {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:cb2a8ec2bc07d3553ccebf0746bbf3d19426d1c6d1adbd4fa48925f66af7b9e8"}, - {file = "bcrypt-4.2.0-cp39-abi3-win32.whl", hash = "sha256:77800b7147c9dc905db1cba26abe31e504d8247ac73580b4aa179f98e6608f34"}, - {file = "bcrypt-4.2.0-cp39-abi3-win_amd64.whl", hash = "sha256:61ed14326ee023917ecd093ee6ef422a72f3aec6f07e21ea5f10622b735538a9"}, - {file = "bcrypt-4.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:39e1d30c7233cfc54f5c3f2c825156fe044efdd3e0b9d309512cc514a263ec2a"}, - {file = "bcrypt-4.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f4f4acf526fcd1c34e7ce851147deedd4e26e6402369304220250598b26448db"}, - {file = "bcrypt-4.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:1ff39b78a52cf03fdf902635e4c81e544714861ba3f0efc56558979dd4f09170"}, - {file = "bcrypt-4.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:373db9abe198e8e2c70d12b479464e0d5092cc122b20ec504097b5f2297ed184"}, - {file = "bcrypt-4.2.0.tar.gz", hash = "sha256:cf69eaf5185fd58f268f805b505ce31f9b9fc2d64b376642164e9244540c1221"}, + {file = "bcrypt-4.3.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f01e060f14b6b57bbb72fc5b4a83ac21c443c9a2ee708e04a10e9192f90a6281"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5eeac541cefd0bb887a371ef73c62c3cd78535e4887b310626036a7c0a817bb"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59e1aa0e2cd871b08ca146ed08445038f42ff75968c7ae50d2fdd7860ade2180"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:0042b2e342e9ae3d2ed22727c1262f76cc4f345683b5c1715f0250cf4277294f"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74a8d21a09f5e025a9a23e7c0fd2c7fe8e7503e4d356c0a2c1486ba010619f09"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:0142b2cb84a009f8452c8c5a33ace5e3dfec4159e7735f5afe9a4d50a8ea722d"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_aarch64.whl", hash = "sha256:12fa6ce40cde3f0b899729dbd7d5e8811cb892d31b6f7d0334a1f37748b789fd"}, + {file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_x86_64.whl", hash = "sha256:5bd3cca1f2aa5dbcf39e2aa13dd094ea181f48959e1071265de49cc2b82525af"}, + {file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:335a420cfd63fc5bc27308e929bee231c15c85cc4c496610ffb17923abf7f231"}, + {file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:0e30e5e67aed0187a1764911af023043b4542e70a7461ad20e837e94d23e1d6c"}, + {file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b8d62290ebefd49ee0b3ce7500f5dbdcf13b81402c05f6dafab9a1e1b27212f"}, + {file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2ef6630e0ec01376f59a006dc72918b1bf436c3b571b80fa1968d775fa02fe7d"}, + {file = "bcrypt-4.3.0-cp313-cp313t-win32.whl", hash = "sha256:7a4be4cbf241afee43f1c3969b9103a41b40bcb3a3f467ab19f891d9bc4642e4"}, + {file = "bcrypt-4.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5c1949bf259a388863ced887c7861da1df681cb2388645766c89fdfd9004c669"}, + {file = "bcrypt-4.3.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:f81b0ed2639568bf14749112298f9e4e2b28853dab50a8b357e31798686a036d"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:864f8f19adbe13b7de11ba15d85d4a428c7e2f344bac110f667676a0ff84924b"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e36506d001e93bffe59754397572f21bb5dc7c83f54454c990c74a468cd589e"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:842d08d75d9fe9fb94b18b071090220697f9f184d4547179b60734846461ed59"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7c03296b85cb87db865d91da79bf63d5609284fc0cab9472fdd8367bbd830753"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:62f26585e8b219cdc909b6a0069efc5e4267e25d4a3770a364ac58024f62a761"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:beeefe437218a65322fbd0069eb437e7c98137e08f22c4660ac2dc795c31f8bb"}, + {file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:97eea7408db3a5bcce4a55d13245ab3fa566e23b4c67cd227062bb49e26c585d"}, + {file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:191354ebfe305e84f344c5964c7cd5f924a3bfc5d405c75ad07f232b6dffb49f"}, + {file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:41261d64150858eeb5ff43c753c4b216991e0ae16614a308a15d909503617732"}, + {file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:33752b1ba962ee793fa2b6321404bf20011fe45b9afd2a842139de3011898fef"}, + {file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:50e6e80a4bfd23a25f5c05b90167c19030cf9f87930f7cb2eacb99f45d1c3304"}, + {file = "bcrypt-4.3.0-cp38-abi3-win32.whl", hash = "sha256:67a561c4d9fb9465ec866177e7aebcad08fe23aaf6fbd692a6fab69088abfc51"}, + {file = "bcrypt-4.3.0-cp38-abi3-win_amd64.whl", hash = "sha256:584027857bc2843772114717a7490a37f68da563b3620f78a849bcb54dc11e62"}, + {file = "bcrypt-4.3.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d3efb1157edebfd9128e4e46e2ac1a64e0c1fe46fb023158a407c7892b0f8c3"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08bacc884fd302b611226c01014eca277d48f0a05187666bca23aac0dad6fe24"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6746e6fec103fcd509b96bacdfdaa2fbde9a553245dbada284435173a6f1aef"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:afe327968aaf13fc143a56a3360cb27d4ad0345e34da12c7290f1b00b8fe9a8b"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d9af79d322e735b1fc33404b5765108ae0ff232d4b54666d46730f8ac1a43676"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f1e3ffa1365e8702dc48c8b360fef8d7afeca482809c5e45e653af82ccd088c1"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3004df1b323d10021fda07a813fd33e0fd57bef0e9a480bb143877f6cba996fe"}, + {file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:531457e5c839d8caea9b589a1bcfe3756b0547d7814e9ce3d437f17da75c32b0"}, + {file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:17a854d9a7a476a89dcef6c8bd119ad23e0f82557afbd2c442777a16408e614f"}, + {file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6fb1fd3ab08c0cbc6826a2e0447610c6f09e983a281b919ed721ad32236b8b23"}, + {file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e965a9c1e9a393b8005031ff52583cedc15b7884fce7deb8b0346388837d6cfe"}, + {file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:79e70b8342a33b52b55d93b3a59223a844962bef479f6a0ea318ebbcadf71505"}, + {file = "bcrypt-4.3.0-cp39-abi3-win32.whl", hash = "sha256:b4d4e57f0a63fd0b358eb765063ff661328f69a04494427265950c71b992a39a"}, + {file = "bcrypt-4.3.0-cp39-abi3-win_amd64.whl", hash = "sha256:e53e074b120f2877a35cc6c736b8eb161377caae8925c17688bd46ba56daaa5b"}, + {file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c950d682f0952bafcceaf709761da0a32a942272fad381081b51096ffa46cea1"}, + {file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:107d53b5c67e0bbc3f03ebf5b030e0403d24dda980f8e244795335ba7b4a027d"}, + {file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:b693dbb82b3c27a1604a3dff5bfc5418a7e6a781bb795288141e5f80cf3a3492"}, + {file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:b6354d3760fcd31994a14c89659dee887f1351a06e5dac3c1142307172a79f90"}, + {file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a839320bf27d474e52ef8cb16449bb2ce0ba03ca9f44daba6d93fa1d8828e48a"}, + {file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:bdc6a24e754a555d7316fa4774e64c6c3997d27ed2d1964d55920c7c227bc4ce"}, + {file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:55a935b8e9a1d2def0626c4269db3fcd26728cbff1e84f0341465c31c4ee56d8"}, + {file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57967b7a28d855313a963aaea51bf6df89f833db4320da458e5b3c5ab6d4c938"}, + {file = "bcrypt-4.3.0.tar.gz", hash = "sha256:3a3fd2204178b6d2adcf09cb4f6426ffef54762577a7c9b54c159008cb288c18"}, ] [package.extras] @@ -407,18 +419,19 @@ typecheck = ["mypy"] [[package]] name = "beautifulsoup4" -version = "4.12.3" +version = "4.13.4" description = "Screen-scraping library" optional = false -python-versions = ">=3.6.0" +python-versions = ">=3.7.0" groups = ["test"] files = [ - {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, - {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, + {file = "beautifulsoup4-4.13.4-py3-none-any.whl", hash = "sha256:9bbbb14bfde9d79f38b8cd5f8c7c85f4b8f2523190ebed90e950a8dea4cb1c4b"}, + {file = "beautifulsoup4-4.13.4.tar.gz", hash = "sha256:dbb3c4e1ceae6aefebdaf2423247260cd062430a410e38c66f2baa50a8437195"}, ] [package.dependencies] soupsieve = ">1.2" +typing-extensions = ">=4.0.0" [package.extras] cchardet = ["cchardet"] @@ -429,45 +442,45 @@ lxml = ["lxml"] [[package]] name = "bleach" -version = "6.1.0" +version = "6.2.0" description = "An easy safelist-based HTML-sanitizing tool." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["test"] files = [ - {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, - {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, + {file = "bleach-6.2.0-py3-none-any.whl", hash = "sha256:117d9c6097a7c3d22fd578fcd8d35ff1e125df6736f554da4e432fdd63f31e5e"}, + {file = "bleach-6.2.0.tar.gz", hash = "sha256:123e894118b8a599fd80d3ec1a6d4cc7ce4e5882b1317a7e1ba69b56e95f991f"}, ] [package.dependencies] -six = ">=1.9.0" +tinycss2 = {version = ">=1.1.0,<1.5", optional = true, markers = "extra == \"css\""} webencodings = "*" [package.extras] -css = ["tinycss2 (>=1.1.0,<1.3)"] +css = ["tinycss2 (>=1.1.0,<1.5)"] [[package]] name = "cachetools" -version = "5.5.0" +version = "5.5.2" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, - {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, + {file = "cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a"}, + {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, ] [[package]] name = "certifi" -version = "2024.8.30" +version = "2025.4.26" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" groups = ["main", "docs", "test"] files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, + {file = "certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3"}, + {file = "certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6"}, ] [[package]] @@ -552,114 +565,133 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.3.2" +version = "3.4.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.7" groups = ["main", "docs", "test"] files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cad5f45b3146325bb38d6855642f6fd609c3f7cad4dbaf75549bf3b904d3184"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2680962a4848b3c4f155dc2ee64505a9c57186d0d56b43123b17ca3de18f0fa"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:36b31da18b8890a76ec181c3cf44326bf2c48e36d393ca1b72b3f484113ea344"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4074c5a429281bf056ddd4c5d3b740ebca4d43ffffe2ef4bf4d2d05114299da"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9e36a97bee9b86ef9a1cf7bb96747eb7a15c2f22bdb5b516434b00f2a599f02"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:1b1bde144d98e446b056ef98e59c256e9294f6b74d7af6846bf5ffdafd687a7d"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:915f3849a011c1f593ab99092f3cecfcb4d65d8feb4a64cf1bf2d22074dc0ec4"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:fb707f3e15060adf5b7ada797624a6c6e0138e2a26baa089df64c68ee98e040f"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:25a23ea5c7edc53e0f29bae2c44fcb5a1aa10591aae107f2a2b2583a9c5cbc64"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:770cab594ecf99ae64c236bc9ee3439c3f46be49796e265ce0cc8bc17b10294f"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-win32.whl", hash = "sha256:6a0289e4589e8bdfef02a80478f1dfcb14f0ab696b5a00e1f4b8a14a307a3c58"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6fc1f5b51fa4cecaa18f2bd7a003f3dd039dd615cd69a2afd6d3b19aed6775f2"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76af085e67e56c8816c3ccf256ebd136def2ed9654525348cfa744b6802b69eb"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e45ba65510e2647721e35323d6ef54c7974959f6081b58d4ef5d87c60c84919a"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:046595208aae0120559a67693ecc65dd75d46f7bf687f159127046628178dc45"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75d10d37a47afee94919c4fab4c22b9bc2a8bf7d4f46f87363bcf0573f3ff4f5"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6333b3aa5a12c26b2a4d4e7335a28f1475e0e5e17d69d55141ee3cab736f66d1"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8323a9b031aa0393768b87f04b4164a40037fb2a3c11ac06a03ffecd3618027"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:24498ba8ed6c2e0b56d4acbf83f2d989720a93b41d712ebd4f4979660db4417b"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:844da2b5728b5ce0e32d863af26f32b5ce61bc4273a9c720a9f3aa9df73b1455"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:65c981bdbd3f57670af8b59777cbfae75364b483fa8a9f420f08094531d54a01"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:3c21d4fca343c805a52c0c78edc01e3477f6dd1ad7c47653241cf2a206d4fc58"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dc7039885fa1baf9be153a0626e337aa7ec8bf96b0128605fb0d77788ddc1681"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-win32.whl", hash = "sha256:8272b73e1c5603666618805fe821edba66892e2870058c94c53147602eab29c7"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:70f7172939fdf8790425ba31915bfbe8335030f05b9913d7ae00a87d4395620a"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-win32.whl", hash = "sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e"}, + {file = "charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0"}, + {file = "charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63"}, ] [[package]] name = "click" -version = "8.1.7" +version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" groups = ["main"] +markers = "python_version == \"3.9\"" files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "click" +version = "8.2.0" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.10" +groups = ["main"] +markers = "python_version >= \"3.10\"" +files = [ + {file = "click-8.2.0-py3-none-any.whl", hash = "sha256:6b303f0b2aa85f1cb4e5303078fadcbcd4e476f114fab9b5007005711839325c"}, + {file = "click-8.2.0.tar.gz", hash = "sha256:f5452aeddd9988eefa20f90f05ab66f17fce1ee2a36907fd30b05bbb5953814d"}, ] [package.dependencies] @@ -838,46 +870,50 @@ test-randomorder = ["pytest-randomly"] [[package]] name = "debugpy" -version = "1.8.6" +version = "1.8.14" description = "An implementation of the Debug Adapter Protocol for Python" optional = false python-versions = ">=3.8" groups = ["test"] files = [ - {file = "debugpy-1.8.6-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:30f467c5345d9dfdcc0afdb10e018e47f092e383447500f125b4e013236bf14b"}, - {file = "debugpy-1.8.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d73d8c52614432f4215d0fe79a7e595d0dd162b5c15233762565be2f014803b"}, - {file = "debugpy-1.8.6-cp310-cp310-win32.whl", hash = "sha256:e3e182cd98eac20ee23a00653503315085b29ab44ed66269482349d307b08df9"}, - {file = "debugpy-1.8.6-cp310-cp310-win_amd64.whl", hash = "sha256:e3a82da039cfe717b6fb1886cbbe5c4a3f15d7df4765af857f4307585121c2dd"}, - {file = "debugpy-1.8.6-cp311-cp311-macosx_14_0_universal2.whl", hash = "sha256:67479a94cf5fd2c2d88f9615e087fcb4fec169ec780464a3f2ba4a9a2bb79955"}, - {file = "debugpy-1.8.6-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fb8653f6cbf1dd0a305ac1aa66ec246002145074ea57933978346ea5afdf70b"}, - {file = "debugpy-1.8.6-cp311-cp311-win32.whl", hash = "sha256:cdaf0b9691879da2d13fa39b61c01887c34558d1ff6e5c30e2eb698f5384cd43"}, - {file = "debugpy-1.8.6-cp311-cp311-win_amd64.whl", hash = "sha256:43996632bee7435583952155c06881074b9a742a86cee74e701d87ca532fe833"}, - {file = "debugpy-1.8.6-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:db891b141fc6ee4b5fc6d1cc8035ec329cabc64bdd2ae672b4550c87d4ecb128"}, - {file = "debugpy-1.8.6-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:567419081ff67da766c898ccf21e79f1adad0e321381b0dfc7a9c8f7a9347972"}, - {file = "debugpy-1.8.6-cp312-cp312-win32.whl", hash = "sha256:c9834dfd701a1f6bf0f7f0b8b1573970ae99ebbeee68314116e0ccc5c78eea3c"}, - {file = "debugpy-1.8.6-cp312-cp312-win_amd64.whl", hash = "sha256:e4ce0570aa4aca87137890d23b86faeadf184924ad892d20c54237bcaab75d8f"}, - {file = "debugpy-1.8.6-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:df5dc9eb4ca050273b8e374a4cd967c43be1327eeb42bfe2f58b3cdfe7c68dcb"}, - {file = "debugpy-1.8.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a85707c6a84b0c5b3db92a2df685b5230dd8fb8c108298ba4f11dba157a615a"}, - {file = "debugpy-1.8.6-cp38-cp38-win32.whl", hash = "sha256:538c6cdcdcdad310bbefd96d7850be1cd46e703079cc9e67d42a9ca776cdc8a8"}, - {file = "debugpy-1.8.6-cp38-cp38-win_amd64.whl", hash = "sha256:22140bc02c66cda6053b6eb56dfe01bbe22a4447846581ba1dd6df2c9f97982d"}, - {file = "debugpy-1.8.6-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:c1cef65cffbc96e7b392d9178dbfd524ab0750da6c0023c027ddcac968fd1caa"}, - {file = "debugpy-1.8.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1e60bd06bb3cc5c0e957df748d1fab501e01416c43a7bdc756d2a992ea1b881"}, - {file = "debugpy-1.8.6-cp39-cp39-win32.whl", hash = "sha256:f7158252803d0752ed5398d291dee4c553bb12d14547c0e1843ab74ee9c31123"}, - {file = "debugpy-1.8.6-cp39-cp39-win_amd64.whl", hash = "sha256:3358aa619a073b620cd0d51d8a6176590af24abcc3fe2e479929a154bf591b51"}, - {file = "debugpy-1.8.6-py2.py3-none-any.whl", hash = "sha256:b48892df4d810eff21d3ef37274f4c60d32cdcafc462ad5647239036b0f0649f"}, - {file = "debugpy-1.8.6.zip", hash = "sha256:c931a9371a86784cee25dec8d65bc2dc7a21f3f1552e3833d9ef8f919d22280a"}, + {file = "debugpy-1.8.14-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:93fee753097e85623cab1c0e6a68c76308cd9f13ffdf44127e6fab4fbf024339"}, + {file = "debugpy-1.8.14-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d937d93ae4fa51cdc94d3e865f535f185d5f9748efb41d0d49e33bf3365bd79"}, + {file = "debugpy-1.8.14-cp310-cp310-win32.whl", hash = "sha256:c442f20577b38cc7a9aafecffe1094f78f07fb8423c3dddb384e6b8f49fd2987"}, + {file = "debugpy-1.8.14-cp310-cp310-win_amd64.whl", hash = "sha256:f117dedda6d969c5c9483e23f573b38f4e39412845c7bc487b6f2648df30fe84"}, + {file = "debugpy-1.8.14-cp311-cp311-macosx_14_0_universal2.whl", hash = "sha256:1b2ac8c13b2645e0b1eaf30e816404990fbdb168e193322be8f545e8c01644a9"}, + {file = "debugpy-1.8.14-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf431c343a99384ac7eab2f763980724834f933a271e90496944195318c619e2"}, + {file = "debugpy-1.8.14-cp311-cp311-win32.whl", hash = "sha256:c99295c76161ad8d507b413cd33422d7c542889fbb73035889420ac1fad354f2"}, + {file = "debugpy-1.8.14-cp311-cp311-win_amd64.whl", hash = "sha256:7816acea4a46d7e4e50ad8d09d963a680ecc814ae31cdef3622eb05ccacf7b01"}, + {file = "debugpy-1.8.14-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:8899c17920d089cfa23e6005ad9f22582fd86f144b23acb9feeda59e84405b84"}, + {file = "debugpy-1.8.14-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6bb5c0dcf80ad5dbc7b7d6eac484e2af34bdacdf81df09b6a3e62792b722826"}, + {file = "debugpy-1.8.14-cp312-cp312-win32.whl", hash = "sha256:281d44d248a0e1791ad0eafdbbd2912ff0de9eec48022a5bfbc332957487ed3f"}, + {file = "debugpy-1.8.14-cp312-cp312-win_amd64.whl", hash = "sha256:5aa56ef8538893e4502a7d79047fe39b1dae08d9ae257074c6464a7b290b806f"}, + {file = "debugpy-1.8.14-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:329a15d0660ee09fec6786acdb6e0443d595f64f5d096fc3e3ccf09a4259033f"}, + {file = "debugpy-1.8.14-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f920c7f9af409d90f5fd26e313e119d908b0dd2952c2393cd3247a462331f15"}, + {file = "debugpy-1.8.14-cp313-cp313-win32.whl", hash = "sha256:3784ec6e8600c66cbdd4ca2726c72d8ca781e94bce2f396cc606d458146f8f4e"}, + {file = "debugpy-1.8.14-cp313-cp313-win_amd64.whl", hash = "sha256:684eaf43c95a3ec39a96f1f5195a7ff3d4144e4a18d69bb66beeb1a6de605d6e"}, + {file = "debugpy-1.8.14-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:d5582bcbe42917bc6bbe5c12db1bffdf21f6bfc28d4554b738bf08d50dc0c8c3"}, + {file = "debugpy-1.8.14-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5349b7c3735b766a281873fbe32ca9cca343d4cc11ba4a743f84cb854339ff35"}, + {file = "debugpy-1.8.14-cp38-cp38-win32.whl", hash = "sha256:7118d462fe9724c887d355eef395fae68bc764fd862cdca94e70dcb9ade8a23d"}, + {file = "debugpy-1.8.14-cp38-cp38-win_amd64.whl", hash = "sha256:d235e4fa78af2de4e5609073972700523e372cf5601742449970110d565ca28c"}, + {file = "debugpy-1.8.14-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:413512d35ff52c2fb0fd2d65e69f373ffd24f0ecb1fac514c04a668599c5ce7f"}, + {file = "debugpy-1.8.14-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c9156f7524a0d70b7a7e22b2e311d8ba76a15496fb00730e46dcdeedb9e1eea"}, + {file = "debugpy-1.8.14-cp39-cp39-win32.whl", hash = "sha256:b44985f97cc3dd9d52c42eb59ee9d7ee0c4e7ecd62bca704891f997de4cef23d"}, + {file = "debugpy-1.8.14-cp39-cp39-win_amd64.whl", hash = "sha256:b1528cfee6c1b1c698eb10b6b096c598738a8238822d218173d21c3086de8123"}, + {file = "debugpy-1.8.14-py2.py3-none-any.whl", hash = "sha256:5cd9a579d553b6cb9759a7908a41988ee6280b961f24f63336835d9418216a20"}, + {file = "debugpy-1.8.14.tar.gz", hash = "sha256:7cd287184318416850aa8b60ac90105837bb1e59531898c07569d197d2ed5322"}, ] [[package]] name = "decorator" -version = "5.1.1" +version = "5.2.1" description = "Decorators for Humans" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" groups = ["main", "test"] files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, + {file = "decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"}, + {file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"}, ] [[package]] @@ -894,26 +930,26 @@ files = [ [[package]] name = "distlib" -version = "0.3.8" +version = "0.3.9" description = "Distribution utilities" optional = false python-versions = "*" groups = ["main"] files = [ - {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, - {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, + {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, + {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, ] [[package]] name = "docutils" -version = "0.20.1" +version = "0.21.2" description = "Docutils -- Python Documentation Utilities" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" groups = ["docs"] files = [ - {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, - {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, + {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, + {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, ] [[package]] @@ -930,17 +966,20 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.2.2" +version = "1.3.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" groups = ["main", "test"] markers = "python_version < \"3.11\"" files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, + {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, + {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, ] +[package.dependencies] +typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} + [package.extras] test = ["pytest (>=6)"] @@ -961,14 +1000,14 @@ tests = ["asttokens", "littleutils", "pytest", "rich ; python_version >= \"3.11\ [[package]] name = "fastjsonschema" -version = "2.20.0" +version = "2.21.1" description = "Fastest Python implementation of JSON schema" optional = false python-versions = "*" groups = ["test"] files = [ - {file = "fastjsonschema-2.20.0-py3-none-any.whl", hash = "sha256:5875f0b0fa7a0043a91e93a9b8f793bcbbba9691e7fd83dca95c28ba26d21f0a"}, - {file = "fastjsonschema-2.20.0.tar.gz", hash = "sha256:3d48fc5300ee96f5d116f10fe6f28d938e6008f59a6a025c2649475b87f76a23"}, + {file = "fastjsonschema-2.21.1-py3-none-any.whl", hash = "sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667"}, + {file = "fastjsonschema-2.21.1.tar.gz", hash = "sha256:794d4f0a58f848961ba16af7b9c85a3e88cd360df008c59aac6fc5ae9323b5d4"}, ] [package.extras] @@ -976,19 +1015,19 @@ devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benc [[package]] name = "filelock" -version = "3.16.1" +version = "3.18.0" description = "A platform independent file lock." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, - {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, + {file = "filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de"}, + {file = "filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2"}, ] [package.extras] -docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"] typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""] [[package]] @@ -1005,101 +1044,128 @@ files = [ [[package]] name = "frozenlist" -version = "1.4.1" +version = "1.6.0" description = "A list-like structure which implements collections.abc.MutableSequence" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, - {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, - {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, - {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, - {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, - {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, - {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, - {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, - {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, - {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, - {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, - {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, - {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, + {file = "frozenlist-1.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e6e558ea1e47fd6fa8ac9ccdad403e5dd5ecc6ed8dda94343056fa4277d5c65e"}, + {file = "frozenlist-1.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f4b3cd7334a4bbc0c472164f3744562cb72d05002cc6fcf58adb104630bbc352"}, + {file = "frozenlist-1.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9799257237d0479736e2b4c01ff26b5c7f7694ac9692a426cb717f3dc02fff9b"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a7bb0fe1f7a70fb5c6f497dc32619db7d2cdd53164af30ade2f34673f8b1fc"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:36d2fc099229f1e4237f563b2a3e0ff7ccebc3999f729067ce4e64a97a7f2869"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f27a9f9a86dcf00708be82359db8de86b80d029814e6693259befe82bb58a106"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75ecee69073312951244f11b8627e3700ec2bfe07ed24e3a685a5979f0412d24"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2c7d5aa19714b1b01a0f515d078a629e445e667b9da869a3cd0e6fe7dec78bd"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69bbd454f0fb23b51cadc9bdba616c9678e4114b6f9fa372d462ff2ed9323ec8"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7daa508e75613809c7a57136dec4871a21bca3080b3a8fc347c50b187df4f00c"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:89ffdb799154fd4d7b85c56d5fa9d9ad48946619e0eb95755723fffa11022d75"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:920b6bd77d209931e4c263223381d63f76828bec574440f29eb497cf3394c249"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d3ceb265249fb401702fce3792e6b44c1166b9319737d21495d3611028d95769"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:52021b528f1571f98a7d4258c58aa8d4b1a96d4f01d00d51f1089f2e0323cb02"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0f2ca7810b809ed0f1917293050163c7654cefc57a49f337d5cd9de717b8fad3"}, + {file = "frozenlist-1.6.0-cp310-cp310-win32.whl", hash = "sha256:0e6f8653acb82e15e5443dba415fb62a8732b68fe09936bb6d388c725b57f812"}, + {file = "frozenlist-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:f1a39819a5a3e84304cd286e3dc62a549fe60985415851b3337b6f5cc91907f1"}, + {file = "frozenlist-1.6.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae8337990e7a45683548ffb2fee1af2f1ed08169284cd829cdd9a7fa7470530d"}, + {file = "frozenlist-1.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8c952f69dd524558694818a461855f35d36cc7f5c0adddce37e962c85d06eac0"}, + {file = "frozenlist-1.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8f5fef13136c4e2dee91bfb9a44e236fff78fc2cd9f838eddfc470c3d7d90afe"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:716bbba09611b4663ecbb7cd022f640759af8259e12a6ca939c0a6acd49eedba"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7b8c4dc422c1a3ffc550b465090e53b0bf4839047f3e436a34172ac67c45d595"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b11534872256e1666116f6587a1592ef395a98b54476addb5e8d352925cb5d4a"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c6eceb88aaf7221f75be6ab498dc622a151f5f88d536661af3ffc486245a626"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62c828a5b195570eb4b37369fcbbd58e96c905768d53a44d13044355647838ff"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1c6bd2c6399920c9622362ce95a7d74e7f9af9bfec05fff91b8ce4b9647845a"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:49ba23817781e22fcbd45fd9ff2b9b8cdb7b16a42a4851ab8025cae7b22e96d0"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:431ef6937ae0f853143e2ca67d6da76c083e8b1fe3df0e96f3802fd37626e606"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9d124b38b3c299ca68433597ee26b7819209cb8a3a9ea761dfe9db3a04bba584"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:118e97556306402e2b010da1ef21ea70cb6d6122e580da64c056b96f524fbd6a"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fb3b309f1d4086b5533cf7bbcf3f956f0ae6469664522f1bde4feed26fba60f1"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54dece0d21dce4fdb188a1ffc555926adf1d1c516e493c2914d7c370e454bc9e"}, + {file = "frozenlist-1.6.0-cp311-cp311-win32.whl", hash = "sha256:654e4ba1d0b2154ca2f096bed27461cf6160bc7f504a7f9a9ef447c293caf860"}, + {file = "frozenlist-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:3e911391bffdb806001002c1f860787542f45916c3baf764264a52765d5a5603"}, + {file = "frozenlist-1.6.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c5b9e42ace7d95bf41e19b87cec8f262c41d3510d8ad7514ab3862ea2197bfb1"}, + {file = "frozenlist-1.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ca9973735ce9f770d24d5484dcb42f68f135351c2fc81a7a9369e48cf2998a29"}, + {file = "frozenlist-1.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6ac40ec76041c67b928ca8aaffba15c2b2ee3f5ae8d0cb0617b5e63ec119ca25"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b7a8a3180dfb280eb044fdec562f9b461614c0ef21669aea6f1d3dac6ee576"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c444d824e22da6c9291886d80c7d00c444981a72686e2b59d38b285617cb52c8"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb52c8166499a8150bfd38478248572c924c003cbb45fe3bcd348e5ac7c000f9"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b35298b2db9c2468106278537ee529719228950a5fdda686582f68f247d1dc6e"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d108e2d070034f9d57210f22fefd22ea0d04609fc97c5f7f5a686b3471028590"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e1be9111cb6756868ac242b3c2bd1f09d9aea09846e4f5c23715e7afb647103"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:94bb451c664415f02f07eef4ece976a2c65dcbab9c2f1705b7031a3a75349d8c"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:d1a686d0b0949182b8faddea596f3fc11f44768d1f74d4cad70213b2e139d821"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ea8e59105d802c5a38bdbe7362822c522230b3faba2aa35c0fa1765239b7dd70"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:abc4e880a9b920bc5020bf6a431a6bb40589d9bca3975c980495f63632e8382f"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9a79713adfe28830f27a3c62f6b5406c37376c892b05ae070906f07ae4487046"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9a0318c2068e217a8f5e3b85e35899f5a19e97141a45bb925bb357cfe1daf770"}, + {file = "frozenlist-1.6.0-cp312-cp312-win32.whl", hash = "sha256:853ac025092a24bb3bf09ae87f9127de9fe6e0c345614ac92536577cf956dfcc"}, + {file = "frozenlist-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:2bdfe2d7e6c9281c6e55523acd6c2bf77963cb422fdc7d142fb0cb6621b66878"}, + {file = "frozenlist-1.6.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1d7fb014fe0fbfee3efd6a94fc635aeaa68e5e1720fe9e57357f2e2c6e1a647e"}, + {file = "frozenlist-1.6.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01bcaa305a0fdad12745502bfd16a1c75b14558dabae226852f9159364573117"}, + {file = "frozenlist-1.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8b314faa3051a6d45da196a2c495e922f987dc848e967d8cfeaee8a0328b1cd4"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da62fecac21a3ee10463d153549d8db87549a5e77eefb8c91ac84bb42bb1e4e3"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1eb89bf3454e2132e046f9599fbcf0a4483ed43b40f545551a39316d0201cd1"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18689b40cb3936acd971f663ccb8e2589c45db5e2c5f07e0ec6207664029a9c"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e67ddb0749ed066b1a03fba812e2dcae791dd50e5da03be50b6a14d0c1a9ee45"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc5e64626e6682638d6e44398c9baf1d6ce6bc236d40b4b57255c9d3f9761f1f"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:437cfd39564744ae32ad5929e55b18ebd88817f9180e4cc05e7d53b75f79ce85"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:62dd7df78e74d924952e2feb7357d826af8d2f307557a779d14ddf94d7311be8"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a66781d7e4cddcbbcfd64de3d41a61d6bdde370fc2e38623f30b2bd539e84a9f"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:482fe06e9a3fffbcd41950f9d890034b4a54395c60b5e61fae875d37a699813f"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e4f9373c500dfc02feea39f7a56e4f543e670212102cc2eeb51d3a99c7ffbde6"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e69bb81de06827147b7bfbaeb284d85219fa92d9f097e32cc73675f279d70188"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7613d9977d2ab4a9141dde4a149f4357e4065949674c5649f920fec86ecb393e"}, + {file = "frozenlist-1.6.0-cp313-cp313-win32.whl", hash = "sha256:4def87ef6d90429f777c9d9de3961679abf938cb6b7b63d4a7eb8a268babfce4"}, + {file = "frozenlist-1.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:37a8a52c3dfff01515e9bbbee0e6063181362f9de3db2ccf9bc96189b557cbfd"}, + {file = "frozenlist-1.6.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:46138f5a0773d064ff663d273b309b696293d7a7c00a0994c5c13a5078134b64"}, + {file = "frozenlist-1.6.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f88bc0a2b9c2a835cb888b32246c27cdab5740059fb3688852bf91e915399b91"}, + {file = "frozenlist-1.6.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:777704c1d7655b802c7850255639672e90e81ad6fa42b99ce5ed3fbf45e338dd"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85ef8d41764c7de0dcdaf64f733a27352248493a85a80661f3c678acd27e31f2"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:da5cb36623f2b846fb25009d9d9215322318ff1c63403075f812b3b2876c8506"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cbb56587a16cf0fb8acd19e90ff9924979ac1431baea8681712716a8337577b0"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6154c3ba59cda3f954c6333025369e42c3acd0c6e8b6ce31eb5c5b8116c07e0"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e8246877afa3f1ae5c979fe85f567d220f86a50dc6c493b9b7d8191181ae01e"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b0f6cce16306d2e117cf9db71ab3a9e8878a28176aeaf0dbe35248d97b28d0c"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1b8e8cd8032ba266f91136d7105706ad57770f3522eac4a111d77ac126a25a9b"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:e2ada1d8515d3ea5378c018a5f6d14b4994d4036591a52ceaf1a1549dec8e1ad"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:cdb2c7f071e4026c19a3e32b93a09e59b12000751fc9b0b7758da899e657d215"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:03572933a1969a6d6ab509d509e5af82ef80d4a5d4e1e9f2e1cdd22c77a3f4d2"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:77effc978947548b676c54bbd6a08992759ea6f410d4987d69feea9cd0919911"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a2bda8be77660ad4089caf2223fdbd6db1858462c4b85b67fbfa22102021e497"}, + {file = "frozenlist-1.6.0-cp313-cp313t-win32.whl", hash = "sha256:a4d96dc5bcdbd834ec6b0f91027817214216b5b30316494d2b1aebffb87c534f"}, + {file = "frozenlist-1.6.0-cp313-cp313t-win_amd64.whl", hash = "sha256:e18036cb4caa17ea151fd5f3d70be9d354c99eb8cf817a3ccde8a7873b074348"}, + {file = "frozenlist-1.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:536a1236065c29980c15c7229fbb830dedf809708c10e159b8136534233545f0"}, + {file = "frozenlist-1.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ed5e3a4462ff25ca84fb09e0fada8ea267df98a450340ead4c91b44857267d70"}, + {file = "frozenlist-1.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e19c0fc9f4f030fcae43b4cdec9e8ab83ffe30ec10c79a4a43a04d1af6c5e1ad"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c608f833897501dac548585312d73a7dca028bf3b8688f0d712b7acfaf7fb3"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0dbae96c225d584f834b8d3cc688825911960f003a85cb0fd20b6e5512468c42"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:625170a91dd7261a1d1c2a0c1a353c9e55d21cd67d0852185a5fef86587e6f5f"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1db8b2fc7ee8a940b547a14c10e56560ad3ea6499dc6875c354e2335812f739d"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4da6fc43048b648275a220e3a61c33b7fff65d11bdd6dcb9d9c145ff708b804c"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef8e7e8f2f3820c5f175d70fdd199b79e417acf6c72c5d0aa8f63c9f721646f"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aa733d123cc78245e9bb15f29b44ed9e5780dc6867cfc4e544717b91f980af3b"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:ba7f8d97152b61f22d7f59491a781ba9b177dd9f318486c5fbc52cde2db12189"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:56a0b8dd6d0d3d971c91f1df75e824986667ccce91e20dca2023683814344791"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:5c9e89bf19ca148efcc9e3c44fd4c09d5af85c8a7dd3dbd0da1cb83425ef4983"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1330f0a4376587face7637dfd245380a57fe21ae8f9d360c1c2ef8746c4195fa"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2187248203b59625566cac53572ec8c2647a140ee2738b4e36772930377a533c"}, + {file = "frozenlist-1.6.0-cp39-cp39-win32.whl", hash = "sha256:2b8cf4cfea847d6c12af06091561a89740f1f67f331c3fa8623391905e878530"}, + {file = "frozenlist-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:1255d5d64328c5a0d066ecb0f02034d086537925f1f04b50b1ae60d37afbf572"}, + {file = "frozenlist-1.6.0-py3-none-any.whl", hash = "sha256:535eec9987adb04701266b92745d6cdcef2e77669299359c3009c3404dd5d191"}, + {file = "frozenlist-1.6.0.tar.gz", hash = "sha256:b99655c32c1c8e06d111e7f41c06c29a5318cb1835df23a45518e02a47c63b68"}, ] [[package]] name = "fsspec" -version = "2024.9.0" +version = "2025.3.2" description = "File-system specification" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "fsspec-2024.9.0-py3-none-any.whl", hash = "sha256:a0947d552d8a6efa72cc2c730b12c41d043509156966cca4fb157b0f2a0c574b"}, - {file = "fsspec-2024.9.0.tar.gz", hash = "sha256:4b0afb90c2f21832df142f292649035d80b421f60a9e1c027802e5a0da2b04e8"}, + {file = "fsspec-2025.3.2-py3-none-any.whl", hash = "sha256:2daf8dc3d1dfa65b6aa37748d112773a7a08416f6c70d96b264c96476ecaf711"}, + {file = "fsspec-2025.3.2.tar.gz", hash = "sha256:e52c77ef398680bbd6a98c0e628fbc469491282981209907bbc8aea76a04fdc6"}, ] [package.extras] @@ -1126,44 +1192,48 @@ sftp = ["paramiko"] smb = ["smbprotocol"] ssh = ["paramiko"] test = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "numpy", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "requests"] -test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask-expr", "dask[dataframe,test]", "moto[server] (>4,<5)", "pytest-timeout", "xarray"] +test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask[dataframe,test]", "moto[server] (>4,<5)", "pytest-timeout", "xarray"] test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard"] tqdm = ["tqdm"] [[package]] name = "google-api-core" -version = "2.20.0" +version = "2.24.2" description = "Google API client core library" optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "google_api_core-2.20.0-py3-none-any.whl", hash = "sha256:ef0591ef03c30bb83f79b3d0575c3f31219001fc9c5cf37024d08310aeffed8a"}, - {file = "google_api_core-2.20.0.tar.gz", hash = "sha256:f74dff1889ba291a4b76c5079df0711810e2d9da81abfdc99957bc961c1eb28f"}, + {file = "google_api_core-2.24.2-py3-none-any.whl", hash = "sha256:810a63ac95f3c441b7c0e43d344e372887f62ce9071ba972eacf32672e072de9"}, + {file = "google_api_core-2.24.2.tar.gz", hash = "sha256:81718493daf06d96d6bc76a91c23874dbf2fac0adbbf542831b805ee6e974696"}, ] [package.dependencies] -google-auth = ">=2.14.1,<3.0.dev0" -googleapis-common-protos = ">=1.56.2,<2.0.dev0" -proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" -requests = ">=2.18.0,<3.0.0.dev0" +google-auth = ">=2.14.1,<3.0.0" +googleapis-common-protos = ">=1.56.2,<2.0.0" +proto-plus = [ + {version = ">=1.22.3,<2.0.0"}, + {version = ">=1.25.0,<2.0.0", markers = "python_version >= \"3.13\""}, +] +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" +requests = ">=2.18.0,<3.0.0" [package.extras] +async-rest = ["google-auth[aiohttp] (>=2.35.0,<3.0.dev0)"] grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev) ; python_version >= \"3.11\"", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0) ; python_version >= \"3.11\""] grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.35.0" +version = "2.40.1" description = "Google Authentication Library" optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "google_auth-2.35.0-py2.py3-none-any.whl", hash = "sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f"}, - {file = "google_auth-2.35.0.tar.gz", hash = "sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a"}, + {file = "google_auth-2.40.1-py2.py3-none-any.whl", hash = "sha256:ed4cae4f5c46b41bae1d19c036e06f6c371926e97b19e816fc854eff811974ee"}, + {file = "google_auth-2.40.1.tar.gz", hash = "sha256:58f0e8416a9814c1d86c9b7f6acf6816b51aba167b2c76821965271bac275540"}, ] [package.dependencies] @@ -1172,142 +1242,141 @@ pyasn1-modules = ">=0.2.1" rsa = ">=3.1.4,<5" [package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0)", "requests (>=2.20.0,<3.0.0)"] enterprise-cert = ["cryptography", "pyopenssl"] -pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +pyjwt = ["cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "pyjwt (>=2.0)"] +pyopenssl = ["cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] -requests = ["requests (>=2.20.0,<3.0.0.dev0)"] +requests = ["requests (>=2.20.0,<3.0.0)"] +testing = ["aiohttp (<3.10.0)", "aiohttp (>=3.6.2,<4.0.0)", "aioresponses", "cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "flask", "freezegun", "grpcio", "mock", "oauth2client", "packaging", "pyjwt (>=2.0)", "pyopenssl (<24.3.0)", "pyopenssl (>=20.0.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-localserver", "pyu2f (>=0.1.5)", "requests (>=2.20.0,<3.0.0)", "responses", "urllib3"] +urllib3 = ["packaging", "urllib3"] [[package]] name = "googleapis-common-protos" -version = "1.65.0" +version = "1.70.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "googleapis_common_protos-1.65.0-py2.py3-none-any.whl", hash = "sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63"}, - {file = "googleapis_common_protos-1.65.0.tar.gz", hash = "sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0"}, + {file = "googleapis_common_protos-1.70.0-py3-none-any.whl", hash = "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8"}, + {file = "googleapis_common_protos-1.70.0.tar.gz", hash = "sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257"}, ] [package.dependencies] -protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" [package.extras] -grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] +grpc = ["grpcio (>=1.44.0,<2.0.0)"] [[package]] name = "grpcio" -version = "1.66.2" +version = "1.71.0" description = "HTTP/2-based RPC framework" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "grpcio-1.66.2-cp310-cp310-linux_armv7l.whl", hash = "sha256:fe96281713168a3270878255983d2cb1a97e034325c8c2c25169a69289d3ecfa"}, - {file = "grpcio-1.66.2-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:73fc8f8b9b5c4a03e802b3cd0c18b2b06b410d3c1dcbef989fdeb943bd44aff7"}, - {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:03b0b307ba26fae695e067b94cbb014e27390f8bc5ac7a3a39b7723fed085604"}, - {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d69ce1f324dc2d71e40c9261d3fdbe7d4c9d60f332069ff9b2a4d8a257c7b2b"}, - {file = "grpcio-1.66.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05bc2ceadc2529ab0b227b1310d249d95d9001cd106aa4d31e8871ad3c428d73"}, - {file = "grpcio-1.66.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ac475e8da31484efa25abb774674d837b343afb78bb3bcdef10f81a93e3d6bf"}, - {file = "grpcio-1.66.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0be4e0490c28da5377283861bed2941d1d20ec017ca397a5df4394d1c31a9b50"}, - {file = "grpcio-1.66.2-cp310-cp310-win32.whl", hash = "sha256:4e504572433f4e72b12394977679161d495c4c9581ba34a88d843eaf0f2fbd39"}, - {file = "grpcio-1.66.2-cp310-cp310-win_amd64.whl", hash = "sha256:2018b053aa15782db2541ca01a7edb56a0bf18c77efed975392583725974b249"}, - {file = "grpcio-1.66.2-cp311-cp311-linux_armv7l.whl", hash = "sha256:2335c58560a9e92ac58ff2bc5649952f9b37d0735608242973c7a8b94a6437d8"}, - {file = "grpcio-1.66.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45a3d462826f4868b442a6b8fdbe8b87b45eb4f5b5308168c156b21eca43f61c"}, - {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a9539f01cb04950fd4b5ab458e64a15f84c2acc273670072abe49a3f29bbad54"}, - {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce89f5876662f146d4c1f695dda29d4433a5d01c8681fbd2539afff535da14d4"}, - {file = "grpcio-1.66.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25a14af966438cddf498b2e338f88d1c9706f3493b1d73b93f695c99c5f0e2a"}, - {file = "grpcio-1.66.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6001e575b8bbd89eee11960bb640b6da6ae110cf08113a075f1e2051cc596cae"}, - {file = "grpcio-1.66.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4ea1d062c9230278793820146c95d038dc0f468cbdd172eec3363e42ff1c7d01"}, - {file = "grpcio-1.66.2-cp311-cp311-win32.whl", hash = "sha256:38b68498ff579a3b1ee8f93a05eb48dc2595795f2f62716e797dc24774c1aaa8"}, - {file = "grpcio-1.66.2-cp311-cp311-win_amd64.whl", hash = "sha256:6851de821249340bdb100df5eacfecfc4e6075fa85c6df7ee0eb213170ec8e5d"}, - {file = "grpcio-1.66.2-cp312-cp312-linux_armv7l.whl", hash = "sha256:802d84fd3d50614170649853d121baaaa305de7b65b3e01759247e768d691ddf"}, - {file = "grpcio-1.66.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:80fd702ba7e432994df208f27514280b4b5c6843e12a48759c9255679ad38db8"}, - {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:12fda97ffae55e6526825daf25ad0fa37483685952b5d0f910d6405c87e3adb6"}, - {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:950da58d7d80abd0ea68757769c9db0a95b31163e53e5bb60438d263f4bed7b7"}, - {file = "grpcio-1.66.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e636ce23273683b00410f1971d209bf3689238cf5538d960adc3cdfe80dd0dbd"}, - {file = "grpcio-1.66.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a917d26e0fe980b0ac7bfcc1a3c4ad6a9a4612c911d33efb55ed7833c749b0ee"}, - {file = "grpcio-1.66.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49f0ca7ae850f59f828a723a9064cadbed90f1ece179d375966546499b8a2c9c"}, - {file = "grpcio-1.66.2-cp312-cp312-win32.whl", hash = "sha256:31fd163105464797a72d901a06472860845ac157389e10f12631025b3e4d0453"}, - {file = "grpcio-1.66.2-cp312-cp312-win_amd64.whl", hash = "sha256:ff1f7882e56c40b0d33c4922c15dfa30612f05fb785074a012f7cda74d1c3679"}, - {file = "grpcio-1.66.2-cp313-cp313-linux_armv7l.whl", hash = "sha256:3b00efc473b20d8bf83e0e1ae661b98951ca56111feb9b9611df8efc4fe5d55d"}, - {file = "grpcio-1.66.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1caa38fb22a8578ab8393da99d4b8641e3a80abc8fd52646f1ecc92bcb8dee34"}, - {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:c408f5ef75cfffa113cacd8b0c0e3611cbfd47701ca3cdc090594109b9fcbaed"}, - {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c806852deaedee9ce8280fe98955c9103f62912a5b2d5ee7e3eaa284a6d8d8e7"}, - {file = "grpcio-1.66.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f145cc21836c332c67baa6fc81099d1d27e266401565bf481948010d6ea32d46"}, - {file = "grpcio-1.66.2-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:73e3b425c1e155730273f73e419de3074aa5c5e936771ee0e4af0814631fb30a"}, - {file = "grpcio-1.66.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:9c509a4f78114cbc5f0740eb3d7a74985fd2eff022971bc9bc31f8bc93e66a3b"}, - {file = "grpcio-1.66.2-cp313-cp313-win32.whl", hash = "sha256:20657d6b8cfed7db5e11b62ff7dfe2e12064ea78e93f1434d61888834bc86d75"}, - {file = "grpcio-1.66.2-cp313-cp313-win_amd64.whl", hash = "sha256:fb70487c95786e345af5e854ffec8cb8cc781bcc5df7930c4fbb7feaa72e1cdf"}, - {file = "grpcio-1.66.2-cp38-cp38-linux_armv7l.whl", hash = "sha256:a18e20d8321c6400185b4263e27982488cb5cdd62da69147087a76a24ef4e7e3"}, - {file = "grpcio-1.66.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:02697eb4a5cbe5a9639f57323b4c37bcb3ab2d48cec5da3dc2f13334d72790dd"}, - {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:99a641995a6bc4287a6315989ee591ff58507aa1cbe4c2e70d88411c4dcc0839"}, - {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ed71e81782966ffead60268bbda31ea3f725ebf8aa73634d5dda44f2cf3fb9c"}, - {file = "grpcio-1.66.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbd27c24a4cc5e195a7f56cfd9312e366d5d61b86e36d46bbe538457ea6eb8dd"}, - {file = "grpcio-1.66.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d9a9724a156c8ec6a379869b23ba3323b7ea3600851c91489b871e375f710bc8"}, - {file = "grpcio-1.66.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d8d4732cc5052e92cea2f78b233c2e2a52998ac40cd651f40e398893ad0d06ec"}, - {file = "grpcio-1.66.2-cp38-cp38-win32.whl", hash = "sha256:7b2c86457145ce14c38e5bf6bdc19ef88e66c5fee2c3d83285c5aef026ba93b3"}, - {file = "grpcio-1.66.2-cp38-cp38-win_amd64.whl", hash = "sha256:e88264caad6d8d00e7913996030bac8ad5f26b7411495848cc218bd3a9040b6c"}, - {file = "grpcio-1.66.2-cp39-cp39-linux_armv7l.whl", hash = "sha256:c400ba5675b67025c8a9f48aa846f12a39cf0c44df5cd060e23fda5b30e9359d"}, - {file = "grpcio-1.66.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:66a0cd8ba6512b401d7ed46bb03f4ee455839957f28b8d61e7708056a806ba6a"}, - {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:06de8ec0bd71be123eec15b0e0d457474931c2c407869b6c349bd9bed4adbac3"}, - {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb57870449dfcfac428afbb5a877829fcb0d6db9d9baa1148705739e9083880e"}, - {file = "grpcio-1.66.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b672abf90a964bfde2d0ecbce30f2329a47498ba75ce6f4da35a2f4532b7acbc"}, - {file = "grpcio-1.66.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ad2efdbe90c73b0434cbe64ed372e12414ad03c06262279b104a029d1889d13e"}, - {file = "grpcio-1.66.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9c3a99c519f4638e700e9e3f83952e27e2ea10873eecd7935823dab0c1c9250e"}, - {file = "grpcio-1.66.2-cp39-cp39-win32.whl", hash = "sha256:78fa51ebc2d9242c0fc5db0feecc57a9943303b46664ad89921f5079e2e4ada7"}, - {file = "grpcio-1.66.2-cp39-cp39-win_amd64.whl", hash = "sha256:728bdf36a186e7f51da73be7f8d09457a03061be848718d0edf000e709418987"}, - {file = "grpcio-1.66.2.tar.gz", hash = "sha256:563588c587b75c34b928bc428548e5b00ea38c46972181a4d8b75ba7e3f24231"}, + {file = "grpcio-1.71.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:c200cb6f2393468142eb50ab19613229dcc7829b5ccee8b658a36005f6669fdd"}, + {file = "grpcio-1.71.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b2266862c5ad664a380fbbcdbdb8289d71464c42a8c29053820ee78ba0119e5d"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:0ab8b2864396663a5b0b0d6d79495657ae85fa37dcb6498a2669d067c65c11ea"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c30f393f9d5ff00a71bb56de4aa75b8fe91b161aeb61d39528db6b768d7eac69"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f250ff44843d9a0615e350c77f890082102a0318d66a99540f54769c8766ab73"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6d8de076528f7c43a2f576bc311799f89d795aa6c9b637377cc2b1616473804"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9b91879d6da1605811ebc60d21ab6a7e4bae6c35f6b63a061d61eb818c8168f6"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f71574afdf944e6652203cd1badcda195b2a27d9c83e6d88dc1ce3cfb73b31a5"}, + {file = "grpcio-1.71.0-cp310-cp310-win32.whl", hash = "sha256:8997d6785e93308f277884ee6899ba63baafa0dfb4729748200fcc537858a509"}, + {file = "grpcio-1.71.0-cp310-cp310-win_amd64.whl", hash = "sha256:7d6ac9481d9d0d129224f6d5934d5832c4b1cddb96b59e7eba8416868909786a"}, + {file = "grpcio-1.71.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:d6aa986318c36508dc1d5001a3ff169a15b99b9f96ef5e98e13522c506b37eef"}, + {file = "grpcio-1.71.0-cp311-cp311-macosx_10_14_universal2.whl", hash = "sha256:d2c170247315f2d7e5798a22358e982ad6eeb68fa20cf7a820bb74c11f0736e7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:e6f83a583ed0a5b08c5bc7a3fe860bb3c2eac1f03f1f63e0bc2091325605d2b7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4be74ddeeb92cc87190e0e376dbc8fc7736dbb6d3d454f2fa1f5be1dee26b9d7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd0dfbe4d5eb1fcfec9490ca13f82b089a309dc3678e2edabc144051270a66e"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a2242d6950dc892afdf9e951ed7ff89473aaf744b7d5727ad56bdaace363722b"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0fa05ee31a20456b13ae49ad2e5d585265f71dd19fbd9ef983c28f926d45d0a7"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3d081e859fb1ebe176de33fc3adb26c7d46b8812f906042705346b314bde32c3"}, + {file = "grpcio-1.71.0-cp311-cp311-win32.whl", hash = "sha256:d6de81c9c00c8a23047136b11794b3584cdc1460ed7cbc10eada50614baa1444"}, + {file = "grpcio-1.71.0-cp311-cp311-win_amd64.whl", hash = "sha256:24e867651fc67717b6f896d5f0cac0ec863a8b5fb7d6441c2ab428f52c651c6b"}, + {file = "grpcio-1.71.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:0ff35c8d807c1c7531d3002be03221ff9ae15712b53ab46e2a0b4bb271f38537"}, + {file = "grpcio-1.71.0-cp312-cp312-macosx_10_14_universal2.whl", hash = "sha256:b78a99cd1ece4be92ab7c07765a0b038194ded2e0a26fd654591ee136088d8d7"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:dc1a1231ed23caac1de9f943d031f1bc38d0f69d2a3b243ea0d664fc1fbd7fec"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6beeea5566092c5e3c4896c6d1d307fb46b1d4bdf3e70c8340b190a69198594"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5170929109450a2c031cfe87d6716f2fae39695ad5335d9106ae88cc32dc84c"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5b08d03ace7aca7b2fadd4baf291139b4a5f058805a8327bfe9aece7253b6d67"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f903017db76bf9cc2b2d8bdd37bf04b505bbccad6be8a81e1542206875d0e9db"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:469f42a0b410883185eab4689060a20488a1a0a00f8bbb3cbc1061197b4c5a79"}, + {file = "grpcio-1.71.0-cp312-cp312-win32.whl", hash = "sha256:ad9f30838550695b5eb302add33f21f7301b882937460dd24f24b3cc5a95067a"}, + {file = "grpcio-1.71.0-cp312-cp312-win_amd64.whl", hash = "sha256:652350609332de6dac4ece254e5d7e1ff834e203d6afb769601f286886f6f3a8"}, + {file = "grpcio-1.71.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:cebc1b34ba40a312ab480ccdb396ff3c529377a2fce72c45a741f7215bfe8379"}, + {file = "grpcio-1.71.0-cp313-cp313-macosx_10_14_universal2.whl", hash = "sha256:85da336e3649a3d2171e82f696b5cad2c6231fdd5bad52616476235681bee5b3"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f9a412f55bb6e8f3bb000e020dbc1e709627dcb3a56f6431fa7076b4c1aab0db"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47be9584729534660416f6d2a3108aaeac1122f6b5bdbf9fd823e11fe6fbaa29"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c9c80ac6091c916db81131d50926a93ab162a7e97e4428ffc186b6e80d6dda4"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:789d5e2a3a15419374b7b45cd680b1e83bbc1e52b9086e49308e2c0b5bbae6e3"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:1be857615e26a86d7363e8a163fade914595c81fec962b3d514a4b1e8760467b"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:a76d39b5fafd79ed604c4be0a869ec3581a172a707e2a8d7a4858cb05a5a7637"}, + {file = "grpcio-1.71.0-cp313-cp313-win32.whl", hash = "sha256:74258dce215cb1995083daa17b379a1a5a87d275387b7ffe137f1d5131e2cfbb"}, + {file = "grpcio-1.71.0-cp313-cp313-win_amd64.whl", hash = "sha256:22c3bc8d488c039a199f7a003a38cb7635db6656fa96437a8accde8322ce2366"}, + {file = "grpcio-1.71.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c6a0a28450c16809f94e0b5bfe52cabff63e7e4b97b44123ebf77f448534d07d"}, + {file = "grpcio-1.71.0-cp39-cp39-macosx_10_14_universal2.whl", hash = "sha256:a371e6b6a5379d3692cc4ea1cb92754d2a47bdddeee755d3203d1f84ae08e03e"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:39983a9245d37394fd59de71e88c4b295eb510a3555e0a847d9965088cdbd033"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9182e0063112e55e74ee7584769ec5a0b4f18252c35787f48738627e23a62b97"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693bc706c031aeb848849b9d1c6b63ae6bcc64057984bb91a542332b75aa4c3d"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:20e8f653abd5ec606be69540f57289274c9ca503ed38388481e98fa396ed0b41"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8700a2a57771cc43ea295296330daaddc0d93c088f0a35cc969292b6db959bf3"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d35a95f05a8a2cbe8e02be137740138b3b2ea5f80bd004444e4f9a1ffc511e32"}, + {file = "grpcio-1.71.0-cp39-cp39-win32.whl", hash = "sha256:f9c30c464cb2ddfbc2ddf9400287701270fdc0f14be5f08a1e3939f1e749b455"}, + {file = "grpcio-1.71.0-cp39-cp39-win_amd64.whl", hash = "sha256:63e41b91032f298b3e973b3fa4093cbbc620c875e2da7b93e249d4728b54559a"}, + {file = "grpcio-1.71.0.tar.gz", hash = "sha256:2b85f7820475ad3edec209d3d89a7909ada16caab05d3f2e08a7e8ae3200a55c"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.66.2)"] +protobuf = ["grpcio-tools (>=1.71.0)"] [[package]] name = "h11" -version = "0.14.0" +version = "0.16.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" groups = ["test"] files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, + {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, + {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, ] [[package]] name = "httpcore" -version = "1.0.5" +version = "1.0.9" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" groups = ["test"] files = [ - {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, - {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, + {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, + {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, ] [package.dependencies] certifi = "*" -h11 = ">=0.13,<0.15" +h11 = ">=0.16" [package.extras] asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.26.0)"] +trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httpx" -version = "0.27.2" +version = "0.28.1" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" groups = ["test"] files = [ - {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, - {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, + {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, + {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, ] [package.dependencies] @@ -1315,7 +1384,6 @@ anyio = "*" certifi = "*" httpcore = "==1.*" idna = "*" -sniffio = "*" [package.extras] brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] @@ -1353,15 +1421,15 @@ files = [ [[package]] name = "importlib-metadata" -version = "8.5.0" +version = "8.7.0" description = "Read metadata from Python packages" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["docs", "test"] markers = "python_version == \"3.9\"" files = [ - {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, - {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, + {file = "importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}, + {file = "importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"}, ] [package.dependencies] @@ -1373,19 +1441,19 @@ cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] [[package]] name = "iniconfig" -version = "2.0.0" +version = "2.1.0" description = "brain-dead simple config-ini parsing" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" groups = ["test"] files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, + {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, + {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, ] [[package]] @@ -1429,6 +1497,7 @@ description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.9" groups = ["main", "test"] +markers = "python_version == \"3.9\"" files = [ {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"}, @@ -1460,6 +1529,96 @@ qtconsole = ["qtconsole"] test = ["pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath"] test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath", "trio"] +[[package]] +name = "ipython" +version = "8.36.0" +description = "IPython: Productive Interactive Computing" +optional = false +python-versions = ">=3.10" +groups = ["main", "test"] +markers = "python_version == \"3.10\"" +files = [ + {file = "ipython-8.36.0-py3-none-any.whl", hash = "sha256:12b913914d010dcffa2711505ec8be4bf0180742d97f1e5175e51f22086428c1"}, + {file = "ipython-8.36.0.tar.gz", hash = "sha256:24658e9fe5c5c819455043235ba59cfffded4a35936eefceceab6b192f7092ff"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} +prompt_toolkit = ">=3.0.41,<3.1.0" +pygments = ">=2.4.0" +stack_data = "*" +traitlets = ">=5.13.0" +typing_extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} + +[package.extras] +all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] +black = ["black"] +doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli ; python_version < \"3.11\"", "typing_extensions"] +kernel = ["ipykernel"] +matplotlib = ["matplotlib"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "ipython[test]", "jupyter_ai", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] + +[[package]] +name = "ipython" +version = "9.2.0" +description = "IPython: Productive Interactive Computing" +optional = false +python-versions = ">=3.11" +groups = ["main", "test"] +markers = "python_version >= \"3.11\"" +files = [ + {file = "ipython-9.2.0-py3-none-any.whl", hash = "sha256:fef5e33c4a1ae0759e0bba5917c9db4eb8c53fee917b6a526bd973e1ca5159f6"}, + {file = "ipython-9.2.0.tar.gz", hash = "sha256:62a9373dbc12f28f9feaf4700d052195bf89806279fc8ca11f3f54017d04751b"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +ipython-pygments-lexers = "*" +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} +prompt_toolkit = ">=3.0.41,<3.1.0" +pygments = ">=2.4.0" +stack_data = "*" +traitlets = ">=5.13.0" +typing_extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} + +[package.extras] +all = ["ipython[doc,matplotlib,test,test-extra]"] +black = ["black"] +doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinx_toml (==0.0.4)", "typing_extensions"] +matplotlib = ["matplotlib"] +test = ["packaging", "pytest", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "ipykernel", "ipython[test]", "jupyter_ai", "matplotlib (!=3.2.0)", "nbclient", "nbformat", "numpy (>=1.23)", "pandas", "trio"] + +[[package]] +name = "ipython-pygments-lexers" +version = "1.1.1" +description = "Defines a variety of Pygments lexers for highlighting IPython code." +optional = false +python-versions = ">=3.8" +groups = ["main", "test"] +markers = "python_version >= \"3.11\"" +files = [ + {file = "ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c"}, + {file = "ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81"}, +] + +[package.dependencies] +pygments = "*" + [[package]] name = "ipywidgets" version = "8.1.2" @@ -1499,36 +1658,35 @@ arrow = ">=0.15.0" [[package]] name = "jedi" -version = "0.19.1" +version = "0.19.2" description = "An autocompletion tool for Python that can be used for text editors." optional = false python-versions = ">=3.6" groups = ["main", "test"] files = [ - {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, - {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, + {file = "jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9"}, + {file = "jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0"}, ] [package.dependencies] -parso = ">=0.8.3,<0.9.0" +parso = ">=0.8.4,<0.9.0" [package.extras] docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<9.0.0)"] [[package]] name = "jinja2" -version = "3.1.5" +version = "3.1.6" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" -groups = ["main", "docs", "test"] +groups = ["docs", "test"] files = [ - {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, - {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, ] -markers = {main = "sys_platform != \"win32\""} [package.dependencies] MarkupSafe = ">=2.0" @@ -1538,16 +1696,19 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "json5" -version = "0.9.25" +version = "0.12.0" description = "A Python implementation of the JSON5 data format." optional = false -python-versions = ">=3.8" +python-versions = ">=3.8.0" groups = ["test"] files = [ - {file = "json5-0.9.25-py3-none-any.whl", hash = "sha256:34ed7d834b1341a86987ed52f3f76cd8ee184394906b6e22a1e0deb9ab294e8f"}, - {file = "json5-0.9.25.tar.gz", hash = "sha256:548e41b9be043f9426776f05df8635a00fe06104ea51ed24b67f908856e151ae"}, + {file = "json5-0.12.0-py3-none-any.whl", hash = "sha256:6d37aa6c08b0609f16e1ec5ff94697e2cbbfbad5ac112afa05794da9ab7810db"}, + {file = "json5-0.12.0.tar.gz", hash = "sha256:0b4b6ff56801a1c7dc817b0241bca4ce474a0e6a163bfef3fc594d3fd263ff3a"}, ] +[package.extras] +dev = ["build (==1.2.2.post1)", "coverage (==7.5.4) ; python_version < \"3.9\"", "coverage (==7.8.0) ; python_version >= \"3.9\"", "mypy (==1.14.1) ; python_version < \"3.9\"", "mypy (==1.15.0) ; python_version >= \"3.9\"", "pip (==25.0.1)", "pylint (==3.2.7) ; python_version < \"3.9\"", "pylint (==3.3.6) ; python_version >= \"3.9\"", "ruff (==0.11.2)", "twine (==6.1.0)", "uv (==0.6.11)"] + [[package]] name = "jsonpointer" version = "3.0.0" @@ -1592,14 +1753,14 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- [[package]] name = "jsonschema-specifications" -version = "2023.12.1" +version = "2025.4.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main", "test"] files = [ - {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, - {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, + {file = "jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af"}, + {file = "jsonschema_specifications-2025.4.1.tar.gz", hash = "sha256:630159c9f4dbea161a6a2205c3011cc4f18ff381b189fff48bb39b9bf26ae608"}, ] [package.dependencies] @@ -1652,18 +1813,19 @@ test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout" [[package]] name = "jupyter-events" -version = "0.10.0" +version = "0.12.0" description = "Jupyter Event System library" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["test"] files = [ - {file = "jupyter_events-0.10.0-py3-none-any.whl", hash = "sha256:4b72130875e59d57716d327ea70d3ebc3af1944d3717e5a498b8a06c6c159960"}, - {file = "jupyter_events-0.10.0.tar.gz", hash = "sha256:670b8229d3cc882ec782144ed22e0d29e1c2d639263f92ca8383e66682845e22"}, + {file = "jupyter_events-0.12.0-py3-none-any.whl", hash = "sha256:6464b2fa5ad10451c3d35fabc75eab39556ae1e2853ad0c0cc31b656731a97fb"}, + {file = "jupyter_events-0.12.0.tar.gz", hash = "sha256:fc3fce98865f6784c9cd0a56a20644fc6098f21c8c33834a8d9fe383c17e554b"}, ] [package.dependencies] jsonschema = {version = ">=4.18.0", extras = ["format-nongpl"]} +packaging = "*" python-json-logger = ">=2.0.4" pyyaml = ">=5.3" referencing = "*" @@ -1673,7 +1835,7 @@ traitlets = ">=5.3" [package.extras] cli = ["click", "rich"] -docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme", "sphinxcontrib-spelling"] +docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme (>=0.16)", "sphinx (>=8)", "sphinxcontrib-spelling"] test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "rich"] [[package]] @@ -1694,14 +1856,14 @@ jupyter-server = ">=1.1.2" [[package]] name = "jupyter-server" -version = "2.14.2" +version = "2.15.0" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["test"] files = [ - {file = "jupyter_server-2.14.2-py3-none-any.whl", hash = "sha256:47ff506127c2f7851a17bf4713434208fc490955d0e8632e95014a9a9afbeefd"}, - {file = "jupyter_server-2.14.2.tar.gz", hash = "sha256:66095021aa9638ced276c248b1d81862e4c50f292d575920bbe960de1c56b12b"}, + {file = "jupyter_server-2.15.0-py3-none-any.whl", hash = "sha256:872d989becf83517012ee669f09604aa4a28097c0bd90b2f424310156c2cdae3"}, + {file = "jupyter_server-2.15.0.tar.gz", hash = "sha256:9d446b8697b4f7337a1b7cdcac40778babdd93ba614b6d68ab1c0c918f1c4084"}, ] [package.dependencies] @@ -1710,7 +1872,7 @@ argon2-cffi = ">=21.1" jinja2 = ">=3.0.3" jupyter-client = ">=7.4.4" jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" -jupyter-events = ">=0.9.0" +jupyter-events = ">=0.11.0" jupyter-server-terminals = ">=0.4.4" nbconvert = ">=6.4.4" nbformat = ">=5.3.0" @@ -1826,26 +1988,26 @@ test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-v [[package]] name = "jupyterlab-widgets" -version = "3.0.13" +version = "3.0.15" description = "Jupyter interactive widgets for JupyterLab" optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "jupyterlab_widgets-3.0.13-py3-none-any.whl", hash = "sha256:e3cda2c233ce144192f1e29914ad522b2f4c40e77214b0cc97377ca3d323db54"}, - {file = "jupyterlab_widgets-3.0.13.tar.gz", hash = "sha256:a2966d385328c1942b683a8cd96b89b8dd82c8b8f81dda902bb2bc06d46f5bed"}, + {file = "jupyterlab_widgets-3.0.15-py3-none-any.whl", hash = "sha256:d59023d7d7ef71400d51e6fee9a88867f6e65e10a4201605d2d7f3e8f012a31c"}, + {file = "jupyterlab_widgets-3.0.15.tar.gz", hash = "sha256:2920888a0c2922351a9202817957a68c07d99673504d6cd37345299e971bb08b"}, ] [[package]] name = "kubernetes" -version = "31.0.0" +version = "32.0.1" description = "Kubernetes python client" optional = false python-versions = ">=3.6" groups = ["main"] files = [ - {file = "kubernetes-31.0.0-py2.py3-none-any.whl", hash = "sha256:bf141e2d380c8520eada8b351f4e319ffee9636328c137aa432bc486ca1200e1"}, - {file = "kubernetes-31.0.0.tar.gz", hash = "sha256:28945de906c8c259c1ebe62703b56a03b714049372196f854105afe4e6d014c0"}, + {file = "kubernetes-32.0.1-py2.py3-none-any.whl", hash = "sha256:35282ab8493b938b08ab5526c7ce66588232df00ef5e1dbe88a419107dc10998"}, + {file = "kubernetes-32.0.1.tar.gz", hash = "sha256:42f43d49abd437ada79a79a16bd48a604d3471a117a8347e87db693f2ba0ba28"}, ] [package.dependencies] @@ -1891,74 +2053,74 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "2.1.5" +version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false -python-versions = ">=3.7" -groups = ["main", "docs", "test"] +python-versions = ">=3.9" +groups = ["docs", "test"] files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] -markers = {main = "sys_platform != \"win32\""} + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, +] [[package]] name = "matplotlib-inline" @@ -1987,75 +2149,21 @@ files = [ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] -[[package]] -name = "memray" -version = "1.10.0" -description = "A memory profiler for Python applications" -optional = false -python-versions = ">=3.7.0" -groups = ["main"] -markers = "sys_platform != \"win32\"" -files = [ - {file = "memray-1.10.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:843a688877691746f9d1835cfa8a65139948471bdd78720435808d20bc30a1cc"}, - {file = "memray-1.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6937d7ef67d18ccc01c3250cdf3b4ef1445b859ee8756f09e3d11bd3ff0c7d67"}, - {file = "memray-1.10.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:23e8c402625cfb32d0e9edb5ec0945f3e5e54bc6b0c5699f6284302082b80bd4"}, - {file = "memray-1.10.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f16c5c8730b616613dc8bafe32649ca6bd7252606251eb00148582011758d0b5"}, - {file = "memray-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7aeb47174c42e99740a8e2b3b6fe0932c95d987258d48a746974ead19176c26"}, - {file = "memray-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2ce59ef485db3634de98b3a026d2450fc0a875e3a58a9ea85f7a89098841defe"}, - {file = "memray-1.10.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:53a8f66af18b1f3bcf5c9f3c95ae4134dd675903a38f9d0e6341b7bca01b63d0"}, - {file = "memray-1.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9627184c926252c8f719c301f1fefe970f0d033c643a6448b93fed2889d1ea94"}, - {file = "memray-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3a14960838d89a91747885897d34134afb65883cc3b0ed7ff30fe1af00f9fe6"}, - {file = "memray-1.10.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22f2a47871c172a0539bd72737bb6b294fc10c510464066b825d90fcd3bb4916"}, - {file = "memray-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c401c57f49c4c5f1fecaee1e746f537cdc6680da05fb963dc143bd08ee109bf"}, - {file = "memray-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ce22a887a585ef5020896de89ffc793e531b65ccc81fbafcc7886010c2c562b3"}, - {file = "memray-1.10.0-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:b75040f28e8678d0e9c4907d55c95cf26db8ef5adc9941a228f1b280a9efd9c0"}, - {file = "memray-1.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:95e563d9c976e429ad597ad2720d95cebbe8bac891a3082465439143e2740772"}, - {file = "memray-1.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:663d463e89a64bae4a6b2f8c837d11a3d094834442d536a4165e1d31899a3500"}, - {file = "memray-1.10.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a21745fb516b7a6efcd40aa7487c59e9313fcfc782d0193fcfcf00b48426874"}, - {file = "memray-1.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf6d683c4f8d25c6ad06ae18715f218983c5eb86803953615e902d632fdf6ec1"}, - {file = "memray-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6b311e91203be71e1a0ce5e4f978137765bcb1045f3bf5646129c83c5b96ab3c"}, - {file = "memray-1.10.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:68bd8df023c8a32f44c11d997e5c536837e27c0955daf557d3a377edd55a1dd3"}, - {file = "memray-1.10.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:322ed0b69014a0969b777768d461a785203f81f9864386b666b5b26645d9c294"}, - {file = "memray-1.10.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e985fb7646b0475c303919d19211d2aa54e5a9e2cd2a102472299be5dbebd3"}, - {file = "memray-1.10.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4eba29179772b4a2e440a065b320b03bc2e73fe2648bdf7936aa3b9a086fab4a"}, - {file = "memray-1.10.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:b681519357d94f5f0857fbc6029e7c44d3f41436109e955a14fd312d8317bc35"}, - {file = "memray-1.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8196c684f1be8fe423e5cdd2356d4255a2cb482a1f3e89612b70d2a2862cf5bb"}, - {file = "memray-1.10.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:898acd60f57a10dc5aaf1fd64aa2f821f0420114f3f60c3058083788603f173a"}, - {file = "memray-1.10.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6fd13ef666c7fced9768d1cfabf71dc6dfa6724935a8dff463495ac2dc5e13a4"}, - {file = "memray-1.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e356af93e3b031c83957e9ac1a653f5aaba5df1e357dd17142f5ed19bb3dc660"}, - {file = "memray-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:92c372cb262eddd23049f945ca9527f0e4cc7c40a070aade1802d066f680885b"}, - {file = "memray-1.10.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:38393c86ce6d0a08e6ec0eb1401d49803b7c0c950c2565386751cdc81568cba8"}, - {file = "memray-1.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3a8bb7fbd8303c4f0017ba7faef6b88f904cda2931ed667cbf3b98f024b3bc44"}, - {file = "memray-1.10.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8d56f37a34125684746c13d24bd7a3fb17549b0bb355eb50969eb11e05e3ba62"}, - {file = "memray-1.10.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:85c32d6613d81b075f740e398c4d653e0803cd48e82c33dcd584c109d6782666"}, - {file = "memray-1.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:566602b2143e06b3d592901d98c52ce4599e71aa2555146eeb5cec03506f9498"}, - {file = "memray-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:391aac6c9f744528d3186bc82d708a1acc83525778f804045d7c96f860f8ec98"}, - {file = "memray-1.10.0.tar.gz", hash = "sha256:38322e052b882790993412f1840517a51818aa55c47037f69915b2007f2c4cee"}, -] - -[package.dependencies] -jinja2 = ">=2.9" -rich = ">=11.2.0" - -[package.extras] -benchmark = ["asv"] -dev = ["Cython", "IPython", "asv", "black", "bump2version", "check-manifest", "flake8", "furo", "greenlet ; python_version < \"3.12\"", "ipython", "isort", "mypy", "pytest", "pytest-cov", "setuptools ; python_version >= \"3.12\"", "sphinx", "sphinx-argparse", "towncrier"] -docs = ["IPython", "bump2version", "furo", "sphinx", "sphinx-argparse", "towncrier"] -lint = ["black", "check-manifest", "flake8", "isort", "mypy"] -test = ["Cython", "greenlet ; python_version < \"3.12\"", "ipython", "pytest", "pytest-cov", "setuptools ; python_version >= \"3.12\""] - [[package]] name = "mistune" -version = "3.0.2" +version = "3.1.3" description = "A sane and fast Markdown parser with useful plugins and renderers" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" groups = ["test"] files = [ - {file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"}, - {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"}, + {file = "mistune-3.1.3-py3-none-any.whl", hash = "sha256:1a32314113cff28aa6432e99e522677c8587fd83e3d51c29b82a52409c842bd9"}, + {file = "mistune-3.1.3.tar.gz", hash = "sha256:a7035c21782b2becb6be62f8f25d3df81ccb4d6fa477a6525b15af06539f02a0"}, ] +[package.dependencies] +typing-extensions = {version = "*", markers = "python_version < \"3.11\""} + [[package]] name = "msgpack" version = "1.1.0" @@ -2132,104 +2240,116 @@ files = [ [[package]] name = "multidict" -version = "6.1.0" +version = "6.4.3" description = "multidict implementation" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, - {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, - {file = "multidict-6.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53"}, - {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5"}, - {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581"}, - {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56"}, - {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429"}, - {file = "multidict-6.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748"}, - {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db"}, - {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056"}, - {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76"}, - {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160"}, - {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7"}, - {file = "multidict-6.1.0-cp310-cp310-win32.whl", hash = "sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0"}, - {file = "multidict-6.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d"}, - {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6"}, - {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156"}, - {file = "multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb"}, - {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b"}, - {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72"}, - {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304"}, - {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351"}, - {file = "multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb"}, - {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3"}, - {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399"}, - {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423"}, - {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3"}, - {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753"}, - {file = "multidict-6.1.0-cp311-cp311-win32.whl", hash = "sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80"}, - {file = "multidict-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926"}, - {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa"}, - {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436"}, - {file = "multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761"}, - {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e"}, - {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef"}, - {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95"}, - {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925"}, - {file = "multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966"}, - {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305"}, - {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2"}, - {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2"}, - {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6"}, - {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3"}, - {file = "multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133"}, - {file = "multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1"}, - {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008"}, - {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f"}, - {file = "multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28"}, - {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b"}, - {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c"}, - {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3"}, - {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44"}, - {file = "multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2"}, - {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3"}, - {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa"}, - {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa"}, - {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4"}, - {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6"}, - {file = "multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81"}, - {file = "multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774"}, - {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392"}, - {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a"}, - {file = "multidict-6.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2"}, - {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc"}, - {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478"}, - {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4"}, - {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d"}, - {file = "multidict-6.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6"}, - {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2"}, - {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd"}, - {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6"}, - {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492"}, - {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd"}, - {file = "multidict-6.1.0-cp38-cp38-win32.whl", hash = "sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167"}, - {file = "multidict-6.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef"}, - {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c"}, - {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1"}, - {file = "multidict-6.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c"}, - {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c"}, - {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f"}, - {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875"}, - {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255"}, - {file = "multidict-6.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30"}, - {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057"}, - {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657"}, - {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28"}, - {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972"}, - {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43"}, - {file = "multidict-6.1.0-cp39-cp39-win32.whl", hash = "sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada"}, - {file = "multidict-6.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a"}, - {file = "multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506"}, - {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"}, + {file = "multidict-6.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:32a998bd8a64ca48616eac5a8c1cc4fa38fb244a3facf2eeb14abe186e0f6cc5"}, + {file = "multidict-6.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a54ec568f1fc7f3c313c2f3b16e5db346bf3660e1309746e7fccbbfded856188"}, + {file = "multidict-6.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a7be07e5df178430621c716a63151165684d3e9958f2bbfcb644246162007ab7"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b128dbf1c939674a50dd0b28f12c244d90e5015e751a4f339a96c54f7275e291"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b9cb19dfd83d35b6ff24a4022376ea6e45a2beba8ef3f0836b8a4b288b6ad685"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3cf62f8e447ea2c1395afa289b332e49e13d07435369b6f4e41f887db65b40bf"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:909f7d43ff8f13d1adccb6a397094adc369d4da794407f8dd592c51cf0eae4b1"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0bb8f8302fbc7122033df959e25777b0b7659b1fd6bcb9cb6bed76b5de67afef"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:224b79471b4f21169ea25ebc37ed6f058040c578e50ade532e2066562597b8a9"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a7bd27f7ab3204f16967a6f899b3e8e9eb3362c0ab91f2ee659e0345445e0078"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:99592bd3162e9c664671fd14e578a33bfdba487ea64bcb41d281286d3c870ad7"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a62d78a1c9072949018cdb05d3c533924ef8ac9bcb06cbf96f6d14772c5cd451"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:3ccdde001578347e877ca4f629450973c510e88e8865d5aefbcb89b852ccc666"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:eccb67b0e78aa2e38a04c5ecc13bab325a43e5159a181a9d1a6723db913cbb3c"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8b6fcf6054fc4114a27aa865f8840ef3d675f9316e81868e0ad5866184a6cba5"}, + {file = "multidict-6.4.3-cp310-cp310-win32.whl", hash = "sha256:f92c7f62d59373cd93bc9969d2da9b4b21f78283b1379ba012f7ee8127b3152e"}, + {file = "multidict-6.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:b57e28dbc031d13916b946719f213c494a517b442d7b48b29443e79610acd887"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f6f19170197cc29baccd33ccc5b5d6a331058796485857cf34f7635aa25fb0cd"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f2882bf27037eb687e49591690e5d491e677272964f9ec7bc2abbe09108bdfb8"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fbf226ac85f7d6b6b9ba77db4ec0704fde88463dc17717aec78ec3c8546c70ad"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e329114f82ad4b9dd291bef614ea8971ec119ecd0f54795109976de75c9a852"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:1f4e0334d7a555c63f5c8952c57ab6f1c7b4f8c7f3442df689fc9f03df315c08"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:740915eb776617b57142ce0bb13b7596933496e2f798d3d15a20614adf30d229"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255dac25134d2b141c944b59a0d2f7211ca12a6d4779f7586a98b4b03ea80508"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4e8535bd4d741039b5aad4285ecd9b902ef9e224711f0b6afda6e38d7ac02c7"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c433a33be000dd968f5750722eaa0991037be0be4a9d453eba121774985bc8"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4eb33b0bdc50acd538f45041f5f19945a1f32b909b76d7b117c0c25d8063df56"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:75482f43465edefd8a5d72724887ccdcd0c83778ded8f0cb1e0594bf71736cc0"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce5b3082e86aee80b3925ab4928198450d8e5b6466e11501fe03ad2191c6d777"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e413152e3212c4d39f82cf83c6f91be44bec9ddea950ce17af87fbf4e32ca6b2"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8aac2eeff69b71f229a405c0a4b61b54bade8e10163bc7b44fcd257949620618"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ab583ac203af1d09034be41458feeab7863c0635c650a16f15771e1386abf2d7"}, + {file = "multidict-6.4.3-cp311-cp311-win32.whl", hash = "sha256:1b2019317726f41e81154df636a897de1bfe9228c3724a433894e44cd2512378"}, + {file = "multidict-6.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:43173924fa93c7486402217fab99b60baf78d33806af299c56133a3755f69589"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1f1c2f58f08b36f8475f3ec6f5aeb95270921d418bf18f90dffd6be5c7b0e676"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:26ae9ad364fc61b936fb7bf4c9d8bd53f3a5b4417142cd0be5c509d6f767e2f1"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:659318c6c8a85f6ecfc06b4e57529e5a78dfdd697260cc81f683492ad7e9435a"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1eb72c741fd24d5a28242ce72bb61bc91f8451877131fa3fe930edb195f7054"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3cd06d88cb7398252284ee75c8db8e680aa0d321451132d0dba12bc995f0adcc"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4543d8dc6470a82fde92b035a92529317191ce993533c3c0c68f56811164ed07"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30a3ebdc068c27e9d6081fca0e2c33fdf132ecea703a72ea216b81a66860adde"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b038f10e23f277153f86f95c777ba1958bcd5993194fda26a1d06fae98b2f00c"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c605a2b2dc14282b580454b9b5d14ebe0668381a3a26d0ac39daa0ca115eb2ae"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8bd2b875f4ca2bb527fe23e318ddd509b7df163407b0fb717df229041c6df5d3"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c2e98c840c9c8e65c0e04b40c6c5066c8632678cd50c8721fdbcd2e09f21a507"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:66eb80dd0ab36dbd559635e62fba3083a48a252633164857a1d1684f14326427"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c23831bdee0a2a3cf21be057b5e5326292f60472fb6c6f86392bbf0de70ba731"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1535cec6443bfd80d028052e9d17ba6ff8a5a3534c51d285ba56c18af97e9713"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3b73e7227681f85d19dec46e5b881827cd354aabe46049e1a61d2f9aaa4e285a"}, + {file = "multidict-6.4.3-cp312-cp312-win32.whl", hash = "sha256:8eac0c49df91b88bf91f818e0a24c1c46f3622978e2c27035bfdca98e0e18124"}, + {file = "multidict-6.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:11990b5c757d956cd1db7cb140be50a63216af32cd6506329c2c59d732d802db"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a76534263d03ae0cfa721fea40fd2b5b9d17a6f85e98025931d41dc49504474"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:805031c2f599eee62ac579843555ed1ce389ae00c7e9f74c2a1b45e0564a88dd"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c56c179839d5dcf51d565132185409d1d5dd8e614ba501eb79023a6cab25576b"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c64f4ddb3886dd8ab71b68a7431ad4aa01a8fa5be5b11543b29674f29ca0ba3"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3002a856367c0b41cad6784f5b8d3ab008eda194ed7864aaa58f65312e2abcac"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d75e621e7d887d539d6e1d789f0c64271c250276c333480a9e1de089611f790"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:995015cf4a3c0d72cbf453b10a999b92c5629eaf3a0c3e1efb4b5c1f602253bb"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b0fabae7939d09d7d16a711468c385272fa1b9b7fb0d37e51143585d8e72e0"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61ed4d82f8a1e67eb9eb04f8587970d78fe7cddb4e4d6230b77eda23d27938f9"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:062428944a8dc69df9fdc5d5fc6279421e5f9c75a9ee3f586f274ba7b05ab3c8"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:b90e27b4674e6c405ad6c64e515a505c6d113b832df52fdacb6b1ffd1fa9a1d1"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7d50d4abf6729921e9613d98344b74241572b751c6b37feed75fb0c37bd5a817"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:43fe10524fb0a0514be3954be53258e61d87341008ce4914f8e8b92bee6f875d"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:236966ca6c472ea4e2d3f02f6673ebfd36ba3f23159c323f5a496869bc8e47c9"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:422a5ec315018e606473ba1f5431e064cf8b2a7468019233dcf8082fabad64c8"}, + {file = "multidict-6.4.3-cp313-cp313-win32.whl", hash = "sha256:f901a5aace8e8c25d78960dcc24c870c8d356660d3b49b93a78bf38eb682aac3"}, + {file = "multidict-6.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:1c152c49e42277bc9a2f7b78bd5fa10b13e88d1b0328221e7aef89d5c60a99a5"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:be8751869e28b9c0d368d94f5afcb4234db66fe8496144547b4b6d6a0645cfc6"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0d4b31f8a68dccbcd2c0ea04f0e014f1defc6b78f0eb8b35f2265e8716a6df0c"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:032efeab3049e37eef2ff91271884303becc9e54d740b492a93b7e7266e23756"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e78006af1a7c8a8007e4f56629d7252668344442f66982368ac06522445e375"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:daeac9dd30cda8703c417e4fddccd7c4dc0c73421a0b54a7da2713be125846be"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f6f90700881438953eae443a9c6f8a509808bc3b185246992c4233ccee37fea"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f84627997008390dd15762128dcf73c3365f4ec0106739cde6c20a07ed198ec8"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3307b48cd156153b117c0ea54890a3bdbf858a5b296ddd40dc3852e5f16e9b02"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ead46b0fa1dcf5af503a46e9f1c2e80b5d95c6011526352fa5f42ea201526124"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1748cb2743bedc339d63eb1bca314061568793acd603a6e37b09a326334c9f44"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:acc9fa606f76fc111b4569348cc23a771cb52c61516dcc6bcef46d612edb483b"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:31469d5832b5885adeb70982e531ce86f8c992334edd2f2254a10fa3182ac504"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ba46b51b6e51b4ef7bfb84b82f5db0dc5e300fb222a8a13b8cd4111898a869cf"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:389cfefb599edf3fcfd5f64c0410da686f90f5f5e2c4d84e14f6797a5a337af4"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:64bc2bbc5fba7b9db5c2c8d750824f41c6994e3882e6d73c903c2afa78d091e4"}, + {file = "multidict-6.4.3-cp313-cp313t-win32.whl", hash = "sha256:0ecdc12ea44bab2807d6b4a7e5eef25109ab1c82a8240d86d3c1fc9f3b72efd5"}, + {file = "multidict-6.4.3-cp313-cp313t-win_amd64.whl", hash = "sha256:7146a8742ea71b5d7d955bffcef58a9e6e04efba704b52a460134fefd10a8208"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5427a2679e95a642b7f8b0f761e660c845c8e6fe3141cddd6b62005bd133fc21"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:24a8caa26521b9ad09732972927d7b45b66453e6ebd91a3c6a46d811eeb7349b"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6b5a272bc7c36a2cd1b56ddc6bff02e9ce499f9f14ee4a45c45434ef083f2459"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edf74dc5e212b8c75165b435c43eb0d5e81b6b300a938a4eb82827119115e840"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9f35de41aec4b323c71f54b0ca461ebf694fb48bec62f65221f52e0017955b39"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae93e0ff43b6f6892999af64097b18561691ffd835e21a8348a441e256592e1f"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e3929269e9d7eff905d6971d8b8c85e7dbc72c18fb99c8eae6fe0a152f2e343"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb6214fe1750adc2a1b801a199d64b5a67671bf76ebf24c730b157846d0e90d2"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d79cf5c0c6284e90f72123f4a3e4add52d6c6ebb4a9054e88df15b8d08444c6"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2427370f4a255262928cd14533a70d9738dfacadb7563bc3b7f704cc2360fc4e"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:fbd8d737867912b6c5f99f56782b8cb81f978a97b4437a1c476de90a3e41c9a1"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0ee1bf613c448997f73fc4efb4ecebebb1c02268028dd4f11f011f02300cf1e8"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:578568c4ba5f2b8abd956baf8b23790dbfdc953e87d5b110bce343b4a54fc9e7"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:a059ad6b80de5b84b9fa02a39400319e62edd39d210b4e4f8c4f1243bdac4752"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:dd53893675b729a965088aaadd6a1f326a72b83742b056c1065bdd2e2a42b4df"}, + {file = "multidict-6.4.3-cp39-cp39-win32.whl", hash = "sha256:abcfed2c4c139f25c2355e180bcc077a7cae91eefbb8b3927bb3f836c9586f1f"}, + {file = "multidict-6.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:b1b389ae17296dd739015d5ddb222ee99fd66adeae910de21ac950e00979d897"}, + {file = "multidict-6.4.3-py3-none-any.whl", hash = "sha256:59fe01ee8e2a1e8ceb3f6dbb216b09c8d9f4ef1c22c4fc825d045a147fa2ebc9"}, + {file = "multidict-6.4.3.tar.gz", hash = "sha256:3ada0b058c9f213c5f95ba301f922d402ac234f1111a7d8fd70f1b99f3c281ec"}, ] [package.dependencies] @@ -2237,14 +2357,14 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} [[package]] name = "nbclient" -version = "0.10.0" +version = "0.10.2" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." optional = false -python-versions = ">=3.8.0" +python-versions = ">=3.9.0" groups = ["test"] files = [ - {file = "nbclient-0.10.0-py3-none-any.whl", hash = "sha256:f13e3529332a1f1f81d82a53210322476a168bb7090a0289c795fe9cc11c9d3f"}, - {file = "nbclient-0.10.0.tar.gz", hash = "sha256:4b3f1b7dba531e498449c4db4f53da339c91d449dc11e9af3a43b4eb5c5abb09"}, + {file = "nbclient-0.10.2-py3-none-any.whl", hash = "sha256:4ffee11e788b4a27fabeb7955547e4318a5298f34342a4bfd01f2e1faaeadc3d"}, + {file = "nbclient-0.10.2.tar.gz", hash = "sha256:90b7fc6b810630db87a6d0c2250b1f0ab4cf4d3c27a299b0cde78a4ed3fd9193"}, ] [package.dependencies] @@ -2255,24 +2375,24 @@ traitlets = ">=5.4" [package.extras] dev = ["pre-commit"] -docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling"] -test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] +docs = ["autodoc-traits", "flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "mock", "moto", "myst-parser", "nbconvert (>=7.1.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling", "testpath", "xmltodict"] +test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.1.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] [[package]] name = "nbconvert" -version = "7.16.4" +version = "7.16.6" description = "Converting Jupyter Notebooks (.ipynb files) to other formats. Output formats include asciidoc, html, latex, markdown, pdf, py, rst, script. nbconvert can be used both as a Python library (`import nbconvert`) or as a command line tool (invoked as `jupyter nbconvert ...`)." optional = false python-versions = ">=3.8" groups = ["test"] files = [ - {file = "nbconvert-7.16.4-py3-none-any.whl", hash = "sha256:05873c620fe520b6322bf8a5ad562692343fe3452abda5765c7a34b7d1aa3eb3"}, - {file = "nbconvert-7.16.4.tar.gz", hash = "sha256:86ca91ba266b0a448dc96fa6c5b9d98affabde2867b363258703536807f9f7f4"}, + {file = "nbconvert-7.16.6-py3-none-any.whl", hash = "sha256:1375a7b67e0c2883678c48e506dc320febb57685e5ee67faa51b18a90f3a712b"}, + {file = "nbconvert-7.16.6.tar.gz", hash = "sha256:576a7e37c6480da7b8465eefa66c17844243816ce1ccc372633c6b71c3c0f582"}, ] [package.dependencies] beautifulsoup4 = "*" -bleach = "!=5.0.0" +bleach = {version = "!=5.0.0", extras = ["css"]} defusedxml = "*" importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} jinja2 = ">=3.0" @@ -2285,7 +2405,6 @@ nbformat = ">=5.7" packaging = "*" pandocfilters = ">=1.4.1" pygments = ">=2.4.1" -tinycss2 = "*" traitlets = ">=5.1" [package.extras] @@ -2356,6 +2475,7 @@ description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" groups = ["main"] +markers = "python_version == \"3.9\"" files = [ {file = "numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece"}, {file = "numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04"}, @@ -2404,6 +2524,72 @@ files = [ {file = "numpy-2.0.2.tar.gz", hash = "sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78"}, ] +[[package]] +name = "numpy" +version = "2.2.5" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.10" +groups = ["main"] +markers = "python_version >= \"3.10\"" +files = [ + {file = "numpy-2.2.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f4a922da1729f4c40932b2af4fe84909c7a6e167e6e99f71838ce3a29f3fe26"}, + {file = "numpy-2.2.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b6f91524d31b34f4a5fee24f5bc16dcd1491b668798b6d85585d836c1e633a6a"}, + {file = "numpy-2.2.5-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:19f4718c9012e3baea91a7dba661dcab2451cda2550678dc30d53acb91a7290f"}, + {file = "numpy-2.2.5-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:eb7fd5b184e5d277afa9ec0ad5e4eb562ecff541e7f60e69ee69c8d59e9aeaba"}, + {file = "numpy-2.2.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6413d48a9be53e183eb06495d8e3b006ef8f87c324af68241bbe7a39e8ff54c3"}, + {file = "numpy-2.2.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7451f92eddf8503c9b8aa4fe6aa7e87fd51a29c2cfc5f7dbd72efde6c65acf57"}, + {file = "numpy-2.2.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0bcb1d057b7571334139129b7f941588f69ce7c4ed15a9d6162b2ea54ded700c"}, + {file = "numpy-2.2.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:36ab5b23915887543441efd0417e6a3baa08634308894316f446027611b53bf1"}, + {file = "numpy-2.2.5-cp310-cp310-win32.whl", hash = "sha256:422cc684f17bc963da5f59a31530b3936f57c95a29743056ef7a7903a5dbdf88"}, + {file = "numpy-2.2.5-cp310-cp310-win_amd64.whl", hash = "sha256:e4f0b035d9d0ed519c813ee23e0a733db81ec37d2e9503afbb6e54ccfdee0fa7"}, + {file = "numpy-2.2.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c42365005c7a6c42436a54d28c43fe0e01ca11eb2ac3cefe796c25a5f98e5e9b"}, + {file = "numpy-2.2.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:498815b96f67dc347e03b719ef49c772589fb74b8ee9ea2c37feae915ad6ebda"}, + {file = "numpy-2.2.5-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:6411f744f7f20081b1b4e7112e0f4c9c5b08f94b9f086e6f0adf3645f85d3a4d"}, + {file = "numpy-2.2.5-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:9de6832228f617c9ef45d948ec1cd8949c482238d68b2477e6f642c33a7b0a54"}, + {file = "numpy-2.2.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:369e0d4647c17c9363244f3468f2227d557a74b6781cb62ce57cf3ef5cc7c610"}, + {file = "numpy-2.2.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:262d23f383170f99cd9191a7c85b9a50970fe9069b2f8ab5d786eca8a675d60b"}, + {file = "numpy-2.2.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:aa70fdbdc3b169d69e8c59e65c07a1c9351ceb438e627f0fdcd471015cd956be"}, + {file = "numpy-2.2.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37e32e985f03c06206582a7323ef926b4e78bdaa6915095ef08070471865b906"}, + {file = "numpy-2.2.5-cp311-cp311-win32.whl", hash = "sha256:f5045039100ed58fa817a6227a356240ea1b9a1bc141018864c306c1a16d4175"}, + {file = "numpy-2.2.5-cp311-cp311-win_amd64.whl", hash = "sha256:b13f04968b46ad705f7c8a80122a42ae8f620536ea38cf4bdd374302926424dd"}, + {file = "numpy-2.2.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ee461a4eaab4f165b68780a6a1af95fb23a29932be7569b9fab666c407969051"}, + {file = "numpy-2.2.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ec31367fd6a255dc8de4772bd1658c3e926d8e860a0b6e922b615e532d320ddc"}, + {file = "numpy-2.2.5-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:47834cde750d3c9f4e52c6ca28a7361859fcaf52695c7dc3cc1a720b8922683e"}, + {file = "numpy-2.2.5-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:2c1a1c6ccce4022383583a6ded7bbcda22fc635eb4eb1e0a053336425ed36dfa"}, + {file = "numpy-2.2.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d75f338f5f79ee23548b03d801d28a505198297534f62416391857ea0479571"}, + {file = "numpy-2.2.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a801fef99668f309b88640e28d261991bfad9617c27beda4a3aec4f217ea073"}, + {file = "numpy-2.2.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:abe38cd8381245a7f49967a6010e77dbf3680bd3627c0fe4362dd693b404c7f8"}, + {file = "numpy-2.2.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5a0ac90e46fdb5649ab6369d1ab6104bfe5854ab19b645bf5cda0127a13034ae"}, + {file = "numpy-2.2.5-cp312-cp312-win32.whl", hash = "sha256:0cd48122a6b7eab8f06404805b1bd5856200e3ed6f8a1b9a194f9d9054631beb"}, + {file = "numpy-2.2.5-cp312-cp312-win_amd64.whl", hash = "sha256:ced69262a8278547e63409b2653b372bf4baff0870c57efa76c5703fd6543282"}, + {file = "numpy-2.2.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:059b51b658f4414fff78c6d7b1b4e18283ab5fa56d270ff212d5ba0c561846f4"}, + {file = "numpy-2.2.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:47f9ed103af0bc63182609044b0490747e03bd20a67e391192dde119bf43d52f"}, + {file = "numpy-2.2.5-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:261a1ef047751bb02f29dfe337230b5882b54521ca121fc7f62668133cb119c9"}, + {file = "numpy-2.2.5-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:4520caa3807c1ceb005d125a75e715567806fed67e315cea619d5ec6e75a4191"}, + {file = "numpy-2.2.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d14b17b9be5f9c9301f43d2e2a4886a33b53f4e6fdf9ca2f4cc60aeeee76372"}, + {file = "numpy-2.2.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ba321813a00e508d5421104464510cc962a6f791aa2fca1c97b1e65027da80d"}, + {file = "numpy-2.2.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4cbdef3ddf777423060c6f81b5694bad2dc9675f110c4b2a60dc0181543fac7"}, + {file = "numpy-2.2.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:54088a5a147ab71a8e7fdfd8c3601972751ded0739c6b696ad9cb0343e21ab73"}, + {file = "numpy-2.2.5-cp313-cp313-win32.whl", hash = "sha256:c8b82a55ef86a2d8e81b63da85e55f5537d2157165be1cb2ce7cfa57b6aef38b"}, + {file = "numpy-2.2.5-cp313-cp313-win_amd64.whl", hash = "sha256:d8882a829fd779f0f43998e931c466802a77ca1ee0fe25a3abe50278616b1471"}, + {file = "numpy-2.2.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:e8b025c351b9f0e8b5436cf28a07fa4ac0204d67b38f01433ac7f9b870fa38c6"}, + {file = "numpy-2.2.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8dfa94b6a4374e7851bbb6f35e6ded2120b752b063e6acdd3157e4d2bb922eba"}, + {file = "numpy-2.2.5-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:97c8425d4e26437e65e1d189d22dff4a079b747ff9c2788057bfb8114ce1e133"}, + {file = "numpy-2.2.5-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:352d330048c055ea6db701130abc48a21bec690a8d38f8284e00fab256dc1376"}, + {file = "numpy-2.2.5-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b4c0773b6ada798f51f0f8e30c054d32304ccc6e9c5d93d46cb26f3d385ab19"}, + {file = "numpy-2.2.5-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55f09e00d4dccd76b179c0f18a44f041e5332fd0e022886ba1c0bbf3ea4a18d0"}, + {file = "numpy-2.2.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:02f226baeefa68f7d579e213d0f3493496397d8f1cff5e2b222af274c86a552a"}, + {file = "numpy-2.2.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c26843fd58f65da9491165072da2cccc372530681de481ef670dcc8e27cfb066"}, + {file = "numpy-2.2.5-cp313-cp313t-win32.whl", hash = "sha256:1a161c2c79ab30fe4501d5a2bbfe8b162490757cf90b7f05be8b80bc02f7bb8e"}, + {file = "numpy-2.2.5-cp313-cp313t-win_amd64.whl", hash = "sha256:d403c84991b5ad291d3809bace5e85f4bbf44a04bdc9a88ed2bb1807b3360bb8"}, + {file = "numpy-2.2.5-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b4ea7e1cff6784e58fe281ce7e7f05036b3e1c89c6f922a6bfbc0a7e8768adbe"}, + {file = "numpy-2.2.5-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:d7543263084a85fbc09c704b515395398d31d6395518446237eac219eab9e55e"}, + {file = "numpy-2.2.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0255732338c4fdd00996c0421884ea8a3651eea555c3a56b84892b66f696eb70"}, + {file = "numpy-2.2.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d2e3bdadaba0e040d1e7ab39db73e0afe2c74ae277f5614dad53eadbecbbb169"}, + {file = "numpy-2.2.5.tar.gz", hash = "sha256:a9c0d994680cd991b1cb772e8b297340085466a6fe964bc9d4e80f5e2f43c291"}, +] + [[package]] name = "oauthlib" version = "3.2.2" @@ -2481,14 +2667,14 @@ files = [ [[package]] name = "packaging" -version = "24.1" +version = "25.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" groups = ["main", "docs", "test"] files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] [[package]] @@ -2592,14 +2778,14 @@ files = [ [[package]] name = "paramiko" -version = "3.5.0" +version = "3.5.1" description = "SSH2 protocol library" optional = false python-versions = ">=3.6" groups = ["main"] files = [ - {file = "paramiko-3.5.0-py3-none-any.whl", hash = "sha256:1fedf06b085359051cd7d0d270cebe19e755a8a921cc2ddbfa647fb0cd7d68f9"}, - {file = "paramiko-3.5.0.tar.gz", hash = "sha256:ad11e540da4f55cedda52931f1a3f812a8238a7af7f62a60de538cd80bb28124"}, + {file = "paramiko-3.5.1-py3-none-any.whl", hash = "sha256:43b9a0501fc2b5e70680388d9346cf252cfb7d00b0667c39e80eb43a408b8f61"}, + {file = "paramiko-3.5.1.tar.gz", hash = "sha256:b2c665bc45b2b215bd7d7f039901b14b067da00f3a11e6640995fd58f2664822"}, ] [package.dependencies] @@ -2635,7 +2821,7 @@ description = "Pexpect allows easy control of interactive console applications." optional = false python-versions = "*" groups = ["main", "test"] -markers = "sys_platform != \"win32\"" +markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\" or python_version == \"3.9\" and sys_platform != \"win32\"" files = [ {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, @@ -2646,20 +2832,20 @@ ptyprocess = ">=0.5" [[package]] name = "platformdirs" -version = "4.3.6" +version = "4.3.8" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main", "test"] files = [ - {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, - {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, + {file = "platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4"}, + {file = "platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc"}, ] [package.extras] -docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] -type = ["mypy (>=1.11.2)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.14.1)"] [[package]] name = "pluggy" @@ -2679,14 +2865,14 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "prometheus-client" -version = "0.21.0" +version = "0.21.1" description = "Python client for the Prometheus monitoring system." optional = false python-versions = ">=3.8" groups = ["main", "test"] files = [ - {file = "prometheus_client-0.21.0-py3-none-any.whl", hash = "sha256:4fa6b4dd0ac16d58bb587c04b1caae65b8c5043e85f778f42f5f632f6af2e166"}, - {file = "prometheus_client-0.21.0.tar.gz", hash = "sha256:96c83c606b71ff2b0a433c98889d275f51ffec6c5e267de37c7a2b5c9aa9233e"}, + {file = "prometheus_client-0.21.1-py3-none-any.whl", hash = "sha256:594b45c410d6f4f8888940fe80b5cc2521b305a1fafe1c58609ef715a001f301"}, + {file = "prometheus_client-0.21.1.tar.gz", hash = "sha256:252505a722ac04b0456be05c05f75f45d760c2911ffc45f2a06bcaed9f3ae3fb"}, ] [package.extras] @@ -2694,87 +2880,187 @@ twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.48" +version = "3.0.51" description = "Library for building powerful interactive command lines in Python" optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.8" groups = ["main", "test"] files = [ - {file = "prompt_toolkit-3.0.48-py3-none-any.whl", hash = "sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e"}, - {file = "prompt_toolkit-3.0.48.tar.gz", hash = "sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90"}, + {file = "prompt_toolkit-3.0.51-py3-none-any.whl", hash = "sha256:52742911fde84e2d423e2f9a4cf1de7d7ac4e51958f648d9540e0fb8db077b07"}, + {file = "prompt_toolkit-3.0.51.tar.gz", hash = "sha256:931a162e3b27fc90c86f1b48bb1fb2c528c2761475e57c9c06de13311c7b54ed"}, ] [package.dependencies] wcwidth = "*" +[[package]] +name = "propcache" +version = "0.3.1" +description = "Accelerated property cache" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "propcache-0.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f27785888d2fdd918bc36de8b8739f2d6c791399552333721b58193f68ea3e98"}, + {file = "propcache-0.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4e89cde74154c7b5957f87a355bb9c8ec929c167b59c83d90654ea36aeb6180"}, + {file = "propcache-0.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:730178f476ef03d3d4d255f0c9fa186cb1d13fd33ffe89d39f2cda4da90ceb71"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967a8eec513dbe08330f10137eacb427b2ca52118769e82ebcfcab0fba92a649"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b9145c35cc87313b5fd480144f8078716007656093d23059e8993d3a8fa730f"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e64e948ab41411958670f1093c0a57acfdc3bee5cf5b935671bbd5313bcf229"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:319fa8765bfd6a265e5fa661547556da381e53274bc05094fc9ea50da51bfd46"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66d8ccbc902ad548312b96ed8d5d266d0d2c6d006fd0f66323e9d8f2dd49be7"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2d219b0dbabe75e15e581fc1ae796109b07c8ba7d25b9ae8d650da582bed01b0"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:cd6a55f65241c551eb53f8cf4d2f4af33512c39da5d9777694e9d9c60872f519"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9979643ffc69b799d50d3a7b72b5164a2e97e117009d7af6dfdd2ab906cb72cd"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4cf9e93a81979f1424f1a3d155213dc928f1069d697e4353edb8a5eba67c6259"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2fce1df66915909ff6c824bbb5eb403d2d15f98f1518e583074671a30fe0c21e"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4d0dfdd9a2ebc77b869a0b04423591ea8823f791293b527dc1bb896c1d6f1136"}, + {file = "propcache-0.3.1-cp310-cp310-win32.whl", hash = "sha256:1f6cc0ad7b4560e5637eb2c994e97b4fa41ba8226069c9277eb5ea7101845b42"}, + {file = "propcache-0.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:47ef24aa6511e388e9894ec16f0fbf3313a53ee68402bc428744a367ec55b833"}, + {file = "propcache-0.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7f30241577d2fef2602113b70ef7231bf4c69a97e04693bde08ddab913ba0ce5"}, + {file = "propcache-0.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:43593c6772aa12abc3af7784bff4a41ffa921608dd38b77cf1dfd7f5c4e71371"}, + {file = "propcache-0.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a75801768bbe65499495660b777e018cbe90c7980f07f8aa57d6be79ea6f71da"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6f1324db48f001c2ca26a25fa25af60711e09b9aaf4b28488602776f4f9a744"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cdb0f3e1eb6dfc9965d19734d8f9c481b294b5274337a8cb5cb01b462dcb7e0"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1eb34d90aac9bfbced9a58b266f8946cb5935869ff01b164573a7634d39fbcb5"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f35c7070eeec2cdaac6fd3fe245226ed2a6292d3ee8c938e5bb645b434c5f256"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b23c11c2c9e6d4e7300c92e022046ad09b91fd00e36e83c44483df4afa990073"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3e19ea4ea0bf46179f8a3652ac1426e6dcbaf577ce4b4f65be581e237340420d"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:bd39c92e4c8f6cbf5f08257d6360123af72af9f4da75a690bef50da77362d25f"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b0313e8b923b3814d1c4a524c93dfecea5f39fa95601f6a9b1ac96cd66f89ea0"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e861ad82892408487be144906a368ddbe2dc6297074ade2d892341b35c59844a"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:61014615c1274df8da5991a1e5da85a3ccb00c2d4701ac6f3383afd3ca47ab0a"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:71ebe3fe42656a2328ab08933d420df5f3ab121772eef78f2dc63624157f0ed9"}, + {file = "propcache-0.3.1-cp311-cp311-win32.whl", hash = "sha256:58aa11f4ca8b60113d4b8e32d37e7e78bd8af4d1a5b5cb4979ed856a45e62005"}, + {file = "propcache-0.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:9532ea0b26a401264b1365146c440a6d78269ed41f83f23818d4b79497aeabe7"}, + {file = "propcache-0.3.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f78eb8422acc93d7b69964012ad7048764bb45a54ba7a39bb9e146c72ea29723"}, + {file = "propcache-0.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:89498dd49c2f9a026ee057965cdf8192e5ae070ce7d7a7bd4b66a8e257d0c976"}, + {file = "propcache-0.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09400e98545c998d57d10035ff623266927cb784d13dd2b31fd33b8a5316b85b"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa8efd8c5adc5a2c9d3b952815ff8f7710cefdcaf5f2c36d26aff51aeca2f12f"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2fe5c910f6007e716a06d269608d307b4f36e7babee5f36533722660e8c4a70"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a0ab8cf8cdd2194f8ff979a43ab43049b1df0b37aa64ab7eca04ac14429baeb7"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:563f9d8c03ad645597b8d010ef4e9eab359faeb11a0a2ac9f7b4bc8c28ebef25"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb6e0faf8cb6b4beea5d6ed7b5a578254c6d7df54c36ccd3d8b3eb00d6770277"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1c5c7ab7f2bb3f573d1cb921993006ba2d39e8621019dffb1c5bc94cdbae81e8"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:050b571b2e96ec942898f8eb46ea4bfbb19bd5502424747e83badc2d4a99a44e"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e1c4d24b804b3a87e9350f79e2371a705a188d292fd310e663483af6ee6718ee"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e4fe2a6d5ce975c117a6bb1e8ccda772d1e7029c1cca1acd209f91d30fa72815"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:feccd282de1f6322f56f6845bf1207a537227812f0a9bf5571df52bb418d79d5"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ec314cde7314d2dd0510c6787326bbffcbdc317ecee6b7401ce218b3099075a7"}, + {file = "propcache-0.3.1-cp312-cp312-win32.whl", hash = "sha256:7d2d5a0028d920738372630870e7d9644ce437142197f8c827194fca404bf03b"}, + {file = "propcache-0.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:88c423efef9d7a59dae0614eaed718449c09a5ac79a5f224a8b9664d603f04a3"}, + {file = "propcache-0.3.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f1528ec4374617a7a753f90f20e2f551121bb558fcb35926f99e3c42367164b8"}, + {file = "propcache-0.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dc1915ec523b3b494933b5424980831b636fe483d7d543f7afb7b3bf00f0c10f"}, + {file = "propcache-0.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a110205022d077da24e60b3df8bcee73971be9575dec5573dd17ae5d81751111"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d249609e547c04d190e820d0d4c8ca03ed4582bcf8e4e160a6969ddfb57b62e5"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ced33d827625d0a589e831126ccb4f5c29dfdf6766cac441d23995a65825dcb"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4114c4ada8f3181af20808bedb250da6bae56660e4b8dfd9cd95d4549c0962f7"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:975af16f406ce48f1333ec5e912fe11064605d5c5b3f6746969077cc3adeb120"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a34aa3a1abc50740be6ac0ab9d594e274f59960d3ad253cd318af76b996dd654"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9cec3239c85ed15bfaded997773fdad9fb5662b0a7cbc854a43f291eb183179e"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:05543250deac8e61084234d5fc54f8ebd254e8f2b39a16b1dce48904f45b744b"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5cb5918253912e088edbf023788de539219718d3b10aef334476b62d2b53de53"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f3bbecd2f34d0e6d3c543fdb3b15d6b60dd69970c2b4c822379e5ec8f6f621d5"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aca63103895c7d960a5b9b044a83f544b233c95e0dcff114389d64d762017af7"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a0a9898fdb99bf11786265468571e628ba60af80dc3f6eb89a3545540c6b0ef"}, + {file = "propcache-0.3.1-cp313-cp313-win32.whl", hash = "sha256:3a02a28095b5e63128bcae98eb59025924f121f048a62393db682f049bf4ac24"}, + {file = "propcache-0.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:813fbb8b6aea2fc9659815e585e548fe706d6f663fa73dff59a1677d4595a037"}, + {file = "propcache-0.3.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a444192f20f5ce8a5e52761a031b90f5ea6288b1eef42ad4c7e64fef33540b8f"}, + {file = "propcache-0.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0fbe94666e62ebe36cd652f5fc012abfbc2342de99b523f8267a678e4dfdee3c"}, + {file = "propcache-0.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f011f104db880f4e2166bcdcf7f58250f7a465bc6b068dc84c824a3d4a5c94dc"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e584b6d388aeb0001d6d5c2bd86b26304adde6d9bb9bfa9c4889805021b96de"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a17583515a04358b034e241f952f1715243482fc2c2945fd99a1b03a0bd77d6"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5aed8d8308215089c0734a2af4f2e95eeb360660184ad3912686c181e500b2e7"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d8e309ff9a0503ef70dc9a0ebd3e69cf7b3894c9ae2ae81fc10943c37762458"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b655032b202028a582d27aeedc2e813299f82cb232f969f87a4fde491a233f11"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9f64d91b751df77931336b5ff7bafbe8845c5770b06630e27acd5dbb71e1931c"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:19a06db789a4bd896ee91ebc50d059e23b3639c25d58eb35be3ca1cbe967c3bf"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:bef100c88d8692864651b5f98e871fb090bd65c8a41a1cb0ff2322db39c96c27"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:87380fb1f3089d2a0b8b00f006ed12bd41bd858fabfa7330c954c70f50ed8757"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e474fc718e73ba5ec5180358aa07f6aded0ff5f2abe700e3115c37d75c947e18"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:17d1c688a443355234f3c031349da69444be052613483f3e4158eef751abcd8a"}, + {file = "propcache-0.3.1-cp313-cp313t-win32.whl", hash = "sha256:359e81a949a7619802eb601d66d37072b79b79c2505e6d3fd8b945538411400d"}, + {file = "propcache-0.3.1-cp313-cp313t-win_amd64.whl", hash = "sha256:e7fb9a84c9abbf2b2683fa3e7b0d7da4d8ecf139a1c635732a8bda29c5214b0e"}, + {file = "propcache-0.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ed5f6d2edbf349bd8d630e81f474d33d6ae5d07760c44d33cd808e2f5c8f4ae6"}, + {file = "propcache-0.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:668ddddc9f3075af019f784456267eb504cb77c2c4bd46cc8402d723b4d200bf"}, + {file = "propcache-0.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0c86e7ceea56376216eba345aa1fc6a8a6b27ac236181f840d1d7e6a1ea9ba5c"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83be47aa4e35b87c106fc0c84c0fc069d3f9b9b06d3c494cd404ec6747544894"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:27c6ac6aa9fc7bc662f594ef380707494cb42c22786a558d95fcdedb9aa5d035"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64a956dff37080b352c1c40b2966b09defb014347043e740d420ca1eb7c9b908"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82de5da8c8893056603ac2d6a89eb8b4df49abf1a7c19d536984c8dd63f481d5"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c3c3a203c375b08fd06a20da3cf7aac293b834b6f4f4db71190e8422750cca5"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b303b194c2e6f171cfddf8b8ba30baefccf03d36a4d9cab7fd0bb68ba476a3d7"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:916cd229b0150129d645ec51614d38129ee74c03293a9f3f17537be0029a9641"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a461959ead5b38e2581998700b26346b78cd98540b5524796c175722f18b0294"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:069e7212890b0bcf9b2be0a03afb0c2d5161d91e1bf51569a64f629acc7defbf"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ef2e4e91fb3945769e14ce82ed53007195e616a63aa43b40fb7ebaaf907c8d4c"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8638f99dca15b9dff328fb6273e09f03d1c50d9b6512f3b65a4154588a7595fe"}, + {file = "propcache-0.3.1-cp39-cp39-win32.whl", hash = "sha256:6f173bbfe976105aaa890b712d1759de339d8a7cef2fc0a1714cc1a1e1c47f64"}, + {file = "propcache-0.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:603f1fe4144420374f1a69b907494c3acbc867a581c2d49d4175b0de7cc64566"}, + {file = "propcache-0.3.1-py3-none-any.whl", hash = "sha256:9a8ecf38de50a7f518c21568c80f985e776397b902f1ce0b01f799aba1608b40"}, + {file = "propcache-0.3.1.tar.gz", hash = "sha256:40d980c33765359098837527e18eddefc9a24cea5b45e078a7f3bb5b032c6ecf"}, +] + [[package]] name = "proto-plus" -version = "1.24.0" -description = "Beautiful, Pythonic protocol buffers." +version = "1.26.1" +description = "Beautiful, Pythonic protocol buffers" optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"}, - {file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"}, + {file = "proto_plus-1.26.1-py3-none-any.whl", hash = "sha256:13285478c2dcf2abb829db158e1047e2f1e8d63a077d94263c2b88b043c75a66"}, + {file = "proto_plus-1.26.1.tar.gz", hash = "sha256:21a515a4c4c0088a773899e23c7bbade3d18f9c66c73edd4c7ee3816bc96a012"}, ] [package.dependencies] -protobuf = ">=3.19.0,<6.0.0dev" +protobuf = ">=3.19.0,<7.0.0" [package.extras] testing = ["google-api-core (>=1.31.5)"] [[package]] name = "protobuf" -version = "5.28.2" +version = "6.30.2" description = "" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "protobuf-5.28.2-cp310-abi3-win32.whl", hash = "sha256:eeea10f3dc0ac7e6b4933d32db20662902b4ab81bf28df12218aa389e9c2102d"}, - {file = "protobuf-5.28.2-cp310-abi3-win_amd64.whl", hash = "sha256:2c69461a7fcc8e24be697624c09a839976d82ae75062b11a0972e41fd2cd9132"}, - {file = "protobuf-5.28.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a8b9403fc70764b08d2f593ce44f1d2920c5077bf7d311fefec999f8c40f78b7"}, - {file = "protobuf-5.28.2-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:35cfcb15f213449af7ff6198d6eb5f739c37d7e4f1c09b5d0641babf2cc0c68f"}, - {file = "protobuf-5.28.2-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:5e8a95246d581eef20471b5d5ba010d55f66740942b95ba9b872d918c459452f"}, - {file = "protobuf-5.28.2-cp38-cp38-win32.whl", hash = "sha256:87317e9bcda04a32f2ee82089a204d3a2f0d3c8aeed16568c7daf4756e4f1fe0"}, - {file = "protobuf-5.28.2-cp38-cp38-win_amd64.whl", hash = "sha256:c0ea0123dac3399a2eeb1a1443d82b7afc9ff40241433296769f7da42d142ec3"}, - {file = "protobuf-5.28.2-cp39-cp39-win32.whl", hash = "sha256:ca53faf29896c526863366a52a8f4d88e69cd04ec9571ed6082fa117fac3ab36"}, - {file = "protobuf-5.28.2-cp39-cp39-win_amd64.whl", hash = "sha256:8ddc60bf374785fb7cb12510b267f59067fa10087325b8e1855b898a0d81d276"}, - {file = "protobuf-5.28.2-py3-none-any.whl", hash = "sha256:52235802093bd8a2811abbe8bf0ab9c5f54cca0a751fdd3f6ac2a21438bffece"}, - {file = "protobuf-5.28.2.tar.gz", hash = "sha256:59379674ff119717404f7454647913787034f03fe7049cbef1d74a97bb4593f0"}, + {file = "protobuf-6.30.2-cp310-abi3-win32.whl", hash = "sha256:b12ef7df7b9329886e66404bef5e9ce6a26b54069d7f7436a0853ccdeb91c103"}, + {file = "protobuf-6.30.2-cp310-abi3-win_amd64.whl", hash = "sha256:7653c99774f73fe6b9301b87da52af0e69783a2e371e8b599b3e9cb4da4b12b9"}, + {file = "protobuf-6.30.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:0eb523c550a66a09a0c20f86dd554afbf4d32b02af34ae53d93268c1f73bc65b"}, + {file = "protobuf-6.30.2-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:50f32cc9fd9cb09c783ebc275611b4f19dfdfb68d1ee55d2f0c7fa040df96815"}, + {file = "protobuf-6.30.2-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:4f6c687ae8efae6cf6093389a596548214467778146b7245e886f35e1485315d"}, + {file = "protobuf-6.30.2-cp39-cp39-win32.whl", hash = "sha256:524afedc03b31b15586ca7f64d877a98b184f007180ce25183d1a5cb230ee72b"}, + {file = "protobuf-6.30.2-cp39-cp39-win_amd64.whl", hash = "sha256:acec579c39c88bd8fbbacab1b8052c793efe83a0a5bd99db4a31423a25c0a0e2"}, + {file = "protobuf-6.30.2-py3-none-any.whl", hash = "sha256:ae86b030e69a98e08c77beab574cbcb9fff6d031d57209f574a5aea1445f4b51"}, + {file = "protobuf-6.30.2.tar.gz", hash = "sha256:35c859ae076d8c56054c25b59e5e59638d86545ed6e2b6efac6be0b6ea3ba048"}, ] [[package]] name = "psutil" -version = "6.0.0" -description = "Cross-platform lib for process and system monitoring in Python." +version = "7.0.0" +description = "Cross-platform lib for process and system monitoring in Python. NOTE: the syntax of this script MUST be kept compatible with Python 2.7." optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +python-versions = ">=3.6" groups = ["test"] files = [ - {file = "psutil-6.0.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a021da3e881cd935e64a3d0a20983bda0bb4cf80e4f74fa9bfcb1bc5785360c6"}, - {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:1287c2b95f1c0a364d23bc6f2ea2365a8d4d9b726a3be7294296ff7ba97c17f0"}, - {file = "psutil-6.0.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a9a3dbfb4de4f18174528d87cc352d1f788b7496991cca33c6996f40c9e3c92c"}, - {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6ec7588fb3ddaec7344a825afe298db83fe01bfaaab39155fa84cf1c0d6b13c3"}, - {file = "psutil-6.0.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:1e7c870afcb7d91fdea2b37c24aeb08f98b6d67257a5cb0a8bc3ac68d0f1a68c"}, - {file = "psutil-6.0.0-cp27-none-win32.whl", hash = "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35"}, - {file = "psutil-6.0.0-cp27-none-win_amd64.whl", hash = "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1"}, - {file = "psutil-6.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd"}, - {file = "psutil-6.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e8d0054fc88153ca0544f5c4d554d42e33df2e009c4ff42284ac9ebdef4132"}, - {file = "psutil-6.0.0-cp36-cp36m-win32.whl", hash = "sha256:fc8c9510cde0146432bbdb433322861ee8c3efbf8589865c8bf8d21cb30c4d14"}, - {file = "psutil-6.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:34859b8d8f423b86e4385ff3665d3f4d94be3cdf48221fbe476e883514fdb71c"}, - {file = "psutil-6.0.0-cp37-abi3-win32.whl", hash = "sha256:a495580d6bae27291324fe60cea0b5a7c23fa36a7cd35035a16d93bdcf076b9d"}, - {file = "psutil-6.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:33ea5e1c975250a720b3a6609c490db40dae5d83a4eb315170c4fe0d8b1f34b3"}, - {file = "psutil-6.0.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:ffe7fc9b6b36beadc8c322f84e1caff51e8703b88eee1da46d1e3a6ae11b4fd0"}, - {file = "psutil-6.0.0.tar.gz", hash = "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2"}, + {file = "psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25"}, + {file = "psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993"}, + {file = "psutil-7.0.0-cp36-cp36m-win32.whl", hash = "sha256:84df4eb63e16849689f76b1ffcb36db7b8de703d1bc1fe41773db487621b6c17"}, + {file = "psutil-7.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:1e744154a6580bc968a0195fd25e80432d3afec619daf145b9e5ba16cc1d688e"}, + {file = "psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99"}, + {file = "psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553"}, + {file = "psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456"}, ] [package.extras] -test = ["enum34 ; python_version <= \"3.4\"", "ipaddress ; python_version < \"3.0\"", "mock ; python_version < \"3.0\"", "pywin32 ; sys_platform == \"win32\"", "wmi ; sys_platform == \"win32\""] +dev = ["abi3audit", "black (==24.10.0)", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest", "pytest-cov", "pytest-xdist", "requests", "rstcheck", "ruff", "setuptools", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"] +test = ["pytest", "pytest-xdist", "setuptools"] [[package]] name = "ptyprocess" @@ -2787,7 +3073,7 @@ files = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, ] -markers = {main = "sys_platform != \"win32\"", test = "sys_platform != \"win32\" or os_name != \"nt\""} +markers = {main = "sys_platform != \"win32\" and sys_platform != \"emscripten\" or python_version == \"3.9\" and sys_platform != \"win32\"", test = "sys_platform != \"win32\" and sys_platform != \"emscripten\" or python_version == \"3.9\" and sys_platform != \"win32\" or os_name != \"nt\""} [[package]] name = "pure-eval" @@ -2806,19 +3092,20 @@ tests = ["pytest"] [[package]] name = "py-spy" -version = "0.3.14" +version = "0.4.0" description = "Sampling profiler for Python programs" optional = false python-versions = "*" groups = ["main"] files = [ - {file = "py_spy-0.3.14-py2.py3-none-macosx_10_7_x86_64.whl", hash = "sha256:5b342cc5feb8d160d57a7ff308de153f6be68dcf506ad02b4d67065f2bae7f45"}, - {file = "py_spy-0.3.14-py2.py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:fe7efe6c91f723442259d428bf1f9ddb9c1679828866b353d539345ca40d9dd2"}, - {file = "py_spy-0.3.14-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590905447241d789d9de36cff9f52067b6f18d8b5e9fb399242041568d414461"}, - {file = "py_spy-0.3.14-py2.py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd6211fe7f587b3532ba9d300784326d9a6f2b890af7bf6fff21a029ebbc812b"}, - {file = "py_spy-0.3.14-py2.py3-none-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3e8e48032e71c94c3dd51694c39e762e4bbfec250df5bf514adcdd64e79371e0"}, - {file = "py_spy-0.3.14-py2.py3-none-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f59b0b52e56ba9566305236375e6fc68888261d0d36b5addbe3cf85affbefc0e"}, - {file = "py_spy-0.3.14-py2.py3-none-win_amd64.whl", hash = "sha256:8f5b311d09f3a8e33dbd0d44fc6e37b715e8e0c7efefafcda8bfd63b31ab5a31"}, + {file = "py_spy-0.4.0-py2.py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:f2cf3f7130e7d780471faa5957441d3b4e0ec39a79b2c00f4c33d494f7728428"}, + {file = "py_spy-0.4.0-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:47cdda4c34d9b6cb01f3aaeceb2e88faf57da880207fe72ff6ff97e9bb6cc8a9"}, + {file = "py_spy-0.4.0-py2.py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eee3d0bde85ca5cf4f01f012d461180ca76c24835a96f7b5c4ded64eb6a008ab"}, + {file = "py_spy-0.4.0-py2.py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c5f06ffce4c9c98b7fc9f5e67e5e7db591173f1351837633f3f23d9378b1d18a"}, + {file = "py_spy-0.4.0-py2.py3-none-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:87573e64dbfdfc89ba2e0f5e2f525aa84e0299c7eb6454b47ea335fde583a7a0"}, + {file = "py_spy-0.4.0-py2.py3-none-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:8bf2f3702cef367a489faa45177b41a6c31b2a3e5bd78c978d44e29340152f5a"}, + {file = "py_spy-0.4.0-py2.py3-none-win_amd64.whl", hash = "sha256:77d8f637ade38367d944874776f45b703b7ac5938b1f7be8891f3a5876ddbb96"}, + {file = "py_spy-0.4.0.tar.gz", hash = "sha256:806602ce7972782cc9c1e383f339bfc27bfb822d42485e6a3e0530ae5040e1f0"}, ] [[package]] @@ -2828,6 +3115,7 @@ description = "Python library for Apache Arrow" optional = false python-versions = ">=3.8" groups = ["main"] +markers = "sys_platform == \"darwin\" and platform_machine == \"x86_64\"" files = [ {file = "pyarrow-17.0.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a5c8b238d47e48812ee577ee20c9a2779e6a5904f1708ae240f53ecbee7c9f07"}, {file = "pyarrow-17.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db023dc4c6cae1015de9e198d41250688383c3f9af8f565370ab2b4cb5f62655"}, @@ -2873,6 +3161,75 @@ numpy = ">=1.16.6" [package.extras] test = ["cffi", "hypothesis", "pandas", "pytest", "pytz"] +[[package]] +name = "pyarrow" +version = "20.0.0" +description = "Python library for Apache Arrow" +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "sys_platform != \"darwin\" or platform_machine != \"x86_64\"" +files = [ + {file = "pyarrow-20.0.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:c7dd06fd7d7b410ca5dc839cc9d485d2bc4ae5240851bcd45d85105cc90a47d7"}, + {file = "pyarrow-20.0.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:d5382de8dc34c943249b01c19110783d0d64b207167c728461add1ecc2db88e4"}, + {file = "pyarrow-20.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6415a0d0174487456ddc9beaead703d0ded5966129fa4fd3114d76b5d1c5ceae"}, + {file = "pyarrow-20.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15aa1b3b2587e74328a730457068dc6c89e6dcbf438d4369f572af9d320a25ee"}, + {file = "pyarrow-20.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:5605919fbe67a7948c1f03b9f3727d82846c053cd2ce9303ace791855923fd20"}, + {file = "pyarrow-20.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a5704f29a74b81673d266e5ec1fe376f060627c2e42c5c7651288ed4b0db29e9"}, + {file = "pyarrow-20.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:00138f79ee1b5aca81e2bdedb91e3739b987245e11fa3c826f9e57c5d102fb75"}, + {file = "pyarrow-20.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f2d67ac28f57a362f1a2c1e6fa98bfe2f03230f7e15927aecd067433b1e70ce8"}, + {file = "pyarrow-20.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:4a8b029a07956b8d7bd742ffca25374dd3f634b35e46cc7a7c3fa4c75b297191"}, + {file = "pyarrow-20.0.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:24ca380585444cb2a31324c546a9a56abbe87e26069189e14bdba19c86c049f0"}, + {file = "pyarrow-20.0.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:95b330059ddfdc591a3225f2d272123be26c8fa76e8c9ee1a77aad507361cfdb"}, + {file = "pyarrow-20.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f0fb1041267e9968c6d0d2ce3ff92e3928b243e2b6d11eeb84d9ac547308232"}, + {file = "pyarrow-20.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8ff87cc837601532cc8242d2f7e09b4e02404de1b797aee747dd4ba4bd6313f"}, + {file = "pyarrow-20.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:7a3a5dcf54286e6141d5114522cf31dd67a9e7c9133d150799f30ee302a7a1ab"}, + {file = "pyarrow-20.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a6ad3e7758ecf559900261a4df985662df54fb7fdb55e8e3b3aa99b23d526b62"}, + {file = "pyarrow-20.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6bb830757103a6cb300a04610e08d9636f0cd223d32f388418ea893a3e655f1c"}, + {file = "pyarrow-20.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:96e37f0766ecb4514a899d9a3554fadda770fb57ddf42b63d80f14bc20aa7db3"}, + {file = "pyarrow-20.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:3346babb516f4b6fd790da99b98bed9708e3f02e734c84971faccb20736848dc"}, + {file = "pyarrow-20.0.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:75a51a5b0eef32727a247707d4755322cb970be7e935172b6a3a9f9ae98404ba"}, + {file = "pyarrow-20.0.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:211d5e84cecc640c7a3ab900f930aaff5cd2702177e0d562d426fb7c4f737781"}, + {file = "pyarrow-20.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ba3cf4182828be7a896cbd232aa8dd6a31bd1f9e32776cc3796c012855e1199"}, + {file = "pyarrow-20.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c3a01f313ffe27ac4126f4c2e5ea0f36a5fc6ab51f8726cf41fee4b256680bd"}, + {file = "pyarrow-20.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:a2791f69ad72addd33510fec7bb14ee06c2a448e06b649e264c094c5b5f7ce28"}, + {file = "pyarrow-20.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:4250e28a22302ce8692d3a0e8ec9d9dde54ec00d237cff4dfa9c1fbf79e472a8"}, + {file = "pyarrow-20.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:89e030dc58fc760e4010148e6ff164d2f44441490280ef1e97a542375e41058e"}, + {file = "pyarrow-20.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6102b4864d77102dbbb72965618e204e550135a940c2534711d5ffa787df2a5a"}, + {file = "pyarrow-20.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:96d6a0a37d9c98be08f5ed6a10831d88d52cac7b13f5287f1e0f625a0de8062b"}, + {file = "pyarrow-20.0.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:a15532e77b94c61efadde86d10957950392999503b3616b2ffcef7621a002893"}, + {file = "pyarrow-20.0.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:dd43f58037443af715f34f1322c782ec463a3c8a94a85fdb2d987ceb5658e061"}, + {file = "pyarrow-20.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa0d288143a8585806e3cc7c39566407aab646fb9ece164609dac1cfff45f6ae"}, + {file = "pyarrow-20.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6953f0114f8d6f3d905d98e987d0924dabce59c3cda380bdfaa25a6201563b4"}, + {file = "pyarrow-20.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:991f85b48a8a5e839b2128590ce07611fae48a904cae6cab1f089c5955b57eb5"}, + {file = "pyarrow-20.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:97c8dc984ed09cb07d618d57d8d4b67a5100a30c3818c2fb0b04599f0da2de7b"}, + {file = "pyarrow-20.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9b71daf534f4745818f96c214dbc1e6124d7daf059167330b610fc69b6f3d3e3"}, + {file = "pyarrow-20.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e8b88758f9303fa5a83d6c90e176714b2fd3852e776fc2d7e42a22dd6c2fb368"}, + {file = "pyarrow-20.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:30b3051b7975801c1e1d387e17c588d8ab05ced9b1e14eec57915f79869b5031"}, + {file = "pyarrow-20.0.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:ca151afa4f9b7bc45bcc791eb9a89e90a9eb2772767d0b1e5389609c7d03db63"}, + {file = "pyarrow-20.0.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:4680f01ecd86e0dd63e39eb5cd59ef9ff24a9d166db328679e36c108dc993d4c"}, + {file = "pyarrow-20.0.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f4c8534e2ff059765647aa69b75d6543f9fef59e2cd4c6d18015192565d2b70"}, + {file = "pyarrow-20.0.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e1f8a47f4b4ae4c69c4d702cfbdfe4d41e18e5c7ef6f1bb1c50918c1e81c57b"}, + {file = "pyarrow-20.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:a1f60dc14658efaa927f8214734f6a01a806d7690be4b3232ba526836d216122"}, + {file = "pyarrow-20.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:204a846dca751428991346976b914d6d2a82ae5b8316a6ed99789ebf976551e6"}, + {file = "pyarrow-20.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:f3b117b922af5e4c6b9a9115825726cac7d8b1421c37c2b5e24fbacc8930612c"}, + {file = "pyarrow-20.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e724a3fd23ae5b9c010e7be857f4405ed5e679db5c93e66204db1a69f733936a"}, + {file = "pyarrow-20.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:82f1ee5133bd8f49d31be1299dc07f585136679666b502540db854968576faf9"}, + {file = "pyarrow-20.0.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:1bcbe471ef3349be7714261dea28fe280db574f9d0f77eeccc195a2d161fd861"}, + {file = "pyarrow-20.0.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:a18a14baef7d7ae49247e75641fd8bcbb39f44ed49a9fc4ec2f65d5031aa3b96"}, + {file = "pyarrow-20.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb497649e505dc36542d0e68eca1a3c94ecbe9799cb67b578b55f2441a247fbc"}, + {file = "pyarrow-20.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11529a2283cb1f6271d7c23e4a8f9f8b7fd173f7360776b668e509d712a02eec"}, + {file = "pyarrow-20.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fc1499ed3b4b57ee4e090e1cea6eb3584793fe3d1b4297bbf53f09b434991a5"}, + {file = "pyarrow-20.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:db53390eaf8a4dab4dbd6d93c85c5cf002db24902dbff0ca7d988beb5c9dd15b"}, + {file = "pyarrow-20.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:851c6a8260ad387caf82d2bbf54759130534723e37083111d4ed481cb253cc0d"}, + {file = "pyarrow-20.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e22f80b97a271f0a7d9cd07394a7d348f80d3ac63ed7cc38b6d1b696ab3b2619"}, + {file = "pyarrow-20.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:9965a050048ab02409fb7cbbefeedba04d3d67f2cc899eff505cc084345959ca"}, + {file = "pyarrow-20.0.0.tar.gz", hash = "sha256:febc4a913592573c8d5805091a6c2b5064c8bd6e002131f01061797d91c783c1"}, +] + +[package.extras] +test = ["cffi", "hypothesis", "pandas", "pytest", "pytz"] + [[package]] name = "pyasn1" version = "0.6.1" @@ -2887,18 +3244,18 @@ files = [ [[package]] name = "pyasn1-modules" -version = "0.4.1" +version = "0.4.2" description = "A collection of ASN.1-based protocols modules" optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, - {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, + {file = "pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a"}, + {file = "pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6"}, ] [package.dependencies] -pyasn1 = ">=0.4.6,<0.7.0" +pyasn1 = ">=0.6.1,<0.7.0" [[package]] name = "pycparser" @@ -2914,55 +3271,62 @@ files = [ [[package]] name = "pydantic" -version = "1.10.18" +version = "1.10.22" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "pydantic-1.10.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e405ffcc1254d76bb0e760db101ee8916b620893e6edfbfee563b3c6f7a67c02"}, - {file = "pydantic-1.10.18-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e306e280ebebc65040034bff1a0a81fd86b2f4f05daac0131f29541cafd80b80"}, - {file = "pydantic-1.10.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11d9d9b87b50338b1b7de4ebf34fd29fdb0d219dc07ade29effc74d3d2609c62"}, - {file = "pydantic-1.10.18-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b661ce52c7b5e5f600c0c3c5839e71918346af2ef20062705ae76b5c16914cab"}, - {file = "pydantic-1.10.18-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c20f682defc9ef81cd7eaa485879ab29a86a0ba58acf669a78ed868e72bb89e0"}, - {file = "pydantic-1.10.18-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c5ae6b7c8483b1e0bf59e5f1843e4fd8fd405e11df7de217ee65b98eb5462861"}, - {file = "pydantic-1.10.18-cp310-cp310-win_amd64.whl", hash = "sha256:74fe19dda960b193b0eb82c1f4d2c8e5e26918d9cda858cbf3f41dd28549cb70"}, - {file = "pydantic-1.10.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:72fa46abace0a7743cc697dbb830a41ee84c9db8456e8d77a46d79b537efd7ec"}, - {file = "pydantic-1.10.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ef0fe7ad7cbdb5f372463d42e6ed4ca9c443a52ce544472d8842a0576d830da5"}, - {file = "pydantic-1.10.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a00e63104346145389b8e8f500bc6a241e729feaf0559b88b8aa513dd2065481"}, - {file = "pydantic-1.10.18-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae6fa2008e1443c46b7b3a5eb03800121868d5ab6bc7cda20b5df3e133cde8b3"}, - {file = "pydantic-1.10.18-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9f463abafdc92635da4b38807f5b9972276be7c8c5121989768549fceb8d2588"}, - {file = "pydantic-1.10.18-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3445426da503c7e40baccefb2b2989a0c5ce6b163679dd75f55493b460f05a8f"}, - {file = "pydantic-1.10.18-cp311-cp311-win_amd64.whl", hash = "sha256:467a14ee2183bc9c902579bb2f04c3d3dac00eff52e252850509a562255b2a33"}, - {file = "pydantic-1.10.18-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:efbc8a7f9cb5fe26122acba1852d8dcd1e125e723727c59dcd244da7bdaa54f2"}, - {file = "pydantic-1.10.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:24a4a159d0f7a8e26bf6463b0d3d60871d6a52eac5bb6a07a7df85c806f4c048"}, - {file = "pydantic-1.10.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b74be007703547dc52e3c37344d130a7bfacca7df112a9e5ceeb840a9ce195c7"}, - {file = "pydantic-1.10.18-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcb20d4cb355195c75000a49bb4a31d75e4295200df620f454bbc6bdf60ca890"}, - {file = "pydantic-1.10.18-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:46f379b8cb8a3585e3f61bf9ae7d606c70d133943f339d38b76e041ec234953f"}, - {file = "pydantic-1.10.18-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cbfbca662ed3729204090c4d09ee4beeecc1a7ecba5a159a94b5a4eb24e3759a"}, - {file = "pydantic-1.10.18-cp312-cp312-win_amd64.whl", hash = "sha256:c6d0a9f9eccaf7f438671a64acf654ef0d045466e63f9f68a579e2383b63f357"}, - {file = "pydantic-1.10.18-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3d5492dbf953d7d849751917e3b2433fb26010d977aa7a0765c37425a4026ff1"}, - {file = "pydantic-1.10.18-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe734914977eed33033b70bfc097e1baaffb589517863955430bf2e0846ac30f"}, - {file = "pydantic-1.10.18-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15fdbe568beaca9aacfccd5ceadfb5f1a235087a127e8af5e48df9d8a45ae85c"}, - {file = "pydantic-1.10.18-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c3e742f62198c9eb9201781fbebe64533a3bbf6a76a91b8d438d62b813079dbc"}, - {file = "pydantic-1.10.18-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:19a3bd00b9dafc2cd7250d94d5b578edf7a0bd7daf102617153ff9a8fa37871c"}, - {file = "pydantic-1.10.18-cp37-cp37m-win_amd64.whl", hash = "sha256:2ce3fcf75b2bae99aa31bd4968de0474ebe8c8258a0110903478bd83dfee4e3b"}, - {file = "pydantic-1.10.18-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:335a32d72c51a313b33fa3a9b0fe283503272ef6467910338e123f90925f0f03"}, - {file = "pydantic-1.10.18-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:34a3613c7edb8c6fa578e58e9abe3c0f5e7430e0fc34a65a415a1683b9c32d9a"}, - {file = "pydantic-1.10.18-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9ee4e6ca1d9616797fa2e9c0bfb8815912c7d67aca96f77428e316741082a1b"}, - {file = "pydantic-1.10.18-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23e8ec1ce4e57b4f441fc91e3c12adba023fedd06868445a5b5f1d48f0ab3682"}, - {file = "pydantic-1.10.18-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:44ae8a3e35a54d2e8fa88ed65e1b08967a9ef8c320819a969bfa09ce5528fafe"}, - {file = "pydantic-1.10.18-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5389eb3b48a72da28c6e061a247ab224381435256eb541e175798483368fdd3"}, - {file = "pydantic-1.10.18-cp38-cp38-win_amd64.whl", hash = "sha256:069b9c9fc645474d5ea3653788b544a9e0ccd3dca3ad8c900c4c6eac844b4620"}, - {file = "pydantic-1.10.18-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:80b982d42515632eb51f60fa1d217dfe0729f008e81a82d1544cc392e0a50ddf"}, - {file = "pydantic-1.10.18-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:aad8771ec8dbf9139b01b56f66386537c6fe4e76c8f7a47c10261b69ad25c2c9"}, - {file = "pydantic-1.10.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941a2eb0a1509bd7f31e355912eb33b698eb0051730b2eaf9e70e2e1589cae1d"}, - {file = "pydantic-1.10.18-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65f7361a09b07915a98efd17fdec23103307a54db2000bb92095457ca758d485"}, - {file = "pydantic-1.10.18-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6951f3f47cb5ca4da536ab161ac0163cab31417d20c54c6de5ddcab8bc813c3f"}, - {file = "pydantic-1.10.18-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7a4c5eec138a9b52c67f664c7d51d4c7234c5ad65dd8aacd919fb47445a62c86"}, - {file = "pydantic-1.10.18-cp39-cp39-win_amd64.whl", hash = "sha256:49e26c51ca854286bffc22b69787a8d4063a62bf7d83dc21d44d2ff426108518"}, - {file = "pydantic-1.10.18-py3-none-any.whl", hash = "sha256:06a189b81ffc52746ec9c8c007f16e5167c8b0a696e1a726369327e3db7b2a82"}, - {file = "pydantic-1.10.18.tar.gz", hash = "sha256:baebdff1907d1d96a139c25136a9bb7d17e118f133a76a2ef3b845e831e3403a"}, + {file = "pydantic-1.10.22-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:57889565ccc1e5b7b73343329bbe6198ebc472e3ee874af2fa1865cfe7048228"}, + {file = "pydantic-1.10.22-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:90729e22426de79bc6a3526b4c45ec4400caf0d4f10d7181ba7f12c01bb3897d"}, + {file = "pydantic-1.10.22-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8684d347f351554ec94fdcb507983d3116dc4577fb8799fed63c65869a2d10"}, + {file = "pydantic-1.10.22-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8dad498ceff2d9ef1d2e2bc6608f5b59b8e1ba2031759b22dfb8c16608e1802"}, + {file = "pydantic-1.10.22-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fac529cc654d4575cf8de191cce354b12ba705f528a0a5c654de6d01f76cd818"}, + {file = "pydantic-1.10.22-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4148232aded8dd1dd13cf910a01b32a763c34bd79a0ab4d1ee66164fcb0b7b9d"}, + {file = "pydantic-1.10.22-cp310-cp310-win_amd64.whl", hash = "sha256:ece68105d9e436db45d8650dc375c760cc85a6793ae019c08769052902dca7db"}, + {file = "pydantic-1.10.22-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8e530a8da353f791ad89e701c35787418605d35085f4bdda51b416946070e938"}, + {file = "pydantic-1.10.22-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:654322b85642e9439d7de4c83cb4084ddd513df7ff8706005dada43b34544946"}, + {file = "pydantic-1.10.22-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8bece75bd1b9fc1c32b57a32831517943b1159ba18b4ba32c0d431d76a120ae"}, + {file = "pydantic-1.10.22-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eccb58767f13c6963dcf96d02cb8723ebb98b16692030803ac075d2439c07b0f"}, + {file = "pydantic-1.10.22-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7778e6200ff8ed5f7052c1516617423d22517ad36cc7a3aedd51428168e3e5e8"}, + {file = "pydantic-1.10.22-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bffe02767d27c39af9ca7dc7cd479c00dda6346bb62ffc89e306f665108317a2"}, + {file = "pydantic-1.10.22-cp311-cp311-win_amd64.whl", hash = "sha256:23bc19c55427091b8e589bc08f635ab90005f2dc99518f1233386f46462c550a"}, + {file = "pydantic-1.10.22-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:92d0f97828a075a71d9efc65cf75db5f149b4d79a38c89648a63d2932894d8c9"}, + {file = "pydantic-1.10.22-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6af5a2811b6b95b58b829aeac5996d465a5f0c7ed84bd871d603cf8646edf6ff"}, + {file = "pydantic-1.10.22-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6cf06d8d40993e79af0ab2102ef5da77b9ddba51248e4cb27f9f3f591fbb096e"}, + {file = "pydantic-1.10.22-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:184b7865b171a6057ad97f4a17fbac81cec29bd103e996e7add3d16b0d95f609"}, + {file = "pydantic-1.10.22-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:923ad861677ab09d89be35d36111156063a7ebb44322cdb7b49266e1adaba4bb"}, + {file = "pydantic-1.10.22-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:82d9a3da1686443fb854c8d2ab9a473251f8f4cdd11b125522efb4d7c646e7bc"}, + {file = "pydantic-1.10.22-cp312-cp312-win_amd64.whl", hash = "sha256:1612604929af4c602694a7f3338b18039d402eb5ddfbf0db44f1ebfaf07f93e7"}, + {file = "pydantic-1.10.22-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b259dc89c9abcd24bf42f31951fb46c62e904ccf4316393f317abeeecda39978"}, + {file = "pydantic-1.10.22-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9238aa0964d80c0908d2f385e981add58faead4412ca80ef0fa352094c24e46d"}, + {file = "pydantic-1.10.22-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f8029f05b04080e3f1a550575a1bca747c0ea4be48e2d551473d47fd768fc1b"}, + {file = "pydantic-1.10.22-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5c06918894f119e0431a36c9393bc7cceeb34d1feeb66670ef9b9ca48c073937"}, + {file = "pydantic-1.10.22-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e205311649622ee8fc1ec9089bd2076823797f5cd2c1e3182dc0e12aab835b35"}, + {file = "pydantic-1.10.22-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:815f0a73d5688d6dd0796a7edb9eca7071bfef961a7b33f91e618822ae7345b7"}, + {file = "pydantic-1.10.22-cp313-cp313-win_amd64.whl", hash = "sha256:9dfce71d42a5cde10e78a469e3d986f656afc245ab1b97c7106036f088dd91f8"}, + {file = "pydantic-1.10.22-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3ecaf8177b06aac5d1f442db1288e3b46d9f05f34fd17fdca3ad34105328b61a"}, + {file = "pydantic-1.10.22-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb36c2de9ea74bd7f66b5481dea8032d399affd1cbfbb9bb7ce539437f1fce62"}, + {file = "pydantic-1.10.22-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6b8d14a256be3b8fff9286d76c532f1a7573fbba5f189305b22471c6679854d"}, + {file = "pydantic-1.10.22-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:1c33269e815db4324e71577174c29c7aa30d1bba51340ce6be976f6f3053a4c6"}, + {file = "pydantic-1.10.22-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:8661b3ab2735b2a9ccca2634738534a795f4a10bae3ab28ec0a10c96baa20182"}, + {file = "pydantic-1.10.22-cp37-cp37m-win_amd64.whl", hash = "sha256:22bdd5fe70d4549995981c55b970f59de5c502d5656b2abdfcd0a25be6f3763e"}, + {file = "pydantic-1.10.22-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e3f33d1358aa4bc2795208cc29ff3118aeaad0ea36f0946788cf7cadeccc166b"}, + {file = "pydantic-1.10.22-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:813f079f9cd136cac621f3f9128a4406eb8abd2ad9fdf916a0731d91c6590017"}, + {file = "pydantic-1.10.22-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab618ab8dca6eac7f0755db25f6aba3c22c40e3463f85a1c08dc93092d917704"}, + {file = "pydantic-1.10.22-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d128e1aaa38db88caca920d5822c98fc06516a09a58b6d3d60fa5ea9099b32cc"}, + {file = "pydantic-1.10.22-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:cc97bbc25def7025e55fc9016080773167cda2aad7294e06a37dda04c7d69ece"}, + {file = "pydantic-1.10.22-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dda5d7157d543b1fa565038cae6e952549d0f90071c839b3740fb77c820fab8"}, + {file = "pydantic-1.10.22-cp38-cp38-win_amd64.whl", hash = "sha256:a093fe44fe518cb445d23119511a71f756f8503139d02fcdd1173f7b76c95ffe"}, + {file = "pydantic-1.10.22-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ec54c89b2568b258bb30d7348ac4d82bec1b58b377fb56a00441e2ac66b24587"}, + {file = "pydantic-1.10.22-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d8f1d1a1532e4f3bcab4e34e8d2197a7def4b67072acd26cfa60e92d75803a48"}, + {file = "pydantic-1.10.22-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ad83ca35508c27eae1005b6b61f369f78aae6d27ead2135ec156a2599910121"}, + {file = "pydantic-1.10.22-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53cdb44b78c420f570ff16b071ea8cd5a477635c6b0efc343c8a91e3029bbf1a"}, + {file = "pydantic-1.10.22-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:16d0a5ae9d98264186ce31acdd7686ec05fd331fab9d68ed777d5cb2d1514e5e"}, + {file = "pydantic-1.10.22-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8aee040e25843f036192b1a1af62117504a209a043aa8db12e190bb86ad7e611"}, + {file = "pydantic-1.10.22-cp39-cp39-win_amd64.whl", hash = "sha256:7f691eec68dbbfca497d3c11b92a3e5987393174cbedf03ec7a4184c35c2def6"}, + {file = "pydantic-1.10.22-py3-none-any.whl", hash = "sha256:343037d608bcbd34df937ac259708bfc83664dadf88afe8516c4f282d7d471a9"}, + {file = "pydantic-1.10.22.tar.gz", hash = "sha256:ee1006cebd43a8e7158fb7190bb8f4e2da9649719bff65d0c287282ec38dec6d"}, ] [package.dependencies] @@ -2974,14 +3338,14 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pygments" -version = "2.18.0" +version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" groups = ["main", "docs", "test"] files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, ] [package.extras] @@ -3087,68 +3451,77 @@ six = ">=1.5" [[package]] name = "python-json-logger" -version = "2.0.7" -description = "A python library adding a json log formatter" +version = "3.3.0" +description = "JSON Log Formatter for the Python Logging Package" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" groups = ["test"] files = [ - {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, - {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, + {file = "python_json_logger-3.3.0-py3-none-any.whl", hash = "sha256:dd980fae8cffb24c13caf6e158d3d61c0d6d22342f932cb6e9deedab3d35eec7"}, + {file = "python_json_logger-3.3.0.tar.gz", hash = "sha256:12b7e74b17775e7d565129296105bbe3910842d9d0eb083fc83a6a617aa8df84"}, ] +[package.dependencies] +typing_extensions = {version = "*", markers = "python_version < \"3.10\""} + +[package.extras] +dev = ["backports.zoneinfo ; python_version < \"3.9\"", "black", "build", "freezegun", "mdx_truly_sane_lists", "mike", "mkdocs", "mkdocs-awesome-pages-plugin", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-material (>=8.5)", "mkdocstrings[python]", "msgspec ; implementation_name != \"pypy\"", "mypy", "orjson ; implementation_name != \"pypy\"", "pylint", "pytest", "tzdata", "validate-pyproject[all]"] + [[package]] name = "pytz" -version = "2024.2" +version = "2025.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" groups = ["main"] files = [ - {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, - {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, + {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, + {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, ] [[package]] name = "pywin32" -version = "306" +version = "310" description = "Python for Window Extensions" optional = false python-versions = "*" groups = ["test"] markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\"" files = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, - {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, - {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, - {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, - {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, - {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, - {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, + {file = "pywin32-310-cp310-cp310-win32.whl", hash = "sha256:6dd97011efc8bf51d6793a82292419eba2c71cf8e7250cfac03bba284454abc1"}, + {file = "pywin32-310-cp310-cp310-win_amd64.whl", hash = "sha256:c3e78706e4229b915a0821941a84e7ef420bf2b77e08c9dae3c76fd03fd2ae3d"}, + {file = "pywin32-310-cp310-cp310-win_arm64.whl", hash = "sha256:33babed0cf0c92a6f94cc6cc13546ab24ee13e3e800e61ed87609ab91e4c8213"}, + {file = "pywin32-310-cp311-cp311-win32.whl", hash = "sha256:1e765f9564e83011a63321bb9d27ec456a0ed90d3732c4b2e312b855365ed8bd"}, + {file = "pywin32-310-cp311-cp311-win_amd64.whl", hash = "sha256:126298077a9d7c95c53823934f000599f66ec9296b09167810eb24875f32689c"}, + {file = "pywin32-310-cp311-cp311-win_arm64.whl", hash = "sha256:19ec5fc9b1d51c4350be7bb00760ffce46e6c95eaf2f0b2f1150657b1a43c582"}, + {file = "pywin32-310-cp312-cp312-win32.whl", hash = "sha256:8a75a5cc3893e83a108c05d82198880704c44bbaee4d06e442e471d3c9ea4f3d"}, + {file = "pywin32-310-cp312-cp312-win_amd64.whl", hash = "sha256:bf5c397c9a9a19a6f62f3fb821fbf36cac08f03770056711f765ec1503972060"}, + {file = "pywin32-310-cp312-cp312-win_arm64.whl", hash = "sha256:2349cc906eae872d0663d4d6290d13b90621eaf78964bb1578632ff20e152966"}, + {file = "pywin32-310-cp313-cp313-win32.whl", hash = "sha256:5d241a659c496ada3253cd01cfaa779b048e90ce4b2b38cd44168ad555ce74ab"}, + {file = "pywin32-310-cp313-cp313-win_amd64.whl", hash = "sha256:667827eb3a90208ddbdcc9e860c81bde63a135710e21e4cb3348968e4bd5249e"}, + {file = "pywin32-310-cp313-cp313-win_arm64.whl", hash = "sha256:e308f831de771482b7cf692a1f308f8fca701b2d8f9dde6cc440c7da17e47b33"}, + {file = "pywin32-310-cp38-cp38-win32.whl", hash = "sha256:0867beb8addefa2e3979d4084352e4ac6e991ca45373390775f7084cc0209b9c"}, + {file = "pywin32-310-cp38-cp38-win_amd64.whl", hash = "sha256:30f0a9b3138fb5e07eb4973b7077e1883f558e40c578c6925acc7a94c34eaa36"}, + {file = "pywin32-310-cp39-cp39-win32.whl", hash = "sha256:851c8d927af0d879221e616ae1f66145253537bbdd321a77e8ef701b443a9a1a"}, + {file = "pywin32-310-cp39-cp39-win_amd64.whl", hash = "sha256:96867217335559ac619f00ad70e513c0fcf84b8a3af9fc2bba3b59b97da70475"}, ] [[package]] name = "pywinpty" -version = "2.0.13" +version = "2.0.15" description = "Pseudo terminal support for Windows from Python." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["test"] markers = "os_name == \"nt\"" files = [ - {file = "pywinpty-2.0.13-cp310-none-win_amd64.whl", hash = "sha256:697bff211fb5a6508fee2dc6ff174ce03f34a9a233df9d8b5fe9c8ce4d5eaf56"}, - {file = "pywinpty-2.0.13-cp311-none-win_amd64.whl", hash = "sha256:b96fb14698db1284db84ca38c79f15b4cfdc3172065b5137383910567591fa99"}, - {file = "pywinpty-2.0.13-cp312-none-win_amd64.whl", hash = "sha256:2fd876b82ca750bb1333236ce98488c1be96b08f4f7647cfdf4129dfad83c2d4"}, - {file = "pywinpty-2.0.13-cp38-none-win_amd64.whl", hash = "sha256:61d420c2116c0212808d31625611b51caf621fe67f8a6377e2e8b617ea1c1f7d"}, - {file = "pywinpty-2.0.13-cp39-none-win_amd64.whl", hash = "sha256:71cb613a9ee24174730ac7ae439fd179ca34ccb8c5349e8d7b72ab5dea2c6f4b"}, - {file = "pywinpty-2.0.13.tar.gz", hash = "sha256:c34e32351a3313ddd0d7da23d27f835c860d32fe4ac814d372a3ea9594f41dde"}, + {file = "pywinpty-2.0.15-cp310-cp310-win_amd64.whl", hash = "sha256:8e7f5de756a615a38b96cd86fa3cd65f901ce54ce147a3179c45907fa11b4c4e"}, + {file = "pywinpty-2.0.15-cp311-cp311-win_amd64.whl", hash = "sha256:9a6bcec2df2707aaa9d08b86071970ee32c5026e10bcc3cc5f6f391d85baf7ca"}, + {file = "pywinpty-2.0.15-cp312-cp312-win_amd64.whl", hash = "sha256:83a8f20b430bbc5d8957249f875341a60219a4e971580f2ba694fbfb54a45ebc"}, + {file = "pywinpty-2.0.15-cp313-cp313-win_amd64.whl", hash = "sha256:ab5920877dd632c124b4ed17bc6dd6ef3b9f86cd492b963ffdb1a67b85b0f408"}, + {file = "pywinpty-2.0.15-cp313-cp313t-win_amd64.whl", hash = "sha256:a4560ad8c01e537708d2790dbe7da7d986791de805d89dd0d3697ca59e9e4901"}, + {file = "pywinpty-2.0.15-cp39-cp39-win_amd64.whl", hash = "sha256:d261cd88fcd358cfb48a7ca0700db3e1c088c9c10403c9ebc0d8a8b57aa6a117"}, + {file = "pywinpty-2.0.15.tar.gz", hash = "sha256:312cf39153a8736c617d45ce8b6ad6cd2107de121df91c455b10ce6bba7a39b2"}, ] [[package]] @@ -3216,121 +3589,105 @@ files = [ [[package]] name = "pyzmq" -version = "26.2.0" +version = "26.4.0" description = "Python bindings for 0MQ" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" groups = ["test"] files = [ - {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:ddf33d97d2f52d89f6e6e7ae66ee35a4d9ca6f36eda89c24591b0c40205a3629"}, - {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dacd995031a01d16eec825bf30802fceb2c3791ef24bcce48fa98ce40918c27b"}, - {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89289a5ee32ef6c439086184529ae060c741334b8970a6855ec0b6ad3ff28764"}, - {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5506f06d7dc6ecf1efacb4a013b1f05071bb24b76350832c96449f4a2d95091c"}, - {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ea039387c10202ce304af74def5021e9adc6297067f3441d348d2b633e8166a"}, - {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2224fa4a4c2ee872886ed00a571f5e967c85e078e8e8c2530a2fb01b3309b88"}, - {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:28ad5233e9c3b52d76196c696e362508959741e1a005fb8fa03b51aea156088f"}, - {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:1c17211bc037c7d88e85ed8b7d8f7e52db6dc8eca5590d162717c654550f7282"}, - {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b8f86dd868d41bea9a5f873ee13bf5551c94cf6bc51baebc6f85075971fe6eea"}, - {file = "pyzmq-26.2.0-cp310-cp310-win32.whl", hash = "sha256:46a446c212e58456b23af260f3d9fb785054f3e3653dbf7279d8f2b5546b21c2"}, - {file = "pyzmq-26.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:49d34ab71db5a9c292a7644ce74190b1dd5a3475612eefb1f8be1d6961441971"}, - {file = "pyzmq-26.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:bfa832bfa540e5b5c27dcf5de5d82ebc431b82c453a43d141afb1e5d2de025fa"}, - {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:8f7e66c7113c684c2b3f1c83cdd3376103ee0ce4c49ff80a648643e57fb22218"}, - {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3a495b30fc91db2db25120df5847d9833af237546fd59170701acd816ccc01c4"}, - {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77eb0968da535cba0470a5165468b2cac7772cfb569977cff92e240f57e31bef"}, - {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ace4f71f1900a548f48407fc9be59c6ba9d9aaf658c2eea6cf2779e72f9f317"}, - {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92a78853d7280bffb93df0a4a6a2498cba10ee793cc8076ef797ef2f74d107cf"}, - {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:689c5d781014956a4a6de61d74ba97b23547e431e9e7d64f27d4922ba96e9d6e"}, - {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aca98bc423eb7d153214b2df397c6421ba6373d3397b26c057af3c904452e37"}, - {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f3496d76b89d9429a656293744ceca4d2ac2a10ae59b84c1da9b5165f429ad3"}, - {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5c2b3bfd4b9689919db068ac6c9911f3fcb231c39f7dd30e3138be94896d18e6"}, - {file = "pyzmq-26.2.0-cp311-cp311-win32.whl", hash = "sha256:eac5174677da084abf378739dbf4ad245661635f1600edd1221f150b165343f4"}, - {file = "pyzmq-26.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:5a509df7d0a83a4b178d0f937ef14286659225ef4e8812e05580776c70e155d5"}, - {file = "pyzmq-26.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:c0e6091b157d48cbe37bd67233318dbb53e1e6327d6fc3bb284afd585d141003"}, - {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:ded0fc7d90fe93ae0b18059930086c51e640cdd3baebdc783a695c77f123dcd9"}, - {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17bf5a931c7f6618023cdacc7081f3f266aecb68ca692adac015c383a134ca52"}, - {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55cf66647e49d4621a7e20c8d13511ef1fe1efbbccf670811864452487007e08"}, - {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4661c88db4a9e0f958c8abc2b97472e23061f0bc737f6f6179d7a27024e1faa5"}, - {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea7f69de383cb47522c9c208aec6dd17697db7875a4674c4af3f8cfdac0bdeae"}, - {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7f98f6dfa8b8ccaf39163ce872bddacca38f6a67289116c8937a02e30bbe9711"}, - {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e3e0210287329272539eea617830a6a28161fbbd8a3271bf4150ae3e58c5d0e6"}, - {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6b274e0762c33c7471f1a7471d1a2085b1a35eba5cdc48d2ae319f28b6fc4de3"}, - {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:29c6a4635eef69d68a00321e12a7d2559fe2dfccfa8efae3ffb8e91cd0b36a8b"}, - {file = "pyzmq-26.2.0-cp312-cp312-win32.whl", hash = "sha256:989d842dc06dc59feea09e58c74ca3e1678c812a4a8a2a419046d711031f69c7"}, - {file = "pyzmq-26.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:2a50625acdc7801bc6f74698c5c583a491c61d73c6b7ea4dee3901bb99adb27a"}, - {file = "pyzmq-26.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:4d29ab8592b6ad12ebbf92ac2ed2bedcfd1cec192d8e559e2e099f648570e19b"}, - {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9dd8cd1aeb00775f527ec60022004d030ddc51d783d056e3e23e74e623e33726"}, - {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:28c812d9757fe8acecc910c9ac9dafd2ce968c00f9e619db09e9f8f54c3a68a3"}, - {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d80b1dd99c1942f74ed608ddb38b181b87476c6a966a88a950c7dee118fdf50"}, - {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c997098cc65e3208eca09303630e84d42718620e83b733d0fd69543a9cab9cb"}, - {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad1bc8d1b7a18497dda9600b12dc193c577beb391beae5cd2349184db40f187"}, - {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bea2acdd8ea4275e1278350ced63da0b166421928276c7c8e3f9729d7402a57b"}, - {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:23f4aad749d13698f3f7b64aad34f5fc02d6f20f05999eebc96b89b01262fb18"}, - {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a4f96f0d88accc3dbe4a9025f785ba830f968e21e3e2c6321ccdfc9aef755115"}, - {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ced65e5a985398827cc9276b93ef6dfabe0273c23de8c7931339d7e141c2818e"}, - {file = "pyzmq-26.2.0-cp313-cp313-win32.whl", hash = "sha256:31507f7b47cc1ead1f6e86927f8ebb196a0bab043f6345ce070f412a59bf87b5"}, - {file = "pyzmq-26.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:70fc7fcf0410d16ebdda9b26cbd8bf8d803d220a7f3522e060a69a9c87bf7bad"}, - {file = "pyzmq-26.2.0-cp313-cp313-win_arm64.whl", hash = "sha256:c3789bd5768ab5618ebf09cef6ec2b35fed88709b104351748a63045f0ff9797"}, - {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:034da5fc55d9f8da09015d368f519478a52675e558c989bfcb5cf6d4e16a7d2a"}, - {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:c92d73464b886931308ccc45b2744e5968cbaade0b1d6aeb40d8ab537765f5bc"}, - {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:794a4562dcb374f7dbbfb3f51d28fb40123b5a2abadee7b4091f93054909add5"}, - {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aee22939bb6075e7afededabad1a56a905da0b3c4e3e0c45e75810ebe3a52672"}, - {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ae90ff9dad33a1cfe947d2c40cb9cb5e600d759ac4f0fd22616ce6540f72797"}, - {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:43a47408ac52647dfabbc66a25b05b6a61700b5165807e3fbd40063fcaf46386"}, - {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:25bf2374a2a8433633c65ccb9553350d5e17e60c8eb4de4d92cc6bd60f01d306"}, - {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:007137c9ac9ad5ea21e6ad97d3489af654381324d5d3ba614c323f60dab8fae6"}, - {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:470d4a4f6d48fb34e92d768b4e8a5cc3780db0d69107abf1cd7ff734b9766eb0"}, - {file = "pyzmq-26.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3b55a4229ce5da9497dd0452b914556ae58e96a4381bb6f59f1305dfd7e53fc8"}, - {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9cb3a6460cdea8fe8194a76de8895707e61ded10ad0be97188cc8463ffa7e3a8"}, - {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8ab5cad923cc95c87bffee098a27856c859bd5d0af31bd346035aa816b081fe1"}, - {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ed69074a610fad1c2fda66180e7b2edd4d31c53f2d1872bc2d1211563904cd9"}, - {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cccba051221b916a4f5e538997c45d7d136a5646442b1231b916d0164067ea27"}, - {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0eaa83fc4c1e271c24eaf8fb083cbccef8fde77ec8cd45f3c35a9a123e6da097"}, - {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9edda2df81daa129b25a39b86cb57dfdfe16f7ec15b42b19bfac503360d27a93"}, - {file = "pyzmq-26.2.0-cp37-cp37m-win32.whl", hash = "sha256:ea0eb6af8a17fa272f7b98d7bebfab7836a0d62738e16ba380f440fceca2d951"}, - {file = "pyzmq-26.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4ff9dc6bc1664bb9eec25cd17506ef6672d506115095411e237d571e92a58231"}, - {file = "pyzmq-26.2.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:2eb7735ee73ca1b0d71e0e67c3739c689067f055c764f73aac4cc8ecf958ee3f"}, - {file = "pyzmq-26.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a534f43bc738181aa7cbbaf48e3eca62c76453a40a746ab95d4b27b1111a7d2"}, - {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:aedd5dd8692635813368e558a05266b995d3d020b23e49581ddd5bbe197a8ab6"}, - {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8be4700cd8bb02cc454f630dcdf7cfa99de96788b80c51b60fe2fe1dac480289"}, - {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fcc03fa4997c447dce58264e93b5aa2d57714fbe0f06c07b7785ae131512732"}, - {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:402b190912935d3db15b03e8f7485812db350d271b284ded2b80d2e5704be780"}, - {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8685fa9c25ff00f550c1fec650430c4b71e4e48e8d852f7ddcf2e48308038640"}, - {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:76589c020680778f06b7e0b193f4b6dd66d470234a16e1df90329f5e14a171cd"}, - {file = "pyzmq-26.2.0-cp38-cp38-win32.whl", hash = "sha256:8423c1877d72c041f2c263b1ec6e34360448decfb323fa8b94e85883043ef988"}, - {file = "pyzmq-26.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:76589f2cd6b77b5bdea4fca5992dc1c23389d68b18ccc26a53680ba2dc80ff2f"}, - {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:b1d464cb8d72bfc1a3adc53305a63a8e0cac6bc8c5a07e8ca190ab8d3faa43c2"}, - {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4da04c48873a6abdd71811c5e163bd656ee1b957971db7f35140a2d573f6949c"}, - {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d049df610ac811dcffdc147153b414147428567fbbc8be43bb8885f04db39d98"}, - {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05590cdbc6b902101d0e65d6a4780af14dc22914cc6ab995d99b85af45362cc9"}, - {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c811cfcd6a9bf680236c40c6f617187515269ab2912f3d7e8c0174898e2519db"}, - {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6835dd60355593de10350394242b5757fbbd88b25287314316f266e24c61d073"}, - {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc6bee759a6bddea5db78d7dcd609397449cb2d2d6587f48f3ca613b19410cfc"}, - {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c530e1eecd036ecc83c3407f77bb86feb79916d4a33d11394b8234f3bd35b940"}, - {file = "pyzmq-26.2.0-cp39-cp39-win32.whl", hash = "sha256:367b4f689786fca726ef7a6c5ba606958b145b9340a5e4808132cc65759abd44"}, - {file = "pyzmq-26.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:e6fa2e3e683f34aea77de8112f6483803c96a44fd726d7358b9888ae5bb394ec"}, - {file = "pyzmq-26.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:7445be39143a8aa4faec43b076e06944b8f9d0701b669df4af200531b21e40bb"}, - {file = "pyzmq-26.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:706e794564bec25819d21a41c31d4df2d48e1cc4b061e8d345d7fb4dd3e94072"}, - {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b435f2753621cd36e7c1762156815e21c985c72b19135dac43a7f4f31d28dd1"}, - {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:160c7e0a5eb178011e72892f99f918c04a131f36056d10d9c1afb223fc952c2d"}, - {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4a71d5d6e7b28a47a394c0471b7e77a0661e2d651e7ae91e0cab0a587859ca"}, - {file = "pyzmq-26.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:90412f2db8c02a3864cbfc67db0e3dcdbda336acf1c469526d3e869394fe001c"}, - {file = "pyzmq-26.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2ea4ad4e6a12e454de05f2949d4beddb52460f3de7c8b9d5c46fbb7d7222e02c"}, - {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fc4f7a173a5609631bb0c42c23d12c49df3966f89f496a51d3eb0ec81f4519d6"}, - {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:878206a45202247781472a2d99df12a176fef806ca175799e1c6ad263510d57c"}, - {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17c412bad2eb9468e876f556eb4ee910e62d721d2c7a53c7fa31e643d35352e6"}, - {file = "pyzmq-26.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:0d987a3ae5a71c6226b203cfd298720e0086c7fe7c74f35fa8edddfbd6597eed"}, - {file = "pyzmq-26.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:39887ac397ff35b7b775db7201095fc6310a35fdbae85bac4523f7eb3b840e20"}, - {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fdb5b3e311d4d4b0eb8b3e8b4d1b0a512713ad7e6a68791d0923d1aec433d919"}, - {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:226af7dcb51fdb0109f0016449b357e182ea0ceb6b47dfb5999d569e5db161d5"}, - {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bed0e799e6120b9c32756203fb9dfe8ca2fb8467fed830c34c877e25638c3fc"}, - {file = "pyzmq-26.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:29c7947c594e105cb9e6c466bace8532dc1ca02d498684128b339799f5248277"}, - {file = "pyzmq-26.2.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cdeabcff45d1c219636ee2e54d852262e5c2e085d6cb476d938aee8d921356b3"}, - {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35cffef589bcdc587d06f9149f8d5e9e8859920a071df5a2671de2213bef592a"}, - {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18c8dc3b7468d8b4bdf60ce9d7141897da103c7a4690157b32b60acb45e333e6"}, - {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7133d0a1677aec369d67dd78520d3fa96dd7f3dcec99d66c1762870e5ea1a50a"}, - {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6a96179a24b14fa6428cbfc08641c779a53f8fcec43644030328f44034c7f1f4"}, - {file = "pyzmq-26.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4f78c88905461a9203eac9faac157a2a0dbba84a0fd09fd29315db27be40af9f"}, - {file = "pyzmq-26.2.0.tar.gz", hash = "sha256:070672c258581c8e4f640b5159297580a9974b026043bd4ab0470be9ed324f1f"}, + {file = "pyzmq-26.4.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:0329bdf83e170ac133f44a233fc651f6ed66ef8e66693b5af7d54f45d1ef5918"}, + {file = "pyzmq-26.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:398a825d2dea96227cf6460ce0a174cf7657d6f6827807d4d1ae9d0f9ae64315"}, + {file = "pyzmq-26.4.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d52d62edc96787f5c1dfa6c6ccff9b581cfae5a70d94ec4c8da157656c73b5b"}, + {file = "pyzmq-26.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1410c3a3705db68d11eb2424d75894d41cff2f64d948ffe245dd97a9debfebf4"}, + {file = "pyzmq-26.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:7dacb06a9c83b007cc01e8e5277f94c95c453c5851aac5e83efe93e72226353f"}, + {file = "pyzmq-26.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6bab961c8c9b3a4dc94d26e9b2cdf84de9918931d01d6ff38c721a83ab3c0ef5"}, + {file = "pyzmq-26.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7a5c09413b924d96af2aa8b57e76b9b0058284d60e2fc3730ce0f979031d162a"}, + {file = "pyzmq-26.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7d489ac234d38e57f458fdbd12a996bfe990ac028feaf6f3c1e81ff766513d3b"}, + {file = "pyzmq-26.4.0-cp310-cp310-win32.whl", hash = "sha256:dea1c8db78fb1b4b7dc9f8e213d0af3fc8ecd2c51a1d5a3ca1cde1bda034a980"}, + {file = "pyzmq-26.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:fa59e1f5a224b5e04dc6c101d7186058efa68288c2d714aa12d27603ae93318b"}, + {file = "pyzmq-26.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:a651fe2f447672f4a815e22e74630b6b1ec3a1ab670c95e5e5e28dcd4e69bbb5"}, + {file = "pyzmq-26.4.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:bfcf82644c9b45ddd7cd2a041f3ff8dce4a0904429b74d73a439e8cab1bd9e54"}, + {file = "pyzmq-26.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9bcae3979b2654d5289d3490742378b2f3ce804b0b5fd42036074e2bf35b030"}, + {file = "pyzmq-26.4.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccdff8ac4246b6fb60dcf3982dfaeeff5dd04f36051fe0632748fc0aa0679c01"}, + {file = "pyzmq-26.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4550af385b442dc2d55ab7717837812799d3674cb12f9a3aa897611839c18e9e"}, + {file = "pyzmq-26.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:2f9f7ffe9db1187a253fca95191854b3fda24696f086e8789d1d449308a34b88"}, + {file = "pyzmq-26.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3709c9ff7ba61589b7372923fd82b99a81932b592a5c7f1a24147c91da9a68d6"}, + {file = "pyzmq-26.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f8f3c30fb2d26ae5ce36b59768ba60fb72507ea9efc72f8f69fa088450cff1df"}, + {file = "pyzmq-26.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:382a4a48c8080e273427fc692037e3f7d2851959ffe40864f2db32646eeb3cef"}, + {file = "pyzmq-26.4.0-cp311-cp311-win32.whl", hash = "sha256:d56aad0517d4c09e3b4f15adebba8f6372c5102c27742a5bdbfc74a7dceb8fca"}, + {file = "pyzmq-26.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:963977ac8baed7058c1e126014f3fe58b3773f45c78cce7af5c26c09b6823896"}, + {file = "pyzmq-26.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:c0c8e8cadc81e44cc5088fcd53b9b3b4ce9344815f6c4a03aec653509296fae3"}, + {file = "pyzmq-26.4.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:5227cb8da4b6f68acfd48d20c588197fd67745c278827d5238c707daf579227b"}, + {file = "pyzmq-26.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1c07a7fa7f7ba86554a2b1bef198c9fed570c08ee062fd2fd6a4dcacd45f905"}, + {file = "pyzmq-26.4.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae775fa83f52f52de73183f7ef5395186f7105d5ed65b1ae65ba27cb1260de2b"}, + {file = "pyzmq-26.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66c760d0226ebd52f1e6b644a9e839b5db1e107a23f2fcd46ec0569a4fdd4e63"}, + {file = "pyzmq-26.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ef8c6ecc1d520debc147173eaa3765d53f06cd8dbe7bd377064cdbc53ab456f5"}, + {file = "pyzmq-26.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3150ef4084e163dec29ae667b10d96aad309b668fac6810c9e8c27cf543d6e0b"}, + {file = "pyzmq-26.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4448c9e55bf8329fa1dcedd32f661bf611214fa70c8e02fee4347bc589d39a84"}, + {file = "pyzmq-26.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e07dde3647afb084d985310d067a3efa6efad0621ee10826f2cb2f9a31b89d2f"}, + {file = "pyzmq-26.4.0-cp312-cp312-win32.whl", hash = "sha256:ba034a32ecf9af72adfa5ee383ad0fd4f4e38cdb62b13624278ef768fe5b5b44"}, + {file = "pyzmq-26.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:056a97aab4064f526ecb32f4343917a4022a5d9efb6b9df990ff72e1879e40be"}, + {file = "pyzmq-26.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:2f23c750e485ce1eb639dbd576d27d168595908aa2d60b149e2d9e34c9df40e0"}, + {file = "pyzmq-26.4.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:c43fac689880f5174d6fc864857d1247fe5cfa22b09ed058a344ca92bf5301e3"}, + {file = "pyzmq-26.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:902aca7eba477657c5fb81c808318460328758e8367ecdd1964b6330c73cae43"}, + {file = "pyzmq-26.4.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5e48a830bfd152fe17fbdeaf99ac5271aa4122521bf0d275b6b24e52ef35eb6"}, + {file = "pyzmq-26.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31be2b6de98c824c06f5574331f805707c667dc8f60cb18580b7de078479891e"}, + {file = "pyzmq-26.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:6332452034be001bbf3206ac59c0d2a7713de5f25bb38b06519fc6967b7cf771"}, + {file = "pyzmq-26.4.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:da8c0f5dd352136853e6a09b1b986ee5278dfddfebd30515e16eae425c872b30"}, + {file = "pyzmq-26.4.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:f4ccc1a0a2c9806dda2a2dd118a3b7b681e448f3bb354056cad44a65169f6d86"}, + {file = "pyzmq-26.4.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1c0b5fceadbab461578daf8d1dcc918ebe7ddd2952f748cf30c7cf2de5d51101"}, + {file = "pyzmq-26.4.0-cp313-cp313-win32.whl", hash = "sha256:28e2b0ff5ba4b3dd11062d905682bad33385cfa3cc03e81abd7f0822263e6637"}, + {file = "pyzmq-26.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:23ecc9d241004c10e8b4f49d12ac064cd7000e1643343944a10df98e57bc544b"}, + {file = "pyzmq-26.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:1edb0385c7f025045d6e0f759d4d3afe43c17a3d898914ec6582e6f464203c08"}, + {file = "pyzmq-26.4.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:93a29e882b2ba1db86ba5dd5e88e18e0ac6b627026c5cfbec9983422011b82d4"}, + {file = "pyzmq-26.4.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb45684f276f57110bb89e4300c00f1233ca631f08f5f42528a5c408a79efc4a"}, + {file = "pyzmq-26.4.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f72073e75260cb301aad4258ad6150fa7f57c719b3f498cb91e31df16784d89b"}, + {file = "pyzmq-26.4.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be37e24b13026cfedd233bcbbccd8c0bcd2fdd186216094d095f60076201538d"}, + {file = "pyzmq-26.4.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:237b283044934d26f1eeff4075f751b05d2f3ed42a257fc44386d00df6a270cf"}, + {file = "pyzmq-26.4.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:b30f862f6768b17040929a68432c8a8be77780317f45a353cb17e423127d250c"}, + {file = "pyzmq-26.4.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:c80fcd3504232f13617c6ab501124d373e4895424e65de8b72042333316f64a8"}, + {file = "pyzmq-26.4.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:26a2a7451606b87f67cdeca2c2789d86f605da08b4bd616b1a9981605ca3a364"}, + {file = "pyzmq-26.4.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:831cc53bf6068d46d942af52fa8b0b9d128fb39bcf1f80d468dc9a3ae1da5bfb"}, + {file = "pyzmq-26.4.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:51d18be6193c25bd229524cfac21e39887c8d5e0217b1857998dfbef57c070a4"}, + {file = "pyzmq-26.4.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:445c97854204119ae2232503585ebb4fa7517142f71092cb129e5ee547957a1f"}, + {file = "pyzmq-26.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:807b8f4ad3e6084412c0f3df0613269f552110fa6fb91743e3e306223dbf11a6"}, + {file = "pyzmq-26.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c01d109dd675ac47fa15c0a79d256878d898f90bc10589f808b62d021d2e653c"}, + {file = "pyzmq-26.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0a294026e28679a8dd64c922e59411cb586dad307661b4d8a5c49e7bbca37621"}, + {file = "pyzmq-26.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:22c8dd677274af8dfb1efd05006d6f68fb2f054b17066e308ae20cb3f61028cf"}, + {file = "pyzmq-26.4.0-cp38-cp38-win32.whl", hash = "sha256:14fc678b696bc42c14e2d7f86ac4e97889d5e6b94d366ebcb637a768d2ad01af"}, + {file = "pyzmq-26.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:d1ef0a536662bbbdc8525f7e2ef19e74123ec9c4578e0582ecd41aedc414a169"}, + {file = "pyzmq-26.4.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:a88643de8abd000ce99ca72056a1a2ae15881ee365ecb24dd1d9111e43d57842"}, + {file = "pyzmq-26.4.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0a744ce209ecb557406fb928f3c8c55ce79b16c3eeb682da38ef5059a9af0848"}, + {file = "pyzmq-26.4.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9434540f333332224ecb02ee6278b6c6f11ea1266b48526e73c903119b2f420f"}, + {file = "pyzmq-26.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6c6f0a23e55cd38d27d4c89add963294ea091ebcb104d7fdab0f093bc5abb1c"}, + {file = "pyzmq-26.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6145df55dc2309f6ef72d70576dcd5aabb0fd373311613fe85a5e547c722b780"}, + {file = "pyzmq-26.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2ea81823840ef8c56e5d2f9918e4d571236294fea4d1842b302aebffb9e40997"}, + {file = "pyzmq-26.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cc2abc385dc37835445abe206524fbc0c9e3fce87631dfaa90918a1ba8f425eb"}, + {file = "pyzmq-26.4.0-cp39-cp39-win32.whl", hash = "sha256:41a2508fe7bed4c76b4cf55aacfb8733926f59d440d9ae2b81ee8220633b4d12"}, + {file = "pyzmq-26.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:d4000e8255d6cbce38982e5622ebb90823f3409b7ffe8aeae4337ef7d6d2612a"}, + {file = "pyzmq-26.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:b4f6919d9c120488246bdc2a2f96662fa80d67b35bd6d66218f457e722b3ff64"}, + {file = "pyzmq-26.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:98d948288ce893a2edc5ec3c438fe8de2daa5bbbd6e2e865ec5f966e237084ba"}, + {file = "pyzmq-26.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9f34f5c9e0203ece706a1003f1492a56c06c0632d86cb77bcfe77b56aacf27b"}, + {file = "pyzmq-26.4.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80c9b48aef586ff8b698359ce22f9508937c799cc1d2c9c2f7c95996f2300c94"}, + {file = "pyzmq-26.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3f2a5b74009fd50b53b26f65daff23e9853e79aa86e0aa08a53a7628d92d44a"}, + {file = "pyzmq-26.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:61c5f93d7622d84cb3092d7f6398ffc77654c346545313a3737e266fc11a3beb"}, + {file = "pyzmq-26.4.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4478b14cb54a805088299c25a79f27eaf530564a7a4f72bf432a040042b554eb"}, + {file = "pyzmq-26.4.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a28ac29c60e4ba84b5f58605ace8ad495414a724fe7aceb7cf06cd0598d04e1"}, + {file = "pyzmq-26.4.0-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43b03c1ceea27c6520124f4fb2ba9c647409b9abdf9a62388117148a90419494"}, + {file = "pyzmq-26.4.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7731abd23a782851426d4e37deb2057bf9410848a4459b5ede4fe89342e687a9"}, + {file = "pyzmq-26.4.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a222ad02fbe80166b0526c038776e8042cd4e5f0dec1489a006a1df47e9040e0"}, + {file = "pyzmq-26.4.0-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:91c3ffaea475ec8bb1a32d77ebc441dcdd13cd3c4c284a6672b92a0f5ade1917"}, + {file = "pyzmq-26.4.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d9a78a52668bf5c9e7b0da36aa5760a9fc3680144e1445d68e98df78a25082ed"}, + {file = "pyzmq-26.4.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b70cab356ff8c860118b89dc86cd910c73ce2127eb986dada4fbac399ef644cf"}, + {file = "pyzmq-26.4.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acae207d4387780838192326b32d373bb286da0b299e733860e96f80728eb0af"}, + {file = "pyzmq-26.4.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:f928eafd15794aa4be75463d537348b35503c1e014c5b663f206504ec1a90fe4"}, + {file = "pyzmq-26.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:552b0d2e39987733e1e9e948a0ced6ff75e0ea39ab1a1db2fc36eb60fd8760db"}, + {file = "pyzmq-26.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd670a8aa843f2ee637039bbd412e0d7294a5e588e1ecc9ad98b0cdc050259a4"}, + {file = "pyzmq-26.4.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d367b7b775a0e1e54a59a2ba3ed4d5e0a31566af97cc9154e34262777dab95ed"}, + {file = "pyzmq-26.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8112af16c406e4a93df2caef49f884f4c2bb2b558b0b5577ef0b2465d15c1abc"}, + {file = "pyzmq-26.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c76c298683f82669cab0b6da59071f55238c039738297c69f187a542c6d40099"}, + {file = "pyzmq-26.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:49b6ca2e625b46f499fb081aaf7819a177f41eeb555acb05758aa97f4f95d147"}, + {file = "pyzmq-26.4.0.tar.gz", hash = "sha256:4bd13f85f80962f91a651a7356fe0472791a5f7a92f227822b5acf44795c626d"}, ] [package.dependencies] @@ -3338,32 +3695,32 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "ray" -version = "2.35.0" +version = "2.44.1" description = "Ray provides a simple, universal API for building distributed applications." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "ray-2.35.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:1e7e2d2e987be728a81821b6fd2bccb23e4d8a6cca8417db08b24f06a08d8476"}, - {file = "ray-2.35.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bd48be4c362004d31e5df072fd58b929efc67adfefc0adece41483b15f84539"}, - {file = "ray-2.35.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:ef41e9254f3e18a90a8cf13fac9e35ac086eb778079ab6c76a37d3a6059186c5"}, - {file = "ray-2.35.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:1994aaf9996ffc45019856545e817d527ad572762f1af76ad669ae4e786fcfd6"}, - {file = "ray-2.35.0-cp310-cp310-win_amd64.whl", hash = "sha256:d3b7a7d73f818e249064460ffa95402ebd852bf97d9ec6167b8b0d95be03da9f"}, - {file = "ray-2.35.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:e29754fac4b69a9cb0d089841af59ec6fb10b5d4a248b7c579d319ca2ed1c96f"}, - {file = "ray-2.35.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d7a606c8ca53c64fc496703e9fd15d1a1ffb50e6b457a33d3622be2f13fc30a5"}, - {file = "ray-2.35.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:ac561e20a62ce941b74d02a0b92b7765c6ba87cc22e24f34f64ded2c454ba64e"}, - {file = "ray-2.35.0-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:587af570cbe5f6cedca854f15107740e63c67207bee900713cb2ee38f6ebf20f"}, - {file = "ray-2.35.0-cp311-cp311-win_amd64.whl", hash = "sha256:8e406cce41679790146d4d2b1b0cb0b413ca35276e43b68ee796366169c1dbde"}, - {file = "ray-2.35.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:eb86355a3a0e794e2f1dbd5a84805dddfca64921ad0999b7fa5276e40d243692"}, - {file = "ray-2.35.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b746913268d5ea5e19bff0eb6bdc7e0538036892a8b57c08411787481195df2"}, - {file = "ray-2.35.0-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:e2ccfd144180f03d38b02a81afdac2b437f27e46736bf2653a1f0e8d67ea56cd"}, - {file = "ray-2.35.0-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:2ca1a0de41d4462fd764598a5981cf55fc955599f38f9a1ae10868e94c6dd80d"}, - {file = "ray-2.35.0-cp312-cp312-win_amd64.whl", hash = "sha256:c5600f745bb0e4df840a5cd51e82b1acf517f73505df9869fe3e369966956129"}, - {file = "ray-2.35.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5e98d2bac394b806109782f316740c5b3c3f10a50117c8e28200a528df734928"}, - {file = "ray-2.35.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c395b46efd0dd871424b1b8d6baf99f91983946fbe351ff66ea34e8919daff29"}, - {file = "ray-2.35.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:4e6314bfdb8c73abcac13f41cc3d935dd1a8ad94c65005a4bfdc4861dc8b070d"}, - {file = "ray-2.35.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:70a154e3071cbb4d7a9b68f2dcf491b96b760be0ec6e2ef11a766071ac6acfef"}, - {file = "ray-2.35.0-cp39-cp39-win_amd64.whl", hash = "sha256:dd8bdf9d16989684486db9ebcd23679140e2d6769fcdaadc05e8cac6b373023e"}, + {file = "ray-2.44.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:7d83999ead98bed7e70afddcda870cc1684773cb888e90768ce1a4c72c5fe009"}, + {file = "ray-2.44.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6f221b1426d8657ce0c24508d5ff11cabc4a8c40a833c8c5bb63e2217f37cfd9"}, + {file = "ray-2.44.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:3a3db83c54085535bed9b4861d871f6527c5df61532bf7d2285701485de8968f"}, + {file = "ray-2.44.1-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:18073261c09811d5faefc918abbd8a6bf5f4968277eeae6fbc0992e3724d1496"}, + {file = "ray-2.44.1-cp310-cp310-win_amd64.whl", hash = "sha256:9decf32c1402f44b5f7a54f29bd422e0ae9a45afc0a10090acf9ba6011ca12a7"}, + {file = "ray-2.44.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:949dbd735e5edec80f6140fa6bb536248c7c97535fe5a11acd279295b7bd1a6d"}, + {file = "ray-2.44.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c21a452227eeecfa3d89b50480d1f9bab11b15c9b3695af41421ab8e7e608cfd"}, + {file = "ray-2.44.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:0c6d395c05542a882d14e31abec1dd1a1808a9a0c0dcf94200a827d2d04c08a1"}, + {file = "ray-2.44.1-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:6e6bd0430d2eb664ae632c96e74c01e4a1bf14ab2a15102e1809b05ea9e0c2c7"}, + {file = "ray-2.44.1-cp311-cp311-win_amd64.whl", hash = "sha256:5e94bd887898dc08db7f87c0429bc41219aceb552af0b1cd4924c01718fc6a77"}, + {file = "ray-2.44.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:b6c7b677035c08141ae01adc25eade20a979eb7c9cabfe9ad1c99396e157ed59"}, + {file = "ray-2.44.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:865a83eaf06d5e988c441bc2607b8d1f326d952d139f66c18ea21f077fedbff4"}, + {file = "ray-2.44.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:3d9807c9c31d42793ca309747b9c7affdd7488a532979aa346d4c889b828783a"}, + {file = "ray-2.44.1-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:a4c0175cc40e6b065391bc8be0f208bacf8cee7ee61392c7791004f17622e7bd"}, + {file = "ray-2.44.1-cp312-cp312-win_amd64.whl", hash = "sha256:2d62f875c36432b6d5ee666ec23280d23a8de44c0a14a56959aa9b75e644b49f"}, + {file = "ray-2.44.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:12e94c27fedd703566de016314c8a5b164a96c5802313c64e7b643157c3930eb"}, + {file = "ray-2.44.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:16ecb31e1156a8952ad7a27da6e91704a67e8f37cb5519b0afc76fb5b21515d6"}, + {file = "ray-2.44.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:72d09a7bd2803979c322f0820f1138257b2fe7f557a615b6521441a1a14a044a"}, + {file = "ray-2.44.1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:1cadfa4ec992d3f9c56ffc5ce22b4a937546954a3f5ea10f32d3cf870e0a6c37"}, + {file = "ray-2.44.1-cp39-cp39-win_amd64.whl", hash = "sha256:35028f39090de977374f7ecdd10fdbd6cffd1dece22c31a9143b5be1da76ac50"}, ] [package.dependencies] @@ -3380,7 +3737,6 @@ grpcio = [ {version = ">=1.42.0", optional = true, markers = "python_version >= \"3.10\" and extra == \"default\""}, ] jsonschema = "*" -memray = {version = "*", optional = true, markers = "sys_platform != \"win32\" and extra == \"default\""} msgpack = ">=1.0.0,<2.0.0" numpy = {version = ">=1.20", optional = true, markers = "extra == \"data\""} opencensus = {version = "*", optional = true, markers = "extra == \"default\""} @@ -3388,8 +3744,14 @@ packaging = "*" pandas = {version = ">=1.3", optional = true, markers = "extra == \"data\""} prometheus-client = {version = ">=0.7.1", optional = true, markers = "extra == \"default\""} protobuf = ">=3.15.3,<3.19.5 || >3.19.5" -py-spy = {version = ">=0.2.0", optional = true, markers = "extra == \"default\""} -pyarrow = {version = ">=6.0.1", optional = true, markers = "extra == \"data\""} +py-spy = [ + {version = ">=0.2.0", optional = true, markers = "python_version < \"3.12\" and extra == \"default\""}, + {version = ">=0.4.0", optional = true, markers = "python_version >= \"3.12\" and extra == \"default\""}, +] +pyarrow = [ + {version = ">=9.0.0,<18", optional = true, markers = "sys_platform == \"darwin\" and platform_machine == \"x86_64\" and extra == \"data\""}, + {version = ">=9.0.0", optional = true, markers = "(sys_platform != \"darwin\" or platform_machine != \"x86_64\") and extra == \"data\""}, +] pydantic = {version = "<2.0.dev0 || >=2.5.dev0,<3", optional = true, markers = "extra == \"default\""} pyyaml = "*" requests = "*" @@ -3398,35 +3760,38 @@ virtualenv = {version = ">=20.0.24,<20.21.1 || >20.21.1", optional = true, marke [package.extras] adag = ["cupy-cuda12x ; sys_platform != \"darwin\""] -air = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "fsspec", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "memray ; sys_platform != \"win32\"", "numpy (>=1.20)", "opencensus", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] -all = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "cupy-cuda12x ; sys_platform != \"darwin\"", "dm-tree", "fastapi", "fsspec", "grpcio (!=1.56.0)", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "gymnasium (==0.28.1)", "lz4", "memray ; sys_platform != \"win32\"", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "requests", "rich", "scikit-image", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "typer", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] -all-cpp = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "cupy-cuda12x ; sys_platform != \"darwin\"", "dm-tree", "fastapi", "fsspec", "grpcio (!=1.56.0)", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "gymnasium (==0.28.1)", "lz4", "memray ; sys_platform != \"win32\"", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "ray-cpp (==2.35.0)", "requests", "rich", "scikit-image", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "typer", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] -client = ["grpcio (!=1.56.0)"] -cpp = ["ray-cpp (==2.35.0)"] -data = ["fsspec", "numpy (>=1.20)", "pandas (>=1.3)", "pyarrow (>=6.0.1)"] -default = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "memray ; sys_platform != \"win32\"", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "virtualenv (>=20.0.24,!=20.21.1)"] -observability = ["opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk"] -rllib = ["dm-tree", "fsspec", "gymnasium (==0.28.1)", "lz4", "pandas", "pyarrow (>=6.0.1)", "pyyaml", "requests", "rich", "scikit-image", "scipy", "tensorboardX (>=1.9)", "typer"] -serve = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "memray ; sys_platform != \"win32\"", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] -serve-grpc = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "memray ; sys_platform != \"win32\"", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] -train = ["fsspec", "pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1.9)"] -tune = ["fsspec", "pandas", "pyarrow (>=6.0.1)", "requests", "tensorboardX (>=1.9)"] +air = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "fsspec", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "numpy (>=1.20)", "opencensus", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +all = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "cupy-cuda12x ; sys_platform != \"darwin\"", "dm-tree", "fastapi", "fsspec", "grpcio", "grpcio (!=1.56.0) ; sys_platform == \"darwin\"", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "gymnasium (==1.0.0)", "lz4", "memray ; sys_platform != \"win32\"", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "ormsgpack (==1.7.0)", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyOpenSSL", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "requests", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +all-cpp = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "cupy-cuda12x ; sys_platform != \"darwin\"", "dm-tree", "fastapi", "fsspec", "grpcio", "grpcio (!=1.56.0) ; sys_platform == \"darwin\"", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "gymnasium (==1.0.0)", "lz4", "memray ; sys_platform != \"win32\"", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "ormsgpack (==1.7.0)", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyOpenSSL", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "ray-cpp (==2.44.1)", "requests", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +cgraph = ["cupy-cuda12x ; sys_platform != \"darwin\""] +client = ["grpcio", "grpcio (!=1.56.0) ; sys_platform == \"darwin\""] +cpp = ["ray-cpp (==2.44.1)"] +data = ["fsspec", "numpy (>=1.20)", "pandas (>=1.3)", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)"] +default = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "virtualenv (>=20.0.24,!=20.21.1)"] +llm = ["aiohttp (>=3.7)", "aiohttp-cors", "async-timeout ; python_version < \"3.11\"", "colorful", "fastapi", "fsspec", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "jsonref (>=1.1.0)", "jsonschema", "ninja", "numpy (>=1.20)", "opencensus", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "typer", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "vllm (>=0.7.2)", "watchfiles"] +observability = ["memray ; sys_platform != \"win32\"", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk"] +rllib = ["dm-tree", "fsspec", "gymnasium (==1.0.0)", "lz4", "ormsgpack (==1.7.0)", "pandas", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pyyaml", "requests", "scipy", "tensorboardX (>=1.9)"] +serve = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +serve-grpc = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyOpenSSL", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +train = ["fsspec", "pandas", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "tensorboardX (>=1.9)"] +tune = ["fsspec", "pandas", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "requests", "tensorboardX (>=1.9)"] [[package]] name = "referencing" -version = "0.35.1" +version = "0.36.2" description = "JSON Referencing + Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main", "test"] files = [ - {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, - {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, + {file = "referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"}, + {file = "referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa"}, ] [package.dependencies] attrs = ">=22.2.0" rpds-py = ">=0.7.0" +typing-extensions = {version = ">=4.4.0", markers = "python_version < \"3.13\""} [[package]] name = "requests" @@ -3518,127 +3883,138 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rpds-py" -version = "0.20.0" +version = "0.24.0" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main", "test"] files = [ - {file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"}, - {file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6377e647bbfd0a0b159fe557f2c6c602c159fc752fa316572f012fc0bf67150"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb851b7df9dda52dc1415ebee12362047ce771fc36914586b2e9fcbd7d293b3e"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e0f80b739e5a8f54837be5d5c924483996b603d5502bfff79bf33da06164ee2"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a8c94dad2e45324fc74dce25e1645d4d14df9a4e54a30fa0ae8bad9a63928e3"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e604fe73ba048c06085beaf51147eaec7df856824bfe7b98657cf436623daf"}, - {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:df3de6b7726b52966edf29663e57306b23ef775faf0ac01a3e9f4012a24a4140"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf258ede5bc22a45c8e726b29835b9303c285ab46fc7c3a4cc770736b5304c9f"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:55fea87029cded5df854ca7e192ec7bdb7ecd1d9a3f63d5c4eb09148acf4a7ce"}, - {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ae94bd0b2f02c28e199e9bc51485d0c5601f58780636185660f86bf80c89af94"}, - {file = "rpds_py-0.20.0-cp310-none-win32.whl", hash = "sha256:28527c685f237c05445efec62426d285e47a58fb05ba0090a4340b73ecda6dee"}, - {file = "rpds_py-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:238a2d5b1cad28cdc6ed15faf93a998336eb041c4e440dd7f902528b8891b399"}, - {file = "rpds_py-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac2f4f7a98934c2ed6505aead07b979e6f999389f16b714448fb39bbaa86a489"}, - {file = "rpds_py-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:220002c1b846db9afd83371d08d239fdc865e8f8c5795bbaec20916a76db3318"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d7919548df3f25374a1f5d01fbcd38dacab338ef5f33e044744b5c36729c8db"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:758406267907b3781beee0f0edfe4a179fbd97c0be2e9b1154d7f0a1279cf8e5"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d61339e9f84a3f0767b1995adfb171a0d00a1185192718a17af6e124728e0f5"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1259c7b3705ac0a0bd38197565a5d603218591d3f6cee6e614e380b6ba61c6f6"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c1dc0f53856b9cc9a0ccca0a7cc61d3d20a7088201c0937f3f4048c1718a209"}, - {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7e60cb630f674a31f0368ed32b2a6b4331b8350d67de53c0359992444b116dd3"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbe982f38565bb50cb7fb061ebf762c2f254ca3d8c20d4006878766e84266272"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:514b3293b64187172bc77c8fb0cdae26981618021053b30d8371c3a902d4d5ad"}, - {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d0a26ffe9d4dd35e4dfdd1e71f46401cff0181c75ac174711ccff0459135fa58"}, - {file = "rpds_py-0.20.0-cp311-none-win32.whl", hash = "sha256:89c19a494bf3ad08c1da49445cc5d13d8fefc265f48ee7e7556839acdacf69d0"}, - {file = "rpds_py-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:c638144ce971df84650d3ed0096e2ae7af8e62ecbbb7b201c8935c370df00a2c"}, - {file = "rpds_py-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6"}, - {file = "rpds_py-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4"}, - {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940"}, - {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174"}, - {file = "rpds_py-0.20.0-cp312-none-win32.whl", hash = "sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139"}, - {file = "rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585"}, - {file = "rpds_py-0.20.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:aa9a0521aeca7d4941499a73ad7d4f8ffa3d1affc50b9ea11d992cd7eff18a29"}, - {file = "rpds_py-0.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1f1d51eccb7e6c32ae89243cb352389228ea62f89cd80823ea7dd1b98e0b91"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a86a9b96070674fc88b6f9f71a97d2c1d3e5165574615d1f9168ecba4cecb24"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c8ef2ebf76df43f5750b46851ed1cdf8f109d7787ca40035fe19fbdc1acc5a7"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b25f024b421d5859d156750ea9a65651793d51b76a2e9238c05c9d5f203a9"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57eb94a8c16ab08fef6404301c38318e2c5a32216bf5de453e2714c964c125c8"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1940dae14e715e2e02dfd5b0f64a52e8374a517a1e531ad9412319dc3ac7879"}, - {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d20277fd62e1b992a50c43f13fbe13277a31f8c9f70d59759c88f644d66c619f"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:06db23d43f26478303e954c34c75182356ca9aa7797d22c5345b16871ab9c45c"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2a5db5397d82fa847e4c624b0c98fe59d2d9b7cf0ce6de09e4d2e80f8f5b3f2"}, - {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a35df9f5548fd79cb2f52d27182108c3e6641a4feb0f39067911bf2adaa3e57"}, - {file = "rpds_py-0.20.0-cp313-none-win32.whl", hash = "sha256:fd2d84f40633bc475ef2d5490b9c19543fbf18596dcb1b291e3a12ea5d722f7a"}, - {file = "rpds_py-0.20.0-cp313-none-win_amd64.whl", hash = "sha256:9bc2d153989e3216b0559251b0c260cfd168ec78b1fac33dd485750a228db5a2"}, - {file = "rpds_py-0.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f2fbf7db2012d4876fb0d66b5b9ba6591197b0f165db8d99371d976546472a24"}, - {file = "rpds_py-0.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1e5f3cd7397c8f86c8cc72d5a791071431c108edd79872cdd96e00abd8497d29"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce9845054c13696f7af7f2b353e6b4f676dab1b4b215d7fe5e05c6f8bb06f965"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c3e130fd0ec56cb76eb49ef52faead8ff09d13f4527e9b0c400307ff72b408e1"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b16aa0107ecb512b568244ef461f27697164d9a68d8b35090e9b0c1c8b27752"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7f429242aae2947246587d2964fad750b79e8c233a2367f71b554e9447949c"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0fc424a5842a11e28956e69395fbbeab2c97c42253169d87e90aac2886d751"}, - {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8c00a3b1e70c1d3891f0db1b05292747f0dbcfb49c43f9244d04c70fbc40eb8"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:40ce74fc86ee4645d0a225498d091d8bc61f39b709ebef8204cb8b5a464d3c0e"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4fe84294c7019456e56d93e8ababdad5a329cd25975be749c3f5f558abb48253"}, - {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:338ca4539aad4ce70a656e5187a3a31c5204f261aef9f6ab50e50bcdffaf050a"}, - {file = "rpds_py-0.20.0-cp38-none-win32.whl", hash = "sha256:54b43a2b07db18314669092bb2de584524d1ef414588780261e31e85846c26a5"}, - {file = "rpds_py-0.20.0-cp38-none-win_amd64.whl", hash = "sha256:a1862d2d7ce1674cffa6d186d53ca95c6e17ed2b06b3f4c476173565c862d232"}, - {file = "rpds_py-0.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3fde368e9140312b6e8b6c09fb9f8c8c2f00999d1823403ae90cc00480221b22"}, - {file = "rpds_py-0.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9824fb430c9cf9af743cf7aaf6707bf14323fb51ee74425c380f4c846ea70789"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11ef6ce74616342888b69878d45e9f779b95d4bd48b382a229fe624a409b72c5"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c52d3f2f82b763a24ef52f5d24358553e8403ce05f893b5347098014f2d9eff2"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d35cef91e59ebbeaa45214861874bc6f19eb35de96db73e467a8358d701a96c"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d72278a30111e5b5525c1dd96120d9e958464316f55adb030433ea905866f4de"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c29cbbba378759ac5786730d1c3cb4ec6f8ababf5c42a9ce303dc4b3d08cda"}, - {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6632f2d04f15d1bd6fe0eedd3b86d9061b836ddca4c03d5cf5c7e9e6b7c14580"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d0b67d87bb45ed1cd020e8fbf2307d449b68abc45402fe1a4ac9e46c3c8b192b"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ec31a99ca63bf3cd7f1a5ac9fe95c5e2d060d3c768a09bc1d16e235840861420"}, - {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22e6c9976e38f4d8c4a63bd8a8edac5307dffd3ee7e6026d97f3cc3a2dc02a0b"}, - {file = "rpds_py-0.20.0-cp39-none-win32.whl", hash = "sha256:569b3ea770c2717b730b61998b6c54996adee3cef69fc28d444f3e7920313cf7"}, - {file = "rpds_py-0.20.0-cp39-none-win_amd64.whl", hash = "sha256:e6900ecdd50ce0facf703f7a00df12374b74bbc8ad9fe0f6559947fb20f82364"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcaeb7b57f1a1e071ebd748984359fef83ecb026325b9d4ca847c95bc7311c92"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d454b8749b4bd70dd0a79f428731ee263fa6995f83ccb8bada706e8d1d3ff89d"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d807dc2051abe041b6649681dce568f8e10668e3c1c6543ebae58f2d7e617855"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c20f0ddeb6e29126d45f89206b8291352b8c5b44384e78a6499d68b52ae511"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7f19250ceef892adf27f0399b9e5afad019288e9be756d6919cb58892129f51"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4f1ed4749a08379555cebf4650453f14452eaa9c43d0a95c49db50c18b7da075"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344"}, - {file = "rpds_py-0.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f918a1a130a6dfe1d7fe0f105064141342e7dd1611f2e6a21cd2f5c8cb1cfb3e"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f60012a73aa396be721558caa3a6fd49b3dd0033d1675c6d59c4502e870fcf0c"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d2b1ad682a3dfda2a4e8ad8572f3100f95fad98cb99faf37ff0ddfe9cbf9d03"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:614fdafe9f5f19c63ea02817fa4861c606a59a604a77c8cdef5aa01d28b97921"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa518bcd7600c584bf42e6617ee8132869e877db2f76bcdc281ec6a4113a53ab"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0475242f447cc6cb8a9dd486d68b2ef7fbee84427124c232bff5f63b1fe11e5"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90a4cd061914a60bd51c68bcb4357086991bd0bb93d8aa66a6da7701370708f"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:def7400461c3a3f26e49078302e1c1b38f6752342c77e3cf72ce91ca69fb1bc1"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:65794e4048ee837494aea3c21a28ad5fc080994dfba5b036cf84de37f7ad5074"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:faefcc78f53a88f3076b7f8be0a8f8d35133a3ecf7f3770895c25f8813460f08"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5b4f105deeffa28bbcdff6c49b34e74903139afa690e35d2d9e3c2c2fba18cec"}, - {file = "rpds_py-0.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdfc3a892927458d98f3d55428ae46b921d1f7543b89382fdb483f5640daaec8"}, - {file = "rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121"}, + {file = "rpds_py-0.24.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:006f4342fe729a368c6df36578d7a348c7c716be1da0a1a0f86e3021f8e98724"}, + {file = "rpds_py-0.24.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2d53747da70a4e4b17f559569d5f9506420966083a31c5fbd84e764461c4444b"}, + {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8acd55bd5b071156bae57b555f5d33697998752673b9de554dd82f5b5352727"}, + {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7e80d375134ddb04231a53800503752093dbb65dad8dabacce2c84cccc78e964"}, + {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60748789e028d2a46fc1c70750454f83c6bdd0d05db50f5ae83e2db500b34da5"}, + {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6e1daf5bf6c2be39654beae83ee6b9a12347cb5aced9a29eecf12a2d25fff664"}, + {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b221c2457d92a1fb3c97bee9095c874144d196f47c038462ae6e4a14436f7bc"}, + {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:66420986c9afff67ef0c5d1e4cdc2d0e5262f53ad11e4f90e5e22448df485bf0"}, + {file = "rpds_py-0.24.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:43dba99f00f1d37b2a0265a259592d05fcc8e7c19d140fe51c6e6f16faabeb1f"}, + {file = "rpds_py-0.24.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a88c0d17d039333a41d9bf4616bd062f0bd7aa0edeb6cafe00a2fc2a804e944f"}, + {file = "rpds_py-0.24.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc31e13ce212e14a539d430428cd365e74f8b2d534f8bc22dd4c9c55b277b875"}, + {file = "rpds_py-0.24.0-cp310-cp310-win32.whl", hash = "sha256:fc2c1e1b00f88317d9de6b2c2b39b012ebbfe35fe5e7bef980fd2a91f6100a07"}, + {file = "rpds_py-0.24.0-cp310-cp310-win_amd64.whl", hash = "sha256:c0145295ca415668420ad142ee42189f78d27af806fcf1f32a18e51d47dd2052"}, + {file = "rpds_py-0.24.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:2d3ee4615df36ab8eb16c2507b11e764dcc11fd350bbf4da16d09cda11fcedef"}, + {file = "rpds_py-0.24.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e13ae74a8a3a0c2f22f450f773e35f893484fcfacb00bb4344a7e0f4f48e1f97"}, + {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf86f72d705fc2ef776bb7dd9e5fbba79d7e1f3e258bf9377f8204ad0fc1c51e"}, + {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c43583ea8517ed2e780a345dd9960896afc1327e8cf3ac8239c167530397440d"}, + {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4cd031e63bc5f05bdcda120646a0d32f6d729486d0067f09d79c8db5368f4586"}, + {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34d90ad8c045df9a4259c47d2e16a3f21fdb396665c94520dbfe8766e62187a4"}, + {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e838bf2bb0b91ee67bf2b889a1a841e5ecac06dd7a2b1ef4e6151e2ce155c7ae"}, + {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04ecf5c1ff4d589987b4d9882872f80ba13da7d42427234fce8f22efb43133bc"}, + {file = "rpds_py-0.24.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:630d3d8ea77eabd6cbcd2ea712e1c5cecb5b558d39547ac988351195db433f6c"}, + {file = "rpds_py-0.24.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ebcb786b9ff30b994d5969213a8430cbb984cdd7ea9fd6df06663194bd3c450c"}, + {file = "rpds_py-0.24.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:174e46569968ddbbeb8a806d9922f17cd2b524aa753b468f35b97ff9c19cb718"}, + {file = "rpds_py-0.24.0-cp311-cp311-win32.whl", hash = "sha256:5ef877fa3bbfb40b388a5ae1cb00636a624690dcb9a29a65267054c9ea86d88a"}, + {file = "rpds_py-0.24.0-cp311-cp311-win_amd64.whl", hash = "sha256:e274f62cbd274359eff63e5c7e7274c913e8e09620f6a57aae66744b3df046d6"}, + {file = "rpds_py-0.24.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:d8551e733626afec514b5d15befabea0dd70a343a9f23322860c4f16a9430205"}, + {file = "rpds_py-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0e374c0ce0ca82e5b67cd61fb964077d40ec177dd2c4eda67dba130de09085c7"}, + {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d69d003296df4840bd445a5d15fa5b6ff6ac40496f956a221c4d1f6f7b4bc4d9"}, + {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8212ff58ac6dfde49946bea57474a386cca3f7706fc72c25b772b9ca4af6b79e"}, + {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:528927e63a70b4d5f3f5ccc1fa988a35456eb5d15f804d276709c33fc2f19bda"}, + {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a824d2c7a703ba6daaca848f9c3d5cb93af0505be505de70e7e66829affd676e"}, + {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44d51febb7a114293ffd56c6cf4736cb31cd68c0fddd6aa303ed09ea5a48e029"}, + {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3fab5f4a2c64a8fb64fc13b3d139848817a64d467dd6ed60dcdd6b479e7febc9"}, + {file = "rpds_py-0.24.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9be4f99bee42ac107870c61dfdb294d912bf81c3c6d45538aad7aecab468b6b7"}, + {file = "rpds_py-0.24.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:564c96b6076a98215af52f55efa90d8419cc2ef45d99e314fddefe816bc24f91"}, + {file = "rpds_py-0.24.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:75a810b7664c17f24bf2ffd7f92416c00ec84b49bb68e6a0d93e542406336b56"}, + {file = "rpds_py-0.24.0-cp312-cp312-win32.whl", hash = "sha256:f6016bd950be4dcd047b7475fdf55fb1e1f59fc7403f387be0e8123e4a576d30"}, + {file = "rpds_py-0.24.0-cp312-cp312-win_amd64.whl", hash = "sha256:998c01b8e71cf051c28f5d6f1187abbdf5cf45fc0efce5da6c06447cba997034"}, + {file = "rpds_py-0.24.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:3d2d8e4508e15fc05b31285c4b00ddf2e0eb94259c2dc896771966a163122a0c"}, + {file = "rpds_py-0.24.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0f00c16e089282ad68a3820fd0c831c35d3194b7cdc31d6e469511d9bffc535c"}, + {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951cc481c0c395c4a08639a469d53b7d4afa252529a085418b82a6b43c45c240"}, + {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c9ca89938dff18828a328af41ffdf3902405a19f4131c88e22e776a8e228c5a8"}, + {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed0ef550042a8dbcd657dfb284a8ee00f0ba269d3f2286b0493b15a5694f9fe8"}, + {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b2356688e5d958c4d5cb964af865bea84db29971d3e563fb78e46e20fe1848b"}, + {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78884d155fd15d9f64f5d6124b486f3d3f7fd7cd71a78e9670a0f6f6ca06fb2d"}, + {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6a4a535013aeeef13c5532f802708cecae8d66c282babb5cd916379b72110cf7"}, + {file = "rpds_py-0.24.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:84e0566f15cf4d769dade9b366b7b87c959be472c92dffb70462dd0844d7cbad"}, + {file = "rpds_py-0.24.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:823e74ab6fbaa028ec89615ff6acb409e90ff45580c45920d4dfdddb069f2120"}, + {file = "rpds_py-0.24.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c61a2cb0085c8783906b2f8b1f16a7e65777823c7f4d0a6aaffe26dc0d358dd9"}, + {file = "rpds_py-0.24.0-cp313-cp313-win32.whl", hash = "sha256:60d9b630c8025b9458a9d114e3af579a2c54bd32df601c4581bd054e85258143"}, + {file = "rpds_py-0.24.0-cp313-cp313-win_amd64.whl", hash = "sha256:6eea559077d29486c68218178ea946263b87f1c41ae7f996b1f30a983c476a5a"}, + {file = "rpds_py-0.24.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:d09dc82af2d3c17e7dd17120b202a79b578d79f2b5424bda209d9966efeed114"}, + {file = "rpds_py-0.24.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5fc13b44de6419d1e7a7e592a4885b323fbc2f46e1f22151e3a8ed3b8b920405"}, + {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c347a20d79cedc0a7bd51c4d4b7dbc613ca4e65a756b5c3e57ec84bd43505b47"}, + {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:20f2712bd1cc26a3cc16c5a1bfee9ed1abc33d4cdf1aabd297fe0eb724df4272"}, + {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aad911555286884be1e427ef0dc0ba3929e6821cbeca2194b13dc415a462c7fd"}, + {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0aeb3329c1721c43c58cae274d7d2ca85c1690d89485d9c63a006cb79a85771a"}, + {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a0f156e9509cee987283abd2296ec816225145a13ed0391df8f71bf1d789e2d"}, + {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aa6800adc8204ce898c8a424303969b7aa6a5e4ad2789c13f8648739830323b7"}, + {file = "rpds_py-0.24.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a18fc371e900a21d7392517c6f60fe859e802547309e94313cd8181ad9db004d"}, + {file = "rpds_py-0.24.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9168764133fd919f8dcca2ead66de0105f4ef5659cbb4fa044f7014bed9a1797"}, + {file = "rpds_py-0.24.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5f6e3cec44ba05ee5cbdebe92d052f69b63ae792e7d05f1020ac5e964394080c"}, + {file = "rpds_py-0.24.0-cp313-cp313t-win32.whl", hash = "sha256:8ebc7e65ca4b111d928b669713865f021b7773350eeac4a31d3e70144297baba"}, + {file = "rpds_py-0.24.0-cp313-cp313t-win_amd64.whl", hash = "sha256:675269d407a257b8c00a6b58205b72eec8231656506c56fd429d924ca00bb350"}, + {file = "rpds_py-0.24.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a36b452abbf29f68527cf52e181fced56685731c86b52e852053e38d8b60bc8d"}, + {file = "rpds_py-0.24.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b3b397eefecec8e8e39fa65c630ef70a24b09141a6f9fc17b3c3a50bed6b50e"}, + {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdabcd3beb2a6dca7027007473d8ef1c3b053347c76f685f5f060a00327b8b65"}, + {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5db385bacd0c43f24be92b60c857cf760b7f10d8234f4bd4be67b5b20a7c0b6b"}, + {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8097b3422d020ff1c44effc40ae58e67d93e60d540a65649d2cdaf9466030791"}, + {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:493fe54318bed7d124ce272fc36adbf59d46729659b2c792e87c3b95649cdee9"}, + {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8aa362811ccdc1f8dadcc916c6d47e554169ab79559319ae9fae7d7752d0d60c"}, + {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d8f9a6e7fd5434817526815f09ea27f2746c4a51ee11bb3439065f5fc754db58"}, + {file = "rpds_py-0.24.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8205ee14463248d3349131bb8099efe15cd3ce83b8ef3ace63c7e976998e7124"}, + {file = "rpds_py-0.24.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:921ae54f9ecba3b6325df425cf72c074cd469dea843fb5743a26ca7fb2ccb149"}, + {file = "rpds_py-0.24.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:32bab0a56eac685828e00cc2f5d1200c548f8bc11f2e44abf311d6b548ce2e45"}, + {file = "rpds_py-0.24.0-cp39-cp39-win32.whl", hash = "sha256:f5c0ed12926dec1dfe7d645333ea59cf93f4d07750986a586f511c0bc61fe103"}, + {file = "rpds_py-0.24.0-cp39-cp39-win_amd64.whl", hash = "sha256:afc6e35f344490faa8276b5f2f7cbf71f88bc2cda4328e00553bd451728c571f"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:619ca56a5468f933d940e1bf431c6f4e13bef8e688698b067ae68eb4f9b30e3a"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:4b28e5122829181de1898c2c97f81c0b3246d49f585f22743a1246420bb8d399"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e5ab32cf9eb3647450bc74eb201b27c185d3857276162c101c0f8c6374e098"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:208b3a70a98cf3710e97cabdc308a51cd4f28aa6e7bb11de3d56cd8b74bab98d"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbc4362e06f950c62cad3d4abf1191021b2ffaf0b31ac230fbf0526453eee75e"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ebea2821cdb5f9fef44933617be76185b80150632736f3d76e54829ab4a3b4d1"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a4df06c35465ef4d81799999bba810c68d29972bf1c31db61bfdb81dd9d5bb"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d3aa13bdf38630da298f2e0d77aca967b200b8cc1473ea05248f6c5e9c9bdb44"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:041f00419e1da7a03c46042453598479f45be3d787eb837af382bfc169c0db33"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:d8754d872a5dfc3c5bf9c0e059e8107451364a30d9fd50f1f1a85c4fb9481164"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:896c41007931217a343eff197c34513c154267636c8056fb409eafd494c3dcdc"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:92558d37d872e808944c3c96d0423b8604879a3d1c86fdad508d7ed91ea547d5"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f9e0057a509e096e47c87f753136c9b10d7a91842d8042c2ee6866899a717c0d"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d6e109a454412ab82979c5b1b3aee0604eca4bbf9a02693bb9df027af2bfa91a"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc1c892b1ec1f8cbd5da8de287577b455e388d9c328ad592eabbdcb6fc93bee5"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9c39438c55983d48f4bb3487734d040e22dad200dab22c41e331cee145e7a50d"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d7e8ce990ae17dda686f7e82fd41a055c668e13ddcf058e7fb5e9da20b57793"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9ea7f4174d2e4194289cb0c4e172d83e79a6404297ff95f2875cf9ac9bced8ba"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb2954155bb8f63bb19d56d80e5e5320b61d71084617ed89efedb861a684baea"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04f2b712a2206e13800a8136b07aaedc23af3facab84918e7aa89e4be0260032"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:eda5c1e2a715a4cbbca2d6d304988460942551e4e5e3b7457b50943cd741626d"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:9abc80fe8c1f87218db116016de575a7998ab1629078c90840e8d11ab423ee25"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6a727fd083009bc83eb83d6950f0c32b3c94c8b80a9b667c87f4bd1274ca30ba"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e0f3ef95795efcd3b2ec3fe0a5bcfb5dadf5e3996ea2117427e524d4fbf309c6"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:2c13777ecdbbba2077670285dd1fe50828c8742f6a4119dbef6f83ea13ad10fb"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79e8d804c2ccd618417e96720ad5cd076a86fa3f8cb310ea386a3e6229bae7d1"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd822f019ccccd75c832deb7aa040bb02d70a92eb15a2f16c7987b7ad4ee8d83"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0047638c3aa0dbcd0ab99ed1e549bbf0e142c9ecc173b6492868432d8989a046"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a5b66d1b201cc71bc3081bc2f1fc36b0c1f268b773e03bbc39066651b9e18391"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbcbb6db5582ea33ce46a5d20a5793134b5365110d84df4e30b9d37c6fd40ad3"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:63981feca3f110ed132fd217bf7768ee8ed738a55549883628ee3da75bb9cb78"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:3a55fc10fdcbf1a4bd3c018eea422c52cf08700cf99c28b5cb10fe97ab77a0d3"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:c30ff468163a48535ee7e9bf21bd14c7a81147c0e58a36c1078289a8ca7af0bd"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:369d9c6d4c714e36d4a03957b4783217a3ccd1e222cdd67d464a3a479fc17796"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:24795c099453e3721fda5d8ddd45f5dfcc8e5a547ce7b8e9da06fecc3832e26f"}, + {file = "rpds_py-0.24.0.tar.gz", hash = "sha256:772cc1b2cd963e7e17e6cc55fe0371fb9c704d63e44cacec7b9b7f523b78919e"}, ] [[package]] name = "rsa" -version = "4.9" +version = "4.9.1" description = "Pure-Python RSA implementation" optional = false -python-versions = ">=3.6,<4" +python-versions = "<4,>=3.6" groups = ["main"] files = [ - {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, - {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, + {file = "rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762"}, + {file = "rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75"}, ] [package.dependencies] @@ -3663,47 +4039,47 @@ win32 = ["pywin32 ; sys_platform == \"win32\""] [[package]] name = "setuptools" -version = "75.1.0" +version = "80.4.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["test"] files = [ - {file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"}, - {file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"}, + {file = "setuptools-80.4.0-py3-none-any.whl", hash = "sha256:6cdc8cb9a7d590b237dbe4493614a9b75d0559b888047c1f67d49ba50fc3edb2"}, + {file = "setuptools-80.4.0.tar.gz", hash = "sha256:5a78f61820bc088c8e4add52932ae6b8cf423da2aff268c23f813cfbb13b4006"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.5.2) ; sys_platform != \"cygwin\""] -core = ["importlib-metadata (>=6) ; python_version < \"3.10\"", "importlib-resources (>=5.10.2) ; python_version < \"3.9\"", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib-metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.11.*)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] [[package]] name = "six" -version = "1.16.0" +version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" groups = ["main", "test"] files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] [[package]] name = "smart-open" -version = "7.0.4" +version = "7.1.0" description = "Utils for streaming large files (S3, HDFS, GCS, Azure Blob Storage, gzip, bz2...)" optional = false python-versions = "<4.0,>=3.7" groups = ["main"] files = [ - {file = "smart_open-7.0.4-py3-none-any.whl", hash = "sha256:4e98489932b3372595cddc075e6033194775165702887216b65eba760dfd8d47"}, - {file = "smart_open-7.0.4.tar.gz", hash = "sha256:62b65852bdd1d1d516839fcb1f6bc50cd0f16e05b4ec44b52f43d38bcb838524"}, + {file = "smart_open-7.1.0-py3-none-any.whl", hash = "sha256:4b8489bb6058196258bafe901730c7db0dcf4f083f316e97269c66f45502055b"}, + {file = "smart_open-7.1.0.tar.gz", hash = "sha256:a4f09f84f0f6d3637c6543aca7b5487438877a21360e7368ccf1f704789752ba"}, ] [package.dependencies] @@ -3716,7 +4092,7 @@ gcs = ["google-cloud-storage (>=2.6.0)"] http = ["requests"] s3 = ["boto3"] ssh = ["paramiko"] -test = ["azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "moto[server]", "paramiko", "pytest", "pytest-rerunfailures", "requests", "responses", "zstandard"] +test = ["awscli", "azure-common", "azure-core", "azure-storage-blob", "boto3", "google-cloud-storage (>=2.6.0)", "moto[server]", "numpy", "paramiko", "pyopenssl", "pytest", "pytest-benchmark", "pytest-rerunfailures", "requests", "responses", "zstandard"] webhdfs = ["requests"] zst = ["zstandard"] @@ -3734,26 +4110,26 @@ files = [ [[package]] name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +version = "3.0.1" +description = "This package provides 32 stemmers for 30 languages generated from Snowball algorithms." optional = false -python-versions = "*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*" groups = ["docs"] files = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, + {file = "snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064"}, + {file = "snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895"}, ] [[package]] name = "soupsieve" -version = "2.6" +version = "2.7" description = "A modern CSS selector implementation for Beautiful Soup." optional = false python-versions = ">=3.8" groups = ["test"] files = [ - {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, - {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, + {file = "soupsieve-2.7-py3-none-any.whl", hash = "sha256:6e60cc5c1ffaf1cebcc12e8188320b72071e922c2e897f737cadce79ad5d30c4"}, + {file = "soupsieve-2.7.tar.gz", hash = "sha256:ad282f9b6926286d2ead4750552c8a6142bc4c783fd66b0293547c8fe6ae126a"}, ] [[package]] @@ -3972,14 +4348,14 @@ typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"] [[package]] name = "tinycss2" -version = "1.3.0" +version = "1.4.0" description = "A tiny CSS parser" optional = false python-versions = ">=3.8" groups = ["test"] files = [ - {file = "tinycss2-1.3.0-py3-none-any.whl", hash = "sha256:54a8dbdffb334d536851be0226030e9505965bb2f30f21a4a82c55fb2a80fae7"}, - {file = "tinycss2-1.3.0.tar.gz", hash = "sha256:152f9acabd296a8375fbca5b84c961ff95971fcfc32e79550c8df8e29118c54d"}, + {file = "tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289"}, + {file = "tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7"}, ] [package.dependencies] @@ -3991,36 +4367,66 @@ test = ["pytest", "ruff"] [[package]] name = "tomli" -version = "2.0.1" +version = "2.2.1" description = "A lil' TOML parser" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" groups = ["docs", "test"] markers = "python_version < \"3.11\"" files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] [[package]] name = "tornado" -version = "6.4.1" +version = "6.4.2" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = false python-versions = ">=3.8" groups = ["test"] files = [ - {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, - {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, - {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, - {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, - {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, + {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1"}, + {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803"}, + {file = "tornado-6.4.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a017d239bd1bb0919f72af256a970624241f070496635784d9bf0db640d3fec"}, + {file = "tornado-6.4.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c36e62ce8f63409301537222faffcef7dfc5284f27eec227389f2ad11b09d946"}, + {file = "tornado-6.4.2-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca9eb02196e789c9cb5c3c7c0f04fb447dc2adffd95265b2c7223a8a615ccbf"}, + {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:304463bd0772442ff4d0f5149c6f1c2135a1fae045adf070821c6cdc76980634"}, + {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:c82c46813ba483a385ab2a99caeaedf92585a1f90defb5693351fa7e4ea0bf73"}, + {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:932d195ca9015956fa502c6b56af9eb06106140d844a335590c1ec7f5277d10c"}, + {file = "tornado-6.4.2-cp38-abi3-win32.whl", hash = "sha256:2876cef82e6c5978fde1e0d5b1f919d756968d5b4282418f3146b79b58556482"}, + {file = "tornado-6.4.2-cp38-abi3-win_amd64.whl", hash = "sha256:908b71bf3ff37d81073356a5fadcc660eb10c1476ee6e2725588626ce7e5ca38"}, + {file = "tornado-6.4.2.tar.gz", hash = "sha256:92bad5b4746e9879fd7bf1eb21dce4e3fc5128d71601f80005afa39237ad620b"}, ] [[package]] @@ -4041,39 +4447,38 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0, [[package]] name = "types-python-dateutil" -version = "2.9.0.20240906" +version = "2.9.0.20241206" description = "Typing stubs for python-dateutil" optional = false python-versions = ">=3.8" groups = ["test"] files = [ - {file = "types-python-dateutil-2.9.0.20240906.tar.gz", hash = "sha256:9706c3b68284c25adffc47319ecc7947e5bb86b3773f843c73906fd598bc176e"}, - {file = "types_python_dateutil-2.9.0.20240906-py3-none-any.whl", hash = "sha256:27c8cc2d058ccb14946eebcaaa503088f4f6dbc4fb6093d3d456a49aef2753f6"}, + {file = "types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53"}, + {file = "types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb"}, ] [[package]] name = "typing-extensions" -version = "4.12.2" +version = "4.13.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" groups = ["main", "test"] files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, + {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, + {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, ] -markers = {test = "python_version < \"3.11\""} [[package]] name = "tzdata" -version = "2024.2" +version = "2025.2" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" groups = ["main"] files = [ - {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, - {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, + {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, + {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, ] [[package]] @@ -4093,14 +4498,14 @@ dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake [[package]] name = "urllib3" -version = "2.2.3" +version = "2.4.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main", "docs", "test"] files = [ - {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, - {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, + {file = "urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"}, + {file = "urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466"}, ] [package.extras] @@ -4111,14 +4516,14 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.26.6" +version = "20.31.2" description = "Virtual Python Environment builder" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" groups = ["main"] files = [ - {file = "virtualenv-20.26.6-py3-none-any.whl", hash = "sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2"}, - {file = "virtualenv-20.26.6.tar.gz", hash = "sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48"}, + {file = "virtualenv-20.31.2-py3-none-any.whl", hash = "sha256:36efd0d9650ee985f0cad72065001e66d49a6f24eb44d98980f630686243cf11"}, + {file = "virtualenv-20.31.2.tar.gz", hash = "sha256:e10c0a9d02835e592521be48b332b6caee6887f332c111aa79a09b9e79efc2af"}, ] [package.dependencies] @@ -4128,7 +4533,7 @@ platformdirs = ">=3.9.1,<5" [package.extras] docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"GraalVM\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""] [[package]] name = "wcwidth" @@ -4144,20 +4549,16 @@ files = [ [[package]] name = "webcolors" -version = "24.8.0" +version = "24.11.1" description = "A library for working with the color formats defined by HTML and CSS." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["test"] files = [ - {file = "webcolors-24.8.0-py3-none-any.whl", hash = "sha256:fc4c3b59358ada164552084a8ebee637c221e4059267d0f8325b3b560f6c7f0a"}, - {file = "webcolors-24.8.0.tar.gz", hash = "sha256:08b07af286a01bcd30d583a7acadf629583d1f79bfef27dd2c2c5c263817277d"}, + {file = "webcolors-24.11.1-py3-none-any.whl", hash = "sha256:515291393b4cdf0eb19c155749a096f779f7d909f7cceea072791cb9095b92e9"}, + {file = "webcolors-24.11.1.tar.gz", hash = "sha256:ecb3d768f32202af770477b8b65f318fa4f566c22948673a977b00d589dd80f6"}, ] -[package.extras] -docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"] -tests = ["coverage[toml]"] - [[package]] name = "webencodings" version = "0.5.1" @@ -4189,213 +4590,235 @@ test = ["websockets"] [[package]] name = "widgetsnbextension" -version = "4.0.13" +version = "4.0.14" description = "Jupyter interactive widgets for Jupyter Notebook" optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "widgetsnbextension-4.0.13-py3-none-any.whl", hash = "sha256:74b2692e8500525cc38c2b877236ba51d34541e6385eeed5aec15a70f88a6c71"}, - {file = "widgetsnbextension-4.0.13.tar.gz", hash = "sha256:ffcb67bc9febd10234a362795f643927f4e0c05d9342c727b65d2384f8feacb6"}, + {file = "widgetsnbextension-4.0.14-py3-none-any.whl", hash = "sha256:4875a9eaf72fbf5079dc372a51a9f268fc38d46f767cbf85c43a36da5cb9b575"}, + {file = "widgetsnbextension-4.0.14.tar.gz", hash = "sha256:a3629b04e3edb893212df862038c7232f62973373869db5084aed739b437b5af"}, ] [[package]] name = "wrapt" -version = "1.16.0" +version = "1.17.2" description = "Module for decorators, wrappers and monkey patching." optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" groups = ["main"] files = [ - {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, - {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, - {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, - {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, - {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, - {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, - {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, - {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, - {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, - {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, - {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, - {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, - {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, - {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, - {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, - {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, - {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, - {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, + {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3d57c572081fed831ad2d26fd430d565b76aa277ed1d30ff4d40670b1c0dd984"}, + {file = "wrapt-1.17.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5e251054542ae57ac7f3fba5d10bfff615b6c2fb09abeb37d2f1463f841ae22"}, + {file = "wrapt-1.17.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:80dd7db6a7cb57ffbc279c4394246414ec99537ae81ffd702443335a61dbf3a7"}, + {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a6e821770cf99cc586d33833b2ff32faebdbe886bd6322395606cf55153246c"}, + {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b60fb58b90c6d63779cb0c0c54eeb38941bae3ecf7a73c764c52c88c2dcb9d72"}, + {file = "wrapt-1.17.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b870b5df5b71d8c3359d21be8f0d6c485fa0ebdb6477dda51a1ea54a9b558061"}, + {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4011d137b9955791f9084749cba9a367c68d50ab8d11d64c50ba1688c9b457f2"}, + {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1473400e5b2733e58b396a04eb7f35f541e1fb976d0c0724d0223dd607e0f74c"}, + {file = "wrapt-1.17.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3cedbfa9c940fdad3e6e941db7138e26ce8aad38ab5fe9dcfadfed9db7a54e62"}, + {file = "wrapt-1.17.2-cp310-cp310-win32.whl", hash = "sha256:582530701bff1dec6779efa00c516496968edd851fba224fbd86e46cc6b73563"}, + {file = "wrapt-1.17.2-cp310-cp310-win_amd64.whl", hash = "sha256:58705da316756681ad3c9c73fd15499aa4d8c69f9fd38dc8a35e06c12468582f"}, + {file = "wrapt-1.17.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ff04ef6eec3eee8a5efef2401495967a916feaa353643defcc03fc74fe213b58"}, + {file = "wrapt-1.17.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4db983e7bca53819efdbd64590ee96c9213894272c776966ca6306b73e4affda"}, + {file = "wrapt-1.17.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9abc77a4ce4c6f2a3168ff34b1da9b0f311a8f1cfd694ec96b0603dff1c79438"}, + {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b929ac182f5ace000d459c59c2c9c33047e20e935f8e39371fa6e3b85d56f4a"}, + {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f09b286faeff3c750a879d336fb6d8713206fc97af3adc14def0cdd349df6000"}, + {file = "wrapt-1.17.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7ed2d9d039bd41e889f6fb9364554052ca21ce823580f6a07c4ec245c1f5d6"}, + {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:129a150f5c445165ff941fc02ee27df65940fcb8a22a61828b1853c98763a64b"}, + {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1fb5699e4464afe5c7e65fa51d4f99e0b2eadcc176e4aa33600a3df7801d6662"}, + {file = "wrapt-1.17.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9a2bce789a5ea90e51a02dfcc39e31b7f1e662bc3317979aa7e5538e3a034f72"}, + {file = "wrapt-1.17.2-cp311-cp311-win32.whl", hash = "sha256:4afd5814270fdf6380616b321fd31435a462019d834f83c8611a0ce7484c7317"}, + {file = "wrapt-1.17.2-cp311-cp311-win_amd64.whl", hash = "sha256:acc130bc0375999da18e3d19e5a86403667ac0c4042a094fefb7eec8ebac7cf3"}, + {file = "wrapt-1.17.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d5e2439eecc762cd85e7bd37161d4714aa03a33c5ba884e26c81559817ca0925"}, + {file = "wrapt-1.17.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fc7cb4c1c744f8c05cd5f9438a3caa6ab94ce8344e952d7c45a8ed59dd88392"}, + {file = "wrapt-1.17.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fdbdb757d5390f7c675e558fd3186d590973244fab0c5fe63d373ade3e99d40"}, + {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb1d0dbf99411f3d871deb6faa9aabb9d4e744d67dcaaa05399af89d847a91d"}, + {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d18a4865f46b8579d44e4fe1e2bcbc6472ad83d98e22a26c963d46e4c125ef0b"}, + {file = "wrapt-1.17.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc570b5f14a79734437cb7b0500376b6b791153314986074486e0b0fa8d71d98"}, + {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6d9187b01bebc3875bac9b087948a2bccefe464a7d8f627cf6e48b1bbae30f82"}, + {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e8659775f1adf02eb1e6f109751268e493c73716ca5761f8acb695e52a756ae"}, + {file = "wrapt-1.17.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8b2816ebef96d83657b56306152a93909a83f23994f4b30ad4573b00bd11bb9"}, + {file = "wrapt-1.17.2-cp312-cp312-win32.whl", hash = "sha256:468090021f391fe0056ad3e807e3d9034e0fd01adcd3bdfba977b6fdf4213ea9"}, + {file = "wrapt-1.17.2-cp312-cp312-win_amd64.whl", hash = "sha256:ec89ed91f2fa8e3f52ae53cd3cf640d6feff92ba90d62236a81e4e563ac0e991"}, + {file = "wrapt-1.17.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6ed6ffac43aecfe6d86ec5b74b06a5be33d5bb9243d055141e8cabb12aa08125"}, + {file = "wrapt-1.17.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:35621ae4c00e056adb0009f8e86e28eb4a41a4bfa8f9bfa9fca7d343fe94f998"}, + {file = "wrapt-1.17.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a604bf7a053f8362d27eb9fefd2097f82600b856d5abe996d623babd067b1ab5"}, + {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cbabee4f083b6b4cd282f5b817a867cf0b1028c54d445b7ec7cfe6505057cf8"}, + {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49703ce2ddc220df165bd2962f8e03b84c89fee2d65e1c24a7defff6f988f4d6"}, + {file = "wrapt-1.17.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8112e52c5822fc4253f3901b676c55ddf288614dc7011634e2719718eaa187dc"}, + {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fee687dce376205d9a494e9c121e27183b2a3df18037f89d69bd7b35bcf59e2"}, + {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:18983c537e04d11cf027fbb60a1e8dfd5190e2b60cc27bc0808e653e7b218d1b"}, + {file = "wrapt-1.17.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:703919b1633412ab54bcf920ab388735832fdcb9f9a00ae49387f0fe67dad504"}, + {file = "wrapt-1.17.2-cp313-cp313-win32.whl", hash = "sha256:abbb9e76177c35d4e8568e58650aa6926040d6a9f6f03435b7a522bf1c487f9a"}, + {file = "wrapt-1.17.2-cp313-cp313-win_amd64.whl", hash = "sha256:69606d7bb691b50a4240ce6b22ebb319c1cfb164e5f6569835058196e0f3a845"}, + {file = "wrapt-1.17.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:4a721d3c943dae44f8e243b380cb645a709ba5bd35d3ad27bc2ed947e9c68192"}, + {file = "wrapt-1.17.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:766d8bbefcb9e00c3ac3b000d9acc51f1b399513f44d77dfe0eb026ad7c9a19b"}, + {file = "wrapt-1.17.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e496a8ce2c256da1eb98bd15803a79bee00fc351f5dfb9ea82594a3f058309e0"}, + {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d615e4fe22f4ad3528448c193b218e077656ca9ccb22ce2cb20db730f8d306"}, + {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5aaeff38654462bc4b09023918b7f21790efb807f54c000a39d41d69cf552cb"}, + {file = "wrapt-1.17.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7d15bbd2bc99e92e39f49a04653062ee6085c0e18b3b7512a4f2fe91f2d681"}, + {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e3890b508a23299083e065f435a492b5435eba6e304a7114d2f919d400888cc6"}, + {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8c8b293cd65ad716d13d8dd3624e42e5a19cc2a2f1acc74b30c2c13f15cb61a6"}, + {file = "wrapt-1.17.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c82b8785d98cdd9fed4cac84d765d234ed3251bd6afe34cb7ac523cb93e8b4f"}, + {file = "wrapt-1.17.2-cp313-cp313t-win32.whl", hash = "sha256:13e6afb7fe71fe7485a4550a8844cc9ffbe263c0f1a1eea569bc7091d4898555"}, + {file = "wrapt-1.17.2-cp313-cp313t-win_amd64.whl", hash = "sha256:eaf675418ed6b3b31c7a989fd007fa7c3be66ce14e5c3b27336383604c9da85c"}, + {file = "wrapt-1.17.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5c803c401ea1c1c18de70a06a6f79fcc9c5acfc79133e9869e730ad7f8ad8ef9"}, + {file = "wrapt-1.17.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f917c1180fdb8623c2b75a99192f4025e412597c50b2ac870f156de8fb101119"}, + {file = "wrapt-1.17.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ecc840861360ba9d176d413a5489b9a0aff6d6303d7e733e2c4623cfa26904a6"}, + {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb87745b2e6dc56361bfde481d5a378dc314b252a98d7dd19a651a3fa58f24a9"}, + {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58455b79ec2661c3600e65c0a716955adc2410f7383755d537584b0de41b1d8a"}, + {file = "wrapt-1.17.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4e42a40a5e164cbfdb7b386c966a588b1047558a990981ace551ed7e12ca9c2"}, + {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:91bd7d1773e64019f9288b7a5101f3ae50d3d8e6b1de7edee9c2ccc1d32f0c0a"}, + {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:bb90fb8bda722a1b9d48ac1e6c38f923ea757b3baf8ebd0c82e09c5c1a0e7a04"}, + {file = "wrapt-1.17.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:08e7ce672e35efa54c5024936e559469436f8b8096253404faeb54d2a878416f"}, + {file = "wrapt-1.17.2-cp38-cp38-win32.whl", hash = "sha256:410a92fefd2e0e10d26210e1dfb4a876ddaf8439ef60d6434f21ef8d87efc5b7"}, + {file = "wrapt-1.17.2-cp38-cp38-win_amd64.whl", hash = "sha256:95c658736ec15602da0ed73f312d410117723914a5c91a14ee4cdd72f1d790b3"}, + {file = "wrapt-1.17.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99039fa9e6306880572915728d7f6c24a86ec57b0a83f6b2491e1d8ab0235b9a"}, + {file = "wrapt-1.17.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2696993ee1eebd20b8e4ee4356483c4cb696066ddc24bd70bcbb80fa56ff9061"}, + {file = "wrapt-1.17.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:612dff5db80beef9e649c6d803a8d50c409082f1fedc9dbcdfde2983b2025b82"}, + {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62c2caa1585c82b3f7a7ab56afef7b3602021d6da34fbc1cf234ff139fed3cd9"}, + {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c958bcfd59bacc2d0249dcfe575e71da54f9dcf4a8bdf89c4cb9a68a1170d73f"}, + {file = "wrapt-1.17.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc78a84e2dfbc27afe4b2bd7c80c8db9bca75cc5b85df52bfe634596a1da846b"}, + {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba0f0eb61ef00ea10e00eb53a9129501f52385c44853dbd6c4ad3f403603083f"}, + {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1e1fe0e6ab7775fd842bc39e86f6dcfc4507ab0ffe206093e76d61cde37225c8"}, + {file = "wrapt-1.17.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c86563182421896d73858e08e1db93afdd2b947a70064b813d515d66549e15f9"}, + {file = "wrapt-1.17.2-cp39-cp39-win32.whl", hash = "sha256:f393cda562f79828f38a819f4788641ac7c4085f30f1ce1a68672baa686482bb"}, + {file = "wrapt-1.17.2-cp39-cp39-win_amd64.whl", hash = "sha256:36ccae62f64235cf8ddb682073a60519426fdd4725524ae38874adf72b5f2aeb"}, + {file = "wrapt-1.17.2-py3-none-any.whl", hash = "sha256:b18f2d1533a71f069c7f82d524a52599053d4c7166e9dd374ae2136b7f40f7c8"}, + {file = "wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3"}, ] [[package]] name = "yarl" -version = "1.13.1" +version = "1.20.0" description = "Yet another URL library" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "yarl-1.13.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:82e692fb325013a18a5b73a4fed5a1edaa7c58144dc67ad9ef3d604eccd451ad"}, - {file = "yarl-1.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df4e82e68f43a07735ae70a2d84c0353e58e20add20ec0af611f32cd5ba43fb4"}, - {file = "yarl-1.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ec9dd328016d8d25702a24ee274932aebf6be9787ed1c28d021945d264235b3c"}, - {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5820bd4178e6a639b3ef1db8b18500a82ceab6d8b89309e121a6859f56585b05"}, - {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86c438ce920e089c8c2388c7dcc8ab30dfe13c09b8af3d306bcabb46a053d6f7"}, - {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3de86547c820e4f4da4606d1c8ab5765dd633189791f15247706a2eeabc783ae"}, - {file = "yarl-1.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca53632007c69ddcdefe1e8cbc3920dd88825e618153795b57e6ebcc92e752a"}, - {file = "yarl-1.13.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4ee1d240b84e2f213565f0ec08caef27a0e657d4c42859809155cf3a29d1735"}, - {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c49f3e379177f4477f929097f7ed4b0622a586b0aa40c07ac8c0f8e40659a1ac"}, - {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5c5e32fef09ce101fe14acd0f498232b5710effe13abac14cd95de9c274e689e"}, - {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ab9524e45ee809a083338a749af3b53cc7efec458c3ad084361c1dbf7aaf82a2"}, - {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:b1481c048fe787f65e34cb06f7d6824376d5d99f1231eae4778bbe5c3831076d"}, - {file = "yarl-1.13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:31497aefd68036d8e31bfbacef915826ca2e741dbb97a8d6c7eac66deda3b606"}, - {file = "yarl-1.13.1-cp310-cp310-win32.whl", hash = "sha256:1fa56f34b2236f5192cb5fceba7bbb09620e5337e0b6dfe2ea0ddbd19dd5b154"}, - {file = "yarl-1.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:1bbb418f46c7f7355084833051701b2301092e4611d9e392360c3ba2e3e69f88"}, - {file = "yarl-1.13.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:216a6785f296169ed52cd7dcdc2612f82c20f8c9634bf7446327f50398732a51"}, - {file = "yarl-1.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40c6e73c03a6befb85b72da213638b8aaa80fe4136ec8691560cf98b11b8ae6e"}, - {file = "yarl-1.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2430cf996113abe5aee387d39ee19529327205cda975d2b82c0e7e96e5fdabdc"}, - {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fb4134cc6e005b99fa29dbc86f1ea0a298440ab6b07c6b3ee09232a3b48f495"}, - {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:309c104ecf67626c033845b860d31594a41343766a46fa58c3309c538a1e22b2"}, - {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f90575e9fe3aae2c1e686393a9689c724cd00045275407f71771ae5d690ccf38"}, - {file = "yarl-1.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d2e1626be8712333a9f71270366f4a132f476ffbe83b689dd6dc0d114796c74"}, - {file = "yarl-1.13.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b66c87da3c6da8f8e8b648878903ca54589038a0b1e08dde2c86d9cd92d4ac9"}, - {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cf1ad338620249f8dd6d4b6a91a69d1f265387df3697ad5dc996305cf6c26fb2"}, - {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9915300fe5a0aa663c01363db37e4ae8e7c15996ebe2c6cce995e7033ff6457f"}, - {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:703b0f584fcf157ef87816a3c0ff868e8c9f3c370009a8b23b56255885528f10"}, - {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1d8e3ca29f643dd121f264a7c89f329f0fcb2e4461833f02de6e39fef80f89da"}, - {file = "yarl-1.13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7055bbade838d68af73aea13f8c86588e4bcc00c2235b4b6d6edb0dbd174e246"}, - {file = "yarl-1.13.1-cp311-cp311-win32.whl", hash = "sha256:a3442c31c11088e462d44a644a454d48110f0588de830921fd201060ff19612a"}, - {file = "yarl-1.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:81bad32c8f8b5897c909bf3468bf601f1b855d12f53b6af0271963ee67fff0d2"}, - {file = "yarl-1.13.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f452cc1436151387d3d50533523291d5f77c6bc7913c116eb985304abdbd9ec9"}, - {file = "yarl-1.13.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9cec42a20eae8bebf81e9ce23fb0d0c729fc54cf00643eb251ce7c0215ad49fe"}, - {file = "yarl-1.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d959fe96e5c2712c1876d69af0507d98f0b0e8d81bee14cfb3f6737470205419"}, - {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8c837ab90c455f3ea8e68bee143472ee87828bff19ba19776e16ff961425b57"}, - {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94a993f976cdcb2dc1b855d8b89b792893220db8862d1a619efa7451817c836b"}, - {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b2442a415a5f4c55ced0fade7b72123210d579f7d950e0b5527fc598866e62c"}, - {file = "yarl-1.13.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3fdbf0418489525231723cdb6c79e7738b3cbacbaed2b750cb033e4ea208f220"}, - {file = "yarl-1.13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b7f6e699304717fdc265a7e1922561b02a93ceffdaefdc877acaf9b9f3080b8"}, - {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bcd5bf4132e6a8d3eb54b8d56885f3d3a38ecd7ecae8426ecf7d9673b270de43"}, - {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2a93a4557f7fc74a38ca5a404abb443a242217b91cd0c4840b1ebedaad8919d4"}, - {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:22b739f99c7e4787922903f27a892744189482125cc7b95b747f04dd5c83aa9f"}, - {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2db874dd1d22d4c2c657807562411ffdfabec38ce4c5ce48b4c654be552759dc"}, - {file = "yarl-1.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4feaaa4742517eaceafcbe74595ed335a494c84634d33961214b278126ec1485"}, - {file = "yarl-1.13.1-cp312-cp312-win32.whl", hash = "sha256:bbf9c2a589be7414ac4a534d54e4517d03f1cbb142c0041191b729c2fa23f320"}, - {file = "yarl-1.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:d07b52c8c450f9366c34aa205754355e933922c79135125541daae6cbf31c799"}, - {file = "yarl-1.13.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:95c6737f28069153c399d875317f226bbdea939fd48a6349a3b03da6829fb550"}, - {file = "yarl-1.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cd66152561632ed4b2a9192e7f8e5a1d41e28f58120b4761622e0355f0fe034c"}, - {file = "yarl-1.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6a2acde25be0cf9be23a8f6cbd31734536a264723fca860af3ae5e89d771cd71"}, - {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a18595e6a2ee0826bf7dfdee823b6ab55c9b70e8f80f8b77c37e694288f5de1"}, - {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a31d21089894942f7d9a8df166b495101b7258ff11ae0abec58e32daf8088813"}, - {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:45f209fb4bbfe8630e3d2e2052535ca5b53d4ce2d2026bed4d0637b0416830da"}, - {file = "yarl-1.13.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f722f30366474a99745533cc4015b1781ee54b08de73260b2bbe13316079851"}, - {file = "yarl-1.13.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3bf60444269345d712838bb11cc4eadaf51ff1a364ae39ce87a5ca8ad3bb2c8"}, - {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:942c80a832a79c3707cca46bd12ab8aa58fddb34b1626d42b05aa8f0bcefc206"}, - {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:44b07e1690f010c3c01d353b5790ec73b2f59b4eae5b0000593199766b3f7a5c"}, - {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:396e59b8de7e4d59ff5507fb4322d2329865b909f29a7ed7ca37e63ade7f835c"}, - {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:3bb83a0f12701c0b91112a11148b5217617982e1e466069d0555be9b372f2734"}, - {file = "yarl-1.13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c92b89bffc660f1274779cb6fbb290ec1f90d6dfe14492523a0667f10170de26"}, - {file = "yarl-1.13.1-cp313-cp313-win32.whl", hash = "sha256:269c201bbc01d2cbba5b86997a1e0f73ba5e2f471cfa6e226bcaa7fd664b598d"}, - {file = "yarl-1.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:1d0828e17fa701b557c6eaed5edbd9098eb62d8838344486248489ff233998b8"}, - {file = "yarl-1.13.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8be8cdfe20787e6a5fcbd010f8066227e2bb9058331a4eccddec6c0db2bb85b2"}, - {file = "yarl-1.13.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:08d7148ff11cb8e886d86dadbfd2e466a76d5dd38c7ea8ebd9b0e07946e76e4b"}, - {file = "yarl-1.13.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4afdf84610ca44dcffe8b6c22c68f309aff96be55f5ea2fa31c0c225d6b83e23"}, - {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0d12fe78dcf60efa205e9a63f395b5d343e801cf31e5e1dda0d2c1fb618073d"}, - {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:298c1eecfd3257aa16c0cb0bdffb54411e3e831351cd69e6b0739be16b1bdaa8"}, - {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c14c16831b565707149c742d87a6203eb5597f4329278446d5c0ae7a1a43928e"}, - {file = "yarl-1.13.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a9bacedbb99685a75ad033fd4de37129449e69808e50e08034034c0bf063f99"}, - {file = "yarl-1.13.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:658e8449b84b92a4373f99305de042b6bd0d19bf2080c093881e0516557474a5"}, - {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:373f16f38721c680316a6a00ae21cc178e3a8ef43c0227f88356a24c5193abd6"}, - {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:45d23c4668d4925688e2ea251b53f36a498e9ea860913ce43b52d9605d3d8177"}, - {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f7917697bcaa3bc3e83db91aa3a0e448bf5cde43c84b7fc1ae2427d2417c0224"}, - {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:5989a38ba1281e43e4663931a53fbf356f78a0325251fd6af09dd03b1d676a09"}, - {file = "yarl-1.13.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:11b3ca8b42a024513adce810385fcabdd682772411d95bbbda3b9ed1a4257644"}, - {file = "yarl-1.13.1-cp38-cp38-win32.whl", hash = "sha256:dcaef817e13eafa547cdfdc5284fe77970b891f731266545aae08d6cce52161e"}, - {file = "yarl-1.13.1-cp38-cp38-win_amd64.whl", hash = "sha256:7addd26594e588503bdef03908fc207206adac5bd90b6d4bc3e3cf33a829f57d"}, - {file = "yarl-1.13.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a0ae6637b173d0c40b9c1462e12a7a2000a71a3258fa88756a34c7d38926911c"}, - {file = "yarl-1.13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:576365c9f7469e1f6124d67b001639b77113cfd05e85ce0310f5f318fd02fe85"}, - {file = "yarl-1.13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:78f271722423b2d4851cf1f4fa1a1c4833a128d020062721ba35e1a87154a049"}, - {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d74f3c335cfe9c21ea78988e67f18eb9822f5d31f88b41aec3a1ec5ecd32da5"}, - {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1891d69a6ba16e89473909665cd355d783a8a31bc84720902c5911dbb6373465"}, - {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb382fd7b4377363cc9f13ba7c819c3c78ed97c36a82f16f3f92f108c787cbbf"}, - {file = "yarl-1.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c8854b9f80693d20cec797d8e48a848c2fb273eb6f2587b57763ccba3f3bd4b"}, - {file = "yarl-1.13.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbf2c3f04ff50f16404ce70f822cdc59760e5e2d7965905f0e700270feb2bbfc"}, - {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fb9f59f3848edf186a76446eb8bcf4c900fe147cb756fbbd730ef43b2e67c6a7"}, - {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ef9b85fa1bc91c4db24407e7c4da93a5822a73dd4513d67b454ca7064e8dc6a3"}, - {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:098b870c18f1341786f290b4d699504e18f1cd050ed179af8123fd8232513424"}, - {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:8c723c91c94a3bc8033dd2696a0f53e5d5f8496186013167bddc3fb5d9df46a3"}, - {file = "yarl-1.13.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:44a4c40a6f84e4d5955b63462a0e2a988f8982fba245cf885ce3be7618f6aa7d"}, - {file = "yarl-1.13.1-cp39-cp39-win32.whl", hash = "sha256:84bbcdcf393139f0abc9f642bf03f00cac31010f3034faa03224a9ef0bb74323"}, - {file = "yarl-1.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:fc2931ac9ce9c61c9968989ec831d3a5e6fcaaff9474e7cfa8de80b7aff5a093"}, - {file = "yarl-1.13.1-py3-none-any.whl", hash = "sha256:6a5185ad722ab4dd52d5fb1f30dcc73282eb1ed494906a92d1a228d3f89607b0"}, - {file = "yarl-1.13.1.tar.gz", hash = "sha256:ec8cfe2295f3e5e44c51f57272afbd69414ae629ec7c6b27f5a410efc78b70a0"}, + {file = "yarl-1.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f1f6670b9ae3daedb325fa55fbe31c22c8228f6e0b513772c2e1c623caa6ab22"}, + {file = "yarl-1.20.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85a231fa250dfa3308f3c7896cc007a47bc76e9e8e8595c20b7426cac4884c62"}, + {file = "yarl-1.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a06701b647c9939d7019acdfa7ebbfbb78ba6aa05985bb195ad716ea759a569"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7595498d085becc8fb9203aa314b136ab0516c7abd97e7d74f7bb4eb95042abe"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af5607159085dcdb055d5678fc2d34949bd75ae6ea6b4381e784bbab1c3aa195"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:95b50910e496567434cb77a577493c26bce0f31c8a305135f3bda6a2483b8e10"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b594113a301ad537766b4e16a5a6750fcbb1497dcc1bc8a4daae889e6402a634"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:083ce0393ea173cd37834eb84df15b6853b555d20c52703e21fbababa8c129d2"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f1a350a652bbbe12f666109fbddfdf049b3ff43696d18c9ab1531fbba1c977a"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fb0caeac4a164aadce342f1597297ec0ce261ec4532bbc5a9ca8da5622f53867"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:d88cc43e923f324203f6ec14434fa33b85c06d18d59c167a0637164863b8e995"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e52d6ed9ea8fd3abf4031325dc714aed5afcbfa19ee4a89898d663c9976eb487"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ce360ae48a5e9961d0c730cf891d40698a82804e85f6e74658fb175207a77cb2"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:06d06c9d5b5bc3eb56542ceeba6658d31f54cf401e8468512447834856fb0e61"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c27d98f4e5c4060582f44e58309c1e55134880558f1add7a87c1bc36ecfade19"}, + {file = "yarl-1.20.0-cp310-cp310-win32.whl", hash = "sha256:f4d3fa9b9f013f7050326e165c3279e22850d02ae544ace285674cb6174b5d6d"}, + {file = "yarl-1.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:bc906b636239631d42eb8a07df8359905da02704a868983265603887ed68c076"}, + {file = "yarl-1.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fdb5204d17cb32b2de2d1e21c7461cabfacf17f3645e4b9039f210c5d3378bf3"}, + {file = "yarl-1.20.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eaddd7804d8e77d67c28d154ae5fab203163bd0998769569861258e525039d2a"}, + {file = "yarl-1.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:634b7ba6b4a85cf67e9df7c13a7fb2e44fa37b5d34501038d174a63eaac25ee2"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d409e321e4addf7d97ee84162538c7258e53792eb7c6defd0c33647d754172e"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ea52f7328a36960ba3231c6677380fa67811b414798a6e071c7085c57b6d20a9"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8703517b924463994c344dcdf99a2d5ce9eca2b6882bb640aa555fb5efc706a"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:077989b09ffd2f48fb2d8f6a86c5fef02f63ffe6b1dd4824c76de7bb01e4f2e2"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0acfaf1da020253f3533526e8b7dd212838fdc4109959a2c53cafc6db611bff2"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4230ac0b97ec5eeb91d96b324d66060a43fd0d2a9b603e3327ed65f084e41f8"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a6a1e6ae21cdd84011c24c78d7a126425148b24d437b5702328e4ba640a8902"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:86de313371ec04dd2531f30bc41a5a1a96f25a02823558ee0f2af0beaa7ca791"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:dd59c9dd58ae16eaa0f48c3d0cbe6be8ab4dc7247c3ff7db678edecbaf59327f"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a0bc5e05f457b7c1994cc29e83b58f540b76234ba6b9648a4971ddc7f6aa52da"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c9471ca18e6aeb0e03276b5e9b27b14a54c052d370a9c0c04a68cefbd1455eb4"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:40ed574b4df723583a26c04b298b283ff171bcc387bc34c2683235e2487a65a5"}, + {file = "yarl-1.20.0-cp311-cp311-win32.whl", hash = "sha256:db243357c6c2bf3cd7e17080034ade668d54ce304d820c2a58514a4e51d0cfd6"}, + {file = "yarl-1.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:8c12cd754d9dbd14204c328915e23b0c361b88f3cffd124129955e60a4fbfcfb"}, + {file = "yarl-1.20.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e06b9f6cdd772f9b665e5ba8161968e11e403774114420737f7884b5bd7bdf6f"}, + {file = "yarl-1.20.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b9ae2fbe54d859b3ade40290f60fe40e7f969d83d482e84d2c31b9bff03e359e"}, + {file = "yarl-1.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6d12b8945250d80c67688602c891237994d203d42427cb14e36d1a732eda480e"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:087e9731884621b162a3e06dc0d2d626e1542a617f65ba7cc7aeab279d55ad33"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:69df35468b66c1a6e6556248e6443ef0ec5f11a7a4428cf1f6281f1879220f58"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b2992fe29002fd0d4cbaea9428b09af9b8686a9024c840b8a2b8f4ea4abc16f"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4c903e0b42aab48abfbac668b5a9d7b6938e721a6341751331bcd7553de2dcae"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf099e2432131093cc611623e0b0bcc399b8cddd9a91eded8bfb50402ec35018"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a7f62f5dc70a6c763bec9ebf922be52aa22863d9496a9a30124d65b489ea672"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:54ac15a8b60382b2bcefd9a289ee26dc0920cf59b05368c9b2b72450751c6eb8"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:25b3bc0763a7aca16a0f1b5e8ef0f23829df11fb539a1b70476dcab28bd83da7"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b2586e36dc070fc8fad6270f93242124df68b379c3a251af534030a4a33ef594"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:866349da9d8c5290cfefb7fcc47721e94de3f315433613e01b435473be63daa6"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:33bb660b390a0554d41f8ebec5cd4475502d84104b27e9b42f5321c5192bfcd1"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:737e9f171e5a07031cbee5e9180f6ce21a6c599b9d4b2c24d35df20a52fabf4b"}, + {file = "yarl-1.20.0-cp312-cp312-win32.whl", hash = "sha256:839de4c574169b6598d47ad61534e6981979ca2c820ccb77bf70f4311dd2cc64"}, + {file = "yarl-1.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:3d7dbbe44b443b0c4aa0971cb07dcb2c2060e4a9bf8d1301140a33a93c98e18c"}, + {file = "yarl-1.20.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2137810a20b933b1b1b7e5cf06a64c3ed3b4747b0e5d79c9447c00db0e2f752f"}, + {file = "yarl-1.20.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:447c5eadd750db8389804030d15f43d30435ed47af1313303ed82a62388176d3"}, + {file = "yarl-1.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42fbe577272c203528d402eec8bf4b2d14fd49ecfec92272334270b850e9cd7d"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18e321617de4ab170226cd15006a565d0fa0d908f11f724a2c9142d6b2812ab0"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4345f58719825bba29895011e8e3b545e6e00257abb984f9f27fe923afca2501"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d9b980d7234614bc4674468ab173ed77d678349c860c3af83b1fffb6a837ddc"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af4baa8a445977831cbaa91a9a84cc09debb10bc8391f128da2f7bd070fc351d"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123393db7420e71d6ce40d24885a9e65eb1edefc7a5228db2d62bcab3386a5c0"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab47acc9332f3de1b39e9b702d9c916af7f02656b2a86a474d9db4e53ef8fd7a"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4a34c52ed158f89876cba9c600b2c964dfc1ca52ba7b3ab6deb722d1d8be6df2"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:04d8cfb12714158abf2618f792c77bc5c3d8c5f37353e79509608be4f18705c9"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7dc63ad0d541c38b6ae2255aaa794434293964677d5c1ec5d0116b0e308031f5"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d02b591a64e4e6ca18c5e3d925f11b559c763b950184a64cf47d74d7e41877"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:95fc9876f917cac7f757df80a5dda9de59d423568460fe75d128c813b9af558e"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bb769ae5760cd1c6a712135ee7915f9d43f11d9ef769cb3f75a23e398a92d384"}, + {file = "yarl-1.20.0-cp313-cp313-win32.whl", hash = "sha256:70e0c580a0292c7414a1cead1e076c9786f685c1fc4757573d2967689b370e62"}, + {file = "yarl-1.20.0-cp313-cp313-win_amd64.whl", hash = "sha256:4c43030e4b0af775a85be1fa0433119b1565673266a70bf87ef68a9d5ba3174c"}, + {file = "yarl-1.20.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b6c4c3d0d6a0ae9b281e492b1465c72de433b782e6b5001c8e7249e085b69051"}, + {file = "yarl-1.20.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8681700f4e4df891eafa4f69a439a6e7d480d64e52bf460918f58e443bd3da7d"}, + {file = "yarl-1.20.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:84aeb556cb06c00652dbf87c17838eb6d92cfd317799a8092cee0e570ee11229"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f166eafa78810ddb383e930d62e623d288fb04ec566d1b4790099ae0f31485f1"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5d3d6d14754aefc7a458261027a562f024d4f6b8a798adb472277f675857b1eb"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a8f64df8ed5d04c51260dbae3cc82e5649834eebea9eadfd829837b8093eb00"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d9949eaf05b4d30e93e4034a7790634bbb41b8be2d07edd26754f2e38e491de"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c366b254082d21cc4f08f522ac201d0d83a8b8447ab562732931d31d80eb2a5"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91bc450c80a2e9685b10e34e41aef3d44ddf99b3a498717938926d05ca493f6a"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9c2aa4387de4bc3a5fe158080757748d16567119bef215bec643716b4fbf53f9"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:d2cbca6760a541189cf87ee54ff891e1d9ea6406079c66341008f7ef6ab61145"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:798a5074e656f06b9fad1a162be5a32da45237ce19d07884d0b67a0aa9d5fdda"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f106e75c454288472dbe615accef8248c686958c2e7dd3b8d8ee2669770d020f"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:3b60a86551669c23dc5445010534d2c5d8a4e012163218fc9114e857c0586fdd"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3e429857e341d5e8e15806118e0294f8073ba9c4580637e59ab7b238afca836f"}, + {file = "yarl-1.20.0-cp313-cp313t-win32.whl", hash = "sha256:65a4053580fe88a63e8e4056b427224cd01edfb5f951498bfefca4052f0ce0ac"}, + {file = "yarl-1.20.0-cp313-cp313t-win_amd64.whl", hash = "sha256:53b2da3a6ca0a541c1ae799c349788d480e5144cac47dba0266c7cb6c76151fe"}, + {file = "yarl-1.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:119bca25e63a7725b0c9d20ac67ca6d98fa40e5a894bd5d4686010ff73397914"}, + {file = "yarl-1.20.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:35d20fb919546995f1d8c9e41f485febd266f60e55383090010f272aca93edcc"}, + {file = "yarl-1.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:484e7a08f72683c0f160270566b4395ea5412b4359772b98659921411d32ad26"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d8a3d54a090e0fff5837cd3cc305dd8a07d3435a088ddb1f65e33b322f66a94"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f0cf05ae2d3d87a8c9022f3885ac6dea2b751aefd66a4f200e408a61ae9b7f0d"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a884b8974729e3899d9287df46f015ce53f7282d8d3340fa0ed57536b440621c"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f8d8aa8dd89ffb9a831fedbcb27d00ffd9f4842107d52dc9d57e64cb34073d5c"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4e88d6c3c8672f45a30867817e4537df1bbc6f882a91581faf1f6d9f0f1b5a"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdb77efde644d6f1ad27be8a5d67c10b7f769804fff7a966ccb1da5a4de4b656"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4ba5e59f14bfe8d261a654278a0f6364feef64a794bd456a8c9e823071e5061c"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:d0bf955b96ea44ad914bc792c26a0edcd71b4668b93cbcd60f5b0aeaaed06c64"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:27359776bc359ee6eaefe40cb19060238f31228799e43ebd3884e9c589e63b20"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:04d9c7a1dc0a26efb33e1acb56c8849bd57a693b85f44774356c92d610369efa"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:faa709b66ae0e24c8e5134033187a972d849d87ed0a12a0366bedcc6b5dc14a5"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:44869ee8538208fe5d9342ed62c11cc6a7a1af1b3d0bb79bb795101b6e77f6e0"}, + {file = "yarl-1.20.0-cp39-cp39-win32.whl", hash = "sha256:b7fa0cb9fd27ffb1211cde944b41f5c67ab1c13a13ebafe470b1e206b8459da8"}, + {file = "yarl-1.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:d4fad6e5189c847820288286732075f213eabf81be4d08d6cc309912e62be5b7"}, + {file = "yarl-1.20.0-py3-none-any.whl", hash = "sha256:5d0fe6af927a47a230f31e6004621fd0959eaa915fc62acfafa67ff7229a3124"}, + {file = "yarl-1.20.0.tar.gz", hash = "sha256:686d51e51ee5dfe62dec86e4866ee0e9ed66df700d55c828a615640adc885307"}, ] [package.dependencies] idna = ">=2.0" multidict = ">=4.0" +propcache = ">=0.2.1" [[package]] name = "zipp" -version = "3.20.2" +version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["docs", "test"] markers = "python_version == \"3.9\"" files = [ - {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, - {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, ] [package.extras] @@ -4409,4 +4832,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.1" python-versions = "^3.9" -content-hash = "6e5e74183be00c02f89e0da419aec0b032025410f8d2ae8685263fb5bc38b911" +content-hash = "6dd8c3cff721090b386fbbe8d8fe7c3bea52570f79605dacb7b9193d546eb2a3" diff --git a/pyproject.toml b/pyproject.toml index d31ea2e1..f3bde976 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ keywords = ['codeflare', 'python', 'sdk', 'client', 'batch', 'scale'] python = "^3.9" openshift-client = "1.0.18" rich = ">=12.5,<14.0" -ray = {version = "2.35.0", extras = ["data", "default"]} +ray = {version = "2.44.1", extras = ["data", "default"]} kubernetes = ">= 27.2.0" cryptography = "43.0.3" executing = "1.2.0" diff --git a/src/codeflare_sdk/common/utils/unit_test_support.py b/src/codeflare_sdk/common/utils/unit_test_support.py index 28a30381..bf437009 100644 --- a/src/codeflare_sdk/common/utils/unit_test_support.py +++ b/src/codeflare_sdk/common/utils/unit_test_support.py @@ -277,7 +277,7 @@ def get_expected_image(): if python_version.major == 3 and python_version.minor == 9: return "quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06" else: - return "quay.io/modh/ray@sha256:db667df1bc437a7b0965e8031e905d3ab04b86390d764d120e05ea5a5c18d1b4" + return "quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31" def get_template_variables(): diff --git a/src/codeflare_sdk/ray/cluster/build_ray_cluster.py b/src/codeflare_sdk/ray/cluster/build_ray_cluster.py index 215ac32e..2b8180a2 100644 --- a/src/codeflare_sdk/ray/cluster/build_ray_cluster.py +++ b/src/codeflare_sdk/ray/cluster/build_ray_cluster.py @@ -94,7 +94,7 @@ SUPPORTED_PYTHON_VERSIONS = { "3.9": "quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06", - "3.11": "quay.io/modh/ray@sha256:db667df1bc437a7b0965e8031e905d3ab04b86390d764d120e05ea5a5c18d1b4", + "3.11": "quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31", } @@ -105,7 +105,7 @@ def build_ray_cluster(cluster: "codeflare_sdk.ray.cluster.Cluster"): The resource is a dict template which uses Kubernetes Objects for creating metadata, resource requests, specs and containers. The result is sanitised and returned either as a dict or written as a yaml file. """ - ray_version = "2.35.0" + ray_version = "2.44.1" # GPU related variables head_gpu_count, worker_gpu_count = head_worker_gpu_count_from_cluster(cluster) diff --git a/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml b/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml index 9166eced..4a14f01f 100644 --- a/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml +++ b/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml @@ -66,7 +66,7 @@ spec: requests: cpu: 2 memory: 8G - rayVersion: 2.35.0 + rayVersion: 2.44.1 workerGroupSpecs: - groupName: small-group-unit-test-cluster maxReplicas: 2 diff --git a/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml b/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml index 0977d659..825318a0 100644 --- a/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml +++ b/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml @@ -131,7 +131,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.35.0 + rayVersion: 2.44.1 workerGroupSpecs: - groupName: small-group-aw-all-params maxReplicas: 10 diff --git a/tests/test_cluster_yamls/kueue/aw_kueue.yaml b/tests/test_cluster_yamls/kueue/aw_kueue.yaml index b5b5ae3f..fd78f070 100644 --- a/tests/test_cluster_yamls/kueue/aw_kueue.yaml +++ b/tests/test_cluster_yamls/kueue/aw_kueue.yaml @@ -90,7 +90,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.35.0 + rayVersion: 2.44.1 workerGroupSpecs: - groupName: small-group-unit-test-aw-kueue maxReplicas: 2 diff --git a/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml b/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml index ad179a0b..a6dd81d7 100644 --- a/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml +++ b/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml @@ -90,7 +90,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.35.0 + rayVersion: 2.44.1 workerGroupSpecs: - groupName: small-group-unit-test-cluster-kueue maxReplicas: 2 diff --git a/tests/test_cluster_yamls/ray/default-appwrapper.yaml b/tests/test_cluster_yamls/ray/default-appwrapper.yaml index eadfeaa0..6d1cdcd5 100644 --- a/tests/test_cluster_yamls/ray/default-appwrapper.yaml +++ b/tests/test_cluster_yamls/ray/default-appwrapper.yaml @@ -88,7 +88,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.35.0 + rayVersion: 2.44.1 workerGroupSpecs: - groupName: small-group-default-appwrapper maxReplicas: 1 diff --git a/tests/test_cluster_yamls/ray/default-ray-cluster.yaml b/tests/test_cluster_yamls/ray/default-ray-cluster.yaml index 056479e6..38e02f8f 100644 --- a/tests/test_cluster_yamls/ray/default-ray-cluster.yaml +++ b/tests/test_cluster_yamls/ray/default-ray-cluster.yaml @@ -80,7 +80,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.35.0 + rayVersion: 2.44.1 workerGroupSpecs: - groupName: small-group-default-cluster maxReplicas: 1 diff --git a/tests/test_cluster_yamls/ray/unit-test-all-params.yaml b/tests/test_cluster_yamls/ray/unit-test-all-params.yaml index 188319ab..16a703b8 100644 --- a/tests/test_cluster_yamls/ray/unit-test-all-params.yaml +++ b/tests/test_cluster_yamls/ray/unit-test-all-params.yaml @@ -122,7 +122,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.35.0 + rayVersion: 2.44.1 workerGroupSpecs: - groupName: small-group-test-all-params maxReplicas: 10 diff --git a/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml b/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml index fe26900d..6600381f 100644 --- a/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml +++ b/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml @@ -91,7 +91,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.35.0 + rayVersion: 2.44.1 workerGroupSpecs: - groupName: small-group-test-cluster-a maxReplicas: 1 diff --git a/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml b/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml index eed571fe..e0e4ae94 100644 --- a/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml +++ b/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml @@ -91,7 +91,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.35.0 + rayVersion: 2.44.1 workerGroupSpecs: - groupName: small-group-test-cluster-b maxReplicas: 1 diff --git a/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml b/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml index 5f5d456c..51136cc7 100644 --- a/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml +++ b/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml @@ -82,7 +82,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.35.0 + rayVersion: 2.44.1 workerGroupSpecs: - groupName: small-group-test-cluster-a maxReplicas: 1 diff --git a/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml b/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml index 3bf894db..e180a442 100644 --- a/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml +++ b/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml @@ -82,7 +82,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.35.0 + rayVersion: 2.44.1 workerGroupSpecs: - groupName: small-group-test-rc-b maxReplicas: 1 From 82fa656fa8069dd32a03275ecefced46faf034f6 Mon Sep 17 00:00:00 2001 From: kryanbeane Date: Tue, 6 May 2025 08:23:26 +0100 Subject: [PATCH 462/496] chore: remove python 3.9 support --- .github/workflows/odh-notebooks-sync.yml | 2 +- .github/workflows/publish-documentation.yaml | 2 +- .../additional-demos/hf_interactive.ipynb | 1 - .../additional-demos/local_interactive.ipynb | 3 +- .../additional-demos/ray_job_client.ipynb | 3 +- demo-notebooks/guided-demos/0_basic_ray.ipynb | 3 +- .../guided-demos/1_cluster_job_client.ipynb | 3 +- .../guided-demos/2_basic_interactive.ipynb | 3 +- .../guided-demos/3_widget_example.ipynb | 3 +- .../notebook-ex-outputs/0_basic_ray.ipynb | 3 +- .../1_cluster_job_client.ipynb | 3 +- .../2_basic_interactive.ipynb | 3 +- .../notebook-ex-outputs/interactivetest.yaml | 4 +- .../notebook-ex-outputs/jobtest.yaml | 4 +- .../notebook-ex-outputs/raytest.yaml | 4 +- .../preview_nbs/0_basic_ray.ipynb | 3 +- .../preview_nbs/1_cluster_job_client.ipynb | 3 +- .../preview_nbs/2_basic_interactive.ipynb | 3 +- .../user-docs/cluster-configuration.rst | 5 +- poetry.lock | 315 +----------------- pyproject.toml | 2 +- .../common/utils/unit_test_support.py | 8 +- .../ray/cluster/build_ray_cluster.py | 5 +- .../ray/cluster/test_build_ray_cluster.py | 3 +- src/utils/constants.py | 1 + tests/e2e/support.py | 2 +- .../appwrapper/test-case-bad.yaml | 4 +- .../support_clusters/test-aw-a.yaml | 4 +- .../support_clusters/test-aw-b.yaml | 4 +- .../support_clusters/test-rc-a.yaml | 4 +- .../support_clusters/test-rc-b.yaml | 4 +- 31 files changed, 46 insertions(+), 368 deletions(-) create mode 100644 src/utils/constants.py diff --git a/.github/workflows/odh-notebooks-sync.yml b/.github/workflows/odh-notebooks-sync.yml index 0b55890b..91f5aecb 100644 --- a/.github/workflows/odh-notebooks-sync.yml +++ b/.github/workflows/odh-notebooks-sync.yml @@ -33,7 +33,7 @@ env: REPO_OWNER: ${{ github.event.inputs.codeflare-repository-organization }} REPO_NAME: notebooks GITHUB_TOKEN: ${{ secrets.CODEFLARE_MACHINE_ACCOUNT_TOKEN }} - MINIMUM_SUPPORTED_PYTHON_VERSION: 3.9 + MINIMUM_SUPPORTED_PYTHON_VERSION: 3.11 jobs: build: diff --git a/.github/workflows/publish-documentation.yaml b/.github/workflows/publish-documentation.yaml index 80afe7d6..a96891c3 100644 --- a/.github/workflows/publish-documentation.yaml +++ b/.github/workflows/publish-documentation.yaml @@ -19,7 +19,7 @@ jobs: - name: Install Python uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: 3.11 - name: Install Sphinx run: | sudo apt-get update diff --git a/demo-notebooks/additional-demos/hf_interactive.ipynb b/demo-notebooks/additional-demos/hf_interactive.ipynb index d75d96ec..fcb45bf6 100644 --- a/demo-notebooks/additional-demos/hf_interactive.ipynb +++ b/demo-notebooks/additional-demos/hf_interactive.ipynb @@ -70,7 +70,6 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." diff --git a/demo-notebooks/additional-demos/local_interactive.ipynb b/demo-notebooks/additional-demos/local_interactive.ipynb index ed105b45..ed9e5648 100644 --- a/demo-notebooks/additional-demos/local_interactive.ipynb +++ b/demo-notebooks/additional-demos/local_interactive.ipynb @@ -37,8 +37,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", - "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121 '\n", + "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/additional-demos/ray_job_client.ipynb b/demo-notebooks/additional-demos/ray_job_client.ipynb index 4b129190..8d28ad13 100644 --- a/demo-notebooks/additional-demos/ray_job_client.ipynb +++ b/demo-notebooks/additional-demos/ray_job_client.ipynb @@ -43,8 +43,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", - "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121 '\n", + "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb index 603a52a2..1e729cc1 100644 --- a/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -49,8 +49,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", - "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121 '\n", + "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb index f33fa2ec..73ef0b2a 100644 --- a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb @@ -43,8 +43,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", - "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121 '\n", + "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/2_basic_interactive.ipynb index fccca9d3..118720aa 100644 --- a/demo-notebooks/guided-demos/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/2_basic_interactive.ipynb @@ -46,8 +46,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", - "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121 '\n", + "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/3_widget_example.ipynb b/demo-notebooks/guided-demos/3_widget_example.ipynb index ea5ad08a..3f44afe3 100644 --- a/demo-notebooks/guided-demos/3_widget_example.ipynb +++ b/demo-notebooks/guided-demos/3_widget_example.ipynb @@ -49,8 +49,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", - "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121 '\n", + "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb index fb1a8d61..2ac5109e 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb @@ -49,8 +49,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", - "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121 '\n", + "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb index 7e62f52a..ff8d53b7 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb @@ -43,8 +43,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", - "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121 '\n", + "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb index fe8f3274..57538aa8 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb @@ -46,8 +46,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", - "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121 '\n", + "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml index fd6500a7..1ac7e603 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml @@ -81,7 +81,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/modh/ray:2.35.0-py39-cu121 + image: quay.io/modh/ray:2.44.1-py311-cu121 imagePullPolicy: Always lifecycle: preStop: @@ -147,7 +147,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/modh/ray:2.35.0-py39-cu121 + image: quay.io/modh/ray:2.44.1-py311-cu121 lifecycle: preStop: exec: diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml index a33a9cf5..840b4f81 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml @@ -70,7 +70,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/modh/ray:2.35.0-py39-cu121 + image: quay.io/modh/ray:2.44.1-py311-cu121 imagePullPolicy: Always lifecycle: preStop: @@ -127,7 +127,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/modh/ray:2.35.0-py39-cu121 + image: quay.io/modh/ray:2.44.1-py311-cu121 lifecycle: preStop: exec: diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml index 151d2e28..6dc0b8b6 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml @@ -70,7 +70,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/modh/ray:2.35.0-py39-cu121 + image: quay.io/modh/ray:2.44.1-py311-cu121 imagePullPolicy: Always lifecycle: preStop: @@ -127,7 +127,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/modh/ray:2.35.0-py39-cu121 + image: quay.io/modh/ray:2.44.1-py311-cu121 lifecycle: preStop: exec: diff --git a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb index fb1a8d61..2ac5109e 100644 --- a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb @@ -49,8 +49,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", - "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121 '\n", + "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb index 34defbf9..d7113ae5 100644 --- a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb @@ -43,8 +43,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", - "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121 '\n", + "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb index 7f50437f..8fd9ce0d 100644 --- a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb @@ -46,8 +46,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.9: 'quay.io/modh/ray:2.35.0-py39-cu121'\n", - "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121 '\n", + "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/docs/sphinx/user-docs/cluster-configuration.rst b/docs/sphinx/user-docs/cluster-configuration.rst index d60e9c92..e08b9f6e 100644 --- a/docs/sphinx/user-docs/cluster-configuration.rst +++ b/docs/sphinx/user-docs/cluster-configuration.rst @@ -35,13 +35,12 @@ requirements for creating the Ray Cluster. The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version: - - For Python 3.9: `quay.io/modh/ray:2.35.0-py39-cu121` - - For Python 3.11: `quay.io/modh/ray:2.44.1-py311-cu121 ` + - For Python 3.11: `quay.io/modh/ray:2.44.1-py311-cu121` If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default. If you are using ROCm compatible GPUs you - can use `quay.io/modh/ray:2.35.0-py311-rocm62` or `quay.io/modh/ray:2.35.0-py39-rocm62`. You can also find + can use `quay.io/modh/ray:2.35.0-py311-rocm62`. You can also find documentation on building a custom image `here `__. diff --git a/poetry.lock b/poetry.lock index 1e0d12ab..725ce7e0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -106,7 +106,6 @@ files = [ [package.dependencies] aiohappyeyeballs = ">=2.3.0" aiosignal = ">=1.1.2" -async-timeout = {version = ">=4.0,<6.0", markers = "python_version < \"3.11\""} attrs = ">=17.3.0" frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" @@ -171,7 +170,6 @@ files = [ ] [package.dependencies] -exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} @@ -301,22 +299,6 @@ files = [ {file = "async_lru-2.0.5.tar.gz", hash = "sha256:481d52ccdd27275f42c43a928b4a50c3bfb2d67af4e78b170e3e0bb39c66e5bb"}, ] -[package.dependencies] -typing_extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} - -[[package]] -name = "async-timeout" -version = "5.0.1" -description = "Timeout context manager for asyncio programs" -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "python_version < \"3.11\"" -files = [ - {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, - {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, -] - [[package]] name = "attrs" version = "25.3.0" @@ -665,22 +647,6 @@ files = [ {file = "charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63"}, ] -[[package]] -name = "click" -version = "8.1.8" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -groups = ["main"] -markers = "python_version == \"3.9\"" -files = [ - {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, - {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - [[package]] name = "click" version = "8.2.0" @@ -688,7 +654,6 @@ description = "Composable command line interface toolkit" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "python_version >= \"3.10\"" files = [ {file = "click-8.2.0-py3-none-any.whl", hash = "sha256:6b303f0b2aa85f1cb4e5303078fadcbcd4e476f114fab9b5007005711839325c"}, {file = "click-8.2.0.tar.gz", hash = "sha256:f5452aeddd9988eefa20f90f05ab66f17fce1ee2a36907fd30b05bbb5953814d"}, @@ -964,25 +929,6 @@ files = [ {file = "durationpy-0.9.tar.gz", hash = "sha256:fd3feb0a69a0057d582ef643c355c40d2fa1c942191f914d12203b1a01ac722a"}, ] -[[package]] -name = "exceptiongroup" -version = "1.3.0" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -groups = ["main", "test"] -markers = "python_version < \"3.11\"" -files = [ - {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, - {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} - -[package.extras] -test = ["pytest (>=6)"] - [[package]] name = "executing" version = "1.2.0" @@ -1419,31 +1365,6 @@ files = [ {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, ] -[[package]] -name = "importlib-metadata" -version = "8.7.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.9" -groups = ["docs", "test"] -markers = "python_version == \"3.9\"" -files = [ - {file = "importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}, - {file = "importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"}, -] - -[package.dependencies] -zipp = ">=3.20" - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -perf = ["ipython"] -test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] -type = ["pytest-mypy"] - [[package]] name = "iniconfig" version = "2.1.0" @@ -1490,85 +1411,6 @@ pyqt5 = ["pyqt5"] pyside6 = ["pyside6"] test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"] -[[package]] -name = "ipython" -version = "8.18.1" -description = "IPython: Productive Interactive Computing" -optional = false -python-versions = ">=3.9" -groups = ["main", "test"] -markers = "python_version == \"3.9\"" -files = [ - {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, - {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -decorator = "*" -exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} -jedi = ">=0.16" -matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} -prompt-toolkit = ">=3.0.41,<3.1.0" -pygments = ">=2.4.0" -stack-data = "*" -traitlets = ">=5" -typing-extensions = {version = "*", markers = "python_version < \"3.10\""} - -[package.extras] -all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] -black = ["black"] -doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] -kernel = ["ipykernel"] -nbconvert = ["nbconvert"] -nbformat = ["nbformat"] -notebook = ["ipywidgets", "notebook"] -parallel = ["ipyparallel"] -qtconsole = ["qtconsole"] -test = ["pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.22)", "pandas", "pickleshare", "pytest (<7.1)", "pytest-asyncio (<0.22)", "testpath", "trio"] - -[[package]] -name = "ipython" -version = "8.36.0" -description = "IPython: Productive Interactive Computing" -optional = false -python-versions = ">=3.10" -groups = ["main", "test"] -markers = "python_version == \"3.10\"" -files = [ - {file = "ipython-8.36.0-py3-none-any.whl", hash = "sha256:12b913914d010dcffa2711505ec8be4bf0180742d97f1e5175e51f22086428c1"}, - {file = "ipython-8.36.0.tar.gz", hash = "sha256:24658e9fe5c5c819455043235ba59cfffded4a35936eefceceab6b192f7092ff"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -decorator = "*" -exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} -jedi = ">=0.16" -matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} -prompt_toolkit = ">=3.0.41,<3.1.0" -pygments = ">=2.4.0" -stack_data = "*" -traitlets = ">=5.13.0" -typing_extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} - -[package.extras] -all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] -black = ["black"] -doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli ; python_version < \"3.11\"", "typing_extensions"] -kernel = ["ipykernel"] -matplotlib = ["matplotlib"] -nbconvert = ["nbconvert"] -nbformat = ["nbformat"] -notebook = ["ipywidgets", "notebook"] -parallel = ["ipyparallel"] -qtconsole = ["qtconsole"] -test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "ipython[test]", "jupyter_ai", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] - [[package]] name = "ipython" version = "9.2.0" @@ -1576,7 +1418,6 @@ description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.11" groups = ["main", "test"] -markers = "python_version >= \"3.11\"" files = [ {file = "ipython-9.2.0-py3-none-any.whl", hash = "sha256:fef5e33c4a1ae0759e0bba5917c9db4eb8c53fee917b6a526bd973e1ca5159f6"}, {file = "ipython-9.2.0.tar.gz", hash = "sha256:62a9373dbc12f28f9feaf4700d052195bf89806279fc8ca11f3f54017d04751b"}, @@ -1610,7 +1451,6 @@ description = "Defines a variety of Pygments lexers for highlighting IPython cod optional = false python-versions = ">=3.8" groups = ["main", "test"] -markers = "python_version >= \"3.11\"" files = [ {file = "ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c"}, {file = "ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81"}, @@ -1779,7 +1619,6 @@ files = [ ] [package.dependencies] -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" python-dateutil = ">=2.8.2" pyzmq = ">=23.0" @@ -1851,7 +1690,6 @@ files = [ ] [package.dependencies] -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} jupyter-server = ">=1.1.2" [[package]] @@ -1926,7 +1764,6 @@ files = [ [package.dependencies] async-lru = ">=1.0.0" httpx = ">=0.25.0" -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} ipykernel = ">=6.5.0" jinja2 = ">=3.0.3" jupyter-core = "*" @@ -1936,7 +1773,6 @@ jupyterlab-server = ">=2.27.1,<3" notebook-shim = ">=0.2" packaging = "*" setuptools = ">=40.1.0" -tomli = {version = ">=1.2.2", markers = "python_version < \"3.11\""} tornado = ">=6.2.0" traitlets = "*" @@ -1973,7 +1809,6 @@ files = [ [package.dependencies] babel = ">=2.10" -importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} jinja2 = ">=3.0.3" json5 = ">=0.9.0" jsonschema = ">=4.18.0" @@ -2161,9 +1996,6 @@ files = [ {file = "mistune-3.1.3.tar.gz", hash = "sha256:a7035c21782b2becb6be62f8f25d3df81ccb4d6fa477a6525b15af06539f02a0"}, ] -[package.dependencies] -typing-extensions = {version = "*", markers = "python_version < \"3.11\""} - [[package]] name = "msgpack" version = "1.1.0" @@ -2352,9 +2184,6 @@ files = [ {file = "multidict-6.4.3.tar.gz", hash = "sha256:3ada0b058c9f213c5f95ba301f922d402ac234f1111a7d8fd70f1b99f3c281ec"}, ] -[package.dependencies] -typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} - [[package]] name = "nbclient" version = "0.10.2" @@ -2394,7 +2223,6 @@ files = [ beautifulsoup4 = "*" bleach = {version = "!=5.0.0", extras = ["css"]} defusedxml = "*" -importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} jinja2 = ">=3.0" jupyter-core = ">=4.7" jupyterlab-pygments = "*" @@ -2468,62 +2296,6 @@ jupyter-server = ">=1.8,<3" [package.extras] test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync"] -[[package]] -name = "numpy" -version = "2.0.2" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.9" -groups = ["main"] -markers = "python_version == \"3.9\"" -files = [ - {file = "numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece"}, - {file = "numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04"}, - {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8c5713284ce4e282544c68d1c3b2c7161d38c256d2eefc93c1d683cf47683e66"}, - {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:becfae3ddd30736fe1889a37f1f580e245ba79a5855bff5f2a29cb3ccc22dd7b"}, - {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2da5960c3cf0df7eafefd806d4e612c5e19358de82cb3c343631188991566ccd"}, - {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:496f71341824ed9f3d2fd36cf3ac57ae2e0165c143b55c3a035ee219413f3318"}, - {file = "numpy-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a61ec659f68ae254e4d237816e33171497e978140353c0c2038d46e63282d0c8"}, - {file = "numpy-2.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d731a1c6116ba289c1e9ee714b08a8ff882944d4ad631fd411106a30f083c326"}, - {file = "numpy-2.0.2-cp310-cp310-win32.whl", hash = "sha256:984d96121c9f9616cd33fbd0618b7f08e0cfc9600a7ee1d6fd9b239186d19d97"}, - {file = "numpy-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:c7b0be4ef08607dd04da4092faee0b86607f111d5ae68036f16cc787e250a131"}, - {file = "numpy-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:49ca4decb342d66018b01932139c0961a8f9ddc7589611158cb3c27cbcf76448"}, - {file = "numpy-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:11a76c372d1d37437857280aa142086476136a8c0f373b2e648ab2c8f18fb195"}, - {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:807ec44583fd708a21d4a11d94aedf2f4f3c3719035c76a2bbe1fe8e217bdc57"}, - {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8cafab480740e22f8d833acefed5cc87ce276f4ece12fdaa2e8903db2f82897a"}, - {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a15f476a45e6e5a3a79d8a14e62161d27ad897381fecfa4a09ed5322f2085669"}, - {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13e689d772146140a252c3a28501da66dfecd77490b498b168b501835041f951"}, - {file = "numpy-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9ea91dfb7c3d1c56a0e55657c0afb38cf1eeae4544c208dc465c3c9f3a7c09f9"}, - {file = "numpy-2.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c1c9307701fec8f3f7a1e6711f9089c06e6284b3afbbcd259f7791282d660a15"}, - {file = "numpy-2.0.2-cp311-cp311-win32.whl", hash = "sha256:a392a68bd329eafac5817e5aefeb39038c48b671afd242710b451e76090e81f4"}, - {file = "numpy-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:286cd40ce2b7d652a6f22efdfc6d1edf879440e53e76a75955bc0c826c7e64dc"}, - {file = "numpy-2.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:df55d490dea7934f330006d0f81e8551ba6010a5bf035a249ef61a94f21c500b"}, - {file = "numpy-2.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8df823f570d9adf0978347d1f926b2a867d5608f434a7cff7f7908c6570dcf5e"}, - {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9a92ae5c14811e390f3767053ff54eaee3bf84576d99a2456391401323f4ec2c"}, - {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:a842d573724391493a97a62ebbb8e731f8a5dcc5d285dfc99141ca15a3302d0c"}, - {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05e238064fc0610c840d1cf6a13bf63d7e391717d247f1bf0318172e759e692"}, - {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0123ffdaa88fa4ab64835dcbde75dcdf89c453c922f18dced6e27c90d1d0ec5a"}, - {file = "numpy-2.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:96a55f64139912d61de9137f11bf39a55ec8faec288c75a54f93dfd39f7eb40c"}, - {file = "numpy-2.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec9852fb39354b5a45a80bdab5ac02dd02b15f44b3804e9f00c556bf24b4bded"}, - {file = "numpy-2.0.2-cp312-cp312-win32.whl", hash = "sha256:671bec6496f83202ed2d3c8fdc486a8fc86942f2e69ff0e986140339a63bcbe5"}, - {file = "numpy-2.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:cfd41e13fdc257aa5778496b8caa5e856dc4896d4ccf01841daee1d96465467a"}, - {file = "numpy-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9059e10581ce4093f735ed23f3b9d283b9d517ff46009ddd485f1747eb22653c"}, - {file = "numpy-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:423e89b23490805d2a5a96fe40ec507407b8ee786d66f7328be214f9679df6dd"}, - {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:2b2955fa6f11907cf7a70dab0d0755159bca87755e831e47932367fc8f2f2d0b"}, - {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:97032a27bd9d8988b9a97a8c4d2c9f2c15a81f61e2f21404d7e8ef00cb5be729"}, - {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e795a8be3ddbac43274f18588329c72939870a16cae810c2b73461c40718ab1"}, - {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26b258c385842546006213344c50655ff1555a9338e2e5e02a0756dc3e803dd"}, - {file = "numpy-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fec9451a7789926bcf7c2b8d187292c9f93ea30284802a0ab3f5be8ab36865d"}, - {file = "numpy-2.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9189427407d88ff25ecf8f12469d4d39d35bee1db5d39fc5c168c6f088a6956d"}, - {file = "numpy-2.0.2-cp39-cp39-win32.whl", hash = "sha256:905d16e0c60200656500c95b6b8dca5d109e23cb24abc701d41c02d74c6b3afa"}, - {file = "numpy-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:a3f4ab0caa7f053f6797fcd4e1e25caee367db3112ef2b6ef82d749530768c73"}, - {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7f0a0c6f12e07fa94133c8a67404322845220c06a9e80e85999afe727f7438b8"}, - {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:312950fdd060354350ed123c0e25a71327d3711584beaef30cdaa93320c392d4"}, - {file = "numpy-2.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26df23238872200f63518dd2aa984cfca675d82469535dc7162dc2ee52d9dd5c"}, - {file = "numpy-2.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a46288ec55ebbd58947d31d72be2c63cbf839f0a63b49cb755022310792a3385"}, - {file = "numpy-2.0.2.tar.gz", hash = "sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78"}, -] - [[package]] name = "numpy" version = "2.2.5" @@ -2531,7 +2303,6 @@ description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "python_version >= \"3.10\"" files = [ {file = "numpy-2.2.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f4a922da1729f4c40932b2af4fe84909c7a6e167e6e99f71838ce3a29f3fe26"}, {file = "numpy-2.2.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b6f91524d31b34f4a5fee24f5bc16dcd1491b668798b6d85585d836c1e633a6a"}, @@ -2731,7 +2502,6 @@ files = [ [package.dependencies] numpy = [ - {version = ">=1.22.4", markers = "python_version < \"3.11\""}, {version = ">=1.23.2", markers = "python_version == \"3.11\""}, {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, ] @@ -2821,7 +2591,7 @@ description = "Pexpect allows easy control of interactive console applications." optional = false python-versions = "*" groups = ["main", "test"] -markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\" or python_version == \"3.9\" and sys_platform != \"win32\"" +markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\"" files = [ {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, @@ -3073,7 +2843,7 @@ files = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, ] -markers = {main = "sys_platform != \"win32\" and sys_platform != \"emscripten\" or python_version == \"3.9\" and sys_platform != \"win32\"", test = "sys_platform != \"win32\" and sys_platform != \"emscripten\" or python_version == \"3.9\" and sys_platform != \"win32\" or os_name != \"nt\""} +markers = {main = "sys_platform != \"win32\" and sys_platform != \"emscripten\"", test = "sys_platform != \"win32\" and sys_platform != \"emscripten\" or os_name != \"nt\""} [[package]] name = "pure-eval" @@ -3392,11 +3162,9 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] @@ -3461,9 +3229,6 @@ files = [ {file = "python_json_logger-3.3.0.tar.gz", hash = "sha256:12b7e74b17775e7d565129296105bbe3910842d9d0eb083fc83a6a617aa8df84"}, ] -[package.dependencies] -typing_extensions = {version = "*", markers = "python_version < \"3.10\""} - [package.extras] dev = ["backports.zoneinfo ; python_version < \"3.9\"", "black", "build", "freezegun", "mdx_truly_sane_lists", "mike", "mkdocs", "mkdocs-awesome-pages-plugin", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-material (>=8.5)", "mkdocstrings[python]", "msgspec ; implementation_name != \"pypy\"", "mypy", "orjson ; implementation_name != \"pypy\"", "pylint", "pytest", "tzdata", "validate-pyproject[all]"] @@ -3732,10 +3497,7 @@ colorful = {version = "*", optional = true, markers = "extra == \"default\""} filelock = "*" frozenlist = "*" fsspec = {version = "*", optional = true, markers = "extra == \"data\""} -grpcio = [ - {version = ">=1.32.0", optional = true, markers = "python_version < \"3.10\" and extra == \"default\""}, - {version = ">=1.42.0", optional = true, markers = "python_version >= \"3.10\" and extra == \"default\""}, -] +grpcio = {version = ">=1.42.0", optional = true, markers = "python_version >= \"3.10\" and extra == \"default\""} jsonschema = "*" msgpack = ">=1.0.0,<2.0.0" numpy = {version = ">=1.20", optional = true, markers = "extra == \"data\""} @@ -3876,7 +3638,6 @@ files = [ [package.dependencies] markdown-it-py = ">=2.2.0" pygments = ">=2.13.0,<3.0.0" -typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""} [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] @@ -4150,7 +3911,6 @@ babel = ">=2.13" colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\""} docutils = ">=0.20,<0.22" imagesize = ">=1.3" -importlib-metadata = {version = ">=6.0", markers = "python_version < \"3.10\""} Jinja2 = ">=3.1" packaging = ">=23.0" Pygments = ">=2.17" @@ -4162,7 +3922,6 @@ sphinxcontrib-htmlhelp = ">=2.0.0" sphinxcontrib-jsmath = "*" sphinxcontrib-qthelp = "*" sphinxcontrib-serializinghtml = ">=1.1.9" -tomli = {version = ">=2", markers = "python_version < \"3.11\""} [package.extras] docs = ["sphinxcontrib-websupport"] @@ -4365,49 +4124,6 @@ webencodings = ">=0.4" doc = ["sphinx", "sphinx_rtd_theme"] test = ["pytest", "ruff"] -[[package]] -name = "tomli" -version = "2.2.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.8" -groups = ["docs", "test"] -markers = "python_version < \"3.11\"" -files = [ - {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, - {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, - {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, - {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, - {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, - {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, - {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, - {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, - {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, - {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, -] - [[package]] name = "tornado" version = "6.4.2" @@ -4808,28 +4524,7 @@ idna = ">=2.0" multidict = ">=4.0" propcache = ">=0.2.1" -[[package]] -name = "zipp" -version = "3.21.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.9" -groups = ["docs", "test"] -markers = "python_version == \"3.9\"" -files = [ - {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, - {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] -type = ["pytest-mypy"] - [metadata] lock-version = "2.1" -python-versions = "^3.9" -content-hash = "6dd8c3cff721090b386fbbe8d8fe7c3bea52570f79605dacb7b9193d546eb2a3" +python-versions = "^3.11" +content-hash = "d04590af4fd433110720e9c0a70760c6127ee6d8e40a4b2dfc513673bc91850d" diff --git a/pyproject.toml b/pyproject.toml index f3bde976..8e383671 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,7 +20,7 @@ homepage = "https://github.com/project-codeflare/codeflare-sdk" keywords = ['codeflare', 'python', 'sdk', 'client', 'batch', 'scale'] [tool.poetry.dependencies] -python = "^3.9" +python = "^3.11" openshift-client = "1.0.18" rich = ">=12.5,<14.0" ray = {version = "2.44.1", extras = ["data", "default"]} diff --git a/src/codeflare_sdk/common/utils/unit_test_support.py b/src/codeflare_sdk/common/utils/unit_test_support.py index bf437009..53fbc696 100644 --- a/src/codeflare_sdk/common/utils/unit_test_support.py +++ b/src/codeflare_sdk/common/utils/unit_test_support.py @@ -68,7 +68,7 @@ def create_cluster_wrong_type(): worker_extended_resource_requests={"nvidia.com/gpu": 7}, appwrapper=True, image_pull_secrets=["unit-test-pull-secret"], - image="quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06", + image="quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31", write_to_file=True, labels={1: 1}, ) @@ -273,11 +273,9 @@ def apply_template(yaml_file_path, variables): def get_expected_image(): + # TODO: Add Python 3.12 support python_version = sys.version_info - if python_version.major == 3 and python_version.minor == 9: - return "quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06" - else: - return "quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31" + return "quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31" def get_template_variables(): diff --git a/src/codeflare_sdk/ray/cluster/build_ray_cluster.py b/src/codeflare_sdk/ray/cluster/build_ray_cluster.py index 2b8180a2..e4e02f76 100644 --- a/src/codeflare_sdk/ray/cluster/build_ray_cluster.py +++ b/src/codeflare_sdk/ray/cluster/build_ray_cluster.py @@ -20,6 +20,7 @@ from ...common import _kube_api_error_handling from ...common.kubernetes_cluster import get_api_client, config_check from kubernetes.client.exceptions import ApiException +from utils.constants import RAY_VERSION import codeflare_sdk import os @@ -93,7 +94,6 @@ ] SUPPORTED_PYTHON_VERSIONS = { - "3.9": "quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06", "3.11": "quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31", } @@ -105,7 +105,6 @@ def build_ray_cluster(cluster: "codeflare_sdk.ray.cluster.Cluster"): The resource is a dict template which uses Kubernetes Objects for creating metadata, resource requests, specs and containers. The result is sanitised and returned either as a dict or written as a yaml file. """ - ray_version = "2.44.1" # GPU related variables head_gpu_count, worker_gpu_count = head_worker_gpu_count_from_cluster(cluster) @@ -123,7 +122,7 @@ def build_ray_cluster(cluster: "codeflare_sdk.ray.cluster.Cluster"): "kind": "RayCluster", "metadata": get_metadata(cluster), "spec": { - "rayVersion": ray_version, + "rayVersion": RAY_VERSION, "enableInTreeAutoscaling": False, "autoscalerOptions": { "upscalingMode": "Default", diff --git a/src/codeflare_sdk/ray/cluster/test_build_ray_cluster.py b/src/codeflare_sdk/ray/cluster/test_build_ray_cluster.py index 7d6d3d0a..42e1e157 100644 --- a/src/codeflare_sdk/ray/cluster/test_build_ray_cluster.py +++ b/src/codeflare_sdk/ray/cluster/test_build_ray_cluster.py @@ -41,7 +41,6 @@ def test_update_image_without_supported_python_version(mocker): mocker.patch.dict( "codeflare_sdk.ray.cluster.build_ray_cluster.SUPPORTED_PYTHON_VERSIONS", { - "3.9": "ray-py3.9", "3.11": "ray-py3.11", }, ) @@ -60,7 +59,7 @@ def test_update_image_without_supported_python_version(mocker): # Assert that the warning was called with the expected message warn_mock.assert_called_once_with( - "No default Ray image defined for 3.8. Please provide your own image or use one of the following python versions: 3.9, 3.11." + "No default Ray image defined for 3.8. Please provide your own image or use one of the following python versions: 3.11." ) # Assert that no image was set since the Python version is not supported diff --git a/src/utils/constants.py b/src/utils/constants.py new file mode 100644 index 00000000..178f6ac3 --- /dev/null +++ b/src/utils/constants.py @@ -0,0 +1 @@ +RAY_VERSION = "2.44.1" diff --git a/tests/e2e/support.py b/tests/e2e/support.py index 5e4ddbdf..d7bee805 100644 --- a/tests/e2e/support.py +++ b/tests/e2e/support.py @@ -28,7 +28,7 @@ def get_ray_cluster(cluster_name, namespace): def get_ray_image(): - default_ray_image = "quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06" + default_ray_image = "quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31" return os.getenv("RAY_IMAGE", default_ray_image) diff --git a/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml b/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml index 4a14f01f..cb1ed471 100644 --- a/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml +++ b/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml @@ -42,7 +42,7 @@ spec: valueFrom: fieldRef: fieldPath: status.podIP - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + image: quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31 imagePullPolicy: IfNotPresent lifecycle: preStop: @@ -89,7 +89,7 @@ spec: valueFrom: fieldRef: fieldPath: status.podIP - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + image: quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31 lifecycle: preStop: exec: diff --git a/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml b/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml index 6600381f..9c89b593 100644 --- a/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml +++ b/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml @@ -38,7 +38,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + - image: quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31 imagePullPolicy: IfNotPresent lifecycle: preStop: @@ -109,7 +109,7 @@ spec: key: value spec: containers: - - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + - image: quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31 lifecycle: preStop: exec: diff --git a/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml b/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml index e0e4ae94..b2701f20 100644 --- a/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml +++ b/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml @@ -38,7 +38,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + - image: quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31 imagePullPolicy: IfNotPresent lifecycle: preStop: @@ -109,7 +109,7 @@ spec: key: value spec: containers: - - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + - image: quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31 lifecycle: preStop: exec: diff --git a/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml b/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml index 51136cc7..9ece6661 100644 --- a/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml +++ b/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml @@ -29,7 +29,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + - image: quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31 imagePullPolicy: IfNotPresent lifecycle: preStop: @@ -100,7 +100,7 @@ spec: key: value spec: containers: - - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + - image: quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31 lifecycle: preStop: exec: diff --git a/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml b/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml index e180a442..9d52e300 100644 --- a/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml +++ b/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml @@ -29,7 +29,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + - image: quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31 imagePullPolicy: IfNotPresent lifecycle: preStop: @@ -100,7 +100,7 @@ spec: key: value spec: containers: - - image: quay.io/modh/ray@sha256:0d715f92570a2997381b7cafc0e224cfa25323f18b9545acfd23bc2b71576d06 + - image: quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31 lifecycle: preStop: exec: From 53425bbc24a7dbc6be2445d5338950da2cfdc856 Mon Sep 17 00:00:00 2001 From: kramaranya Date: Mon, 14 Apr 2025 12:58:37 +0100 Subject: [PATCH 463/496] feat: enable Ray cluster head pod persistency Signed-off-by: kramaranya --- .../user-docs/cluster-configuration.rst | 42 ++++++++++++++++++ .../ray/cluster/build_ray_cluster.py | 25 +++++++++++ src/codeflare_sdk/ray/cluster/config.py | 44 +++++++++++++++++-- 3 files changed, 107 insertions(+), 4 deletions(-) diff --git a/docs/sphinx/user-docs/cluster-configuration.rst b/docs/sphinx/user-docs/cluster-configuration.rst index e08b9f6e..7ca871e7 100644 --- a/docs/sphinx/user-docs/cluster-configuration.rst +++ b/docs/sphinx/user-docs/cluster-configuration.rst @@ -97,6 +97,48 @@ Custom Volumes/Volume Mounts | For more information on creating Volumes and Volume Mounts with Python check out the Python Kubernetes docs (`Volumes `__, `Volume Mounts `__). | You can also find further information on Volumes and Volume Mounts by visiting the Kubernetes `documentation `__. +GCS Fault Tolerance +------------------ +By default, the state of the Ray cluster is transient to the head Pod. Whatever triggers a restart of the head Pod results in losing that state, including Ray Cluster history. To make Ray cluster state persistent you can enable Global Control Service (GCS) fault tolerance with an external Redis storage. + +To configure GCS fault tolerance you need to set the following parameters: + +.. list-table:: + :header-rows: 1 + :widths: auto + + * - Parameter + - Description + * - ``enable_gcs_ft`` + - Boolean to enable GCS fault tolerance + * - ``redis_address`` + - Address of the external Redis service, ex: "redis:6379" + * - ``redis_password_secret`` + - Dictionary with 'name' and 'key' fields specifying the Kubernetes secret for Redis password + * - ``external_storage_namespace`` + - Custom storage namespace for GCS fault tolerance (by default, KubeRay sets it to the RayCluster's UID) + +Example configuration: + +.. code:: python + + from codeflare_sdk import Cluster, ClusterConfiguration + + cluster = Cluster(ClusterConfiguration( + name='ray-cluster-with-persistence', + num_workers=2, + enable_gcs_ft=True, + redis_address="redis:6379", + redis_password_secret={ + "name": "redis-password-secret", + "key": "password" + }, + # external_storage_namespace="my-custom-namespace" # Optional: Custom namespace for GCS data in Redis + )) + +.. note:: + You need to have a Redis instance deployed in your Kubernetes cluster before using this feature. + Deprecating Parameters ---------------------- diff --git a/src/codeflare_sdk/ray/cluster/build_ray_cluster.py b/src/codeflare_sdk/ray/cluster/build_ray_cluster.py index e4e02f76..9c626200 100644 --- a/src/codeflare_sdk/ray/cluster/build_ray_cluster.py +++ b/src/codeflare_sdk/ray/cluster/build_ray_cluster.py @@ -169,6 +169,31 @@ def build_ray_cluster(cluster: "codeflare_sdk.ray.cluster.Cluster"): }, } + if cluster.config.enable_gcs_ft: + if not cluster.config.redis_address: + raise ValueError( + "redis_address must be provided when enable_gcs_ft is True" + ) + + gcs_ft_options = {"redisAddress": cluster.config.redis_address} + + if cluster.config.external_storage_namespace: + gcs_ft_options[ + "externalStorageNamespace" + ] = cluster.config.external_storage_namespace + + if cluster.config.redis_password_secret: + gcs_ft_options["redisPassword"] = { + "valueFrom": { + "secretKeyRef": { + "name": cluster.config.redis_password_secret["name"], + "key": cluster.config.redis_password_secret["key"], + } + } + } + + resource["spec"]["gcsFaultToleranceOptions"] = gcs_ft_options + config_check() k8s_client = get_api_client() or client.ApiClient() diff --git a/src/codeflare_sdk/ray/cluster/config.py b/src/codeflare_sdk/ray/cluster/config.py index ab64be83..4f646baa 100644 --- a/src/codeflare_sdk/ray/cluster/config.py +++ b/src/codeflare_sdk/ray/cluster/config.py @@ -100,6 +100,14 @@ class ClusterConfiguration: A list of V1Volume objects to add to the Cluster volume_mounts: A list of V1VolumeMount objects to add to the Cluster + enable_gcs_ft: + A boolean indicating whether to enable GCS fault tolerance. + redis_address: + The address of the Redis server to use for GCS fault tolerance, required when enable_gcs_ft is True. + redis_password_secret: + Kubernetes secret reference containing Redis password. ex: {"name": "secret-name", "key": "password-key"} + external_storage_namespace: + The storage namespace to use for GCS fault tolerance. By default, KubeRay sets it to the UID of RayCluster. """ name: str @@ -142,6 +150,10 @@ class ClusterConfiguration: annotations: Dict[str, str] = field(default_factory=dict) volumes: list[V1Volume] = field(default_factory=list) volume_mounts: list[V1VolumeMount] = field(default_factory=list) + enable_gcs_ft: bool = False + redis_address: Optional[str] = None + redis_password_secret: Optional[Dict[str, str]] = None + external_storage_namespace: Optional[str] = None def __post_init__(self): if not self.verify_tls: @@ -149,6 +161,27 @@ def __post_init__(self): "Warning: TLS verification has been disabled - Endpoint checks will be bypassed" ) + if self.enable_gcs_ft: + if not self.redis_address: + raise ValueError( + "redis_address must be provided when enable_gcs_ft is True" + ) + + if self.redis_password_secret and not isinstance( + self.redis_password_secret, dict + ): + raise ValueError( + "redis_password_secret must be a dictionary with 'name' and 'key' fields" + ) + + if self.redis_password_secret and ( + "name" not in self.redis_password_secret + or "key" not in self.redis_password_secret + ): + raise ValueError( + "redis_password_secret must contain both 'name' and 'key' fields" + ) + self._validate_types() self._memory_to_resource() self._memory_to_string() @@ -283,10 +316,13 @@ def check_type(value, expected_type): else: return True if origin_type is dict: - return all( - check_type(k, args[0]) and check_type(v, args[1]) - for k, v in value.items() - ) + if value is not None: + return all( + check_type(k, args[0]) and check_type(v, args[1]) + for k, v in value.items() + ) + else: + return True if origin_type is tuple: return all(check_type(elem, etype) for elem, etype in zip(value, args)) if expected_type is int: From e46ee4c71b1c7c9ad92d21a3317dd61eb40994ec Mon Sep 17 00:00:00 2001 From: kramaranya Date: Thu, 24 Apr 2025 15:17:59 +0100 Subject: [PATCH 464/496] test: add unit tests for gcs fault tolerance Signed-off-by: kramaranya --- .../ray/cluster/test_build_ray_cluster.py | 45 +++++++++++++++- src/codeflare_sdk/ray/cluster/test_config.py | 51 +++++++++++++++++++ 2 files changed, 95 insertions(+), 1 deletion(-) diff --git a/src/codeflare_sdk/ray/cluster/test_build_ray_cluster.py b/src/codeflare_sdk/ray/cluster/test_build_ray_cluster.py index 42e1e157..6d322b5f 100644 --- a/src/codeflare_sdk/ray/cluster/test_build_ray_cluster.py +++ b/src/codeflare_sdk/ray/cluster/test_build_ray_cluster.py @@ -13,8 +13,9 @@ # limitations under the License. from collections import namedtuple import sys -from .build_ray_cluster import gen_names, update_image +from .build_ray_cluster import gen_names, update_image, build_ray_cluster import uuid +from codeflare_sdk.ray.cluster.cluster import ClusterConfiguration, Cluster def test_gen_names_with_name(mocker): @@ -64,3 +65,45 @@ def test_update_image_without_supported_python_version(mocker): # Assert that no image was set since the Python version is not supported assert image is None + + +def test_build_ray_cluster_with_gcs_ft(mocker): + mocker.patch("kubernetes.config.load_kube_config", return_value="ignore") + mocker.patch("kubernetes.client.CustomObjectsApi.list_namespaced_custom_object") + + cluster = Cluster( + ClusterConfiguration( + name="test", + namespace="ns", + enable_gcs_ft=True, + redis_address="redis:6379", + redis_password_secret={"name": "redis-password-secret", "key": "password"}, + external_storage_namespace="new-ns", + ) + ) + + mocker.patch("codeflare_sdk.ray.cluster.build_ray_cluster.config_check") + mocker.patch( + "codeflare_sdk.ray.cluster.build_ray_cluster.get_api_client", return_value=None + ) + mocker.patch( + "codeflare_sdk.ray.cluster.build_ray_cluster.update_image", return_value=None + ) + + resource = build_ray_cluster(cluster) + + assert "spec" in resource + assert "gcsFaultToleranceOptions" in resource["spec"] + + gcs_ft_options = resource["spec"]["gcsFaultToleranceOptions"] + + assert gcs_ft_options["redisAddress"] == "redis:6379" + assert gcs_ft_options["externalStorageNamespace"] == "new-ns" + assert ( + gcs_ft_options["redisPassword"]["valueFrom"]["secretKeyRef"]["name"] + == "redis-password-secret" + ) + assert ( + gcs_ft_options["redisPassword"]["valueFrom"]["secretKeyRef"]["key"] + == "password" + ) diff --git a/src/codeflare_sdk/ray/cluster/test_config.py b/src/codeflare_sdk/ray/cluster/test_config.py index 34cc4237..e6bcb8ba 100644 --- a/src/codeflare_sdk/ray/cluster/test_config.py +++ b/src/codeflare_sdk/ray/cluster/test_config.py @@ -153,6 +153,57 @@ def test_cluster_config_deprecation_conversion(mocker): assert config.worker_cpu_limits == 2 +def test_gcs_fault_tolerance_config_validation(): + config = ClusterConfiguration( + name="test", + namespace="ns", + enable_gcs_ft=True, + redis_address="redis:6379", + redis_password_secret={"name": "redis-password-secret", "key": "password"}, + external_storage_namespace="new-ns", + ) + + assert config.enable_gcs_ft is True + assert config.redis_address == "redis:6379" + assert config.redis_password_secret == { + "name": "redis-password-secret", + "key": "password", + } + assert config.external_storage_namespace == "new-ns" + + try: + ClusterConfiguration(name="test", namespace="ns", enable_gcs_ft=True) + except ValueError as e: + assert str(e) in "redis_address must be provided when enable_gcs_ft is True" + + try: + ClusterConfiguration( + name="test", + namespace="ns", + enable_gcs_ft=True, + redis_address="redis:6379", + redis_password_secret={"secret"}, + ) + except ValueError as e: + assert ( + str(e) + in "redis_password_secret must be a dictionary with 'name' and 'key' fields" + ) + + try: + ClusterConfiguration( + name="test", + namespace="ns", + enable_gcs_ft=True, + redis_address="redis:6379", + redis_password_secret={"wrong": "format"}, + ) + except ValueError as e: + assert ( + str(e) in "redis_password_secret must contain both 'name' and 'key' fields" + ) + + # Make sure to always keep this function last def test_cleanup(): os.remove(f"{aw_dir}test-all-params.yaml") From 340f97312e756ce3964c131e046e61f0d36139ed Mon Sep 17 00:00:00 2001 From: Pat O'Connor Date: Thu, 6 Mar 2025 13:40:26 +0000 Subject: [PATCH 465/496] fix(RHOAIENG-20531): propagate annotations to ray pods Signed-off-by: Pat O'Connor --- .../common/utils/unit_test_support.py | 5 ++++- .../ray/cluster/build_ray_cluster.py | 19 +++++++++++++------ src/codeflare_sdk/ray/cluster/test_config.py | 2 ++ .../appwrapper/unit-test-all-params.yaml | 10 ++++++++++ .../ray/unit-test-all-params.yaml | 10 ++++++++++ 5 files changed, 39 insertions(+), 7 deletions(-) diff --git a/src/codeflare_sdk/common/utils/unit_test_support.py b/src/codeflare_sdk/common/utils/unit_test_support.py index 53fbc696..79c7bf01 100644 --- a/src/codeflare_sdk/common/utils/unit_test_support.py +++ b/src/codeflare_sdk/common/utils/unit_test_support.py @@ -496,7 +496,10 @@ def create_cluster_all_config_params(mocker, cluster_name, is_appwrapper) -> Clu extended_resource_mapping={"example.com/gpu": "GPU", "intel.com/gpu": "TPU"}, overwrite_default_resource_mapping=True, local_queue="local-queue-default", - annotations={"key1": "value1", "key2": "value2"}, + annotations={ + "key1": "value1", + "key2": "value2", + }, volumes=volumes, volume_mounts=volume_mounts, ) diff --git a/src/codeflare_sdk/ray/cluster/build_ray_cluster.py b/src/codeflare_sdk/ray/cluster/build_ray_cluster.py index 9c626200..317091a8 100644 --- a/src/codeflare_sdk/ray/cluster/build_ray_cluster.py +++ b/src/codeflare_sdk/ray/cluster/build_ray_cluster.py @@ -138,13 +138,16 @@ def build_ray_cluster(cluster: "codeflare_sdk.ray.cluster.Cluster"): "num-gpus": str(head_gpu_count), "resources": head_resources, }, - "template": { - "spec": get_pod_spec( + "template": V1PodTemplateSpec( + metadata=V1ObjectMeta(cluster.config.annotations) + if cluster.config.annotations + else None, + spec=get_pod_spec( cluster, [get_head_container_spec(cluster)], cluster.config.head_tolerations, - ) - }, + ), + ), }, "workerGroupSpecs": [ { @@ -158,11 +161,14 @@ def build_ray_cluster(cluster: "codeflare_sdk.ray.cluster.Cluster"): "resources": worker_resources, }, "template": V1PodTemplateSpec( + metadata=V1ObjectMeta(cluster.config.annotations) + if cluster.config.annotations + else None, spec=get_pod_spec( cluster, [get_worker_container_spec(cluster)], cluster.config.worker_tolerations, - ) + ), ), } ], @@ -215,7 +221,7 @@ def build_ray_cluster(cluster: "codeflare_sdk.ray.cluster.Cluster"): # Metadata related functions def get_metadata(cluster: "codeflare_sdk.ray.cluster.Cluster"): """ - The get_metadata() function builds and returns a V1ObjectMeta Object using cluster configurtation parameters + The get_metadata() function builds and returns a V1ObjectMeta Object using cluster configuration parameters """ object_meta = V1ObjectMeta( name=cluster.config.name, @@ -227,6 +233,7 @@ def get_metadata(cluster: "codeflare_sdk.ray.cluster.Cluster"): annotations = with_nb_annotations(cluster.config.annotations) if annotations != {}: object_meta.annotations = annotations # As annotations are not a guarantee they are appended to the metadata after creation. + return object_meta diff --git a/src/codeflare_sdk/ray/cluster/test_config.py b/src/codeflare_sdk/ray/cluster/test_config.py index e6bcb8ba..6007f60b 100644 --- a/src/codeflare_sdk/ray/cluster/test_config.py +++ b/src/codeflare_sdk/ray/cluster/test_config.py @@ -41,6 +41,7 @@ def test_default_cluster_creation(mocker): f"{expected_clusters_dir}/ray/default-ray-cluster.yaml", get_template_variables(), ) + assert cluster.resource_yaml == expected_rc @@ -114,6 +115,7 @@ def test_config_creation_all_parameters(mocker): @pytest.mark.filterwarnings("ignore::UserWarning") def test_all_config_params_aw(mocker): create_cluster_all_config_params(mocker, "aw-all-params", True) + assert filecmp.cmp( f"{aw_dir}aw-all-params.yaml", f"{expected_clusters_dir}/appwrapper/unit-test-all-params.yaml", diff --git a/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml b/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml index 825318a0..f1b75410 100644 --- a/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml +++ b/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml @@ -42,6 +42,11 @@ spec: resources: '"{\"TPU\": 2}"' serviceType: ClusterIP template: + metadata: + annotations: + app.kubernetes.io/managed-by: test-prefix + key1: value1 + key2: value2 spec: containers: - env: @@ -142,6 +147,11 @@ spec: resources: '"{}"' replicas: 10 template: + metadata: + annotations: + app.kubernetes.io/managed-by: test-prefix + key1: value1 + key2: value2 spec: containers: - env: diff --git a/tests/test_cluster_yamls/ray/unit-test-all-params.yaml b/tests/test_cluster_yamls/ray/unit-test-all-params.yaml index 16a703b8..d5d8059d 100644 --- a/tests/test_cluster_yamls/ray/unit-test-all-params.yaml +++ b/tests/test_cluster_yamls/ray/unit-test-all-params.yaml @@ -33,6 +33,11 @@ spec: resources: '"{\"TPU\": 2}"' serviceType: ClusterIP template: + metadata: + annotations: + app.kubernetes.io/managed-by: test-prefix + key1: value1 + key2: value2 spec: containers: - env: @@ -133,6 +138,11 @@ spec: resources: '"{}"' replicas: 10 template: + metadata: + annotations: + app.kubernetes.io/managed-by: test-prefix + key1: value1 + key2: value2 spec: containers: - env: From 028d0d5511246a5ddd586b5d1cc2950288c3ca20 Mon Sep 17 00:00:00 2001 From: kryanbeane Date: Tue, 13 May 2025 16:03:30 +0100 Subject: [PATCH 466/496] fix: fix broken utils directory for constants.py --- src/{ => codeflare_sdk/common}/utils/constants.py | 0 src/codeflare_sdk/ray/cluster/build_ray_cluster.py | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename src/{ => codeflare_sdk/common}/utils/constants.py (100%) diff --git a/src/utils/constants.py b/src/codeflare_sdk/common/utils/constants.py similarity index 100% rename from src/utils/constants.py rename to src/codeflare_sdk/common/utils/constants.py diff --git a/src/codeflare_sdk/ray/cluster/build_ray_cluster.py b/src/codeflare_sdk/ray/cluster/build_ray_cluster.py index 317091a8..2a3436b2 100644 --- a/src/codeflare_sdk/ray/cluster/build_ray_cluster.py +++ b/src/codeflare_sdk/ray/cluster/build_ray_cluster.py @@ -20,7 +20,7 @@ from ...common import _kube_api_error_handling from ...common.kubernetes_cluster import get_api_client, config_check from kubernetes.client.exceptions import ApiException -from utils.constants import RAY_VERSION +from ...common.utils.constants import RAY_VERSION import codeflare_sdk import os From 93e9890da1ce8e6fabab16924458546acfdd1ce1 Mon Sep 17 00:00:00 2001 From: kryanbeane Date: Tue, 20 May 2025 09:21:43 +0100 Subject: [PATCH 467/496] fix: port forward for local interactive tests --- .github/workflows/e2e_tests.yaml | 8 ++- tests/e2e/local_interactive_sdk_kind_test.py | 59 +++++++++++++++----- tests/e2e/support.py | 2 - 3 files changed, 52 insertions(+), 17 deletions(-) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index fca6d6e7..cb6d4b8d 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -104,8 +104,12 @@ jobs: kubectl create clusterrolebinding sdk-user-localqueue-creator --clusterrole=localqueue-creator --user=sdk-user kubectl create clusterrole list-secrets --verb=get,list --resource=secrets kubectl create clusterrolebinding sdk-user-list-secrets --clusterrole=list-secrets --user=sdk-user - kubectl create clusterrole pod-creator --verb=get,list --resource=pods + kubectl create clusterrole pod-creator --verb=get,list,watch --resource=pods kubectl create clusterrolebinding sdk-user-pod-creator --clusterrole=pod-creator --user=sdk-user + kubectl create clusterrole service-reader --verb=get,list,watch --resource=services + kubectl create clusterrolebinding sdk-user-service-reader --clusterrole=service-reader --user=sdk-user + kubectl create clusterrole port-forward-pods --verb=create --resource=pods/portforward + kubectl create clusterrolebinding sdk-user-port-forward-pods-binding --clusterrole=port-forward-pods --user=sdk-user kubectl config use-context sdk-user - name: Run e2e tests @@ -117,7 +121,7 @@ jobs: pip install poetry poetry install --with test,docs echo "Running e2e tests..." - poetry run pytest -v -s ./tests/e2e -m 'kind and nvidia_gpu' > ${CODEFLARE_TEST_OUTPUT_DIR}/pytest_output.log 2>&1 + poetry run pytest -v -s --log-cli-level=INFO ./tests/e2e/local_interactive_sdk_kind_test.py::TestRayLocalInteractiveKind::test_local_interactives_nvidia_gpu > ${CODEFLARE_TEST_OUTPUT_DIR}/pytest_output.log 2>&1 env: GRPC_DNS_RESOLVER: "native" diff --git a/tests/e2e/local_interactive_sdk_kind_test.py b/tests/e2e/local_interactive_sdk_kind_test.py index c20fd879..eb5066db 100644 --- a/tests/e2e/local_interactive_sdk_kind_test.py +++ b/tests/e2e/local_interactive_sdk_kind_test.py @@ -1,23 +1,31 @@ from codeflare_sdk import ( Cluster, ClusterConfiguration, - TokenAuthentication, generate_cert, ) import pytest import ray import math +import subprocess from support import * @pytest.mark.kind -class TestRayLocalInteractiveOauth: +class TestRayLocalInteractiveKind: def setup_method(self): initialize_kubernetes_client(self) + self.port_forward_process = None + + def cleanup_port_forward(self): + if self.port_forward_process: + self.port_forward_process.terminate() + self.port_forward_process.wait(timeout=10) + self.port_forward_process = None def teardown_method(self): + self.cleanup_port_forward() delete_namespace(self) delete_kueue_resources(self) @@ -39,6 +47,8 @@ def run_local_interactives( ): cluster_name = "test-ray-cluster-li" + ray.shutdown() + cluster = Cluster( ClusterConfiguration( name=cluster_name, @@ -49,25 +59,24 @@ def run_local_interactives( head_memory_requests=2, head_memory_limits=2, worker_cpu_requests="500m", - worker_cpu_limits=1, + worker_cpu_limits="500m", worker_memory_requests=1, worker_memory_limits=4, worker_extended_resource_requests={gpu_resource_name: number_of_gpus}, - write_to_file=True, verify_tls=False, ) ) + cluster.up() + cluster.wait_ready() + cluster.status() generate_cert.generate_tls_cert(cluster_name, self.namespace) generate_cert.export_env(cluster_name, self.namespace) print(cluster.local_client_url()) - ray.shutdown() - ray.init(address=cluster.local_client_url(), logging_level="DEBUG") - @ray.remote(num_gpus=number_of_gpus / 2) def heavy_calculation_part(num_iterations): result = 0.0 @@ -84,10 +93,34 @@ def heavy_calculation(num_iterations): ) return sum(results) - ref = heavy_calculation.remote(3000) - result = ray.get(ref) - assert result == 1789.4644387076714 - ray.cancel(ref) - ray.shutdown() + # Attempt to port forward + try: + local_port = "20001" + ray_client_port = "10001" + + port_forward_cmd = [ + "kubectl", + "port-forward", + "-n", + self.namespace, + f"svc/{cluster_name}-head-svc", + f"{local_port}:{ray_client_port}", + ] + self.port_forward_process = subprocess.Popen( + port_forward_cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL + ) + + client_url = f"ray://localhost:{local_port}" + cluster.status() + + ray.init(address=client_url, logging_level="INFO") + + ref = heavy_calculation.remote(3000) + result = ray.get(ref) + assert result == 1789.4644387076714 + ray.cancel(ref) + ray.shutdown() - cluster.down() + cluster.down() + finally: + self.cleanup_port_forward() diff --git a/tests/e2e/support.py b/tests/e2e/support.py index d7bee805..46dd4e70 100644 --- a/tests/e2e/support.py +++ b/tests/e2e/support.py @@ -1,11 +1,9 @@ -import json import os import random import string import subprocess from codeflare_sdk import get_cluster from kubernetes import client, config -import kubernetes.client from codeflare_sdk.common.kubernetes_cluster.kube_api_helpers import ( _kube_api_error_handling, ) From 78ec732c49772edaf7544b6171b23aa07e89e9c3 Mon Sep 17 00:00:00 2001 From: kryanbeane Date: Tue, 3 Jun 2025 16:27:18 +0100 Subject: [PATCH 468/496] fix: run all E2Es --- .github/workflows/e2e_tests.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index cb6d4b8d..fc80af56 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -121,7 +121,7 @@ jobs: pip install poetry poetry install --with test,docs echo "Running e2e tests..." - poetry run pytest -v -s --log-cli-level=INFO ./tests/e2e/local_interactive_sdk_kind_test.py::TestRayLocalInteractiveKind::test_local_interactives_nvidia_gpu > ${CODEFLARE_TEST_OUTPUT_DIR}/pytest_output.log 2>&1 + poetry run pytest -v -s ./tests/e2e -m 'kind and nvidia_gpu' > ${CODEFLARE_TEST_OUTPUT_DIR}/pytest_output.log 2>&1 env: GRPC_DNS_RESOLVER: "native" From b6af6f53abb879bdc594fa5cdc8de5ccd24533b2 Mon Sep 17 00:00:00 2001 From: kryanbeane Date: Thu, 15 May 2025 17:51:03 +0100 Subject: [PATCH 469/496] feat(RHOAIENG-25241): disable ray usage metrics by default --- .../user-docs/cluster-configuration.rst | 15 ++++++ src/codeflare_sdk/ray/cluster/config.py | 4 ++ src/codeflare_sdk/ray/cluster/test_cluster.py | 18 ++++--- src/codeflare_sdk/ray/cluster/test_config.py | 47 ++++++++++++++++++- .../appwrapper/unit-test-all-params.yaml | 4 ++ tests/test_cluster_yamls/kueue/aw_kueue.yaml | 6 +++ .../kueue/ray_cluster_kueue.yaml | 6 +++ .../ray/default-appwrapper.yaml | 6 +++ .../ray/default-ray-cluster.yaml | 6 +++ .../ray/unit-test-all-params.yaml | 4 ++ 10 files changed, 105 insertions(+), 11 deletions(-) diff --git a/docs/sphinx/user-docs/cluster-configuration.rst b/docs/sphinx/user-docs/cluster-configuration.rst index 7ca871e7..0dc54930 100644 --- a/docs/sphinx/user-docs/cluster-configuration.rst +++ b/docs/sphinx/user-docs/cluster-configuration.rst @@ -44,6 +44,21 @@ requirements for creating the Ray Cluster. documentation on building a custom image `here `__. +Ray Usage Statistics +------------------- + +By default, Ray usage statistics collection is disabled in CodeFlare SDK clusters. This stops statistics from being sent to AnyScale. If you want to enable usage statistics collection, you can set the ``RAY_USAGE_STATS_ENABLED`` environment variable to ``1`` in your cluster configuration: + +.. code:: python + + from codeflare_sdk import Cluster, ClusterConfiguration + + cluster = Cluster(ClusterConfiguration( + name='ray-example', + namespace='default', + envs={'RAY_USAGE_STATS_ENABLED': '1'} # Enable usage statistics + )) + The ``labels={"exampleLabel": "example"}`` parameter can be used to apply additional labels to the RayCluster resource. diff --git a/src/codeflare_sdk/ray/cluster/config.py b/src/codeflare_sdk/ray/cluster/config.py index 4f646baa..8ea97d55 100644 --- a/src/codeflare_sdk/ray/cluster/config.py +++ b/src/codeflare_sdk/ray/cluster/config.py @@ -161,6 +161,10 @@ def __post_init__(self): "Warning: TLS verification has been disabled - Endpoint checks will be bypassed" ) + # Set default environment variable to disable Ray usage stats if not already set + if "RAY_USAGE_STATS_ENABLED" not in self.envs: + self.envs["RAY_USAGE_STATS_ENABLED"] = "0" + if self.enable_gcs_ft: if not self.redis_address: raise ValueError( diff --git a/src/codeflare_sdk/ray/cluster/test_cluster.py b/src/codeflare_sdk/ray/cluster/test_cluster.py index 298c416e..ce684607 100644 --- a/src/codeflare_sdk/ray/cluster/test_cluster.py +++ b/src/codeflare_sdk/ray/cluster/test_cluster.py @@ -465,11 +465,10 @@ def test_get_cluster_no_appwrapper(mocker): return_value=expected_rc, ) get_cluster("test-all-params", "ns", write_to_file=True) - assert filecmp.cmp( - f"{aw_dir}test-all-params.yaml", - f"{expected_clusters_dir}/ray/unit-test-all-params.yaml", - shallow=True, - ) + + with open(f"{aw_dir}test-all-params.yaml") as f: + generated_rc = yaml.load(f, Loader=yaml.FullLoader) + assert generated_rc == expected_rc def test_get_cluster_with_appwrapper(mocker): @@ -487,11 +486,10 @@ def test_get_cluster_with_appwrapper(mocker): return_value=expected_aw, ) get_cluster("aw-all-params", "ns", write_to_file=True) - assert filecmp.cmp( - f"{aw_dir}aw-all-params.yaml", - f"{expected_clusters_dir}/appwrapper/unit-test-all-params.yaml", - shallow=True, - ) + + with open(f"{aw_dir}aw-all-params.yaml") as f: + generated_aw = yaml.load(f, Loader=yaml.FullLoader) + assert generated_aw == expected_aw def test_wait_ready(mocker, capsys): diff --git a/src/codeflare_sdk/ray/cluster/test_config.py b/src/codeflare_sdk/ray/cluster/test_config.py index 6007f60b..6c990c19 100644 --- a/src/codeflare_sdk/ray/cluster/test_config.py +++ b/src/codeflare_sdk/ray/cluster/test_config.py @@ -24,6 +24,7 @@ import filecmp import pytest import os +import yaml parent = Path(__file__).resolve().parents[4] # project directory expected_clusters_dir = f"{parent}/tests/test_cluster_yamls" @@ -85,7 +86,11 @@ def test_config_creation_all_parameters(mocker): assert cluster.config.worker_memory_requests == "12G" assert cluster.config.worker_memory_limits == "16G" assert cluster.config.appwrapper == False - assert cluster.config.envs == {"key1": "value1", "key2": "value2"} + assert cluster.config.envs == { + "key1": "value1", + "key2": "value2", + "RAY_USAGE_STATS_ENABLED": "0", + } assert cluster.config.image == "example/ray:tag" assert cluster.config.image_pull_secrets == ["secret1", "secret2"] assert cluster.config.write_to_file == True @@ -206,6 +211,46 @@ def test_gcs_fault_tolerance_config_validation(): ) +def test_ray_usage_stats_default(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch("kubernetes.client.CustomObjectsApi.list_namespaced_custom_object") + + cluster = Cluster( + ClusterConfiguration(name="default-usage-stats-cluster", namespace="ns") + ) + + # Verify that usage stats are disabled by default + assert cluster.config.envs["RAY_USAGE_STATS_ENABLED"] == "0" + + # Check that the environment variable is set in the YAML + head_container = cluster.resource_yaml["spec"]["headGroupSpec"]["template"]["spec"][ + "containers" + ][0] + env_vars = {env["name"]: env["value"] for env in head_container["env"]} + assert env_vars["RAY_USAGE_STATS_ENABLED"] == "0" + + +def test_ray_usage_stats_enabled(mocker): + mocker.patch("kubernetes.client.ApisApi.get_api_versions") + mocker.patch("kubernetes.client.CustomObjectsApi.list_namespaced_custom_object") + + cluster = Cluster( + ClusterConfiguration( + name="usage-stats-enabled-cluster", + namespace="ns", + envs={"RAY_USAGE_STATS_ENABLED": "1"}, + ) + ) + + assert cluster.config.envs["RAY_USAGE_STATS_ENABLED"] == "1" + + head_container = cluster.resource_yaml["spec"]["headGroupSpec"]["template"]["spec"][ + "containers" + ][0] + env_vars = {env["name"]: env["value"] for env in head_container["env"]} + assert env_vars["RAY_USAGE_STATS_ENABLED"] == "1" + + # Make sure to always keep this function last def test_cleanup(): os.remove(f"{aw_dir}test-all-params.yaml") diff --git a/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml b/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml index f1b75410..af5acbad 100644 --- a/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml +++ b/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml @@ -54,6 +54,8 @@ spec: value: value1 - name: key2 value: value2 + - name: RAY_USAGE_STATS_ENABLED + value: '0' image: example/ray:tag imagePullPolicy: Always lifecycle: @@ -159,6 +161,8 @@ spec: value: value1 - name: key2 value: value2 + - name: RAY_USAGE_STATS_ENABLED + value: '0' image: example/ray:tag imagePullPolicy: Always lifecycle: diff --git a/tests/test_cluster_yamls/kueue/aw_kueue.yaml b/tests/test_cluster_yamls/kueue/aw_kueue.yaml index fd78f070..7101f6a8 100644 --- a/tests/test_cluster_yamls/kueue/aw_kueue.yaml +++ b/tests/test_cluster_yamls/kueue/aw_kueue.yaml @@ -75,6 +75,9 @@ spec: - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt + env: + - name: RAY_USAGE_STATS_ENABLED + value: '0' volumes: - configMap: items: @@ -133,6 +136,9 @@ spec: - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt + env: + - name: RAY_USAGE_STATS_ENABLED + value: '0' volumes: - configMap: items: diff --git a/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml b/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml index a6dd81d7..f8b3aa46 100644 --- a/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml +++ b/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml @@ -75,6 +75,9 @@ spec: - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt + env: + - name: RAY_USAGE_STATS_ENABLED + value: '0' volumes: - configMap: items: @@ -133,6 +136,9 @@ spec: - mountPath: /etc/ssl/certs/odh-ca-bundle.crt name: odh-ca-cert subPath: odh-ca-bundle.crt + env: + - name: RAY_USAGE_STATS_ENABLED + value: '0' volumes: - configMap: items: diff --git a/tests/test_cluster_yamls/ray/default-appwrapper.yaml b/tests/test_cluster_yamls/ray/default-appwrapper.yaml index 6d1cdcd5..1532c0e8 100644 --- a/tests/test_cluster_yamls/ray/default-appwrapper.yaml +++ b/tests/test_cluster_yamls/ray/default-appwrapper.yaml @@ -53,6 +53,9 @@ spec: name: dashboard - containerPort: 10001 name: client + env: + - name: RAY_USAGE_STATS_ENABLED + value: '0' resources: limits: cpu: 2 @@ -111,6 +114,9 @@ spec: - -c - ray stop name: machine-learning + env: + - name: RAY_USAGE_STATS_ENABLED + value: '0' resources: limits: cpu: 1 diff --git a/tests/test_cluster_yamls/ray/default-ray-cluster.yaml b/tests/test_cluster_yamls/ray/default-ray-cluster.yaml index 38e02f8f..db401026 100644 --- a/tests/test_cluster_yamls/ray/default-ray-cluster.yaml +++ b/tests/test_cluster_yamls/ray/default-ray-cluster.yaml @@ -45,6 +45,9 @@ spec: name: dashboard - containerPort: 10001 name: client + env: + - name: RAY_USAGE_STATS_ENABLED + value: '0' resources: limits: cpu: 2 @@ -110,6 +113,9 @@ spec: requests: cpu: 1 memory: 2G + env: + - name: RAY_USAGE_STATS_ENABLED + value: '0' volumeMounts: - mountPath: /etc/pki/tls/certs/odh-trusted-ca-bundle.crt name: odh-trusted-ca-cert diff --git a/tests/test_cluster_yamls/ray/unit-test-all-params.yaml b/tests/test_cluster_yamls/ray/unit-test-all-params.yaml index d5d8059d..6900b058 100644 --- a/tests/test_cluster_yamls/ray/unit-test-all-params.yaml +++ b/tests/test_cluster_yamls/ray/unit-test-all-params.yaml @@ -45,6 +45,8 @@ spec: value: value1 - name: key2 value: value2 + - name: RAY_USAGE_STATS_ENABLED + value: '0' image: example/ray:tag imagePullPolicy: Always lifecycle: @@ -150,6 +152,8 @@ spec: value: value1 - name: key2 value: value2 + - name: RAY_USAGE_STATS_ENABLED + value: '0' image: example/ray:tag imagePullPolicy: Always lifecycle: From 68d1f6cc968c33f85d04fff45bd27a49fda757a8 Mon Sep 17 00:00:00 2001 From: kryanbeane Date: Wed, 28 May 2025 11:27:03 +0100 Subject: [PATCH 470/496] feat: change usage stats enablement to cluster config param --- docs/sphinx/user-docs/cluster-configuration.rst | 6 ++++-- src/codeflare_sdk/ray/cluster/config.py | 8 ++++++-- src/codeflare_sdk/ray/cluster/test_config.py | 2 +- 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/docs/sphinx/user-docs/cluster-configuration.rst b/docs/sphinx/user-docs/cluster-configuration.rst index 0dc54930..dee5359d 100644 --- a/docs/sphinx/user-docs/cluster-configuration.rst +++ b/docs/sphinx/user-docs/cluster-configuration.rst @@ -47,7 +47,7 @@ requirements for creating the Ray Cluster. Ray Usage Statistics ------------------- -By default, Ray usage statistics collection is disabled in CodeFlare SDK clusters. This stops statistics from being sent to AnyScale. If you want to enable usage statistics collection, you can set the ``RAY_USAGE_STATS_ENABLED`` environment variable to ``1`` in your cluster configuration: +By default, Ray usage statistics collection is **disabled** in Ray Clusters created with the Codeflare SDK. This prevents statistics from being captured and sent externally. If you want to enable usage statistics collection, you can simply set the ``enable_usage_stats`` parameter to ``True`` in your cluster configuration: .. code:: python @@ -56,9 +56,11 @@ By default, Ray usage statistics collection is disabled in CodeFlare SDK cluster cluster = Cluster(ClusterConfiguration( name='ray-example', namespace='default', - envs={'RAY_USAGE_STATS_ENABLED': '1'} # Enable usage statistics + enable_usage_stats=True )) +This will automatically set the ``RAY_USAGE_STATS_ENABLED`` environment variable to ``1`` for all Ray pods in the cluster. If you do not set this parameter, usage statistics will remain disabled (``RAY_USAGE_STATS_ENABLED=0``). + The ``labels={"exampleLabel": "example"}`` parameter can be used to apply additional labels to the RayCluster resource. diff --git a/src/codeflare_sdk/ray/cluster/config.py b/src/codeflare_sdk/ray/cluster/config.py index 8ea97d55..36fdbbc7 100644 --- a/src/codeflare_sdk/ray/cluster/config.py +++ b/src/codeflare_sdk/ray/cluster/config.py @@ -102,6 +102,8 @@ class ClusterConfiguration: A list of V1VolumeMount objects to add to the Cluster enable_gcs_ft: A boolean indicating whether to enable GCS fault tolerance. + enable_usage_stats: + A boolean indicating whether to capture and send Ray usage stats externally. redis_address: The address of the Redis server to use for GCS fault tolerance, required when enable_gcs_ft is True. redis_password_secret: @@ -151,6 +153,7 @@ class ClusterConfiguration: volumes: list[V1Volume] = field(default_factory=list) volume_mounts: list[V1VolumeMount] = field(default_factory=list) enable_gcs_ft: bool = False + enable_usage_stats: bool = False redis_address: Optional[str] = None redis_password_secret: Optional[Dict[str, str]] = None external_storage_namespace: Optional[str] = None @@ -161,8 +164,9 @@ def __post_init__(self): "Warning: TLS verification has been disabled - Endpoint checks will be bypassed" ) - # Set default environment variable to disable Ray usage stats if not already set - if "RAY_USAGE_STATS_ENABLED" not in self.envs: + if self.enable_usage_stats: + self.envs["RAY_USAGE_STATS_ENABLED"] = "1" + else: self.envs["RAY_USAGE_STATS_ENABLED"] = "0" if self.enable_gcs_ft: diff --git a/src/codeflare_sdk/ray/cluster/test_config.py b/src/codeflare_sdk/ray/cluster/test_config.py index 6c990c19..6e8b2669 100644 --- a/src/codeflare_sdk/ray/cluster/test_config.py +++ b/src/codeflare_sdk/ray/cluster/test_config.py @@ -238,7 +238,7 @@ def test_ray_usage_stats_enabled(mocker): ClusterConfiguration( name="usage-stats-enabled-cluster", namespace="ns", - envs={"RAY_USAGE_STATS_ENABLED": "1"}, + enable_usage_stats=True, ) ) From 5c2495d37f9a607f18c1dec88de10ce56a510292 Mon Sep 17 00:00:00 2001 From: kryanbeane Date: Tue, 10 Jun 2025 15:08:19 +0100 Subject: [PATCH 471/496] feat: add comment about tearing down ray clusters to cluster.apply() --- src/codeflare_sdk/ray/cluster/cluster.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/codeflare_sdk/ray/cluster/cluster.py b/src/codeflare_sdk/ray/cluster/cluster.py index b42efd87..d03d78d8 100644 --- a/src/codeflare_sdk/ray/cluster/cluster.py +++ b/src/codeflare_sdk/ray/cluster/cluster.py @@ -219,7 +219,7 @@ def apply(self, force=False): force_conflicts=force, ) print( - f"AppWrapper: '{name}' configuration has successfully been applied" + f"AppWrapper: '{name}' configuration has successfully been applied. For optimal resource management, you should delete this Ray Cluster when no longer in use." ) else: api_version = "ray.io/v1" @@ -227,7 +227,9 @@ def apply(self, force=False): self._component_resources_apply( namespace=namespace, api_instance=api_instance ) - print(f"Ray Cluster: '{name}' has successfully been applied") + print( + f"Ray Cluster: '{name}' has successfully been applied. For optimal resource management, you should delete this Ray Cluster when no longer in use." + ) except AttributeError as e: raise RuntimeError(f"Failed to initialize DynamicClient: {e}") except Exception as e: # pragma: no cover From cb7051093b9834cedddec76da1b5436de78da7a5 Mon Sep 17 00:00:00 2001 From: Pat O'Connor Date: Tue, 10 Jun 2025 10:23:03 +0100 Subject: [PATCH 472/496] task(RHOAIENG-26192): Removed dep fields head_gpus + num_gpus Signed-off-by: Pat O'Connor --- .../user-docs/cluster-configuration.rst | 4 --- src/codeflare_sdk/ray/cluster/config.py | 27 ------------------- src/codeflare_sdk/ray/cluster/test_config.py | 4 --- 3 files changed, 35 deletions(-) diff --git a/docs/sphinx/user-docs/cluster-configuration.rst b/docs/sphinx/user-docs/cluster-configuration.rst index dee5359d..f9749f59 100644 --- a/docs/sphinx/user-docs/cluster-configuration.rst +++ b/docs/sphinx/user-docs/cluster-configuration.rst @@ -180,7 +180,3 @@ deprecated. - ``worker_memory_requests`` * - ``max_memory`` - ``worker_memory_limits`` - * - ``head_gpus`` - - ``head_extended_resource_requests`` - * - ``num_gpus`` - - ``worker_extended_resource_requests`` diff --git a/src/codeflare_sdk/ray/cluster/config.py b/src/codeflare_sdk/ray/cluster/config.py index 36fdbbc7..bce54c32 100644 --- a/src/codeflare_sdk/ray/cluster/config.py +++ b/src/codeflare_sdk/ray/cluster/config.py @@ -54,8 +54,6 @@ class ClusterConfiguration: The number of CPUs to allocate to the head node. head_memory: The amount of memory to allocate to the head node. - head_gpus: - The number of GPUs to allocate to the head node. (Deprecated, use head_extended_resource_requests) head_extended_resource_requests: A dictionary of extended resource requests for the head node. ex: {"nvidia.com/gpu": 1} head_tolerations: @@ -70,8 +68,6 @@ class ClusterConfiguration: The minimum amount of memory to allocate to each worker. max_memory: The maximum amount of memory to allocate to each worker. - num_gpus: - The number of GPUs to allocate to each worker. (Deprecated, use worker_extended_resource_requests) worker_tolerations: List of tolerations for worker nodes. appwrapper: @@ -120,7 +116,6 @@ class ClusterConfiguration: head_memory_requests: Union[int, str] = 8 head_memory_limits: Union[int, str] = 8 head_memory: Optional[Union[int, str]] = None # Deprecating - head_gpus: Optional[int] = None # Deprecating head_extended_resource_requests: Dict[str, Union[str, int]] = field( default_factory=dict ) @@ -134,7 +129,6 @@ class ClusterConfiguration: worker_memory_limits: Union[int, str] = 2 min_memory: Optional[Union[int, str]] = None # Deprecating max_memory: Optional[Union[int, str]] = None # Deprecating - num_gpus: Optional[int] = None # Deprecating worker_tolerations: Optional[List[V1Toleration]] = None appwrapper: bool = False envs: Dict[str, str] = field(default_factory=dict) @@ -195,7 +189,6 @@ def __post_init__(self): self._memory_to_string() self._str_mem_no_unit_add_GB() self._cpu_to_resource() - self._gpu_to_resource() self._combine_extended_resource_mapping() self._validate_extended_resource_requests(self.head_extended_resource_requests) self._validate_extended_resource_requests( @@ -227,26 +220,6 @@ def _validate_extended_resource_requests(self, extended_resources: Dict[str, int f"extended resource '{k}' not found in extended_resource_mapping, available resources are {list(self.extended_resource_mapping.keys())}, to add more supported resources use extended_resource_mapping. i.e. extended_resource_mapping = {{'{k}': 'FOO_BAR'}}" ) - def _gpu_to_resource(self): - if self.head_gpus: - warnings.warn( - f"head_gpus is being deprecated, replacing with head_extended_resource_requests['nvidia.com/gpu'] = {self.head_gpus}" - ) - if "nvidia.com/gpu" in self.head_extended_resource_requests: - raise ValueError( - "nvidia.com/gpu already exists in head_extended_resource_requests" - ) - self.head_extended_resource_requests["nvidia.com/gpu"] = self.head_gpus - if self.num_gpus: - warnings.warn( - f"num_gpus is being deprecated, replacing with worker_extended_resource_requests['nvidia.com/gpu'] = {self.num_gpus}" - ) - if "nvidia.com/gpu" in self.worker_extended_resource_requests: - raise ValueError( - "nvidia.com/gpu already exists in worker_extended_resource_requests" - ) - self.worker_extended_resource_requests["nvidia.com/gpu"] = self.num_gpus - def _str_mem_no_unit_add_GB(self): if isinstance(self.head_memory, str) and self.head_memory.isdecimal(): self.head_memory = f"{self.head_memory}G" diff --git a/src/codeflare_sdk/ray/cluster/test_config.py b/src/codeflare_sdk/ray/cluster/test_config.py index 6e8b2669..23da97c3 100644 --- a/src/codeflare_sdk/ray/cluster/test_config.py +++ b/src/codeflare_sdk/ray/cluster/test_config.py @@ -139,8 +139,6 @@ def test_config_creation_wrong_type(): def test_cluster_config_deprecation_conversion(mocker): config = ClusterConfiguration( name="test", - num_gpus=2, - head_gpus=1, head_cpus=3, head_memory=16, min_memory=3, @@ -152,8 +150,6 @@ def test_cluster_config_deprecation_conversion(mocker): assert config.head_cpu_limits == 3 assert config.head_memory_requests == "16G" assert config.head_memory_limits == "16G" - assert config.worker_extended_resource_requests == {"nvidia.com/gpu": 2} - assert config.head_extended_resource_requests == {"nvidia.com/gpu": 1} assert config.worker_memory_requests == "3G" assert config.worker_memory_limits == "4G" assert config.worker_cpu_requests == 1 From c9f7313bdd3fcc66bc263ca89f892ebe81c5cef9 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Tue, 10 Jun 2025 14:51:58 +0000 Subject: [PATCH 473/496] Updated coverage.svg --- coverage.svg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/coverage.svg b/coverage.svg index a8c7e72a..59d64b37 100644 --- a/coverage.svg +++ b/coverage.svg @@ -15,7 +15,7 @@ coverage coverage - 92% - 92% + 93% + 93% From e967fb71c157dbeb0b3dae3d31a67bc4675e19e1 Mon Sep 17 00:00:00 2001 From: Pat O'Connor Date: Wed, 28 May 2025 08:54:58 +0100 Subject: [PATCH 474/496] fix(RHOAIENG-25120): remove kueue as mandatory in RayCluster Signed-off-by: Pat O'Connor --- .../ray/cluster/build_ray_cluster.py | 4 +++- src/codeflare_sdk/ray/cluster/cluster.py | 16 ++++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/src/codeflare_sdk/ray/cluster/build_ray_cluster.py b/src/codeflare_sdk/ray/cluster/build_ray_cluster.py index 2a3436b2..89c51324 100644 --- a/src/codeflare_sdk/ray/cluster/build_ray_cluster.py +++ b/src/codeflare_sdk/ray/cluster/build_ray_cluster.py @@ -488,9 +488,11 @@ def add_queue_label(cluster: "codeflare_sdk.ray.cluster.Cluster", labels: dict): if lq_name == None: return elif not local_queue_exists(cluster): - raise ValueError( + # ValueError removed to pass validation to validating admission policy + print( "local_queue provided does not exist or is not in this namespace. Please provide the correct local_queue name in Cluster Configuration" ) + return labels.update({"kueue.x-k8s.io/queue-name": lq_name}) diff --git a/src/codeflare_sdk/ray/cluster/cluster.py b/src/codeflare_sdk/ray/cluster/cluster.py index d03d78d8..86ed7c4d 100644 --- a/src/codeflare_sdk/ray/cluster/cluster.py +++ b/src/codeflare_sdk/ray/cluster/cluster.py @@ -183,6 +183,14 @@ def up(self): f"Ray Cluster: '{self.config.name}' has successfully been created" ) except Exception as e: # pragma: no cover + if e.status == 422: + print( + "WARNING: RayCluster creation rejected due to invalid Kueue configuration. Please contact your administrator." + ) + else: + print( + "WARNING: Failed to create RayCluster due to unexpected error. Please contact your administrator." + ) return _kube_api_error_handling(e) # Applies a new cluster with the provided or default spec @@ -233,6 +241,14 @@ def apply(self, force=False): except AttributeError as e: raise RuntimeError(f"Failed to initialize DynamicClient: {e}") except Exception as e: # pragma: no cover + if e.status == 422: + print( + "WARNING: RayCluster creation rejected due to invalid Kueue configuration. Please contact your administrator." + ) + else: + print( + "WARNING: Failed to create RayCluster due to unexpected error. Please contact your administrator." + ) return _kube_api_error_handling(e) def _throw_for_no_raycluster(self): From 62f8a9a090ce289b487d823cc525d6f43d600843 Mon Sep 17 00:00:00 2001 From: Pawel Paszki Date: Wed, 11 Jun 2025 09:13:38 +0100 Subject: [PATCH 475/496] conf: Update Ray to v2.46.0 and update Runtime Image refs (#836) * conf: Update Ray to v2.46.0 and update Runtime Image refs * test: unify kind cluster config --- .../additional-demos/local_interactive.ipynb | 2 +- .../additional-demos/ray_job_client.ipynb | 2 +- demo-notebooks/guided-demos/0_basic_ray.ipynb | 2 +- .../guided-demos/1_cluster_job_client.ipynb | 2 +- .../guided-demos/2_basic_interactive.ipynb | 2 +- .../guided-demos/3_widget_example.ipynb | 2 +- .../notebook-ex-outputs/0_basic_ray.ipynb | 2 +- .../1_cluster_job_client.ipynb | 2 +- .../2_basic_interactive.ipynb | 2 +- .../notebook-ex-outputs/interactivetest.yaml | 4 +- .../notebook-ex-outputs/jobtest.yaml | 4 +- .../notebook-ex-outputs/raytest.yaml | 4 +- .../preview_nbs/0_basic_ray.ipynb | 2 +- .../preview_nbs/1_cluster_job_client.ipynb | 2 +- .../preview_nbs/2_basic_interactive.ipynb | 2 +- .../user-docs/cluster-configuration.rst | 2 +- poetry.lock | 58 ++++++++++--------- pyproject.toml | 2 +- src/codeflare_sdk/common/utils/constants.py | 2 +- .../common/utils/unit_test_support.py | 4 +- .../ray/cluster/build_ray_cluster.py | 2 +- tests/e2e/local_interactive_sdk_kind_test.py | 4 +- .../e2e/mnist_raycluster_sdk_aw_kind_test.py | 2 - tests/e2e/support.py | 2 +- .../appwrapper/test-case-bad.yaml | 6 +- .../appwrapper/unit-test-all-params.yaml | 2 +- tests/test_cluster_yamls/kueue/aw_kueue.yaml | 2 +- .../kueue/ray_cluster_kueue.yaml | 2 +- .../ray/default-appwrapper.yaml | 2 +- .../ray/default-ray-cluster.yaml | 2 +- .../ray/unit-test-all-params.yaml | 2 +- .../support_clusters/test-aw-a.yaml | 6 +- .../support_clusters/test-aw-b.yaml | 6 +- .../support_clusters/test-rc-a.yaml | 6 +- .../support_clusters/test-rc-b.yaml | 6 +- 35 files changed, 77 insertions(+), 79 deletions(-) diff --git a/demo-notebooks/additional-demos/local_interactive.ipynb b/demo-notebooks/additional-demos/local_interactive.ipynb index ed9e5648..be30ba40 100644 --- a/demo-notebooks/additional-demos/local_interactive.ipynb +++ b/demo-notebooks/additional-demos/local_interactive.ipynb @@ -37,7 +37,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.46.0-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/additional-demos/ray_job_client.ipynb b/demo-notebooks/additional-demos/ray_job_client.ipynb index 8d28ad13..599b2425 100644 --- a/demo-notebooks/additional-demos/ray_job_client.ipynb +++ b/demo-notebooks/additional-demos/ray_job_client.ipynb @@ -43,7 +43,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.46.0-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb index 1e729cc1..7e3bbd08 100644 --- a/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -49,7 +49,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.46.0-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb index 73ef0b2a..2aef3184 100644 --- a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb @@ -43,7 +43,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.46.0-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/2_basic_interactive.ipynb index 118720aa..df7728b3 100644 --- a/demo-notebooks/guided-demos/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/2_basic_interactive.ipynb @@ -46,7 +46,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.46.0-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/3_widget_example.ipynb b/demo-notebooks/guided-demos/3_widget_example.ipynb index 3f44afe3..56bd0a5e 100644 --- a/demo-notebooks/guided-demos/3_widget_example.ipynb +++ b/demo-notebooks/guided-demos/3_widget_example.ipynb @@ -49,7 +49,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.46.0-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb index 2ac5109e..8d97751a 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb @@ -49,7 +49,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.46.0-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb index ff8d53b7..b1f0d5f6 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb @@ -43,7 +43,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.46.0-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb index 57538aa8..bf0b49de 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb @@ -46,7 +46,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.46.0-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml index 1ac7e603..5182b92a 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml @@ -81,7 +81,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/modh/ray:2.44.1-py311-cu121 + image: quay.io/modh/ray:2.46.0-py311-cu121 imagePullPolicy: Always lifecycle: preStop: @@ -147,7 +147,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/modh/ray:2.44.1-py311-cu121 + image: quay.io/modh/ray:2.46.0-py311-cu121 lifecycle: preStop: exec: diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml index 840b4f81..5df084fd 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml @@ -70,7 +70,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/modh/ray:2.44.1-py311-cu121 + image: quay.io/modh/ray:2.46.0-py311-cu121 imagePullPolicy: Always lifecycle: preStop: @@ -127,7 +127,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/modh/ray:2.44.1-py311-cu121 + image: quay.io/modh/ray:2.46.0-py311-cu121 lifecycle: preStop: exec: diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml index 6dc0b8b6..ae81b795 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml @@ -70,7 +70,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/modh/ray:2.44.1-py311-cu121 + image: quay.io/modh/ray:2.46.0-py311-cu121 imagePullPolicy: Always lifecycle: preStop: @@ -127,7 +127,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/modh/ray:2.44.1-py311-cu121 + image: quay.io/modh/ray:2.46.0-py311-cu121 lifecycle: preStop: exec: diff --git a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb index 2ac5109e..8d97751a 100644 --- a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb @@ -49,7 +49,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.46.0-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb index d7113ae5..58cb4958 100644 --- a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb @@ -43,7 +43,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.46.0-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb index 8fd9ce0d..4538c8d5 100644 --- a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb @@ -46,7 +46,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.11: 'quay.io/modh/ray:2.44.1-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.46.0-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/docs/sphinx/user-docs/cluster-configuration.rst b/docs/sphinx/user-docs/cluster-configuration.rst index f9749f59..ad4e248e 100644 --- a/docs/sphinx/user-docs/cluster-configuration.rst +++ b/docs/sphinx/user-docs/cluster-configuration.rst @@ -35,7 +35,7 @@ requirements for creating the Ray Cluster. The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version: - - For Python 3.11: `quay.io/modh/ray:2.44.1-py311-cu121` + - For Python 3.11: `quay.io/modh/ray:2.46.0-py311-cu121` If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default. diff --git a/poetry.lock b/poetry.lock index 725ce7e0..384e8088 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. [[package]] name = "aiohappyeyeballs" @@ -3460,42 +3460,44 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "ray" -version = "2.44.1" +version = "2.46.0" description = "Ray provides a simple, universal API for building distributed applications." optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "ray-2.44.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:7d83999ead98bed7e70afddcda870cc1684773cb888e90768ce1a4c72c5fe009"}, - {file = "ray-2.44.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6f221b1426d8657ce0c24508d5ff11cabc4a8c40a833c8c5bb63e2217f37cfd9"}, - {file = "ray-2.44.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:3a3db83c54085535bed9b4861d871f6527c5df61532bf7d2285701485de8968f"}, - {file = "ray-2.44.1-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:18073261c09811d5faefc918abbd8a6bf5f4968277eeae6fbc0992e3724d1496"}, - {file = "ray-2.44.1-cp310-cp310-win_amd64.whl", hash = "sha256:9decf32c1402f44b5f7a54f29bd422e0ae9a45afc0a10090acf9ba6011ca12a7"}, - {file = "ray-2.44.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:949dbd735e5edec80f6140fa6bb536248c7c97535fe5a11acd279295b7bd1a6d"}, - {file = "ray-2.44.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c21a452227eeecfa3d89b50480d1f9bab11b15c9b3695af41421ab8e7e608cfd"}, - {file = "ray-2.44.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:0c6d395c05542a882d14e31abec1dd1a1808a9a0c0dcf94200a827d2d04c08a1"}, - {file = "ray-2.44.1-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:6e6bd0430d2eb664ae632c96e74c01e4a1bf14ab2a15102e1809b05ea9e0c2c7"}, - {file = "ray-2.44.1-cp311-cp311-win_amd64.whl", hash = "sha256:5e94bd887898dc08db7f87c0429bc41219aceb552af0b1cd4924c01718fc6a77"}, - {file = "ray-2.44.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:b6c7b677035c08141ae01adc25eade20a979eb7c9cabfe9ad1c99396e157ed59"}, - {file = "ray-2.44.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:865a83eaf06d5e988c441bc2607b8d1f326d952d139f66c18ea21f077fedbff4"}, - {file = "ray-2.44.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:3d9807c9c31d42793ca309747b9c7affdd7488a532979aa346d4c889b828783a"}, - {file = "ray-2.44.1-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:a4c0175cc40e6b065391bc8be0f208bacf8cee7ee61392c7791004f17622e7bd"}, - {file = "ray-2.44.1-cp312-cp312-win_amd64.whl", hash = "sha256:2d62f875c36432b6d5ee666ec23280d23a8de44c0a14a56959aa9b75e644b49f"}, - {file = "ray-2.44.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:12e94c27fedd703566de016314c8a5b164a96c5802313c64e7b643157c3930eb"}, - {file = "ray-2.44.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:16ecb31e1156a8952ad7a27da6e91704a67e8f37cb5519b0afc76fb5b21515d6"}, - {file = "ray-2.44.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:72d09a7bd2803979c322f0820f1138257b2fe7f557a615b6521441a1a14a044a"}, - {file = "ray-2.44.1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:1cadfa4ec992d3f9c56ffc5ce22b4a937546954a3f5ea10f32d3cf870e0a6c37"}, - {file = "ray-2.44.1-cp39-cp39-win_amd64.whl", hash = "sha256:35028f39090de977374f7ecdd10fdbd6cffd1dece22c31a9143b5be1da76ac50"}, + {file = "ray-2.46.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:719244b84df79502e5f09497f256618d94d78d66fbaf229422008a0568d3a0ff"}, + {file = "ray-2.46.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4378a86919e6643238a1094f711b87fa8dc1a18b998d4190f69ab33c64a22a8c"}, + {file = "ray-2.46.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:396b912a4dbf64966e2fdfca9facbcafe57b792ca4842ac5ae17507fdbdfe89f"}, + {file = "ray-2.46.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:c12850608c57c8afd9613a9f757d77663c50d4bd4e77ba2f181425052520c01a"}, + {file = "ray-2.46.0-cp310-cp310-win_amd64.whl", hash = "sha256:bc953aa4879c7a77893f921905df5cf65227cafd94fbc8273bec65ea393eacdd"}, + {file = "ray-2.46.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:942ba51de6f9cd7fb2ed17618181af48ce6b9517743d3235d846ec32295eca76"}, + {file = "ray-2.46.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:af84f3ed0854bb6de28192ca9e0a3bfa1eb34d69f118ae6348522198896480c8"}, + {file = "ray-2.46.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:81c8ce8b7ba33cb607ec78f5eb2555470e3046bb317732d8282e8189bb58ccbd"}, + {file = "ray-2.46.0-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:d4ddedc3f4d48df564bcee7b131c98c9f898fef0a57483f4ba335f47f951a62f"}, + {file = "ray-2.46.0-cp311-cp311-win_amd64.whl", hash = "sha256:130415c4d231830156f37ce70acbdb5fdee10f6886adc4e85bdc4533d51c24c6"}, + {file = "ray-2.46.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:d1f37ead29299637144726f809c2e0ff958dd9c0e75930ef614156d6a0a3a57f"}, + {file = "ray-2.46.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7a064acfeee7f0677d9e3f25daef9c59593559faea764b44a3e2c5331d5d832"}, + {file = "ray-2.46.0-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:006cbe1a8fdc37664114aa218773100ee891399785e256c202e48958d2dac167"}, + {file = "ray-2.46.0-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:5cec1edda93f618ffd2301f81d5398037f03fa9b16825e7e4d8a00ae7a9a4381"}, + {file = "ray-2.46.0-cp312-cp312-win_amd64.whl", hash = "sha256:7d3160f8d187baaea91a86d16a9fd81136cf8607419c94b7a74d66fce774b5c2"}, + {file = "ray-2.46.0-cp313-cp313-macosx_10_15_x86_64.whl", hash = "sha256:b2fc2c43ea0a37521193c61ef9a27b6fca8dbab116a58a52fd44344cd73e1ece"}, + {file = "ray-2.46.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4296dd8c0174256a04ee4b54abe013b6802a45fb85fb7cfdb1375231965d6d4d"}, + {file = "ray-2.46.0-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:808daece1f12bd8924b9c6382a0f98da6f5c6886cfb271ed8d89407a89413cd5"}, + {file = "ray-2.46.0-cp313-cp313-manylinux2014_x86_64.whl", hash = "sha256:a5a28c0a311d2c3221dcf729c40898a6df82466bb5af21e81be0453e09856adf"}, + {file = "ray-2.46.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:e0ec198c16d0e9af7f03242ef7ad7d548eee37a918193917278a124ddd57410a"}, + {file = "ray-2.46.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e31568818973efa4f8ce18b82bce03089395a62ac9fe639e94d755959f607fe9"}, + {file = "ray-2.46.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:7c44a98cb24f4905e898d05b787cbe9f267a9f66c1e1f8cda50814f8b3673be2"}, + {file = "ray-2.46.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:91ea998a49578b1450cbef60705f6ece8622a262a3d764d5c99ba89b741de5d0"}, + {file = "ray-2.46.0-cp39-cp39-win_amd64.whl", hash = "sha256:018e98c9745eae53b53ad14fef1ca1c43bb64c39c3cceb9e6d4517729396003b"}, ] [package.dependencies] aiohttp = {version = ">=3.7", optional = true, markers = "extra == \"default\""} aiohttp-cors = {version = "*", optional = true, markers = "extra == \"default\""} -aiosignal = "*" click = ">=7.0" colorful = {version = "*", optional = true, markers = "extra == \"default\""} filelock = "*" -frozenlist = "*" fsspec = {version = "*", optional = true, markers = "extra == \"data\""} grpcio = {version = ">=1.42.0", optional = true, markers = "python_version >= \"3.10\" and extra == \"default\""} jsonschema = "*" @@ -3524,13 +3526,13 @@ virtualenv = {version = ">=20.0.24,<20.21.1 || >20.21.1", optional = true, marke adag = ["cupy-cuda12x ; sys_platform != \"darwin\""] air = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "fsspec", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "numpy (>=1.20)", "opencensus", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] all = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "cupy-cuda12x ; sys_platform != \"darwin\"", "dm-tree", "fastapi", "fsspec", "grpcio", "grpcio (!=1.56.0) ; sys_platform == \"darwin\"", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "gymnasium (==1.0.0)", "lz4", "memray ; sys_platform != \"win32\"", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "ormsgpack (==1.7.0)", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyOpenSSL", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "requests", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] -all-cpp = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "cupy-cuda12x ; sys_platform != \"darwin\"", "dm-tree", "fastapi", "fsspec", "grpcio", "grpcio (!=1.56.0) ; sys_platform == \"darwin\"", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "gymnasium (==1.0.0)", "lz4", "memray ; sys_platform != \"win32\"", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "ormsgpack (==1.7.0)", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyOpenSSL", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "ray-cpp (==2.44.1)", "requests", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +all-cpp = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "cupy-cuda12x ; sys_platform != \"darwin\"", "dm-tree", "fastapi", "fsspec", "grpcio", "grpcio (!=1.56.0) ; sys_platform == \"darwin\"", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "gymnasium (==1.0.0)", "lz4", "memray ; sys_platform != \"win32\"", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "ormsgpack (==1.7.0)", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyOpenSSL", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "ray-cpp (==2.46.0)", "requests", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] cgraph = ["cupy-cuda12x ; sys_platform != \"darwin\""] client = ["grpcio", "grpcio (!=1.56.0) ; sys_platform == \"darwin\""] -cpp = ["ray-cpp (==2.44.1)"] +cpp = ["ray-cpp (==2.46.0)"] data = ["fsspec", "numpy (>=1.20)", "pandas (>=1.3)", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)"] default = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "virtualenv (>=20.0.24,!=20.21.1)"] -llm = ["aiohttp (>=3.7)", "aiohttp-cors", "async-timeout ; python_version < \"3.11\"", "colorful", "fastapi", "fsspec", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "jsonref (>=1.1.0)", "jsonschema", "ninja", "numpy (>=1.20)", "opencensus", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "typer", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "vllm (>=0.7.2)", "watchfiles"] +llm = ["aiohttp (>=3.7)", "aiohttp-cors", "async-timeout ; python_version < \"3.11\"", "colorful", "fastapi", "fsspec", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "jsonref (>=1.1.0)", "jsonschema", "ninja", "numpy (>=1.20)", "opencensus", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "typer", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "vllm (>=0.8.5)", "watchfiles"] observability = ["memray ; sys_platform != \"win32\"", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk"] rllib = ["dm-tree", "fsspec", "gymnasium (==1.0.0)", "lz4", "ormsgpack (==1.7.0)", "pandas", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pyyaml", "requests", "scipy", "tensorboardX (>=1.9)"] serve = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] @@ -4527,4 +4529,4 @@ propcache = ">=0.2.1" [metadata] lock-version = "2.1" python-versions = "^3.11" -content-hash = "d04590af4fd433110720e9c0a70760c6127ee6d8e40a4b2dfc513673bc91850d" +content-hash = "7cdd47cb60807e3032d9746b1c3ace156a10f9b6b430c740f0201a8aed7ac860" diff --git a/pyproject.toml b/pyproject.toml index 8e383671..03c390c3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ keywords = ['codeflare', 'python', 'sdk', 'client', 'batch', 'scale'] python = "^3.11" openshift-client = "1.0.18" rich = ">=12.5,<14.0" -ray = {version = "2.44.1", extras = ["data", "default"]} +ray = {version = "2.46.0", extras = ["data", "default"]} kubernetes = ">= 27.2.0" cryptography = "43.0.3" executing = "1.2.0" diff --git a/src/codeflare_sdk/common/utils/constants.py b/src/codeflare_sdk/common/utils/constants.py index 178f6ac3..a10e3e0e 100644 --- a/src/codeflare_sdk/common/utils/constants.py +++ b/src/codeflare_sdk/common/utils/constants.py @@ -1 +1 @@ -RAY_VERSION = "2.44.1" +RAY_VERSION = "2.46.0" diff --git a/src/codeflare_sdk/common/utils/unit_test_support.py b/src/codeflare_sdk/common/utils/unit_test_support.py index 79c7bf01..eee8d933 100644 --- a/src/codeflare_sdk/common/utils/unit_test_support.py +++ b/src/codeflare_sdk/common/utils/unit_test_support.py @@ -68,7 +68,7 @@ def create_cluster_wrong_type(): worker_extended_resource_requests={"nvidia.com/gpu": 7}, appwrapper=True, image_pull_secrets=["unit-test-pull-secret"], - image="quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31", + image="quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707", write_to_file=True, labels={1: 1}, ) @@ -275,7 +275,7 @@ def apply_template(yaml_file_path, variables): def get_expected_image(): # TODO: Add Python 3.12 support python_version = sys.version_info - return "quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31" + return "quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707" def get_template_variables(): diff --git a/src/codeflare_sdk/ray/cluster/build_ray_cluster.py b/src/codeflare_sdk/ray/cluster/build_ray_cluster.py index 89c51324..11b10fc3 100644 --- a/src/codeflare_sdk/ray/cluster/build_ray_cluster.py +++ b/src/codeflare_sdk/ray/cluster/build_ray_cluster.py @@ -94,7 +94,7 @@ ] SUPPORTED_PYTHON_VERSIONS = { - "3.11": "quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31", + "3.11": "quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707", } diff --git a/tests/e2e/local_interactive_sdk_kind_test.py b/tests/e2e/local_interactive_sdk_kind_test.py index eb5066db..6f025d22 100644 --- a/tests/e2e/local_interactive_sdk_kind_test.py +++ b/tests/e2e/local_interactive_sdk_kind_test.py @@ -56,10 +56,8 @@ def run_local_interactives( num_workers=1, head_cpu_requests="500m", head_cpu_limits="500m", - head_memory_requests=2, - head_memory_limits=2, worker_cpu_requests="500m", - worker_cpu_limits="500m", + worker_cpu_limits=1, worker_memory_requests=1, worker_memory_limits=4, worker_extended_resource_requests={gpu_resource_name: number_of_gpus}, diff --git a/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py b/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py index b94fe252..49f0888d 100644 --- a/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py +++ b/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py @@ -44,8 +44,6 @@ def run_mnist_raycluster_sdk_kind( num_workers=1, head_cpu_requests="500m", head_cpu_limits="500m", - head_memory_requests=2, - head_memory_limits=2, worker_cpu_requests="500m", worker_cpu_limits=1, worker_memory_requests=1, diff --git a/tests/e2e/support.py b/tests/e2e/support.py index 46dd4e70..c81f5b73 100644 --- a/tests/e2e/support.py +++ b/tests/e2e/support.py @@ -26,7 +26,7 @@ def get_ray_cluster(cluster_name, namespace): def get_ray_image(): - default_ray_image = "quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31" + default_ray_image = "quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707" return os.getenv("RAY_IMAGE", default_ray_image) diff --git a/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml b/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml index cb1ed471..4aa4d3dd 100644 --- a/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml +++ b/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml @@ -42,7 +42,7 @@ spec: valueFrom: fieldRef: fieldPath: status.podIP - image: quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31 + image: quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707 imagePullPolicy: IfNotPresent lifecycle: preStop: @@ -66,7 +66,7 @@ spec: requests: cpu: 2 memory: 8G - rayVersion: 2.44.1 + rayVersion: 2.46.0 workerGroupSpecs: - groupName: small-group-unit-test-cluster maxReplicas: 2 @@ -89,7 +89,7 @@ spec: valueFrom: fieldRef: fieldPath: status.podIP - image: quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31 + image: quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707 lifecycle: preStop: exec: diff --git a/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml b/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml index af5acbad..bfe23893 100644 --- a/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml +++ b/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml @@ -138,7 +138,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.44.1 + rayVersion: 2.46.0 workerGroupSpecs: - groupName: small-group-aw-all-params maxReplicas: 10 diff --git a/tests/test_cluster_yamls/kueue/aw_kueue.yaml b/tests/test_cluster_yamls/kueue/aw_kueue.yaml index 7101f6a8..08b6d5cf 100644 --- a/tests/test_cluster_yamls/kueue/aw_kueue.yaml +++ b/tests/test_cluster_yamls/kueue/aw_kueue.yaml @@ -93,7 +93,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.44.1 + rayVersion: 2.46.0 workerGroupSpecs: - groupName: small-group-unit-test-aw-kueue maxReplicas: 2 diff --git a/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml b/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml index f8b3aa46..bb8e8494 100644 --- a/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml +++ b/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml @@ -93,7 +93,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.44.1 + rayVersion: 2.46.0 workerGroupSpecs: - groupName: small-group-unit-test-cluster-kueue maxReplicas: 2 diff --git a/tests/test_cluster_yamls/ray/default-appwrapper.yaml b/tests/test_cluster_yamls/ray/default-appwrapper.yaml index 1532c0e8..22fba282 100644 --- a/tests/test_cluster_yamls/ray/default-appwrapper.yaml +++ b/tests/test_cluster_yamls/ray/default-appwrapper.yaml @@ -91,7 +91,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.44.1 + rayVersion: 2.46.0 workerGroupSpecs: - groupName: small-group-default-appwrapper maxReplicas: 1 diff --git a/tests/test_cluster_yamls/ray/default-ray-cluster.yaml b/tests/test_cluster_yamls/ray/default-ray-cluster.yaml index db401026..75713321 100644 --- a/tests/test_cluster_yamls/ray/default-ray-cluster.yaml +++ b/tests/test_cluster_yamls/ray/default-ray-cluster.yaml @@ -83,7 +83,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.44.1 + rayVersion: 2.46.0 workerGroupSpecs: - groupName: small-group-default-cluster maxReplicas: 1 diff --git a/tests/test_cluster_yamls/ray/unit-test-all-params.yaml b/tests/test_cluster_yamls/ray/unit-test-all-params.yaml index 6900b058..98da45b1 100644 --- a/tests/test_cluster_yamls/ray/unit-test-all-params.yaml +++ b/tests/test_cluster_yamls/ray/unit-test-all-params.yaml @@ -129,7 +129,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.44.1 + rayVersion: 2.46.0 workerGroupSpecs: - groupName: small-group-test-all-params maxReplicas: 10 diff --git a/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml b/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml index 9c89b593..4ffbef98 100644 --- a/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml +++ b/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml @@ -38,7 +38,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31 + - image: quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707 imagePullPolicy: IfNotPresent lifecycle: preStop: @@ -91,7 +91,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.44.1 + rayVersion: 2.46.0 workerGroupSpecs: - groupName: small-group-test-cluster-a maxReplicas: 1 @@ -109,7 +109,7 @@ spec: key: value spec: containers: - - image: quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31 + - image: quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707 lifecycle: preStop: exec: diff --git a/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml b/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml index b2701f20..56d2a8e2 100644 --- a/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml +++ b/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml @@ -38,7 +38,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31 + - image: quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707 imagePullPolicy: IfNotPresent lifecycle: preStop: @@ -91,7 +91,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.44.1 + rayVersion: 2.46.0 workerGroupSpecs: - groupName: small-group-test-cluster-b maxReplicas: 1 @@ -109,7 +109,7 @@ spec: key: value spec: containers: - - image: quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31 + - image: quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707 lifecycle: preStop: exec: diff --git a/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml b/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml index 9ece6661..e504566a 100644 --- a/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml +++ b/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml @@ -29,7 +29,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31 + - image: quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707 imagePullPolicy: IfNotPresent lifecycle: preStop: @@ -82,7 +82,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.44.1 + rayVersion: 2.46.0 workerGroupSpecs: - groupName: small-group-test-cluster-a maxReplicas: 1 @@ -100,7 +100,7 @@ spec: key: value spec: containers: - - image: quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31 + - image: quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707 lifecycle: preStop: exec: diff --git a/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml b/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml index 9d52e300..22162295 100644 --- a/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml +++ b/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml @@ -29,7 +29,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31 + - image: quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707 imagePullPolicy: IfNotPresent lifecycle: preStop: @@ -82,7 +82,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.44.1 + rayVersion: 2.46.0 workerGroupSpecs: - groupName: small-group-test-rc-b maxReplicas: 1 @@ -100,7 +100,7 @@ spec: key: value spec: containers: - - image: quay.io/modh/ray@sha256:ac401c35d29cbd920ef982775f20e86d948b81eb67e83adbbbba8b29ad33ca31 + - image: quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707 lifecycle: preStop: exec: From 07bdade8d37b2461a7f4c7588ba1ec67ba5c7a2d Mon Sep 17 00:00:00 2001 From: kryanbeane Date: Tue, 10 Jun 2025 16:02:20 +0100 Subject: [PATCH 476/496] chore(deps): update dependencies --- poetry.lock | 1723 +++++++++++++++++++++++++-------------------------- 1 file changed, 861 insertions(+), 862 deletions(-) diff --git a/poetry.lock b/poetry.lock index 384e8088..08d51b6f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. [[package]] name = "aiohappyeyeballs" @@ -14,97 +14,102 @@ files = [ [[package]] name = "aiohttp" -version = "3.11.18" +version = "3.12.12" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "aiohttp-3.11.18-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:96264854fedbea933a9ca4b7e0c745728f01380691687b7365d18d9e977179c4"}, - {file = "aiohttp-3.11.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9602044ff047043430452bc3a2089743fa85da829e6fc9ee0025351d66c332b6"}, - {file = "aiohttp-3.11.18-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5691dc38750fcb96a33ceef89642f139aa315c8a193bbd42a0c33476fd4a1609"}, - {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:554c918ec43f8480b47a5ca758e10e793bd7410b83701676a4782672d670da55"}, - {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a4076a2b3ba5b004b8cffca6afe18a3b2c5c9ef679b4d1e9859cf76295f8d4f"}, - {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:767a97e6900edd11c762be96d82d13a1d7c4fc4b329f054e88b57cdc21fded94"}, - {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0ddc9337a0fb0e727785ad4f41163cc314376e82b31846d3835673786420ef1"}, - {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f414f37b244f2a97e79b98d48c5ff0789a0b4b4609b17d64fa81771ad780e415"}, - {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fdb239f47328581e2ec7744ab5911f97afb10752332a6dd3d98e14e429e1a9e7"}, - {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:f2c50bad73ed629cc326cc0f75aed8ecfb013f88c5af116f33df556ed47143eb"}, - {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a8d8f20c39d3fa84d1c28cdb97f3111387e48209e224408e75f29c6f8e0861d"}, - {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:106032eaf9e62fd6bc6578c8b9e6dc4f5ed9a5c1c7fb2231010a1b4304393421"}, - {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:b491e42183e8fcc9901d8dcd8ae644ff785590f1727f76ca86e731c61bfe6643"}, - {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ad8c745ff9460a16b710e58e06a9dec11ebc0d8f4dd82091cefb579844d69868"}, - {file = "aiohttp-3.11.18-cp310-cp310-win32.whl", hash = "sha256:8e57da93e24303a883146510a434f0faf2f1e7e659f3041abc4e3fb3f6702a9f"}, - {file = "aiohttp-3.11.18-cp310-cp310-win_amd64.whl", hash = "sha256:cc93a4121d87d9f12739fc8fab0a95f78444e571ed63e40bfc78cd5abe700ac9"}, - {file = "aiohttp-3.11.18-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:427fdc56ccb6901ff8088544bde47084845ea81591deb16f957897f0f0ba1be9"}, - {file = "aiohttp-3.11.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c828b6d23b984255b85b9b04a5b963a74278b7356a7de84fda5e3b76866597b"}, - {file = "aiohttp-3.11.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5c2eaa145bb36b33af1ff2860820ba0589e165be4ab63a49aebfd0981c173b66"}, - {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d518ce32179f7e2096bf4e3e8438cf445f05fedd597f252de9f54c728574756"}, - {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0700055a6e05c2f4711011a44364020d7a10fbbcd02fbf3e30e8f7e7fddc8717"}, - {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8bd1cde83e4684324e6ee19adfc25fd649d04078179890be7b29f76b501de8e4"}, - {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73b8870fe1c9a201b8c0d12c94fe781b918664766728783241a79e0468427e4f"}, - {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25557982dd36b9e32c0a3357f30804e80790ec2c4d20ac6bcc598533e04c6361"}, - {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7e889c9df381a2433802991288a61e5a19ceb4f61bd14f5c9fa165655dcb1fd1"}, - {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9ea345fda05bae217b6cce2acf3682ce3b13d0d16dd47d0de7080e5e21362421"}, - {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9f26545b9940c4b46f0a9388fd04ee3ad7064c4017b5a334dd450f616396590e"}, - {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:3a621d85e85dccabd700294494d7179ed1590b6d07a35709bb9bd608c7f5dd1d"}, - {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9c23fd8d08eb9c2af3faeedc8c56e134acdaf36e2117ee059d7defa655130e5f"}, - {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9e6b0e519067caa4fd7fb72e3e8002d16a68e84e62e7291092a5433763dc0dd"}, - {file = "aiohttp-3.11.18-cp311-cp311-win32.whl", hash = "sha256:122f3e739f6607e5e4c6a2f8562a6f476192a682a52bda8b4c6d4254e1138f4d"}, - {file = "aiohttp-3.11.18-cp311-cp311-win_amd64.whl", hash = "sha256:e6f3c0a3a1e73e88af384b2e8a0b9f4fb73245afd47589df2afcab6b638fa0e6"}, - {file = "aiohttp-3.11.18-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:63d71eceb9cad35d47d71f78edac41fcd01ff10cacaa64e473d1aec13fa02df2"}, - {file = "aiohttp-3.11.18-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d1929da615840969929e8878d7951b31afe0bac883d84418f92e5755d7b49508"}, - {file = "aiohttp-3.11.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d0aebeb2392f19b184e3fdd9e651b0e39cd0f195cdb93328bd124a1d455cd0e"}, - {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3849ead845e8444f7331c284132ab314b4dac43bfae1e3cf350906d4fff4620f"}, - {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5e8452ad6b2863709f8b3d615955aa0807bc093c34b8e25b3b52097fe421cb7f"}, - {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b8d2b42073611c860a37f718b3d61ae8b4c2b124b2e776e2c10619d920350ec"}, - {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40fbf91f6a0ac317c0a07eb328a1384941872f6761f2e6f7208b63c4cc0a7ff6"}, - {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ff5625413fec55216da5eaa011cf6b0a2ed67a565914a212a51aa3755b0009"}, - {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7f33a92a2fde08e8c6b0c61815521324fc1612f397abf96eed86b8e31618fdb4"}, - {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:11d5391946605f445ddafda5eab11caf310f90cdda1fd99865564e3164f5cff9"}, - {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3cc314245deb311364884e44242e00c18b5896e4fe6d5f942e7ad7e4cb640adb"}, - {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0f421843b0f70740772228b9e8093289924359d306530bcd3926f39acbe1adda"}, - {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e220e7562467dc8d589e31c1acd13438d82c03d7f385c9cd41a3f6d1d15807c1"}, - {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ab2ef72f8605046115bc9aa8e9d14fd49086d405855f40b79ed9e5c1f9f4faea"}, - {file = "aiohttp-3.11.18-cp312-cp312-win32.whl", hash = "sha256:12a62691eb5aac58d65200c7ae94d73e8a65c331c3a86a2e9670927e94339ee8"}, - {file = "aiohttp-3.11.18-cp312-cp312-win_amd64.whl", hash = "sha256:364329f319c499128fd5cd2d1c31c44f234c58f9b96cc57f743d16ec4f3238c8"}, - {file = "aiohttp-3.11.18-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:474215ec618974054cf5dc465497ae9708543cbfc312c65212325d4212525811"}, - {file = "aiohttp-3.11.18-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ced70adf03920d4e67c373fd692123e34d3ac81dfa1c27e45904a628567d804"}, - {file = "aiohttp-3.11.18-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2d9f6c0152f8d71361905aaf9ed979259537981f47ad099c8b3d81e0319814bd"}, - {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a35197013ed929c0aed5c9096de1fc5a9d336914d73ab3f9df14741668c0616c"}, - {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:540b8a1f3a424f1af63e0af2d2853a759242a1769f9f1ab053996a392bd70118"}, - {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9e6710ebebfce2ba21cee6d91e7452d1125100f41b906fb5af3da8c78b764c1"}, - {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8af2ef3b4b652ff109f98087242e2ab974b2b2b496304063585e3d78de0b000"}, - {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:28c3f975e5ae3dbcbe95b7e3dcd30e51da561a0a0f2cfbcdea30fc1308d72137"}, - {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c28875e316c7b4c3e745172d882d8a5c835b11018e33432d281211af35794a93"}, - {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:13cd38515568ae230e1ef6919e2e33da5d0f46862943fcda74e7e915096815f3"}, - {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0e2a92101efb9f4c2942252c69c63ddb26d20f46f540c239ccfa5af865197bb8"}, - {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e6d3e32b8753c8d45ac550b11a1090dd66d110d4ef805ffe60fa61495360b3b2"}, - {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ea4cf2488156e0f281f93cc2fd365025efcba3e2d217cbe3df2840f8c73db261"}, - {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d4df95ad522c53f2b9ebc07f12ccd2cb15550941e11a5bbc5ddca2ca56316d7"}, - {file = "aiohttp-3.11.18-cp313-cp313-win32.whl", hash = "sha256:cdd1bbaf1e61f0d94aced116d6e95fe25942f7a5f42382195fd9501089db5d78"}, - {file = "aiohttp-3.11.18-cp313-cp313-win_amd64.whl", hash = "sha256:bdd619c27e44382cf642223f11cfd4d795161362a5a1fc1fa3940397bc89db01"}, - {file = "aiohttp-3.11.18-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:469ac32375d9a716da49817cd26f1916ec787fc82b151c1c832f58420e6d3533"}, - {file = "aiohttp-3.11.18-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3cec21dd68924179258ae14af9f5418c1ebdbba60b98c667815891293902e5e0"}, - {file = "aiohttp-3.11.18-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b426495fb9140e75719b3ae70a5e8dd3a79def0ae3c6c27e012fc59f16544a4a"}, - {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad2f41203e2808616292db5d7170cccf0c9f9c982d02544443c7eb0296e8b0c7"}, - {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bc0ae0a5e9939e423e065a3e5b00b24b8379f1db46046d7ab71753dfc7dd0e1"}, - {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe7cdd3f7d1df43200e1c80f1aed86bb36033bf65e3c7cf46a2b97a253ef8798"}, - {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5199be2a2f01ffdfa8c3a6f5981205242986b9e63eb8ae03fd18f736e4840721"}, - {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ccec9e72660b10f8e283e91aa0295975c7bd85c204011d9f5eb69310555cf30"}, - {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1596ebf17e42e293cbacc7a24c3e0dc0f8f755b40aff0402cb74c1ff6baec1d3"}, - {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:eab7b040a8a873020113ba814b7db7fa935235e4cbaf8f3da17671baa1024863"}, - {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5d61df4a05476ff891cff0030329fee4088d40e4dc9b013fac01bc3c745542c2"}, - {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:46533e6792e1410f9801d09fd40cbbff3f3518d1b501d6c3c5b218f427f6ff08"}, - {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c1b90407ced992331dd6d4f1355819ea1c274cc1ee4d5b7046c6761f9ec11829"}, - {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a2fd04ae4971b914e54fe459dd7edbbd3f2ba875d69e057d5e3c8e8cac094935"}, - {file = "aiohttp-3.11.18-cp39-cp39-win32.whl", hash = "sha256:b2f317d1678002eee6fe85670039fb34a757972284614638f82b903a03feacdc"}, - {file = "aiohttp-3.11.18-cp39-cp39-win_amd64.whl", hash = "sha256:5e7007b8d1d09bce37b54111f593d173691c530b80f27c6493b928dabed9e6ef"}, - {file = "aiohttp-3.11.18.tar.gz", hash = "sha256:ae856e1138612b7e412db63b7708735cff4d38d0399f6a5435d3dac2669f558a"}, + {file = "aiohttp-3.12.12-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6f25e9d274d6abbb15254f76f100c3984d6b9ad6e66263cc60a465dd5c7e48f5"}, + {file = "aiohttp-3.12.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b8ec3c1a1c13d24941b5b913607e57b9364e4c0ea69d5363181467492c4b2ba6"}, + {file = "aiohttp-3.12.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81ef2f9253c327c211cb7b06ea2edd90e637cf21c347b894d540466b8d304e08"}, + {file = "aiohttp-3.12.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28ded835c3663fd41c9ad44685811b11e34e6ac9a7516a30bfce13f6abba4496"}, + {file = "aiohttp-3.12.12-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a4b78ccf254fc10605b263996949a94ca3f50e4f9100e05137d6583e266b711e"}, + {file = "aiohttp-3.12.12-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4f4a5af90d5232c41bb857568fe7d11ed84408653ec9da1ff999cc30258b9bd1"}, + {file = "aiohttp-3.12.12-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffa5205c2f53f1120e93fdf2eca41b0f6344db131bc421246ee82c1e1038a14a"}, + {file = "aiohttp-3.12.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f68301660f0d7a3eddfb84f959f78a8f9db98c76a49b5235508fa16edaad0f7c"}, + {file = "aiohttp-3.12.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db874d3b0c92fdbb553751af9d2733b378c25cc83cd9dfba87f12fafd2dc9cd5"}, + {file = "aiohttp-3.12.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5e53cf9c201b45838a2d07b1f2d5f7fec9666db7979240002ce64f9b8a1e0cf2"}, + {file = "aiohttp-3.12.12-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:8687cc5f32b4e328c233acd387d09a1b477007896b2f03c1c823a0fd05f63883"}, + {file = "aiohttp-3.12.12-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5ee537ad29de716a3d8dc46c609908de0c25ffeebf93cd94a03d64cdc07d66d0"}, + {file = "aiohttp-3.12.12-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:411f821be5af6af11dc5bed6c6c1dc6b6b25b91737d968ec2756f9baa75e5f9b"}, + {file = "aiohttp-3.12.12-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:f90319d94cf5f9786773237f24bd235a7b5959089f1af8ec1154580a3434b503"}, + {file = "aiohttp-3.12.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:73b148e606f34e9d513c451fd65efe1091772659ca5703338a396a99f60108ff"}, + {file = "aiohttp-3.12.12-cp310-cp310-win32.whl", hash = "sha256:d40e7bfd577fdc8a92b72f35dfbdd3ec90f1bc8a72a42037fefe34d4eca2d4a1"}, + {file = "aiohttp-3.12.12-cp310-cp310-win_amd64.whl", hash = "sha256:65c7804a2343893d6dea9fce69811aea0a9ac47f68312cf2e3ee1668cd9a387f"}, + {file = "aiohttp-3.12.12-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:38823fe0d8bc059b3eaedb263fe427d887c7032e72b4ef92c472953285f0e658"}, + {file = "aiohttp-3.12.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:10237f2c34711215d04ed21da63852ce023608299554080a45c576215d9df81c"}, + {file = "aiohttp-3.12.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:563ec477c0dc6d56fc7f943a3475b5acdb399c7686c30f5a98ada24bb7562c7a"}, + {file = "aiohttp-3.12.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3d05c46a61aca7c47df74afff818bc06a251ab95d95ff80b53665edfe1e0bdf"}, + {file = "aiohttp-3.12.12-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:277c882916759b4a6b6dc7e2ceb124aad071b3c6456487808d9ab13e1b448d57"}, + {file = "aiohttp-3.12.12-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:216abf74b324b0f4e67041dd4fb2819613909a825904f8a51701fbcd40c09cd7"}, + {file = "aiohttp-3.12.12-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65d6cefad286459b68e7f867b9586a821fb7f121057b88f02f536ef570992329"}, + {file = "aiohttp-3.12.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:feaaaff61966b5f4b4eae0b79fc79427f49484e4cfa5ab7d138ecd933ab540a8"}, + {file = "aiohttp-3.12.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a05917780b7cad1755784b16cfaad806bc16029a93d15f063ca60185b7d9ba05"}, + {file = "aiohttp-3.12.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:082c5ec6d262c1b2ee01c63f4fb9152c17f11692bf16f0f100ad94a7a287d456"}, + {file = "aiohttp-3.12.12-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:b265a3a8b379b38696ac78bdef943bdc4f4a5d6bed1a3fb5c75c6bab1ecea422"}, + {file = "aiohttp-3.12.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:2e0f2e208914ecbc4b2a3b7b4daa759d0c587d9a0b451bb0835ac47fae7fa735"}, + {file = "aiohttp-3.12.12-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:9923b025845b72f64d167bca221113377c8ffabd0a351dc18fb839d401ee8e22"}, + {file = "aiohttp-3.12.12-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1ebb213445900527831fecc70e185bf142fdfe5f2a691075f22d63c65ee3c35a"}, + {file = "aiohttp-3.12.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6fc369fb273a8328077d37798b77c1e65676709af5c182cb74bd169ca9defe81"}, + {file = "aiohttp-3.12.12-cp311-cp311-win32.whl", hash = "sha256:58ecd10fda6a44c311cd3742cfd2aea8c4c600338e9f27cb37434d9f5ca9ddaa"}, + {file = "aiohttp-3.12.12-cp311-cp311-win_amd64.whl", hash = "sha256:b0066e88f30be00badffb5ef8f2281532b9a9020863d873ae15f7c147770b6ec"}, + {file = "aiohttp-3.12.12-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:98451ce9ce229d092f278a74a7c2a06b3aa72984673c87796126d7ccade893e9"}, + {file = "aiohttp-3.12.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:adbac7286d89245e1aff42e948503fdc6edf6d5d65c8e305a67c40f6a8fb95f4"}, + {file = "aiohttp-3.12.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0728882115bfa85cbd8d0f664c8ccc0cfd5bd3789dd837596785450ae52fac31"}, + {file = "aiohttp-3.12.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf3b9d9e767f9d0e09fb1a31516410fc741a62cc08754578c40abc497d09540"}, + {file = "aiohttp-3.12.12-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c944860e86b9f77a462321a440ccf6fa10f5719bb9d026f6b0b11307b1c96c7b"}, + {file = "aiohttp-3.12.12-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b1979e1f0c98c06fd0cd940988833b102fa3aa56751f6c40ffe85cabc51f6fd"}, + {file = "aiohttp-3.12.12-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:120b7dd084e96cfdad85acea2ce1e7708c70a26db913eabb8d7b417c728f5d84"}, + {file = "aiohttp-3.12.12-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e58f5ae79649ffa247081c2e8c85e31d29623cf2a3137dda985ae05c9478aae"}, + {file = "aiohttp-3.12.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9aa5f049e3e2745b0141f13e5a64e7c48b1a1427ed18bbb7957b348f282fee56"}, + {file = "aiohttp-3.12.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7163cc9cf3722d90f1822f8a38b211e3ae2fc651c63bb55449f03dc1b3ff1d44"}, + {file = "aiohttp-3.12.12-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ef97c4d035b721de6607f3980fa3e4ef0ec3aca76474b5789b7fac286a8c4e23"}, + {file = "aiohttp-3.12.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:1c14448d6a86acadc3f7b2f4cc385d1fb390acb6f37dce27f86fe629410d92e3"}, + {file = "aiohttp-3.12.12-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a1b6df6255cfc493454c79221183d64007dd5080bcda100db29b7ff181b8832c"}, + {file = "aiohttp-3.12.12-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:60fc7338dfb0626c2927bfbac4785de3ea2e2bbe3d328ba5f3ece123edda4977"}, + {file = "aiohttp-3.12.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d2afc72207ef4c9d4ca9fcd00689a6a37ef2d625600c3d757b5c2b80c9d0cf9a"}, + {file = "aiohttp-3.12.12-cp312-cp312-win32.whl", hash = "sha256:8098a48f93b2cbcdb5778e7c9a0e0375363e40ad692348e6e65c3b70d593b27c"}, + {file = "aiohttp-3.12.12-cp312-cp312-win_amd64.whl", hash = "sha256:d1c1879b2e0fc337d7a1b63fe950553c2b9e93c071cf95928aeea1902d441403"}, + {file = "aiohttp-3.12.12-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ea5d604318234427929d486954e3199aded65f41593ac57aa0241ab93dda3d15"}, + {file = "aiohttp-3.12.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e03ff38250b8b572dce6fcd7b6fb6ee398bb8a59e6aa199009c5322d721df4fc"}, + {file = "aiohttp-3.12.12-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:71125b1fc2b6a94bccc63bbece620906a4dead336d2051f8af9cbf04480bc5af"}, + {file = "aiohttp-3.12.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:784a66f9f853a22c6b8c2bd0ff157f9b879700f468d6d72cfa99167df08c5c9c"}, + {file = "aiohttp-3.12.12-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a5be0b58670b54301404bd1840e4902570a1c3be00358e2700919cb1ea73c438"}, + {file = "aiohttp-3.12.12-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8f13566fc7bf5a728275b434bc3bdea87a7ed3ad5f734102b02ca59d9b510f"}, + {file = "aiohttp-3.12.12-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d736e57d1901683bc9be648aa308cb73e646252c74b4c639c35dcd401ed385ea"}, + {file = "aiohttp-3.12.12-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2007eaa7aae9102f211c519d1ec196bd3cecb1944a095db19eeaf132b798738"}, + {file = "aiohttp-3.12.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a813e61583cab6d5cdbaa34bc28863acdb92f9f46e11de1b3b9251a1e8238f6"}, + {file = "aiohttp-3.12.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e408293aa910b0aea48b86a28eace41d497a85ba16c20f619f0c604597ef996c"}, + {file = "aiohttp-3.12.12-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:f3d31faf290f5a30acba46b388465b67c6dbe8655d183e9efe2f6a1d594e6d9d"}, + {file = "aiohttp-3.12.12-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0b84731697325b023902aa643bd1726d999f5bc7854bc28b17ff410a81151d4b"}, + {file = "aiohttp-3.12.12-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:a324c6852b6e327811748446e56cc9bb6eaa58710557922183175816e82a4234"}, + {file = "aiohttp-3.12.12-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:22fd867fbd72612dcf670c90486dbcbaf702cb807fb0b42bc0b7a142a573574a"}, + {file = "aiohttp-3.12.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3e092f1a970223794a4bf620a26c0e4e4e8e36bccae9b0b5da35e6d8ee598a03"}, + {file = "aiohttp-3.12.12-cp313-cp313-win32.whl", hash = "sha256:7f5f5eb8717ef8ba15ab35fcde5a70ad28bbdc34157595d1cddd888a985f5aae"}, + {file = "aiohttp-3.12.12-cp313-cp313-win_amd64.whl", hash = "sha256:ace2499bdd03c329c054dc4b47361f2b19d5aa470f7db5c7e0e989336761b33c"}, + {file = "aiohttp-3.12.12-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0d0b1c27c05a7d39a50e946ec5f94c3af4ffadd33fa5f20705df42fb0a72ca14"}, + {file = "aiohttp-3.12.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e5928847e6f7b7434921fbabf73fa5609d1f2bf4c25d9d4522b1fcc3b51995cb"}, + {file = "aiohttp-3.12.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7678147c3c85a7ae61559b06411346272ed40a08f54bc05357079a63127c9718"}, + {file = "aiohttp-3.12.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f50057f36f2a1d8e750b273bb966bec9f69ee1e0a20725ae081610501f25d555"}, + {file = "aiohttp-3.12.12-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5e834f0f11ff5805d11f0f22b627c75eadfaf91377b457875e4e3affd0b924f"}, + {file = "aiohttp-3.12.12-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f94b2e2dea19d09745ef02ed483192260750f18731876a5c76f1c254b841443a"}, + {file = "aiohttp-3.12.12-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b434bfb49564dc1c318989a0ab1d3000d23e5cfd00d8295dc9d5a44324cdd42d"}, + {file = "aiohttp-3.12.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ed76bc80177ddb7c5c93e1a6440b115ed2c92a3063420ac55206fd0832a6459"}, + {file = "aiohttp-3.12.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1282a9acd378f2aed8dc79c01e702b1d5fd260ad083926a88ec7e987c4e0ade"}, + {file = "aiohttp-3.12.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:09a213c13fba321586edab1528b530799645b82bd64d79b779eb8d47ceea155a"}, + {file = "aiohttp-3.12.12-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:72eae16a9233561d315e72ae78ed9fc65ab3db0196e56cb2d329c755d694f137"}, + {file = "aiohttp-3.12.12-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f25990c507dbbeefd5a6a17df32a4ace634f7b20a38211d1b9609410c7f67a24"}, + {file = "aiohttp-3.12.12-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:3a2aa255417c8ccf1b39359cd0a3d63ae3b5ced83958dbebc4d9113327c0536a"}, + {file = "aiohttp-3.12.12-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:a4c53b89b3f838e9c25f943d1257efff10b348cb56895f408ddbcb0ec953a2ad"}, + {file = "aiohttp-3.12.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b5a49c2dcb32114455ad503e8354624d85ab311cbe032da03965882492a9cb98"}, + {file = "aiohttp-3.12.12-cp39-cp39-win32.whl", hash = "sha256:74fddc0ba8cea6b9c5bd732eb9d97853543586596b86391f8de5d4f6c2a0e068"}, + {file = "aiohttp-3.12.12-cp39-cp39-win_amd64.whl", hash = "sha256:ddf40ba4a1d0b4d232dc47d2b98ae7e937dcbc40bb5f2746bce0af490a64526f"}, + {file = "aiohttp-3.12.12.tar.gz", hash = "sha256:05875595d2483d96cb61fa9f64e75262d7ac6251a7e3c811d8e26f7d721760bd"}, ] [package.dependencies] -aiohappyeyeballs = ">=2.3.0" +aiohappyeyeballs = ">=2.5.0" aiosignal = ">=1.1.2" attrs = ">=17.3.0" frozenlist = ">=1.1.1" @@ -113,7 +118,7 @@ propcache = ">=0.2.0" yarl = ">=1.17.0,<2.0" [package.extras] -speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.2.0) ; sys_platform == \"linux\" or sys_platform == \"darwin\"", "brotlicffi ; platform_python_implementation != \"CPython\""] +speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.3.0)", "brotlicffi ; platform_python_implementation != \"CPython\""] [[package]] name = "aiohttp-cors" @@ -194,25 +199,19 @@ files = [ [[package]] name = "argon2-cffi" -version = "23.1.0" +version = "25.1.0" description = "Argon2 for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" groups = ["test"] files = [ - {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, - {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, + {file = "argon2_cffi-25.1.0-py3-none-any.whl", hash = "sha256:fdc8b074db390fccb6eb4a3604ae7231f219aa669a2652e0f20e16ba513d5741"}, + {file = "argon2_cffi-25.1.0.tar.gz", hash = "sha256:694ae5cc8a42f4c4e2bf2ca0e64e51e23a040c6a517a85074683d3959e1346c1"}, ] [package.dependencies] argon2-cffi-bindings = "*" -[package.extras] -dev = ["argon2-cffi[tests,typing]", "tox (>4)"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-notfound-page"] -tests = ["hypothesis", "pytest"] -typing = ["mypy"] - [[package]] name = "argon2-cffi-bindings" version = "21.2.0" @@ -649,14 +648,14 @@ files = [ [[package]] name = "click" -version = "8.2.0" +version = "8.2.1" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.10" groups = ["main"] files = [ - {file = "click-8.2.0-py3-none-any.whl", hash = "sha256:6b303f0b2aa85f1cb4e5303078fadcbcd4e476f114fab9b5007005711839325c"}, - {file = "click-8.2.0.tar.gz", hash = "sha256:f5452aeddd9988eefa20f90f05ab66f17fce1ee2a36907fd30b05bbb5953814d"}, + {file = "click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b"}, + {file = "click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202"}, ] [package.dependencies] @@ -919,14 +918,14 @@ files = [ [[package]] name = "durationpy" -version = "0.9" +version = "0.10" description = "Module for converting between datetime.timedelta and Go's Duration strings." optional = false python-versions = "*" groups = ["main"] files = [ - {file = "durationpy-0.9-py3-none-any.whl", hash = "sha256:e65359a7af5cedad07fb77a2dd3f390f8eb0b74cb845589fa6c057086834dd38"}, - {file = "durationpy-0.9.tar.gz", hash = "sha256:fd3feb0a69a0057d582ef643c355c40d2fa1c942191f914d12203b1a01ac722a"}, + {file = "durationpy-0.10-py3-none-any.whl", hash = "sha256:3b41e1b601234296b4fb368338fdcd3e13e0b4fb5b67345948f4f2bf9868b286"}, + {file = "durationpy-0.10.tar.gz", hash = "sha256:1fa6893409a6e739c9c72334fc65cca1f355dbdd93405d30f726deb5bde42fba"}, ] [[package]] @@ -990,128 +989,128 @@ files = [ [[package]] name = "frozenlist" -version = "1.6.0" +version = "1.7.0" description = "A list-like structure which implements collections.abc.MutableSequence" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "frozenlist-1.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e6e558ea1e47fd6fa8ac9ccdad403e5dd5ecc6ed8dda94343056fa4277d5c65e"}, - {file = "frozenlist-1.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f4b3cd7334a4bbc0c472164f3744562cb72d05002cc6fcf58adb104630bbc352"}, - {file = "frozenlist-1.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9799257237d0479736e2b4c01ff26b5c7f7694ac9692a426cb717f3dc02fff9b"}, - {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a7bb0fe1f7a70fb5c6f497dc32619db7d2cdd53164af30ade2f34673f8b1fc"}, - {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:36d2fc099229f1e4237f563b2a3e0ff7ccebc3999f729067ce4e64a97a7f2869"}, - {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f27a9f9a86dcf00708be82359db8de86b80d029814e6693259befe82bb58a106"}, - {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75ecee69073312951244f11b8627e3700ec2bfe07ed24e3a685a5979f0412d24"}, - {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2c7d5aa19714b1b01a0f515d078a629e445e667b9da869a3cd0e6fe7dec78bd"}, - {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69bbd454f0fb23b51cadc9bdba616c9678e4114b6f9fa372d462ff2ed9323ec8"}, - {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7daa508e75613809c7a57136dec4871a21bca3080b3a8fc347c50b187df4f00c"}, - {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:89ffdb799154fd4d7b85c56d5fa9d9ad48946619e0eb95755723fffa11022d75"}, - {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:920b6bd77d209931e4c263223381d63f76828bec574440f29eb497cf3394c249"}, - {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d3ceb265249fb401702fce3792e6b44c1166b9319737d21495d3611028d95769"}, - {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:52021b528f1571f98a7d4258c58aa8d4b1a96d4f01d00d51f1089f2e0323cb02"}, - {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0f2ca7810b809ed0f1917293050163c7654cefc57a49f337d5cd9de717b8fad3"}, - {file = "frozenlist-1.6.0-cp310-cp310-win32.whl", hash = "sha256:0e6f8653acb82e15e5443dba415fb62a8732b68fe09936bb6d388c725b57f812"}, - {file = "frozenlist-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:f1a39819a5a3e84304cd286e3dc62a549fe60985415851b3337b6f5cc91907f1"}, - {file = "frozenlist-1.6.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae8337990e7a45683548ffb2fee1af2f1ed08169284cd829cdd9a7fa7470530d"}, - {file = "frozenlist-1.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8c952f69dd524558694818a461855f35d36cc7f5c0adddce37e962c85d06eac0"}, - {file = "frozenlist-1.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8f5fef13136c4e2dee91bfb9a44e236fff78fc2cd9f838eddfc470c3d7d90afe"}, - {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:716bbba09611b4663ecbb7cd022f640759af8259e12a6ca939c0a6acd49eedba"}, - {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7b8c4dc422c1a3ffc550b465090e53b0bf4839047f3e436a34172ac67c45d595"}, - {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b11534872256e1666116f6587a1592ef395a98b54476addb5e8d352925cb5d4a"}, - {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c6eceb88aaf7221f75be6ab498dc622a151f5f88d536661af3ffc486245a626"}, - {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62c828a5b195570eb4b37369fcbbd58e96c905768d53a44d13044355647838ff"}, - {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1c6bd2c6399920c9622362ce95a7d74e7f9af9bfec05fff91b8ce4b9647845a"}, - {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:49ba23817781e22fcbd45fd9ff2b9b8cdb7b16a42a4851ab8025cae7b22e96d0"}, - {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:431ef6937ae0f853143e2ca67d6da76c083e8b1fe3df0e96f3802fd37626e606"}, - {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9d124b38b3c299ca68433597ee26b7819209cb8a3a9ea761dfe9db3a04bba584"}, - {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:118e97556306402e2b010da1ef21ea70cb6d6122e580da64c056b96f524fbd6a"}, - {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fb3b309f1d4086b5533cf7bbcf3f956f0ae6469664522f1bde4feed26fba60f1"}, - {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54dece0d21dce4fdb188a1ffc555926adf1d1c516e493c2914d7c370e454bc9e"}, - {file = "frozenlist-1.6.0-cp311-cp311-win32.whl", hash = "sha256:654e4ba1d0b2154ca2f096bed27461cf6160bc7f504a7f9a9ef447c293caf860"}, - {file = "frozenlist-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:3e911391bffdb806001002c1f860787542f45916c3baf764264a52765d5a5603"}, - {file = "frozenlist-1.6.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c5b9e42ace7d95bf41e19b87cec8f262c41d3510d8ad7514ab3862ea2197bfb1"}, - {file = "frozenlist-1.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ca9973735ce9f770d24d5484dcb42f68f135351c2fc81a7a9369e48cf2998a29"}, - {file = "frozenlist-1.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6ac40ec76041c67b928ca8aaffba15c2b2ee3f5ae8d0cb0617b5e63ec119ca25"}, - {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b7a8a3180dfb280eb044fdec562f9b461614c0ef21669aea6f1d3dac6ee576"}, - {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c444d824e22da6c9291886d80c7d00c444981a72686e2b59d38b285617cb52c8"}, - {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb52c8166499a8150bfd38478248572c924c003cbb45fe3bcd348e5ac7c000f9"}, - {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b35298b2db9c2468106278537ee529719228950a5fdda686582f68f247d1dc6e"}, - {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d108e2d070034f9d57210f22fefd22ea0d04609fc97c5f7f5a686b3471028590"}, - {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e1be9111cb6756868ac242b3c2bd1f09d9aea09846e4f5c23715e7afb647103"}, - {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:94bb451c664415f02f07eef4ece976a2c65dcbab9c2f1705b7031a3a75349d8c"}, - {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:d1a686d0b0949182b8faddea596f3fc11f44768d1f74d4cad70213b2e139d821"}, - {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ea8e59105d802c5a38bdbe7362822c522230b3faba2aa35c0fa1765239b7dd70"}, - {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:abc4e880a9b920bc5020bf6a431a6bb40589d9bca3975c980495f63632e8382f"}, - {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9a79713adfe28830f27a3c62f6b5406c37376c892b05ae070906f07ae4487046"}, - {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9a0318c2068e217a8f5e3b85e35899f5a19e97141a45bb925bb357cfe1daf770"}, - {file = "frozenlist-1.6.0-cp312-cp312-win32.whl", hash = "sha256:853ac025092a24bb3bf09ae87f9127de9fe6e0c345614ac92536577cf956dfcc"}, - {file = "frozenlist-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:2bdfe2d7e6c9281c6e55523acd6c2bf77963cb422fdc7d142fb0cb6621b66878"}, - {file = "frozenlist-1.6.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1d7fb014fe0fbfee3efd6a94fc635aeaa68e5e1720fe9e57357f2e2c6e1a647e"}, - {file = "frozenlist-1.6.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01bcaa305a0fdad12745502bfd16a1c75b14558dabae226852f9159364573117"}, - {file = "frozenlist-1.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8b314faa3051a6d45da196a2c495e922f987dc848e967d8cfeaee8a0328b1cd4"}, - {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da62fecac21a3ee10463d153549d8db87549a5e77eefb8c91ac84bb42bb1e4e3"}, - {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1eb89bf3454e2132e046f9599fbcf0a4483ed43b40f545551a39316d0201cd1"}, - {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18689b40cb3936acd971f663ccb8e2589c45db5e2c5f07e0ec6207664029a9c"}, - {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e67ddb0749ed066b1a03fba812e2dcae791dd50e5da03be50b6a14d0c1a9ee45"}, - {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc5e64626e6682638d6e44398c9baf1d6ce6bc236d40b4b57255c9d3f9761f1f"}, - {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:437cfd39564744ae32ad5929e55b18ebd88817f9180e4cc05e7d53b75f79ce85"}, - {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:62dd7df78e74d924952e2feb7357d826af8d2f307557a779d14ddf94d7311be8"}, - {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a66781d7e4cddcbbcfd64de3d41a61d6bdde370fc2e38623f30b2bd539e84a9f"}, - {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:482fe06e9a3fffbcd41950f9d890034b4a54395c60b5e61fae875d37a699813f"}, - {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e4f9373c500dfc02feea39f7a56e4f543e670212102cc2eeb51d3a99c7ffbde6"}, - {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e69bb81de06827147b7bfbaeb284d85219fa92d9f097e32cc73675f279d70188"}, - {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7613d9977d2ab4a9141dde4a149f4357e4065949674c5649f920fec86ecb393e"}, - {file = "frozenlist-1.6.0-cp313-cp313-win32.whl", hash = "sha256:4def87ef6d90429f777c9d9de3961679abf938cb6b7b63d4a7eb8a268babfce4"}, - {file = "frozenlist-1.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:37a8a52c3dfff01515e9bbbee0e6063181362f9de3db2ccf9bc96189b557cbfd"}, - {file = "frozenlist-1.6.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:46138f5a0773d064ff663d273b309b696293d7a7c00a0994c5c13a5078134b64"}, - {file = "frozenlist-1.6.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f88bc0a2b9c2a835cb888b32246c27cdab5740059fb3688852bf91e915399b91"}, - {file = "frozenlist-1.6.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:777704c1d7655b802c7850255639672e90e81ad6fa42b99ce5ed3fbf45e338dd"}, - {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85ef8d41764c7de0dcdaf64f733a27352248493a85a80661f3c678acd27e31f2"}, - {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:da5cb36623f2b846fb25009d9d9215322318ff1c63403075f812b3b2876c8506"}, - {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cbb56587a16cf0fb8acd19e90ff9924979ac1431baea8681712716a8337577b0"}, - {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6154c3ba59cda3f954c6333025369e42c3acd0c6e8b6ce31eb5c5b8116c07e0"}, - {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e8246877afa3f1ae5c979fe85f567d220f86a50dc6c493b9b7d8191181ae01e"}, - {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b0f6cce16306d2e117cf9db71ab3a9e8878a28176aeaf0dbe35248d97b28d0c"}, - {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1b8e8cd8032ba266f91136d7105706ad57770f3522eac4a111d77ac126a25a9b"}, - {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:e2ada1d8515d3ea5378c018a5f6d14b4994d4036591a52ceaf1a1549dec8e1ad"}, - {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:cdb2c7f071e4026c19a3e32b93a09e59b12000751fc9b0b7758da899e657d215"}, - {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:03572933a1969a6d6ab509d509e5af82ef80d4a5d4e1e9f2e1cdd22c77a3f4d2"}, - {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:77effc978947548b676c54bbd6a08992759ea6f410d4987d69feea9cd0919911"}, - {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a2bda8be77660ad4089caf2223fdbd6db1858462c4b85b67fbfa22102021e497"}, - {file = "frozenlist-1.6.0-cp313-cp313t-win32.whl", hash = "sha256:a4d96dc5bcdbd834ec6b0f91027817214216b5b30316494d2b1aebffb87c534f"}, - {file = "frozenlist-1.6.0-cp313-cp313t-win_amd64.whl", hash = "sha256:e18036cb4caa17ea151fd5f3d70be9d354c99eb8cf817a3ccde8a7873b074348"}, - {file = "frozenlist-1.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:536a1236065c29980c15c7229fbb830dedf809708c10e159b8136534233545f0"}, - {file = "frozenlist-1.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ed5e3a4462ff25ca84fb09e0fada8ea267df98a450340ead4c91b44857267d70"}, - {file = "frozenlist-1.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e19c0fc9f4f030fcae43b4cdec9e8ab83ffe30ec10c79a4a43a04d1af6c5e1ad"}, - {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c608f833897501dac548585312d73a7dca028bf3b8688f0d712b7acfaf7fb3"}, - {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0dbae96c225d584f834b8d3cc688825911960f003a85cb0fd20b6e5512468c42"}, - {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:625170a91dd7261a1d1c2a0c1a353c9e55d21cd67d0852185a5fef86587e6f5f"}, - {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1db8b2fc7ee8a940b547a14c10e56560ad3ea6499dc6875c354e2335812f739d"}, - {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4da6fc43048b648275a220e3a61c33b7fff65d11bdd6dcb9d9c145ff708b804c"}, - {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef8e7e8f2f3820c5f175d70fdd199b79e417acf6c72c5d0aa8f63c9f721646f"}, - {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aa733d123cc78245e9bb15f29b44ed9e5780dc6867cfc4e544717b91f980af3b"}, - {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:ba7f8d97152b61f22d7f59491a781ba9b177dd9f318486c5fbc52cde2db12189"}, - {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:56a0b8dd6d0d3d971c91f1df75e824986667ccce91e20dca2023683814344791"}, - {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:5c9e89bf19ca148efcc9e3c44fd4c09d5af85c8a7dd3dbd0da1cb83425ef4983"}, - {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1330f0a4376587face7637dfd245380a57fe21ae8f9d360c1c2ef8746c4195fa"}, - {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2187248203b59625566cac53572ec8c2647a140ee2738b4e36772930377a533c"}, - {file = "frozenlist-1.6.0-cp39-cp39-win32.whl", hash = "sha256:2b8cf4cfea847d6c12af06091561a89740f1f67f331c3fa8623391905e878530"}, - {file = "frozenlist-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:1255d5d64328c5a0d066ecb0f02034d086537925f1f04b50b1ae60d37afbf572"}, - {file = "frozenlist-1.6.0-py3-none-any.whl", hash = "sha256:535eec9987adb04701266b92745d6cdcef2e77669299359c3009c3404dd5d191"}, - {file = "frozenlist-1.6.0.tar.gz", hash = "sha256:b99655c32c1c8e06d111e7f41c06c29a5318cb1835df23a45518e02a47c63b68"}, + {file = "frozenlist-1.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cc4df77d638aa2ed703b878dd093725b72a824c3c546c076e8fdf276f78ee84a"}, + {file = "frozenlist-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:716a9973a2cc963160394f701964fe25012600f3d311f60c790400b00e568b61"}, + {file = "frozenlist-1.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0fd1bad056a3600047fb9462cff4c5322cebc59ebf5d0a3725e0ee78955001d"}, + {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3789ebc19cb811163e70fe2bd354cea097254ce6e707ae42e56f45e31e96cb8e"}, + {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af369aa35ee34f132fcfad5be45fbfcde0e3a5f6a1ec0712857f286b7d20cca9"}, + {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac64b6478722eeb7a3313d494f8342ef3478dff539d17002f849101b212ef97c"}, + {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f89f65d85774f1797239693cef07ad4c97fdd0639544bad9ac4b869782eb1981"}, + {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1073557c941395fdfcfac13eb2456cb8aad89f9de27bae29fabca8e563b12615"}, + {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ed8d2fa095aae4bdc7fdd80351009a48d286635edffee66bf865e37a9125c50"}, + {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:24c34bea555fe42d9f928ba0a740c553088500377448febecaa82cc3e88aa1fa"}, + {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:69cac419ac6a6baad202c85aaf467b65ac860ac2e7f2ac1686dc40dbb52f6577"}, + {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:960d67d0611f4c87da7e2ae2eacf7ea81a5be967861e0c63cf205215afbfac59"}, + {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:41be2964bd4b15bf575e5daee5a5ce7ed3115320fb3c2b71fca05582ffa4dc9e"}, + {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:46d84d49e00c9429238a7ce02dc0be8f6d7cd0cd405abd1bebdc991bf27c15bd"}, + {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15900082e886edb37480335d9d518cec978afc69ccbc30bd18610b7c1b22a718"}, + {file = "frozenlist-1.7.0-cp310-cp310-win32.whl", hash = "sha256:400ddd24ab4e55014bba442d917203c73b2846391dd42ca5e38ff52bb18c3c5e"}, + {file = "frozenlist-1.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:6eb93efb8101ef39d32d50bce242c84bcbddb4f7e9febfa7b524532a239b4464"}, + {file = "frozenlist-1.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:aa51e147a66b2d74de1e6e2cf5921890de6b0f4820b257465101d7f37b49fb5a"}, + {file = "frozenlist-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b35db7ce1cd71d36ba24f80f0c9e7cff73a28d7a74e91fe83e23d27c7828750"}, + {file = "frozenlist-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34a69a85e34ff37791e94542065c8416c1afbf820b68f720452f636d5fb990cd"}, + {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a646531fa8d82c87fe4bb2e596f23173caec9185bfbca5d583b4ccfb95183e2"}, + {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:79b2ffbba483f4ed36a0f236ccb85fbb16e670c9238313709638167670ba235f"}, + {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a26f205c9ca5829cbf82bb2a84b5c36f7184c4316617d7ef1b271a56720d6b30"}, + {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bcacfad3185a623fa11ea0e0634aac7b691aa925d50a440f39b458e41c561d98"}, + {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72c1b0fe8fe451b34f12dce46445ddf14bd2a5bcad7e324987194dc8e3a74c86"}, + {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61d1a5baeaac6c0798ff6edfaeaa00e0e412d49946c53fae8d4b8e8b3566c4ae"}, + {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7edf5c043c062462f09b6820de9854bf28cc6cc5b6714b383149745e287181a8"}, + {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:d50ac7627b3a1bd2dcef6f9da89a772694ec04d9a61b66cf87f7d9446b4a0c31"}, + {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce48b2fece5aeb45265bb7a58259f45027db0abff478e3077e12b05b17fb9da7"}, + {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:fe2365ae915a1fafd982c146754e1de6ab3478def8a59c86e1f7242d794f97d5"}, + {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:45a6f2fdbd10e074e8814eb98b05292f27bad7d1883afbe009d96abdcf3bc898"}, + {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:21884e23cffabb157a9dd7e353779077bf5b8f9a58e9b262c6caad2ef5f80a56"}, + {file = "frozenlist-1.7.0-cp311-cp311-win32.whl", hash = "sha256:284d233a8953d7b24f9159b8a3496fc1ddc00f4db99c324bd5fb5f22d8698ea7"}, + {file = "frozenlist-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:387cbfdcde2f2353f19c2f66bbb52406d06ed77519ac7ee21be0232147c2592d"}, + {file = "frozenlist-1.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3dbf9952c4bb0e90e98aec1bd992b3318685005702656bc6f67c1a32b76787f2"}, + {file = "frozenlist-1.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1f5906d3359300b8a9bb194239491122e6cf1444c2efb88865426f170c262cdb"}, + {file = "frozenlist-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3dabd5a8f84573c8d10d8859a50ea2dec01eea372031929871368c09fa103478"}, + {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa57daa5917f1738064f302bf2626281a1cb01920c32f711fbc7bc36111058a8"}, + {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c193dda2b6d49f4c4398962810fa7d7c78f032bf45572b3e04dd5249dff27e08"}, + {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe2b675cf0aaa6d61bf8fbffd3c274b3c9b7b1623beb3809df8a81399a4a9c4"}, + {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fc5d5cda37f62b262405cf9652cf0856839c4be8ee41be0afe8858f17f4c94b"}, + {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d5ce521d1dd7d620198829b87ea002956e4319002ef0bc8d3e6d045cb4646e"}, + {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:488d0a7d6a0008ca0db273c542098a0fa9e7dfaa7e57f70acef43f32b3f69dca"}, + {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:15a7eaba63983d22c54d255b854e8108e7e5f3e89f647fc854bd77a237e767df"}, + {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1eaa7e9c6d15df825bf255649e05bd8a74b04a4d2baa1ae46d9c2d00b2ca2cb5"}, + {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4389e06714cfa9d47ab87f784a7c5be91d3934cd6e9a7b85beef808297cc025"}, + {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:73bd45e1488c40b63fe5a7df892baf9e2a4d4bb6409a2b3b78ac1c6236178e01"}, + {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99886d98e1643269760e5fe0df31e5ae7050788dd288947f7f007209b8c33f08"}, + {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43"}, + {file = "frozenlist-1.7.0-cp312-cp312-win32.whl", hash = "sha256:426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3"}, + {file = "frozenlist-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a"}, + {file = "frozenlist-1.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee80eeda5e2a4e660651370ebffd1286542b67e268aa1ac8d6dbe973120ef7ee"}, + {file = "frozenlist-1.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d1a81c85417b914139e3a9b995d4a1c84559afc839a93cf2cb7f15e6e5f6ed2d"}, + {file = "frozenlist-1.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cbb65198a9132ebc334f237d7b0df163e4de83fb4f2bdfe46c1e654bdb0c5d43"}, + {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dab46c723eeb2c255a64f9dc05b8dd601fde66d6b19cdb82b2e09cc6ff8d8b5d"}, + {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6aeac207a759d0dedd2e40745575ae32ab30926ff4fa49b1635def65806fddee"}, + {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bd8c4e58ad14b4fa7802b8be49d47993182fdd4023393899632c88fd8cd994eb"}, + {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04fb24d104f425da3540ed83cbfc31388a586a7696142004c577fa61c6298c3f"}, + {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a5c505156368e4ea6b53b5ac23c92d7edc864537ff911d2fb24c140bb175e60"}, + {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bd7eb96a675f18aa5c553eb7ddc24a43c8c18f22e1f9925528128c052cdbe00"}, + {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:05579bf020096fe05a764f1f84cd104a12f78eaab68842d036772dc6d4870b4b"}, + {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:376b6222d114e97eeec13d46c486facd41d4f43bab626b7c3f6a8b4e81a5192c"}, + {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0aa7e176ebe115379b5b1c95b4096fb1c17cce0847402e227e712c27bdb5a949"}, + {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3fbba20e662b9c2130dc771e332a99eff5da078b2b2648153a40669a6d0e36ca"}, + {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f3f4410a0a601d349dd406b5713fec59b4cee7e71678d5b17edda7f4655a940b"}, + {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e2cdfaaec6a2f9327bf43c933c0319a7c429058e8537c508964a133dffee412e"}, + {file = "frozenlist-1.7.0-cp313-cp313-win32.whl", hash = "sha256:5fc4df05a6591c7768459caba1b342d9ec23fa16195e744939ba5914596ae3e1"}, + {file = "frozenlist-1.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:52109052b9791a3e6b5d1b65f4b909703984b770694d3eb64fad124c835d7cba"}, + {file = "frozenlist-1.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a6f86e4193bb0e235ef6ce3dde5cbabed887e0b11f516ce8a0f4d3b33078ec2d"}, + {file = "frozenlist-1.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:82d664628865abeb32d90ae497fb93df398a69bb3434463d172b80fc25b0dd7d"}, + {file = "frozenlist-1.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:912a7e8375a1c9a68325a902f3953191b7b292aa3c3fb0d71a216221deca460b"}, + {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9537c2777167488d539bc5de2ad262efc44388230e5118868e172dd4a552b146"}, + {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f34560fb1b4c3e30ba35fa9a13894ba39e5acfc5f60f57d8accde65f46cc5e74"}, + {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acd03d224b0175f5a850edc104ac19040d35419eddad04e7cf2d5986d98427f1"}, + {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2038310bc582f3d6a09b3816ab01737d60bf7b1ec70f5356b09e84fb7408ab1"}, + {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8c05e4c8e5f36e5e088caa1bf78a687528f83c043706640a92cb76cd6999384"}, + {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:765bb588c86e47d0b68f23c1bee323d4b703218037765dcf3f25c838c6fecceb"}, + {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:32dc2e08c67d86d0969714dd484fd60ff08ff81d1a1e40a77dd34a387e6ebc0c"}, + {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:c0303e597eb5a5321b4de9c68e9845ac8f290d2ab3f3e2c864437d3c5a30cd65"}, + {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:a47f2abb4e29b3a8d0b530f7c3598badc6b134562b1a5caee867f7c62fee51e3"}, + {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:3d688126c242a6fabbd92e02633414d40f50bb6002fa4cf995a1d18051525657"}, + {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:4e7e9652b3d367c7bd449a727dc79d5043f48b88d0cbfd4f9f1060cf2b414104"}, + {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1a85e345b4c43db8b842cab1feb41be5cc0b10a1830e6295b69d7310f99becaf"}, + {file = "frozenlist-1.7.0-cp313-cp313t-win32.whl", hash = "sha256:3a14027124ddb70dfcee5148979998066897e79f89f64b13328595c4bdf77c81"}, + {file = "frozenlist-1.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3bf8010d71d4507775f658e9823210b7427be36625b387221642725b515dcf3e"}, + {file = "frozenlist-1.7.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cea3dbd15aea1341ea2de490574a4a37ca080b2ae24e4b4f4b51b9057b4c3630"}, + {file = "frozenlist-1.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7d536ee086b23fecc36c2073c371572374ff50ef4db515e4e503925361c24f71"}, + {file = "frozenlist-1.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dfcebf56f703cb2e346315431699f00db126d158455e513bd14089d992101e44"}, + {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:974c5336e61d6e7eb1ea5b929cb645e882aadab0095c5a6974a111e6479f8878"}, + {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c70db4a0ab5ab20878432c40563573229a7ed9241506181bba12f6b7d0dc41cb"}, + {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1137b78384eebaf70560a36b7b229f752fb64d463d38d1304939984d5cb887b6"}, + {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e793a9f01b3e8b5c0bc646fb59140ce0efcc580d22a3468d70766091beb81b35"}, + {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74739ba8e4e38221d2c5c03d90a7e542cb8ad681915f4ca8f68d04f810ee0a87"}, + {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e63344c4e929b1a01e29bc184bbb5fd82954869033765bfe8d65d09e336a677"}, + {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2ea2a7369eb76de2217a842f22087913cdf75f63cf1307b9024ab82dfb525938"}, + {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:836b42f472a0e006e02499cef9352ce8097f33df43baaba3e0a28a964c26c7d2"}, + {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e22b9a99741294b2571667c07d9f8cceec07cb92aae5ccda39ea1b6052ed4319"}, + {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:9a19e85cc503d958abe5218953df722748d87172f71b73cf3c9257a91b999890"}, + {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:f22dac33bb3ee8fe3e013aa7b91dc12f60d61d05b7fe32191ffa84c3aafe77bd"}, + {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9ccec739a99e4ccf664ea0775149f2749b8a6418eb5b8384b4dc0a7d15d304cb"}, + {file = "frozenlist-1.7.0-cp39-cp39-win32.whl", hash = "sha256:b3950f11058310008a87757f3eee16a8e1ca97979833239439586857bc25482e"}, + {file = "frozenlist-1.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:43a82fce6769c70f2f5a06248b614a7d268080a9d20f7457ef10ecee5af82b63"}, + {file = "frozenlist-1.7.0-py3-none-any.whl", hash = "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e"}, + {file = "frozenlist-1.7.0.tar.gz", hash = "sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f"}, ] [[package]] name = "fsspec" -version = "2025.3.2" +version = "2025.5.1" description = "File-system specification" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "fsspec-2025.3.2-py3-none-any.whl", hash = "sha256:2daf8dc3d1dfa65b6aa37748d112773a7a08416f6c70d96b264c96476ecaf711"}, - {file = "fsspec-2025.3.2.tar.gz", hash = "sha256:e52c77ef398680bbd6a98c0e628fbc469491282981209907bbc8aea76a04fdc6"}, + {file = "fsspec-2025.5.1-py3-none-any.whl", hash = "sha256:24d3a2e663d5fc735ab256263c4075f374a174c3410c0b25e5bd1970bceaa462"}, + {file = "fsspec-2025.5.1.tar.gz", hash = "sha256:2e55e47a540b91843b755e83ded97c6e897fa0942b11490113f09e9c443c2475"}, ] [package.extras] @@ -1144,14 +1143,14 @@ tqdm = ["tqdm"] [[package]] name = "google-api-core" -version = "2.24.2" +version = "2.25.0" description = "Google API client core library" optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "google_api_core-2.24.2-py3-none-any.whl", hash = "sha256:810a63ac95f3c441b7c0e43d344e372887f62ce9071ba972eacf32672e072de9"}, - {file = "google_api_core-2.24.2.tar.gz", hash = "sha256:81718493daf06d96d6bc76a91c23874dbf2fac0adbbf542831b805ee6e974696"}, + {file = "google_api_core-2.25.0-py3-none-any.whl", hash = "sha256:1db79d1281dcf9f3d10023283299ba38f3dc9f639ec41085968fd23e5bcf512e"}, + {file = "google_api_core-2.25.0.tar.gz", hash = "sha256:9b548e688702f82a34ed8409fb8a6961166f0b7795032f0be8f48308dff4333a"}, ] [package.dependencies] @@ -1165,21 +1164,21 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4 requests = ">=2.18.0,<3.0.0" [package.extras] -async-rest = ["google-auth[aiohttp] (>=2.35.0,<3.0.dev0)"] -grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev) ; python_version >= \"3.11\"", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0) ; python_version >= \"3.11\""] -grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] -grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] +async-rest = ["google-auth[aiohttp] (>=2.35.0,<3.0.0)"] +grpc = ["grpcio (>=1.33.2,<2.0.0)", "grpcio (>=1.49.1,<2.0.0) ; python_version >= \"3.11\"", "grpcio-status (>=1.33.2,<2.0.0)", "grpcio-status (>=1.49.1,<2.0.0) ; python_version >= \"3.11\""] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.0)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.0)"] [[package]] name = "google-auth" -version = "2.40.1" +version = "2.40.3" description = "Google Authentication Library" optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "google_auth-2.40.1-py2.py3-none-any.whl", hash = "sha256:ed4cae4f5c46b41bae1d19c036e06f6c371926e97b19e816fc854eff811974ee"}, - {file = "google_auth-2.40.1.tar.gz", hash = "sha256:58f0e8416a9814c1d86c9b7f6acf6816b51aba167b2c76821965271bac275540"}, + {file = "google_auth-2.40.3-py2.py3-none-any.whl", hash = "sha256:1370d4593e86213563547f97a92752fc658456fe4514c809544f330fed45a7ca"}, + {file = "google_auth-2.40.3.tar.gz", hash = "sha256:500c3a29adedeb36ea9cf24b8d10858e152f2412e3ca37829b3fa18e33d63b77"}, ] [package.dependencies] @@ -1217,67 +1216,67 @@ grpc = ["grpcio (>=1.44.0,<2.0.0)"] [[package]] name = "grpcio" -version = "1.71.0" +version = "1.73.0" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "grpcio-1.71.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:c200cb6f2393468142eb50ab19613229dcc7829b5ccee8b658a36005f6669fdd"}, - {file = "grpcio-1.71.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b2266862c5ad664a380fbbcdbdb8289d71464c42a8c29053820ee78ba0119e5d"}, - {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:0ab8b2864396663a5b0b0d6d79495657ae85fa37dcb6498a2669d067c65c11ea"}, - {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c30f393f9d5ff00a71bb56de4aa75b8fe91b161aeb61d39528db6b768d7eac69"}, - {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f250ff44843d9a0615e350c77f890082102a0318d66a99540f54769c8766ab73"}, - {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6d8de076528f7c43a2f576bc311799f89d795aa6c9b637377cc2b1616473804"}, - {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9b91879d6da1605811ebc60d21ab6a7e4bae6c35f6b63a061d61eb818c8168f6"}, - {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f71574afdf944e6652203cd1badcda195b2a27d9c83e6d88dc1ce3cfb73b31a5"}, - {file = "grpcio-1.71.0-cp310-cp310-win32.whl", hash = "sha256:8997d6785e93308f277884ee6899ba63baafa0dfb4729748200fcc537858a509"}, - {file = "grpcio-1.71.0-cp310-cp310-win_amd64.whl", hash = "sha256:7d6ac9481d9d0d129224f6d5934d5832c4b1cddb96b59e7eba8416868909786a"}, - {file = "grpcio-1.71.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:d6aa986318c36508dc1d5001a3ff169a15b99b9f96ef5e98e13522c506b37eef"}, - {file = "grpcio-1.71.0-cp311-cp311-macosx_10_14_universal2.whl", hash = "sha256:d2c170247315f2d7e5798a22358e982ad6eeb68fa20cf7a820bb74c11f0736e7"}, - {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:e6f83a583ed0a5b08c5bc7a3fe860bb3c2eac1f03f1f63e0bc2091325605d2b7"}, - {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4be74ddeeb92cc87190e0e376dbc8fc7736dbb6d3d454f2fa1f5be1dee26b9d7"}, - {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd0dfbe4d5eb1fcfec9490ca13f82b089a309dc3678e2edabc144051270a66e"}, - {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a2242d6950dc892afdf9e951ed7ff89473aaf744b7d5727ad56bdaace363722b"}, - {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0fa05ee31a20456b13ae49ad2e5d585265f71dd19fbd9ef983c28f926d45d0a7"}, - {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3d081e859fb1ebe176de33fc3adb26c7d46b8812f906042705346b314bde32c3"}, - {file = "grpcio-1.71.0-cp311-cp311-win32.whl", hash = "sha256:d6de81c9c00c8a23047136b11794b3584cdc1460ed7cbc10eada50614baa1444"}, - {file = "grpcio-1.71.0-cp311-cp311-win_amd64.whl", hash = "sha256:24e867651fc67717b6f896d5f0cac0ec863a8b5fb7d6441c2ab428f52c651c6b"}, - {file = "grpcio-1.71.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:0ff35c8d807c1c7531d3002be03221ff9ae15712b53ab46e2a0b4bb271f38537"}, - {file = "grpcio-1.71.0-cp312-cp312-macosx_10_14_universal2.whl", hash = "sha256:b78a99cd1ece4be92ab7c07765a0b038194ded2e0a26fd654591ee136088d8d7"}, - {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:dc1a1231ed23caac1de9f943d031f1bc38d0f69d2a3b243ea0d664fc1fbd7fec"}, - {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6beeea5566092c5e3c4896c6d1d307fb46b1d4bdf3e70c8340b190a69198594"}, - {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5170929109450a2c031cfe87d6716f2fae39695ad5335d9106ae88cc32dc84c"}, - {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5b08d03ace7aca7b2fadd4baf291139b4a5f058805a8327bfe9aece7253b6d67"}, - {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f903017db76bf9cc2b2d8bdd37bf04b505bbccad6be8a81e1542206875d0e9db"}, - {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:469f42a0b410883185eab4689060a20488a1a0a00f8bbb3cbc1061197b4c5a79"}, - {file = "grpcio-1.71.0-cp312-cp312-win32.whl", hash = "sha256:ad9f30838550695b5eb302add33f21f7301b882937460dd24f24b3cc5a95067a"}, - {file = "grpcio-1.71.0-cp312-cp312-win_amd64.whl", hash = "sha256:652350609332de6dac4ece254e5d7e1ff834e203d6afb769601f286886f6f3a8"}, - {file = "grpcio-1.71.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:cebc1b34ba40a312ab480ccdb396ff3c529377a2fce72c45a741f7215bfe8379"}, - {file = "grpcio-1.71.0-cp313-cp313-macosx_10_14_universal2.whl", hash = "sha256:85da336e3649a3d2171e82f696b5cad2c6231fdd5bad52616476235681bee5b3"}, - {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f9a412f55bb6e8f3bb000e020dbc1e709627dcb3a56f6431fa7076b4c1aab0db"}, - {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47be9584729534660416f6d2a3108aaeac1122f6b5bdbf9fd823e11fe6fbaa29"}, - {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c9c80ac6091c916db81131d50926a93ab162a7e97e4428ffc186b6e80d6dda4"}, - {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:789d5e2a3a15419374b7b45cd680b1e83bbc1e52b9086e49308e2c0b5bbae6e3"}, - {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:1be857615e26a86d7363e8a163fade914595c81fec962b3d514a4b1e8760467b"}, - {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:a76d39b5fafd79ed604c4be0a869ec3581a172a707e2a8d7a4858cb05a5a7637"}, - {file = "grpcio-1.71.0-cp313-cp313-win32.whl", hash = "sha256:74258dce215cb1995083daa17b379a1a5a87d275387b7ffe137f1d5131e2cfbb"}, - {file = "grpcio-1.71.0-cp313-cp313-win_amd64.whl", hash = "sha256:22c3bc8d488c039a199f7a003a38cb7635db6656fa96437a8accde8322ce2366"}, - {file = "grpcio-1.71.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c6a0a28450c16809f94e0b5bfe52cabff63e7e4b97b44123ebf77f448534d07d"}, - {file = "grpcio-1.71.0-cp39-cp39-macosx_10_14_universal2.whl", hash = "sha256:a371e6b6a5379d3692cc4ea1cb92754d2a47bdddeee755d3203d1f84ae08e03e"}, - {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:39983a9245d37394fd59de71e88c4b295eb510a3555e0a847d9965088cdbd033"}, - {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9182e0063112e55e74ee7584769ec5a0b4f18252c35787f48738627e23a62b97"}, - {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693bc706c031aeb848849b9d1c6b63ae6bcc64057984bb91a542332b75aa4c3d"}, - {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:20e8f653abd5ec606be69540f57289274c9ca503ed38388481e98fa396ed0b41"}, - {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8700a2a57771cc43ea295296330daaddc0d93c088f0a35cc969292b6db959bf3"}, - {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d35a95f05a8a2cbe8e02be137740138b3b2ea5f80bd004444e4f9a1ffc511e32"}, - {file = "grpcio-1.71.0-cp39-cp39-win32.whl", hash = "sha256:f9c30c464cb2ddfbc2ddf9400287701270fdc0f14be5f08a1e3939f1e749b455"}, - {file = "grpcio-1.71.0-cp39-cp39-win_amd64.whl", hash = "sha256:63e41b91032f298b3e973b3fa4093cbbc620c875e2da7b93e249d4728b54559a"}, - {file = "grpcio-1.71.0.tar.gz", hash = "sha256:2b85f7820475ad3edec209d3d89a7909ada16caab05d3f2e08a7e8ae3200a55c"}, + {file = "grpcio-1.73.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:d050197eeed50f858ef6c51ab09514856f957dba7b1f7812698260fc9cc417f6"}, + {file = "grpcio-1.73.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:ebb8d5f4b0200916fb292a964a4d41210de92aba9007e33d8551d85800ea16cb"}, + {file = "grpcio-1.73.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:c0811331b469e3f15dda5f90ab71bcd9681189a83944fd6dc908e2c9249041ef"}, + {file = "grpcio-1.73.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12787c791c3993d0ea1cc8bf90393647e9a586066b3b322949365d2772ba965b"}, + {file = "grpcio-1.73.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c17771e884fddf152f2a0df12478e8d02853e5b602a10a9a9f1f52fa02b1d32"}, + {file = "grpcio-1.73.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:275e23d4c428c26b51857bbd95fcb8e528783597207ec592571e4372b300a29f"}, + {file = "grpcio-1.73.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9ffc972b530bf73ef0f948f799482a1bf12d9b6f33406a8e6387c0ca2098a833"}, + {file = "grpcio-1.73.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d269df64aff092b2cec5e015d8ae09c7e90888b5c35c24fdca719a2c9f35"}, + {file = "grpcio-1.73.0-cp310-cp310-win32.whl", hash = "sha256:072d8154b8f74300ed362c01d54af8b93200c1a9077aeaea79828d48598514f1"}, + {file = "grpcio-1.73.0-cp310-cp310-win_amd64.whl", hash = "sha256:ce953d9d2100e1078a76a9dc2b7338d5415924dc59c69a15bf6e734db8a0f1ca"}, + {file = "grpcio-1.73.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:51036f641f171eebe5fa7aaca5abbd6150f0c338dab3a58f9111354240fe36ec"}, + {file = "grpcio-1.73.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:d12bbb88381ea00bdd92c55aff3da3391fd85bc902c41275c8447b86f036ce0f"}, + {file = "grpcio-1.73.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:483c507c2328ed0e01bc1adb13d1eada05cc737ec301d8e5a8f4a90f387f1790"}, + {file = "grpcio-1.73.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c201a34aa960c962d0ce23fe5f423f97e9d4b518ad605eae6d0a82171809caaa"}, + {file = "grpcio-1.73.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:859f70c8e435e8e1fa060e04297c6818ffc81ca9ebd4940e180490958229a45a"}, + {file = "grpcio-1.73.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e2459a27c6886e7e687e4e407778425f3c6a971fa17a16420227bda39574d64b"}, + {file = "grpcio-1.73.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e0084d4559ee3dbdcce9395e1bc90fdd0262529b32c417a39ecbc18da8074ac7"}, + {file = "grpcio-1.73.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef5fff73d5f724755693a464d444ee0a448c6cdfd3c1616a9223f736c622617d"}, + {file = "grpcio-1.73.0-cp311-cp311-win32.whl", hash = "sha256:965a16b71a8eeef91fc4df1dc40dc39c344887249174053814f8a8e18449c4c3"}, + {file = "grpcio-1.73.0-cp311-cp311-win_amd64.whl", hash = "sha256:b71a7b4483d1f753bbc11089ff0f6fa63b49c97a9cc20552cded3fcad466d23b"}, + {file = "grpcio-1.73.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:fb9d7c27089d9ba3746f18d2109eb530ef2a37452d2ff50f5a6696cd39167d3b"}, + {file = "grpcio-1.73.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:128ba2ebdac41e41554d492b82c34586a90ebd0766f8ebd72160c0e3a57b9155"}, + {file = "grpcio-1.73.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:068ecc415f79408d57a7f146f54cdf9f0acb4b301a52a9e563973dc981e82f3d"}, + {file = "grpcio-1.73.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ddc1cfb2240f84d35d559ade18f69dcd4257dbaa5ba0de1a565d903aaab2968"}, + {file = "grpcio-1.73.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e53007f70d9783f53b41b4cf38ed39a8e348011437e4c287eee7dd1d39d54b2f"}, + {file = "grpcio-1.73.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4dd8d8d092efede7d6f48d695ba2592046acd04ccf421436dd7ed52677a9ad29"}, + {file = "grpcio-1.73.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:70176093d0a95b44d24baa9c034bb67bfe2b6b5f7ebc2836f4093c97010e17fd"}, + {file = "grpcio-1.73.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:085ebe876373ca095e24ced95c8f440495ed0b574c491f7f4f714ff794bbcd10"}, + {file = "grpcio-1.73.0-cp312-cp312-win32.whl", hash = "sha256:cfc556c1d6aef02c727ec7d0016827a73bfe67193e47c546f7cadd3ee6bf1a60"}, + {file = "grpcio-1.73.0-cp312-cp312-win_amd64.whl", hash = "sha256:bbf45d59d090bf69f1e4e1594832aaf40aa84b31659af3c5e2c3f6a35202791a"}, + {file = "grpcio-1.73.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:da1d677018ef423202aca6d73a8d3b2cb245699eb7f50eb5f74cae15a8e1f724"}, + {file = "grpcio-1.73.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:36bf93f6a657f37c131d9dd2c391b867abf1426a86727c3575393e9e11dadb0d"}, + {file = "grpcio-1.73.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:d84000367508ade791d90c2bafbd905574b5ced8056397027a77a215d601ba15"}, + {file = "grpcio-1.73.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c98ba1d928a178ce33f3425ff823318040a2b7ef875d30a0073565e5ceb058d9"}, + {file = "grpcio-1.73.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a73c72922dfd30b396a5f25bb3a4590195ee45ecde7ee068acb0892d2900cf07"}, + {file = "grpcio-1.73.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:10e8edc035724aba0346a432060fd192b42bd03675d083c01553cab071a28da5"}, + {file = "grpcio-1.73.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:f5cdc332b503c33b1643b12ea933582c7b081957c8bc2ea4cc4bc58054a09288"}, + {file = "grpcio-1.73.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:07ad7c57233c2109e4ac999cb9c2710c3b8e3f491a73b058b0ce431f31ed8145"}, + {file = "grpcio-1.73.0-cp313-cp313-win32.whl", hash = "sha256:0eb5df4f41ea10bda99a802b2a292d85be28958ede2a50f2beb8c7fc9a738419"}, + {file = "grpcio-1.73.0-cp313-cp313-win_amd64.whl", hash = "sha256:38cf518cc54cd0c47c9539cefa8888549fcc067db0b0c66a46535ca8032020c4"}, + {file = "grpcio-1.73.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:1284850607901cfe1475852d808e5a102133461ec9380bc3fc9ebc0686ee8e32"}, + {file = "grpcio-1.73.0-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:0e092a4b28eefb63eec00d09ef33291cd4c3a0875cde29aec4d11d74434d222c"}, + {file = "grpcio-1.73.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:33577fe7febffe8ebad458744cfee8914e0c10b09f0ff073a6b149a84df8ab8f"}, + {file = "grpcio-1.73.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:60813d8a16420d01fa0da1fc7ebfaaa49a7e5051b0337cd48f4f950eb249a08e"}, + {file = "grpcio-1.73.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a9c957dc65e5d474378d7bcc557e9184576605d4b4539e8ead6e351d7ccce20"}, + {file = "grpcio-1.73.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3902b71407d021163ea93c70c8531551f71ae742db15b66826cf8825707d2908"}, + {file = "grpcio-1.73.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1dd7fa7276dcf061e2d5f9316604499eea06b1b23e34a9380572d74fe59915a8"}, + {file = "grpcio-1.73.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2d1510c4ea473110cb46a010555f2c1a279d1c256edb276e17fa571ba1e8927c"}, + {file = "grpcio-1.73.0-cp39-cp39-win32.whl", hash = "sha256:d0a1517b2005ba1235a1190b98509264bf72e231215dfeef8db9a5a92868789e"}, + {file = "grpcio-1.73.0-cp39-cp39-win_amd64.whl", hash = "sha256:6228f7eb6d9f785f38b589d49957fca5df3d5b5349e77d2d89b14e390165344c"}, + {file = "grpcio-1.73.0.tar.gz", hash = "sha256:3af4c30918a7f0d39de500d11255f8d9da4f30e94a2033e70fe2a720e184bd8e"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.71.0)"] +protobuf = ["grpcio-tools (>=1.73.0)"] [[package]] name = "h11" @@ -1413,14 +1412,14 @@ test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio [[package]] name = "ipython" -version = "9.2.0" +version = "9.3.0" description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.11" groups = ["main", "test"] files = [ - {file = "ipython-9.2.0-py3-none-any.whl", hash = "sha256:fef5e33c4a1ae0759e0bba5917c9db4eb8c53fee917b6a526bd973e1ca5159f6"}, - {file = "ipython-9.2.0.tar.gz", hash = "sha256:62a9373dbc12f28f9feaf4700d052195bf89806279fc8ca11f3f54017d04751b"}, + {file = "ipython-9.3.0-py3-none-any.whl", hash = "sha256:1a0b6dd9221a1f5dddf725b57ac0cb6fddc7b5f470576231ae9162b9b3455a04"}, + {file = "ipython-9.3.0.tar.gz", hash = "sha256:79eb896f9f23f50ad16c3bc205f686f6e030ad246cc309c6279a242b14afe9d8"}, ] [package.dependencies] @@ -1563,14 +1562,14 @@ files = [ [[package]] name = "jsonschema" -version = "4.23.0" +version = "4.24.0" description = "An implementation of JSON Schema validation for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main", "test"] files = [ - {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, - {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, + {file = "jsonschema-4.24.0-py3-none-any.whl", hash = "sha256:a462455f19f5faf404a7902952b6f0e3ce868f3ee09a359b05eca6673bd8412d"}, + {file = "jsonschema-4.24.0.tar.gz", hash = "sha256:0b4e8069eb12aedfa881333004bccaec24ecef5a8a6a4b6df142b2cc9599d196"}, ] [package.dependencies] @@ -1631,14 +1630,14 @@ test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko ; sys_platform == \" [[package]] name = "jupyter-core" -version = "5.7.2" +version = "5.8.1" description = "Jupyter core package. A base package on which Jupyter projects rely." optional = false python-versions = ">=3.8" groups = ["test"] files = [ - {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, - {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, + {file = "jupyter_core-5.8.1-py3-none-any.whl", hash = "sha256:c28d268fc90fb53f1338ded2eb410704c5449a358406e8a948b75706e24863d0"}, + {file = "jupyter_core-5.8.1.tar.gz", hash = "sha256:0a5f9706f70e64786b75acba995988915ebd4601c8a52e534a40b51c95f59941"}, ] [package.dependencies] @@ -1647,8 +1646,8 @@ pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_ traitlets = ">=5.3" [package.extras] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] -test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"] +docs = ["intersphinx-registry", "myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-spelling", "traitlets"] +test = ["ipykernel", "pre-commit", "pytest (<9)", "pytest-cov", "pytest-timeout"] [[package]] name = "jupyter-events" @@ -1694,14 +1693,14 @@ jupyter-server = ">=1.1.2" [[package]] name = "jupyter-server" -version = "2.15.0" +version = "2.16.0" description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." optional = false python-versions = ">=3.9" groups = ["test"] files = [ - {file = "jupyter_server-2.15.0-py3-none-any.whl", hash = "sha256:872d989becf83517012ee669f09604aa4a28097c0bd90b2f424310156c2cdae3"}, - {file = "jupyter_server-2.15.0.tar.gz", hash = "sha256:9d446b8697b4f7337a1b7cdcac40778babdd93ba614b6d68ab1c0c918f1c4084"}, + {file = "jupyter_server-2.16.0-py3-none-any.whl", hash = "sha256:3d8db5be3bc64403b1c65b400a1d7f4647a5ce743f3b20dbdefe8ddb7b55af9e"}, + {file = "jupyter_server-2.16.0.tar.gz", hash = "sha256:65d4b44fdf2dcbbdfe0aa1ace4a842d4aaf746a2b7b168134d5aaed35621b7f6"}, ] [package.dependencies] @@ -1835,14 +1834,14 @@ files = [ [[package]] name = "kubernetes" -version = "32.0.1" +version = "33.1.0" description = "Kubernetes python client" optional = false python-versions = ">=3.6" groups = ["main"] files = [ - {file = "kubernetes-32.0.1-py2.py3-none-any.whl", hash = "sha256:35282ab8493b938b08ab5526c7ce66588232df00ef5e1dbe88a419107dc10998"}, - {file = "kubernetes-32.0.1.tar.gz", hash = "sha256:42f43d49abd437ada79a79a16bd48a604d3471a117a8347e87db693f2ba0ba28"}, + {file = "kubernetes-33.1.0-py2.py3-none-any.whl", hash = "sha256:544de42b24b64287f7e0aa9513c93cb503f7f40eea39b20f66810011a86eabc5"}, + {file = "kubernetes-33.1.0.tar.gz", hash = "sha256:f64d829843a54c251061a8e7a14523b521f2dc5c896cf6d65ccf348648a88993"}, ] [package.dependencies] @@ -2072,116 +2071,116 @@ files = [ [[package]] name = "multidict" -version = "6.4.3" +version = "6.4.4" description = "multidict implementation" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "multidict-6.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:32a998bd8a64ca48616eac5a8c1cc4fa38fb244a3facf2eeb14abe186e0f6cc5"}, - {file = "multidict-6.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a54ec568f1fc7f3c313c2f3b16e5db346bf3660e1309746e7fccbbfded856188"}, - {file = "multidict-6.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a7be07e5df178430621c716a63151165684d3e9958f2bbfcb644246162007ab7"}, - {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b128dbf1c939674a50dd0b28f12c244d90e5015e751a4f339a96c54f7275e291"}, - {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b9cb19dfd83d35b6ff24a4022376ea6e45a2beba8ef3f0836b8a4b288b6ad685"}, - {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3cf62f8e447ea2c1395afa289b332e49e13d07435369b6f4e41f887db65b40bf"}, - {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:909f7d43ff8f13d1adccb6a397094adc369d4da794407f8dd592c51cf0eae4b1"}, - {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0bb8f8302fbc7122033df959e25777b0b7659b1fd6bcb9cb6bed76b5de67afef"}, - {file = "multidict-6.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:224b79471b4f21169ea25ebc37ed6f058040c578e50ade532e2066562597b8a9"}, - {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a7bd27f7ab3204f16967a6f899b3e8e9eb3362c0ab91f2ee659e0345445e0078"}, - {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:99592bd3162e9c664671fd14e578a33bfdba487ea64bcb41d281286d3c870ad7"}, - {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a62d78a1c9072949018cdb05d3c533924ef8ac9bcb06cbf96f6d14772c5cd451"}, - {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:3ccdde001578347e877ca4f629450973c510e88e8865d5aefbcb89b852ccc666"}, - {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:eccb67b0e78aa2e38a04c5ecc13bab325a43e5159a181a9d1a6723db913cbb3c"}, - {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8b6fcf6054fc4114a27aa865f8840ef3d675f9316e81868e0ad5866184a6cba5"}, - {file = "multidict-6.4.3-cp310-cp310-win32.whl", hash = "sha256:f92c7f62d59373cd93bc9969d2da9b4b21f78283b1379ba012f7ee8127b3152e"}, - {file = "multidict-6.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:b57e28dbc031d13916b946719f213c494a517b442d7b48b29443e79610acd887"}, - {file = "multidict-6.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f6f19170197cc29baccd33ccc5b5d6a331058796485857cf34f7635aa25fb0cd"}, - {file = "multidict-6.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f2882bf27037eb687e49591690e5d491e677272964f9ec7bc2abbe09108bdfb8"}, - {file = "multidict-6.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fbf226ac85f7d6b6b9ba77db4ec0704fde88463dc17717aec78ec3c8546c70ad"}, - {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e329114f82ad4b9dd291bef614ea8971ec119ecd0f54795109976de75c9a852"}, - {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:1f4e0334d7a555c63f5c8952c57ab6f1c7b4f8c7f3442df689fc9f03df315c08"}, - {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:740915eb776617b57142ce0bb13b7596933496e2f798d3d15a20614adf30d229"}, - {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255dac25134d2b141c944b59a0d2f7211ca12a6d4779f7586a98b4b03ea80508"}, - {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4e8535bd4d741039b5aad4285ecd9b902ef9e224711f0b6afda6e38d7ac02c7"}, - {file = "multidict-6.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c433a33be000dd968f5750722eaa0991037be0be4a9d453eba121774985bc8"}, - {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4eb33b0bdc50acd538f45041f5f19945a1f32b909b76d7b117c0c25d8063df56"}, - {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:75482f43465edefd8a5d72724887ccdcd0c83778ded8f0cb1e0594bf71736cc0"}, - {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce5b3082e86aee80b3925ab4928198450d8e5b6466e11501fe03ad2191c6d777"}, - {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e413152e3212c4d39f82cf83c6f91be44bec9ddea950ce17af87fbf4e32ca6b2"}, - {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8aac2eeff69b71f229a405c0a4b61b54bade8e10163bc7b44fcd257949620618"}, - {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ab583ac203af1d09034be41458feeab7863c0635c650a16f15771e1386abf2d7"}, - {file = "multidict-6.4.3-cp311-cp311-win32.whl", hash = "sha256:1b2019317726f41e81154df636a897de1bfe9228c3724a433894e44cd2512378"}, - {file = "multidict-6.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:43173924fa93c7486402217fab99b60baf78d33806af299c56133a3755f69589"}, - {file = "multidict-6.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1f1c2f58f08b36f8475f3ec6f5aeb95270921d418bf18f90dffd6be5c7b0e676"}, - {file = "multidict-6.4.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:26ae9ad364fc61b936fb7bf4c9d8bd53f3a5b4417142cd0be5c509d6f767e2f1"}, - {file = "multidict-6.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:659318c6c8a85f6ecfc06b4e57529e5a78dfdd697260cc81f683492ad7e9435a"}, - {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1eb72c741fd24d5a28242ce72bb61bc91f8451877131fa3fe930edb195f7054"}, - {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3cd06d88cb7398252284ee75c8db8e680aa0d321451132d0dba12bc995f0adcc"}, - {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4543d8dc6470a82fde92b035a92529317191ce993533c3c0c68f56811164ed07"}, - {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30a3ebdc068c27e9d6081fca0e2c33fdf132ecea703a72ea216b81a66860adde"}, - {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b038f10e23f277153f86f95c777ba1958bcd5993194fda26a1d06fae98b2f00c"}, - {file = "multidict-6.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c605a2b2dc14282b580454b9b5d14ebe0668381a3a26d0ac39daa0ca115eb2ae"}, - {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8bd2b875f4ca2bb527fe23e318ddd509b7df163407b0fb717df229041c6df5d3"}, - {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c2e98c840c9c8e65c0e04b40c6c5066c8632678cd50c8721fdbcd2e09f21a507"}, - {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:66eb80dd0ab36dbd559635e62fba3083a48a252633164857a1d1684f14326427"}, - {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c23831bdee0a2a3cf21be057b5e5326292f60472fb6c6f86392bbf0de70ba731"}, - {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1535cec6443bfd80d028052e9d17ba6ff8a5a3534c51d285ba56c18af97e9713"}, - {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3b73e7227681f85d19dec46e5b881827cd354aabe46049e1a61d2f9aaa4e285a"}, - {file = "multidict-6.4.3-cp312-cp312-win32.whl", hash = "sha256:8eac0c49df91b88bf91f818e0a24c1c46f3622978e2c27035bfdca98e0e18124"}, - {file = "multidict-6.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:11990b5c757d956cd1db7cb140be50a63216af32cd6506329c2c59d732d802db"}, - {file = "multidict-6.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a76534263d03ae0cfa721fea40fd2b5b9d17a6f85e98025931d41dc49504474"}, - {file = "multidict-6.4.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:805031c2f599eee62ac579843555ed1ce389ae00c7e9f74c2a1b45e0564a88dd"}, - {file = "multidict-6.4.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c56c179839d5dcf51d565132185409d1d5dd8e614ba501eb79023a6cab25576b"}, - {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c64f4ddb3886dd8ab71b68a7431ad4aa01a8fa5be5b11543b29674f29ca0ba3"}, - {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3002a856367c0b41cad6784f5b8d3ab008eda194ed7864aaa58f65312e2abcac"}, - {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d75e621e7d887d539d6e1d789f0c64271c250276c333480a9e1de089611f790"}, - {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:995015cf4a3c0d72cbf453b10a999b92c5629eaf3a0c3e1efb4b5c1f602253bb"}, - {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b0fabae7939d09d7d16a711468c385272fa1b9b7fb0d37e51143585d8e72e0"}, - {file = "multidict-6.4.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61ed4d82f8a1e67eb9eb04f8587970d78fe7cddb4e4d6230b77eda23d27938f9"}, - {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:062428944a8dc69df9fdc5d5fc6279421e5f9c75a9ee3f586f274ba7b05ab3c8"}, - {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:b90e27b4674e6c405ad6c64e515a505c6d113b832df52fdacb6b1ffd1fa9a1d1"}, - {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7d50d4abf6729921e9613d98344b74241572b751c6b37feed75fb0c37bd5a817"}, - {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:43fe10524fb0a0514be3954be53258e61d87341008ce4914f8e8b92bee6f875d"}, - {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:236966ca6c472ea4e2d3f02f6673ebfd36ba3f23159c323f5a496869bc8e47c9"}, - {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:422a5ec315018e606473ba1f5431e064cf8b2a7468019233dcf8082fabad64c8"}, - {file = "multidict-6.4.3-cp313-cp313-win32.whl", hash = "sha256:f901a5aace8e8c25d78960dcc24c870c8d356660d3b49b93a78bf38eb682aac3"}, - {file = "multidict-6.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:1c152c49e42277bc9a2f7b78bd5fa10b13e88d1b0328221e7aef89d5c60a99a5"}, - {file = "multidict-6.4.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:be8751869e28b9c0d368d94f5afcb4234db66fe8496144547b4b6d6a0645cfc6"}, - {file = "multidict-6.4.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0d4b31f8a68dccbcd2c0ea04f0e014f1defc6b78f0eb8b35f2265e8716a6df0c"}, - {file = "multidict-6.4.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:032efeab3049e37eef2ff91271884303becc9e54d740b492a93b7e7266e23756"}, - {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e78006af1a7c8a8007e4f56629d7252668344442f66982368ac06522445e375"}, - {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:daeac9dd30cda8703c417e4fddccd7c4dc0c73421a0b54a7da2713be125846be"}, - {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f6f90700881438953eae443a9c6f8a509808bc3b185246992c4233ccee37fea"}, - {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f84627997008390dd15762128dcf73c3365f4ec0106739cde6c20a07ed198ec8"}, - {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3307b48cd156153b117c0ea54890a3bdbf858a5b296ddd40dc3852e5f16e9b02"}, - {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ead46b0fa1dcf5af503a46e9f1c2e80b5d95c6011526352fa5f42ea201526124"}, - {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1748cb2743bedc339d63eb1bca314061568793acd603a6e37b09a326334c9f44"}, - {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:acc9fa606f76fc111b4569348cc23a771cb52c61516dcc6bcef46d612edb483b"}, - {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:31469d5832b5885adeb70982e531ce86f8c992334edd2f2254a10fa3182ac504"}, - {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ba46b51b6e51b4ef7bfb84b82f5db0dc5e300fb222a8a13b8cd4111898a869cf"}, - {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:389cfefb599edf3fcfd5f64c0410da686f90f5f5e2c4d84e14f6797a5a337af4"}, - {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:64bc2bbc5fba7b9db5c2c8d750824f41c6994e3882e6d73c903c2afa78d091e4"}, - {file = "multidict-6.4.3-cp313-cp313t-win32.whl", hash = "sha256:0ecdc12ea44bab2807d6b4a7e5eef25109ab1c82a8240d86d3c1fc9f3b72efd5"}, - {file = "multidict-6.4.3-cp313-cp313t-win_amd64.whl", hash = "sha256:7146a8742ea71b5d7d955bffcef58a9e6e04efba704b52a460134fefd10a8208"}, - {file = "multidict-6.4.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5427a2679e95a642b7f8b0f761e660c845c8e6fe3141cddd6b62005bd133fc21"}, - {file = "multidict-6.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:24a8caa26521b9ad09732972927d7b45b66453e6ebd91a3c6a46d811eeb7349b"}, - {file = "multidict-6.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6b5a272bc7c36a2cd1b56ddc6bff02e9ce499f9f14ee4a45c45434ef083f2459"}, - {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edf74dc5e212b8c75165b435c43eb0d5e81b6b300a938a4eb82827119115e840"}, - {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9f35de41aec4b323c71f54b0ca461ebf694fb48bec62f65221f52e0017955b39"}, - {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae93e0ff43b6f6892999af64097b18561691ffd835e21a8348a441e256592e1f"}, - {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e3929269e9d7eff905d6971d8b8c85e7dbc72c18fb99c8eae6fe0a152f2e343"}, - {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb6214fe1750adc2a1b801a199d64b5a67671bf76ebf24c730b157846d0e90d2"}, - {file = "multidict-6.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d79cf5c0c6284e90f72123f4a3e4add52d6c6ebb4a9054e88df15b8d08444c6"}, - {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2427370f4a255262928cd14533a70d9738dfacadb7563bc3b7f704cc2360fc4e"}, - {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:fbd8d737867912b6c5f99f56782b8cb81f978a97b4437a1c476de90a3e41c9a1"}, - {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0ee1bf613c448997f73fc4efb4ecebebb1c02268028dd4f11f011f02300cf1e8"}, - {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:578568c4ba5f2b8abd956baf8b23790dbfdc953e87d5b110bce343b4a54fc9e7"}, - {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:a059ad6b80de5b84b9fa02a39400319e62edd39d210b4e4f8c4f1243bdac4752"}, - {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:dd53893675b729a965088aaadd6a1f326a72b83742b056c1065bdd2e2a42b4df"}, - {file = "multidict-6.4.3-cp39-cp39-win32.whl", hash = "sha256:abcfed2c4c139f25c2355e180bcc077a7cae91eefbb8b3927bb3f836c9586f1f"}, - {file = "multidict-6.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:b1b389ae17296dd739015d5ddb222ee99fd66adeae910de21ac950e00979d897"}, - {file = "multidict-6.4.3-py3-none-any.whl", hash = "sha256:59fe01ee8e2a1e8ceb3f6dbb216b09c8d9f4ef1c22c4fc825d045a147fa2ebc9"}, - {file = "multidict-6.4.3.tar.gz", hash = "sha256:3ada0b058c9f213c5f95ba301f922d402ac234f1111a7d8fd70f1b99f3c281ec"}, + {file = "multidict-6.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8adee3ac041145ffe4488ea73fa0a622b464cc25340d98be76924d0cda8545ff"}, + {file = "multidict-6.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b61e98c3e2a861035aaccd207da585bdcacef65fe01d7a0d07478efac005e028"}, + {file = "multidict-6.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75493f28dbadecdbb59130e74fe935288813301a8554dc32f0c631b6bdcdf8b0"}, + {file = "multidict-6.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffc3c6a37e048b5395ee235e4a2a0d639c2349dffa32d9367a42fc20d399772"}, + {file = "multidict-6.4.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:87cb72263946b301570b0f63855569a24ee8758aaae2cd182aae7d95fbc92ca7"}, + {file = "multidict-6.4.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bbf7bd39822fd07e3609b6b4467af4c404dd2b88ee314837ad1830a7f4a8299"}, + {file = "multidict-6.4.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1f7cbd4f1f44ddf5fd86a8675b7679176eae770f2fc88115d6dddb6cefb59bc"}, + {file = "multidict-6.4.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb5ac9e5bfce0e6282e7f59ff7b7b9a74aa8e5c60d38186a4637f5aa764046ad"}, + {file = "multidict-6.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4efc31dfef8c4eeb95b6b17d799eedad88c4902daba39ce637e23a17ea078915"}, + {file = "multidict-6.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9fcad2945b1b91c29ef2b4050f590bfcb68d8ac8e0995a74e659aa57e8d78e01"}, + {file = "multidict-6.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:d877447e7368c7320832acb7159557e49b21ea10ffeb135c1077dbbc0816b598"}, + {file = "multidict-6.4.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:33a12ebac9f380714c298cbfd3e5b9c0c4e89c75fe612ae496512ee51028915f"}, + {file = "multidict-6.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0f14ea68d29b43a9bf37953881b1e3eb75b2739e896ba4a6aa4ad4c5b9ffa145"}, + {file = "multidict-6.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:0327ad2c747a6600e4797d115d3c38a220fdb28e54983abe8964fd17e95ae83c"}, + {file = "multidict-6.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d1a20707492db9719a05fc62ee215fd2c29b22b47c1b1ba347f9abc831e26683"}, + {file = "multidict-6.4.4-cp310-cp310-win32.whl", hash = "sha256:d83f18315b9fca5db2452d1881ef20f79593c4aa824095b62cb280019ef7aa3d"}, + {file = "multidict-6.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:9c17341ee04545fd962ae07330cb5a39977294c883485c8d74634669b1f7fe04"}, + {file = "multidict-6.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4f5f29794ac0e73d2a06ac03fd18870adc0135a9d384f4a306a951188ed02f95"}, + {file = "multidict-6.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c04157266344158ebd57b7120d9b0b35812285d26d0e78193e17ef57bfe2979a"}, + {file = "multidict-6.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bb61ffd3ab8310d93427e460f565322c44ef12769f51f77277b4abad7b6f7223"}, + {file = "multidict-6.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e0ba18a9afd495f17c351d08ebbc4284e9c9f7971d715f196b79636a4d0de44"}, + {file = "multidict-6.4.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9faf1b1dcaadf9f900d23a0e6d6c8eadd6a95795a0e57fcca73acce0eb912065"}, + {file = "multidict-6.4.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a4d1cb1327c6082c4fce4e2a438483390964c02213bc6b8d782cf782c9b1471f"}, + {file = "multidict-6.4.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:941f1bec2f5dbd51feeb40aea654c2747f811ab01bdd3422a48a4e4576b7d76a"}, + {file = "multidict-6.4.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5f8a146184da7ea12910a4cec51ef85e44f6268467fb489c3caf0cd512f29c2"}, + {file = "multidict-6.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:232b7237e57ec3c09be97206bfb83a0aa1c5d7d377faa019c68a210fa35831f1"}, + {file = "multidict-6.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:55ae0721c1513e5e3210bca4fc98456b980b0c2c016679d3d723119b6b202c42"}, + {file = "multidict-6.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:51d662c072579f63137919d7bb8fc250655ce79f00c82ecf11cab678f335062e"}, + {file = "multidict-6.4.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0e05c39962baa0bb19a6b210e9b1422c35c093b651d64246b6c2e1a7e242d9fd"}, + {file = "multidict-6.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d5b1cc3ab8c31d9ebf0faa6e3540fb91257590da330ffe6d2393d4208e638925"}, + {file = "multidict-6.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:93ec84488a384cd7b8a29c2c7f467137d8a73f6fe38bb810ecf29d1ade011a7c"}, + {file = "multidict-6.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b308402608493638763abc95f9dc0030bbd6ac6aff784512e8ac3da73a88af08"}, + {file = "multidict-6.4.4-cp311-cp311-win32.whl", hash = "sha256:343892a27d1a04d6ae455ecece12904d242d299ada01633d94c4f431d68a8c49"}, + {file = "multidict-6.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:73484a94f55359780c0f458bbd3c39cb9cf9c182552177d2136e828269dee529"}, + {file = "multidict-6.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:dc388f75a1c00000824bf28b7633e40854f4127ede80512b44c3cfeeea1839a2"}, + {file = "multidict-6.4.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:98af87593a666f739d9dba5d0ae86e01b0e1a9cfcd2e30d2d361fbbbd1a9162d"}, + {file = "multidict-6.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aff4cafea2d120327d55eadd6b7f1136a8e5a0ecf6fb3b6863e8aca32cd8e50a"}, + {file = "multidict-6.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:169c4ba7858176b797fe551d6e99040c531c775d2d57b31bcf4de6d7a669847f"}, + {file = "multidict-6.4.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b9eb4c59c54421a32b3273d4239865cb14ead53a606db066d7130ac80cc8ec93"}, + {file = "multidict-6.4.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7cf3bd54c56aa16fdb40028d545eaa8d051402b61533c21e84046e05513d5780"}, + {file = "multidict-6.4.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f682c42003c7264134bfe886376299db4cc0c6cd06a3295b41b347044bcb5482"}, + {file = "multidict-6.4.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920f9cf2abdf6e493c519492d892c362007f113c94da4c239ae88429835bad1"}, + {file = "multidict-6.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:530d86827a2df6504526106b4c104ba19044594f8722d3e87714e847c74a0275"}, + {file = "multidict-6.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ecde56ea2439b96ed8a8d826b50c57364612ddac0438c39e473fafad7ae1c23b"}, + {file = "multidict-6.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:dc8c9736d8574b560634775ac0def6bdc1661fc63fa27ffdfc7264c565bcb4f2"}, + {file = "multidict-6.4.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7f3d3b3c34867579ea47cbd6c1f2ce23fbfd20a273b6f9e3177e256584f1eacc"}, + {file = "multidict-6.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:87a728af265e08f96b6318ebe3c0f68b9335131f461efab2fc64cc84a44aa6ed"}, + {file = "multidict-6.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9f193eeda1857f8e8d3079a4abd258f42ef4a4bc87388452ed1e1c4d2b0c8740"}, + {file = "multidict-6.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be06e73c06415199200e9a2324a11252a3d62030319919cde5e6950ffeccf72e"}, + {file = "multidict-6.4.4-cp312-cp312-win32.whl", hash = "sha256:622f26ea6a7e19b7c48dd9228071f571b2fbbd57a8cd71c061e848f281550e6b"}, + {file = "multidict-6.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:5e2bcda30d5009996ff439e02a9f2b5c3d64a20151d34898c000a6281faa3781"}, + {file = "multidict-6.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:82ffabefc8d84c2742ad19c37f02cde5ec2a1ee172d19944d380f920a340e4b9"}, + {file = "multidict-6.4.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6a2f58a66fe2c22615ad26156354005391e26a2f3721c3621504cd87c1ea87bf"}, + {file = "multidict-6.4.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5883d6ee0fd9d8a48e9174df47540b7545909841ac82354c7ae4cbe9952603bd"}, + {file = "multidict-6.4.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9abcf56a9511653fa1d052bfc55fbe53dbee8f34e68bd6a5a038731b0ca42d15"}, + {file = "multidict-6.4.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6ed5ae5605d4ad5a049fad2a28bb7193400700ce2f4ae484ab702d1e3749c3f9"}, + {file = "multidict-6.4.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbfcb60396f9bcfa63e017a180c3105b8c123a63e9d1428a36544e7d37ca9e20"}, + {file = "multidict-6.4.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0f1987787f5f1e2076b59692352ab29a955b09ccc433c1f6b8e8e18666f608b"}, + {file = "multidict-6.4.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d0121ccce8c812047d8d43d691a1ad7641f72c4f730474878a5aeae1b8ead8c"}, + {file = "multidict-6.4.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83ec4967114295b8afd120a8eec579920c882831a3e4c3331d591a8e5bfbbc0f"}, + {file = "multidict-6.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:995f985e2e268deaf17867801b859a282e0448633f1310e3704b30616d269d69"}, + {file = "multidict-6.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:d832c608f94b9f92a0ec8b7e949be7792a642b6e535fcf32f3e28fab69eeb046"}, + {file = "multidict-6.4.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d21c1212171cf7da703c5b0b7a0e85be23b720818aef502ad187d627316d5645"}, + {file = "multidict-6.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:cbebaa076aaecad3d4bb4c008ecc73b09274c952cf6a1b78ccfd689e51f5a5b0"}, + {file = "multidict-6.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:c93a6fb06cc8e5d3628b2b5fda215a5db01e8f08fc15fadd65662d9b857acbe4"}, + {file = "multidict-6.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8cd8f81f1310182362fb0c7898145ea9c9b08a71081c5963b40ee3e3cac589b1"}, + {file = "multidict-6.4.4-cp313-cp313-win32.whl", hash = "sha256:3e9f1cd61a0ab857154205fb0b1f3d3ace88d27ebd1409ab7af5096e409614cd"}, + {file = "multidict-6.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:8ffb40b74400e4455785c2fa37eba434269149ec525fc8329858c862e4b35373"}, + {file = "multidict-6.4.4-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:6a602151dbf177be2450ef38966f4be3467d41a86c6a845070d12e17c858a156"}, + {file = "multidict-6.4.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0d2b9712211b860d123815a80b859075d86a4d54787e247d7fbee9db6832cf1c"}, + {file = "multidict-6.4.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d2fa86af59f8fc1972e121ade052145f6da22758f6996a197d69bb52f8204e7e"}, + {file = "multidict-6.4.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50855d03e9e4d66eab6947ba688ffb714616f985838077bc4b490e769e48da51"}, + {file = "multidict-6.4.4-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5bce06b83be23225be1905dcdb6b789064fae92499fbc458f59a8c0e68718601"}, + {file = "multidict-6.4.4-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66ed0731f8e5dfd8369a883b6e564aca085fb9289aacabd9decd70568b9a30de"}, + {file = "multidict-6.4.4-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:329ae97fc2f56f44d91bc47fe0972b1f52d21c4b7a2ac97040da02577e2daca2"}, + {file = "multidict-6.4.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c27e5dcf520923d6474d98b96749e6805f7677e93aaaf62656005b8643f907ab"}, + {file = "multidict-6.4.4-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:058cc59b9e9b143cc56715e59e22941a5d868c322242278d28123a5d09cdf6b0"}, + {file = "multidict-6.4.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:69133376bc9a03f8c47343d33f91f74a99c339e8b58cea90433d8e24bb298031"}, + {file = "multidict-6.4.4-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:d6b15c55721b1b115c5ba178c77104123745b1417527ad9641a4c5e2047450f0"}, + {file = "multidict-6.4.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:a887b77f51d3d41e6e1a63cf3bc7ddf24de5939d9ff69441387dfefa58ac2e26"}, + {file = "multidict-6.4.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:632a3bf8f1787f7ef7d3c2f68a7bde5be2f702906f8b5842ad6da9d974d0aab3"}, + {file = "multidict-6.4.4-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:a145c550900deb7540973c5cdb183b0d24bed6b80bf7bddf33ed8f569082535e"}, + {file = "multidict-6.4.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cc5d83c6619ca5c9672cb78b39ed8542f1975a803dee2cda114ff73cbb076edd"}, + {file = "multidict-6.4.4-cp313-cp313t-win32.whl", hash = "sha256:3312f63261b9df49be9d57aaa6abf53a6ad96d93b24f9cc16cf979956355ce6e"}, + {file = "multidict-6.4.4-cp313-cp313t-win_amd64.whl", hash = "sha256:ba852168d814b2c73333073e1c7116d9395bea69575a01b0b3c89d2d5a87c8fb"}, + {file = "multidict-6.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:603f39bd1cf85705c6c1ba59644b480dfe495e6ee2b877908de93322705ad7cf"}, + {file = "multidict-6.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fc60f91c02e11dfbe3ff4e1219c085695c339af72d1641800fe6075b91850c8f"}, + {file = "multidict-6.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:496bcf01c76a70a31c3d746fd39383aad8d685ce6331e4c709e9af4ced5fa221"}, + {file = "multidict-6.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4219390fb5bf8e548e77b428bb36a21d9382960db5321b74d9d9987148074d6b"}, + {file = "multidict-6.4.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3ef4e9096ff86dfdcbd4a78253090ba13b1d183daa11b973e842465d94ae1772"}, + {file = "multidict-6.4.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:49a29d7133b1fc214e818bbe025a77cc6025ed9a4f407d2850373ddde07fd04a"}, + {file = "multidict-6.4.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e32053d6d3a8b0dfe49fde05b496731a0e6099a4df92154641c00aa76786aef5"}, + {file = "multidict-6.4.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cc403092a49509e8ef2d2fd636a8ecefc4698cc57bbe894606b14579bc2a955"}, + {file = "multidict-6.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5363f9b2a7f3910e5c87d8b1855c478c05a2dc559ac57308117424dfaad6805c"}, + {file = "multidict-6.4.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e543a40e4946cf70a88a3be87837a3ae0aebd9058ba49e91cacb0b2cd631e2b"}, + {file = "multidict-6.4.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:60d849912350da557fe7de20aa8cf394aada6980d0052cc829eeda4a0db1c1db"}, + {file = "multidict-6.4.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:19d08b4f22eae45bb018b9f06e2838c1e4b853c67628ef8ae126d99de0da6395"}, + {file = "multidict-6.4.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:d693307856d1ef08041e8b6ff01d5b4618715007d288490ce2c7e29013c12b9a"}, + {file = "multidict-6.4.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:fad6daaed41021934917f4fb03ca2db8d8a4d79bf89b17ebe77228eb6710c003"}, + {file = "multidict-6.4.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c10d17371bff801af0daf8b073c30b6cf14215784dc08cd5c43ab5b7b8029bbc"}, + {file = "multidict-6.4.4-cp39-cp39-win32.whl", hash = "sha256:7e23f2f841fcb3ebd4724a40032d32e0892fbba4143e43d2a9e7695c5e50e6bd"}, + {file = "multidict-6.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:4d7b50b673ffb4ff4366e7ab43cf1f0aef4bd3608735c5fbdf0bdb6f690da411"}, + {file = "multidict-6.4.4-py3-none-any.whl", hash = "sha256:bd4557071b561a8b3b6075c3ce93cf9bfb6182cb241805c3d66ced3b75eff4ac"}, + {file = "multidict-6.4.4.tar.gz", hash = "sha256:69ee9e6ba214b5245031b76233dd95408a0fd57fdb019ddcc1ead4790932a8e8"}, ] [[package]] @@ -2298,67 +2297,63 @@ test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync" [[package]] name = "numpy" -version = "2.2.5" +version = "2.3.0" description = "Fundamental package for array computing in Python" optional = false -python-versions = ">=3.10" +python-versions = ">=3.11" groups = ["main"] files = [ - {file = "numpy-2.2.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f4a922da1729f4c40932b2af4fe84909c7a6e167e6e99f71838ce3a29f3fe26"}, - {file = "numpy-2.2.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b6f91524d31b34f4a5fee24f5bc16dcd1491b668798b6d85585d836c1e633a6a"}, - {file = "numpy-2.2.5-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:19f4718c9012e3baea91a7dba661dcab2451cda2550678dc30d53acb91a7290f"}, - {file = "numpy-2.2.5-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:eb7fd5b184e5d277afa9ec0ad5e4eb562ecff541e7f60e69ee69c8d59e9aeaba"}, - {file = "numpy-2.2.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6413d48a9be53e183eb06495d8e3b006ef8f87c324af68241bbe7a39e8ff54c3"}, - {file = "numpy-2.2.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7451f92eddf8503c9b8aa4fe6aa7e87fd51a29c2cfc5f7dbd72efde6c65acf57"}, - {file = "numpy-2.2.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0bcb1d057b7571334139129b7f941588f69ce7c4ed15a9d6162b2ea54ded700c"}, - {file = "numpy-2.2.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:36ab5b23915887543441efd0417e6a3baa08634308894316f446027611b53bf1"}, - {file = "numpy-2.2.5-cp310-cp310-win32.whl", hash = "sha256:422cc684f17bc963da5f59a31530b3936f57c95a29743056ef7a7903a5dbdf88"}, - {file = "numpy-2.2.5-cp310-cp310-win_amd64.whl", hash = "sha256:e4f0b035d9d0ed519c813ee23e0a733db81ec37d2e9503afbb6e54ccfdee0fa7"}, - {file = "numpy-2.2.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c42365005c7a6c42436a54d28c43fe0e01ca11eb2ac3cefe796c25a5f98e5e9b"}, - {file = "numpy-2.2.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:498815b96f67dc347e03b719ef49c772589fb74b8ee9ea2c37feae915ad6ebda"}, - {file = "numpy-2.2.5-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:6411f744f7f20081b1b4e7112e0f4c9c5b08f94b9f086e6f0adf3645f85d3a4d"}, - {file = "numpy-2.2.5-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:9de6832228f617c9ef45d948ec1cd8949c482238d68b2477e6f642c33a7b0a54"}, - {file = "numpy-2.2.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:369e0d4647c17c9363244f3468f2227d557a74b6781cb62ce57cf3ef5cc7c610"}, - {file = "numpy-2.2.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:262d23f383170f99cd9191a7c85b9a50970fe9069b2f8ab5d786eca8a675d60b"}, - {file = "numpy-2.2.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:aa70fdbdc3b169d69e8c59e65c07a1c9351ceb438e627f0fdcd471015cd956be"}, - {file = "numpy-2.2.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37e32e985f03c06206582a7323ef926b4e78bdaa6915095ef08070471865b906"}, - {file = "numpy-2.2.5-cp311-cp311-win32.whl", hash = "sha256:f5045039100ed58fa817a6227a356240ea1b9a1bc141018864c306c1a16d4175"}, - {file = "numpy-2.2.5-cp311-cp311-win_amd64.whl", hash = "sha256:b13f04968b46ad705f7c8a80122a42ae8f620536ea38cf4bdd374302926424dd"}, - {file = "numpy-2.2.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ee461a4eaab4f165b68780a6a1af95fb23a29932be7569b9fab666c407969051"}, - {file = "numpy-2.2.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ec31367fd6a255dc8de4772bd1658c3e926d8e860a0b6e922b615e532d320ddc"}, - {file = "numpy-2.2.5-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:47834cde750d3c9f4e52c6ca28a7361859fcaf52695c7dc3cc1a720b8922683e"}, - {file = "numpy-2.2.5-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:2c1a1c6ccce4022383583a6ded7bbcda22fc635eb4eb1e0a053336425ed36dfa"}, - {file = "numpy-2.2.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d75f338f5f79ee23548b03d801d28a505198297534f62416391857ea0479571"}, - {file = "numpy-2.2.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a801fef99668f309b88640e28d261991bfad9617c27beda4a3aec4f217ea073"}, - {file = "numpy-2.2.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:abe38cd8381245a7f49967a6010e77dbf3680bd3627c0fe4362dd693b404c7f8"}, - {file = "numpy-2.2.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5a0ac90e46fdb5649ab6369d1ab6104bfe5854ab19b645bf5cda0127a13034ae"}, - {file = "numpy-2.2.5-cp312-cp312-win32.whl", hash = "sha256:0cd48122a6b7eab8f06404805b1bd5856200e3ed6f8a1b9a194f9d9054631beb"}, - {file = "numpy-2.2.5-cp312-cp312-win_amd64.whl", hash = "sha256:ced69262a8278547e63409b2653b372bf4baff0870c57efa76c5703fd6543282"}, - {file = "numpy-2.2.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:059b51b658f4414fff78c6d7b1b4e18283ab5fa56d270ff212d5ba0c561846f4"}, - {file = "numpy-2.2.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:47f9ed103af0bc63182609044b0490747e03bd20a67e391192dde119bf43d52f"}, - {file = "numpy-2.2.5-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:261a1ef047751bb02f29dfe337230b5882b54521ca121fc7f62668133cb119c9"}, - {file = "numpy-2.2.5-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:4520caa3807c1ceb005d125a75e715567806fed67e315cea619d5ec6e75a4191"}, - {file = "numpy-2.2.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d14b17b9be5f9c9301f43d2e2a4886a33b53f4e6fdf9ca2f4cc60aeeee76372"}, - {file = "numpy-2.2.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ba321813a00e508d5421104464510cc962a6f791aa2fca1c97b1e65027da80d"}, - {file = "numpy-2.2.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4cbdef3ddf777423060c6f81b5694bad2dc9675f110c4b2a60dc0181543fac7"}, - {file = "numpy-2.2.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:54088a5a147ab71a8e7fdfd8c3601972751ded0739c6b696ad9cb0343e21ab73"}, - {file = "numpy-2.2.5-cp313-cp313-win32.whl", hash = "sha256:c8b82a55ef86a2d8e81b63da85e55f5537d2157165be1cb2ce7cfa57b6aef38b"}, - {file = "numpy-2.2.5-cp313-cp313-win_amd64.whl", hash = "sha256:d8882a829fd779f0f43998e931c466802a77ca1ee0fe25a3abe50278616b1471"}, - {file = "numpy-2.2.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:e8b025c351b9f0e8b5436cf28a07fa4ac0204d67b38f01433ac7f9b870fa38c6"}, - {file = "numpy-2.2.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8dfa94b6a4374e7851bbb6f35e6ded2120b752b063e6acdd3157e4d2bb922eba"}, - {file = "numpy-2.2.5-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:97c8425d4e26437e65e1d189d22dff4a079b747ff9c2788057bfb8114ce1e133"}, - {file = "numpy-2.2.5-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:352d330048c055ea6db701130abc48a21bec690a8d38f8284e00fab256dc1376"}, - {file = "numpy-2.2.5-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b4c0773b6ada798f51f0f8e30c054d32304ccc6e9c5d93d46cb26f3d385ab19"}, - {file = "numpy-2.2.5-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55f09e00d4dccd76b179c0f18a44f041e5332fd0e022886ba1c0bbf3ea4a18d0"}, - {file = "numpy-2.2.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:02f226baeefa68f7d579e213d0f3493496397d8f1cff5e2b222af274c86a552a"}, - {file = "numpy-2.2.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c26843fd58f65da9491165072da2cccc372530681de481ef670dcc8e27cfb066"}, - {file = "numpy-2.2.5-cp313-cp313t-win32.whl", hash = "sha256:1a161c2c79ab30fe4501d5a2bbfe8b162490757cf90b7f05be8b80bc02f7bb8e"}, - {file = "numpy-2.2.5-cp313-cp313t-win_amd64.whl", hash = "sha256:d403c84991b5ad291d3809bace5e85f4bbf44a04bdc9a88ed2bb1807b3360bb8"}, - {file = "numpy-2.2.5-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b4ea7e1cff6784e58fe281ce7e7f05036b3e1c89c6f922a6bfbc0a7e8768adbe"}, - {file = "numpy-2.2.5-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:d7543263084a85fbc09c704b515395398d31d6395518446237eac219eab9e55e"}, - {file = "numpy-2.2.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0255732338c4fdd00996c0421884ea8a3651eea555c3a56b84892b66f696eb70"}, - {file = "numpy-2.2.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d2e3bdadaba0e040d1e7ab39db73e0afe2c74ae277f5614dad53eadbecbbb169"}, - {file = "numpy-2.2.5.tar.gz", hash = "sha256:a9c0d994680cd991b1cb772e8b297340085466a6fe964bc9d4e80f5e2f43c291"}, + {file = "numpy-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3c9fdde0fa18afa1099d6257eb82890ea4f3102847e692193b54e00312a9ae9"}, + {file = "numpy-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:46d16f72c2192da7b83984aa5455baee640e33a9f1e61e656f29adf55e406c2b"}, + {file = "numpy-2.3.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:a0be278be9307c4ab06b788f2a077f05e180aea817b3e41cebbd5aaf7bd85ed3"}, + {file = "numpy-2.3.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:99224862d1412d2562248d4710126355d3a8db7672170a39d6909ac47687a8a4"}, + {file = "numpy-2.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2393a914db64b0ead0ab80c962e42d09d5f385802006a6c87835acb1f58adb96"}, + {file = "numpy-2.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:7729c8008d55e80784bd113787ce876ca117185c579c0d626f59b87d433ea779"}, + {file = "numpy-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:06d4fb37a8d383b769281714897420c5cc3545c79dc427df57fc9b852ee0bf58"}, + {file = "numpy-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c39ec392b5db5088259c68250e342612db82dc80ce044cf16496cf14cf6bc6f8"}, + {file = "numpy-2.3.0-cp311-cp311-win32.whl", hash = "sha256:ee9d3ee70d62827bc91f3ea5eee33153212c41f639918550ac0475e3588da59f"}, + {file = "numpy-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:43c55b6a860b0eb44d42341438b03513cf3879cb3617afb749ad49307e164edd"}, + {file = "numpy-2.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:2e6a1409eee0cb0316cb64640a49a49ca44deb1a537e6b1121dc7c458a1299a8"}, + {file = "numpy-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:389b85335838155a9076e9ad7f8fdba0827496ec2d2dc32ce69ce7898bde03ba"}, + {file = "numpy-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9498f60cd6bb8238d8eaf468a3d5bb031d34cd12556af53510f05fcf581c1b7e"}, + {file = "numpy-2.3.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:622a65d40d8eb427d8e722fd410ac3ad4958002f109230bc714fa551044ebae2"}, + {file = "numpy-2.3.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:b9446d9d8505aadadb686d51d838f2b6688c9e85636a0c3abaeb55ed54756459"}, + {file = "numpy-2.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:50080245365d75137a2bf46151e975de63146ae6d79f7e6bd5c0e85c9931d06a"}, + {file = "numpy-2.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c24bb4113c66936eeaa0dc1e47c74770453d34f46ee07ae4efd853a2ed1ad10a"}, + {file = "numpy-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4d8d294287fdf685281e671886c6dcdf0291a7c19db3e5cb4178d07ccf6ecc67"}, + {file = "numpy-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6295f81f093b7f5769d1728a6bd8bf7466de2adfa771ede944ce6711382b89dc"}, + {file = "numpy-2.3.0-cp312-cp312-win32.whl", hash = "sha256:e6648078bdd974ef5d15cecc31b0c410e2e24178a6e10bf511e0557eed0f2570"}, + {file = "numpy-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:0898c67a58cdaaf29994bc0e2c65230fd4de0ac40afaf1584ed0b02cd74c6fdd"}, + {file = "numpy-2.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:bd8df082b6c4695753ad6193018c05aac465d634834dca47a3ae06d4bb22d9ea"}, + {file = "numpy-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5754ab5595bfa2c2387d241296e0381c21f44a4b90a776c3c1d39eede13a746a"}, + {file = "numpy-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d11fa02f77752d8099573d64e5fe33de3229b6632036ec08f7080f46b6649959"}, + {file = "numpy-2.3.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:aba48d17e87688a765ab1cd557882052f238e2f36545dfa8e29e6a91aef77afe"}, + {file = "numpy-2.3.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:4dc58865623023b63b10d52f18abaac3729346a7a46a778381e0e3af4b7f3beb"}, + {file = "numpy-2.3.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:df470d376f54e052c76517393fa443758fefcdd634645bc9c1f84eafc67087f0"}, + {file = "numpy-2.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:87717eb24d4a8a64683b7a4e91ace04e2f5c7c77872f823f02a94feee186168f"}, + {file = "numpy-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d8fa264d56882b59dcb5ea4d6ab6f31d0c58a57b41aec605848b6eb2ef4a43e8"}, + {file = "numpy-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e651756066a0eaf900916497e20e02fe1ae544187cb0fe88de981671ee7f6270"}, + {file = "numpy-2.3.0-cp313-cp313-win32.whl", hash = "sha256:e43c3cce3b6ae5f94696669ff2a6eafd9a6b9332008bafa4117af70f4b88be6f"}, + {file = "numpy-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:81ae0bf2564cf475f94be4a27ef7bcf8af0c3e28da46770fc904da9abd5279b5"}, + {file = "numpy-2.3.0-cp313-cp313-win_arm64.whl", hash = "sha256:c8738baa52505fa6e82778580b23f945e3578412554d937093eac9205e845e6e"}, + {file = "numpy-2.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:39b27d8b38942a647f048b675f134dd5a567f95bfff481f9109ec308515c51d8"}, + {file = "numpy-2.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0eba4a1ea88f9a6f30f56fdafdeb8da3774349eacddab9581a21234b8535d3d3"}, + {file = "numpy-2.3.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:b0f1f11d0a1da54927436505a5a7670b154eac27f5672afc389661013dfe3d4f"}, + {file = "numpy-2.3.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:690d0a5b60a47e1f9dcec7b77750a4854c0d690e9058b7bef3106e3ae9117808"}, + {file = "numpy-2.3.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:8b51ead2b258284458e570942137155978583e407babc22e3d0ed7af33ce06f8"}, + {file = "numpy-2.3.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:aaf81c7b82c73bd9b45e79cfb9476cb9c29e937494bfe9092c26aece812818ad"}, + {file = "numpy-2.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:f420033a20b4f6a2a11f585f93c843ac40686a7c3fa514060a97d9de93e5e72b"}, + {file = "numpy-2.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d344ca32ab482bcf8735d8f95091ad081f97120546f3d250240868430ce52555"}, + {file = "numpy-2.3.0-cp313-cp313t-win32.whl", hash = "sha256:48a2e8eaf76364c32a1feaa60d6925eaf32ed7a040183b807e02674305beef61"}, + {file = "numpy-2.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ba17f93a94e503551f154de210e4d50c5e3ee20f7e7a1b5f6ce3f22d419b93bb"}, + {file = "numpy-2.3.0-cp313-cp313t-win_arm64.whl", hash = "sha256:f14e016d9409680959691c109be98c436c6249eaf7f118b424679793607b5944"}, + {file = "numpy-2.3.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:80b46117c7359de8167cc00a2c7d823bdd505e8c7727ae0871025a86d668283b"}, + {file = "numpy-2.3.0-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:5814a0f43e70c061f47abd5857d120179609ddc32a613138cbb6c4e9e2dbdda5"}, + {file = "numpy-2.3.0-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:ef6c1e88fd6b81ac6d215ed71dc8cd027e54d4bf1d2682d362449097156267a2"}, + {file = "numpy-2.3.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:33a5a12a45bb82d9997e2c0b12adae97507ad7c347546190a18ff14c28bbca12"}, + {file = "numpy-2.3.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:54dfc8681c1906d239e95ab1508d0a533c4a9505e52ee2d71a5472b04437ef97"}, + {file = "numpy-2.3.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:e017a8a251ff4d18d71f139e28bdc7c31edba7a507f72b1414ed902cbe48c74d"}, + {file = "numpy-2.3.0.tar.gz", hash = "sha256:581f87f9e9e9db2cba2141400e160e9dd644ee248788d6f90636eeb8fd9260a6"}, ] [[package]] @@ -2450,54 +2445,54 @@ files = [ [[package]] name = "pandas" -version = "2.2.3" +version = "2.3.0" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, - {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, - {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, - {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, - {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, - {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, - {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, - {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, - {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, - {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, - {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, - {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, - {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, - {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, - {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, - {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, - {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, - {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, - {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, - {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, - {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, - {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, - {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, - {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, - {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, - {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, - {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, - {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, - {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, - {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, - {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, - {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, - {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, - {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, - {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, - {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, - {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, - {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, - {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, - {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, - {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, - {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, + {file = "pandas-2.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:625466edd01d43b75b1883a64d859168e4556261a5035b32f9d743b67ef44634"}, + {file = "pandas-2.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6872d695c896f00df46b71648eea332279ef4077a409e2fe94220208b6bb675"}, + {file = "pandas-2.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4dd97c19bd06bc557ad787a15b6489d2614ddaab5d104a0310eb314c724b2d2"}, + {file = "pandas-2.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:034abd6f3db8b9880aaee98f4f5d4dbec7c4829938463ec046517220b2f8574e"}, + {file = "pandas-2.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23c2b2dc5213810208ca0b80b8666670eb4660bbfd9d45f58592cc4ddcfd62e1"}, + {file = "pandas-2.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:39ff73ec07be5e90330cc6ff5705c651ace83374189dcdcb46e6ff54b4a72cd6"}, + {file = "pandas-2.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:40cecc4ea5abd2921682b57532baea5588cc5f80f0231c624056b146887274d2"}, + {file = "pandas-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8adff9f138fc614347ff33812046787f7d43b3cef7c0f0171b3340cae333f6ca"}, + {file = "pandas-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e5f08eb9a445d07720776df6e641975665c9ea12c9d8a331e0f6890f2dcd76ef"}, + {file = "pandas-2.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa35c266c8cd1a67d75971a1912b185b492d257092bdd2709bbdebe574ed228d"}, + {file = "pandas-2.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a0cc77b0f089d2d2ffe3007db58f170dae9b9f54e569b299db871a3ab5bf46"}, + {file = "pandas-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c06f6f144ad0a1bf84699aeea7eff6068ca5c63ceb404798198af7eb86082e33"}, + {file = "pandas-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ed16339bc354a73e0a609df36d256672c7d296f3f767ac07257801aa064ff73c"}, + {file = "pandas-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:fa07e138b3f6c04addfeaf56cc7fdb96c3b68a3fe5e5401251f231fce40a0d7a"}, + {file = "pandas-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2eb4728a18dcd2908c7fccf74a982e241b467d178724545a48d0caf534b38ebf"}, + {file = "pandas-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b9d8c3187be7479ea5c3d30c32a5d73d62a621166675063b2edd21bc47614027"}, + {file = "pandas-2.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ff730713d4c4f2f1c860e36c005c7cefc1c7c80c21c0688fd605aa43c9fcf09"}, + {file = "pandas-2.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba24af48643b12ffe49b27065d3babd52702d95ab70f50e1b34f71ca703e2c0d"}, + {file = "pandas-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:404d681c698e3c8a40a61d0cd9412cc7364ab9a9cc6e144ae2992e11a2e77a20"}, + {file = "pandas-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6021910b086b3ca756755e86ddc64e0ddafd5e58e076c72cb1585162e5ad259b"}, + {file = "pandas-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:094e271a15b579650ebf4c5155c05dcd2a14fd4fdd72cf4854b2f7ad31ea30be"}, + {file = "pandas-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2c7e2fc25f89a49a11599ec1e76821322439d90820108309bf42130d2f36c983"}, + {file = "pandas-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c6da97aeb6a6d233fb6b17986234cc723b396b50a3c6804776351994f2a658fd"}, + {file = "pandas-2.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb32dc743b52467d488e7a7c8039b821da2826a9ba4f85b89ea95274f863280f"}, + {file = "pandas-2.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:213cd63c43263dbb522c1f8a7c9d072e25900f6975596f883f4bebd77295d4f3"}, + {file = "pandas-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1d2b33e68d0ce64e26a4acc2e72d747292084f4e8db4c847c6f5f6cbe56ed6d8"}, + {file = "pandas-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:430a63bae10b5086995db1b02694996336e5a8ac9a96b4200572b413dfdfccb9"}, + {file = "pandas-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:4930255e28ff5545e2ca404637bcc56f031893142773b3468dc021c6c32a1390"}, + {file = "pandas-2.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f925f1ef673b4bd0271b1809b72b3270384f2b7d9d14a189b12b7fc02574d575"}, + {file = "pandas-2.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e78ad363ddb873a631e92a3c063ade1ecfb34cae71e9a2be6ad100f875ac1042"}, + {file = "pandas-2.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951805d146922aed8357e4cc5671b8b0b9be1027f0619cea132a9f3f65f2f09c"}, + {file = "pandas-2.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a881bc1309f3fce34696d07b00f13335c41f5f5a8770a33b09ebe23261cfc67"}, + {file = "pandas-2.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e1991bbb96f4050b09b5f811253c4f3cf05ee89a589379aa36cd623f21a31d6f"}, + {file = "pandas-2.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:bb3be958022198531eb7ec2008cfc78c5b1eed51af8600c6c5d9160d89d8d249"}, + {file = "pandas-2.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9efc0acbbffb5236fbdf0409c04edce96bec4bdaa649d49985427bd1ec73e085"}, + {file = "pandas-2.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:75651c14fde635e680496148a8526b328e09fe0572d9ae9b638648c46a544ba3"}, + {file = "pandas-2.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5be867a0541a9fb47a4be0c5790a4bccd5b77b92f0a59eeec9375fafc2aa14"}, + {file = "pandas-2.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84141f722d45d0c2a89544dd29d35b3abfc13d2250ed7e68394eda7564bd6324"}, + {file = "pandas-2.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f95a2aef32614ed86216d3c450ab12a4e82084e8102e355707a1d96e33d51c34"}, + {file = "pandas-2.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e0f51973ba93a9f97185049326d75b942b9aeb472bec616a129806facb129ebb"}, + {file = "pandas-2.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:b198687ca9c8529662213538a9bb1e60fa0bf0f6af89292eb68fea28743fcd5a"}, + {file = "pandas-2.3.0.tar.gz", hash = "sha256:34600ab34ebf1131a7613a260a61dbe8b62c188ec0ea4c296da7c9a06b004133"}, ] [package.dependencies] @@ -2619,30 +2614,30 @@ type = ["mypy (>=1.14.1)"] [[package]] name = "pluggy" -version = "1.5.0" +version = "1.6.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["test"] files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, ] [package.extras] dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] +testing = ["coverage", "pytest", "pytest-benchmark"] [[package]] name = "prometheus-client" -version = "0.21.1" +version = "0.22.1" description = "Python client for the Prometheus monitoring system." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main", "test"] files = [ - {file = "prometheus_client-0.21.1-py3-none-any.whl", hash = "sha256:594b45c410d6f4f8888940fe80b5cc2521b305a1fafe1c58609ef715a001f301"}, - {file = "prometheus_client-0.21.1.tar.gz", hash = "sha256:252505a722ac04b0456be05c05f75f45d760c2911ffc45f2a06bcaed9f3ae3fb"}, + {file = "prometheus_client-0.22.1-py3-none-any.whl", hash = "sha256:cca895342e308174341b2cbf99a56bef291fbc0ef7b9e5412a0f26d653ba7094"}, + {file = "prometheus_client-0.22.1.tar.gz", hash = "sha256:190f1331e783cf21eb60bca559354e0a4d4378facecf78f5428c39b675d20d28"}, ] [package.extras] @@ -2665,110 +2660,110 @@ wcwidth = "*" [[package]] name = "propcache" -version = "0.3.1" +version = "0.3.2" description = "Accelerated property cache" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "propcache-0.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f27785888d2fdd918bc36de8b8739f2d6c791399552333721b58193f68ea3e98"}, - {file = "propcache-0.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4e89cde74154c7b5957f87a355bb9c8ec929c167b59c83d90654ea36aeb6180"}, - {file = "propcache-0.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:730178f476ef03d3d4d255f0c9fa186cb1d13fd33ffe89d39f2cda4da90ceb71"}, - {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967a8eec513dbe08330f10137eacb427b2ca52118769e82ebcfcab0fba92a649"}, - {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b9145c35cc87313b5fd480144f8078716007656093d23059e8993d3a8fa730f"}, - {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e64e948ab41411958670f1093c0a57acfdc3bee5cf5b935671bbd5313bcf229"}, - {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:319fa8765bfd6a265e5fa661547556da381e53274bc05094fc9ea50da51bfd46"}, - {file = "propcache-0.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66d8ccbc902ad548312b96ed8d5d266d0d2c6d006fd0f66323e9d8f2dd49be7"}, - {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2d219b0dbabe75e15e581fc1ae796109b07c8ba7d25b9ae8d650da582bed01b0"}, - {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:cd6a55f65241c551eb53f8cf4d2f4af33512c39da5d9777694e9d9c60872f519"}, - {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9979643ffc69b799d50d3a7b72b5164a2e97e117009d7af6dfdd2ab906cb72cd"}, - {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4cf9e93a81979f1424f1a3d155213dc928f1069d697e4353edb8a5eba67c6259"}, - {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2fce1df66915909ff6c824bbb5eb403d2d15f98f1518e583074671a30fe0c21e"}, - {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4d0dfdd9a2ebc77b869a0b04423591ea8823f791293b527dc1bb896c1d6f1136"}, - {file = "propcache-0.3.1-cp310-cp310-win32.whl", hash = "sha256:1f6cc0ad7b4560e5637eb2c994e97b4fa41ba8226069c9277eb5ea7101845b42"}, - {file = "propcache-0.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:47ef24aa6511e388e9894ec16f0fbf3313a53ee68402bc428744a367ec55b833"}, - {file = "propcache-0.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7f30241577d2fef2602113b70ef7231bf4c69a97e04693bde08ddab913ba0ce5"}, - {file = "propcache-0.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:43593c6772aa12abc3af7784bff4a41ffa921608dd38b77cf1dfd7f5c4e71371"}, - {file = "propcache-0.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a75801768bbe65499495660b777e018cbe90c7980f07f8aa57d6be79ea6f71da"}, - {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6f1324db48f001c2ca26a25fa25af60711e09b9aaf4b28488602776f4f9a744"}, - {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cdb0f3e1eb6dfc9965d19734d8f9c481b294b5274337a8cb5cb01b462dcb7e0"}, - {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1eb34d90aac9bfbced9a58b266f8946cb5935869ff01b164573a7634d39fbcb5"}, - {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f35c7070eeec2cdaac6fd3fe245226ed2a6292d3ee8c938e5bb645b434c5f256"}, - {file = "propcache-0.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b23c11c2c9e6d4e7300c92e022046ad09b91fd00e36e83c44483df4afa990073"}, - {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3e19ea4ea0bf46179f8a3652ac1426e6dcbaf577ce4b4f65be581e237340420d"}, - {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:bd39c92e4c8f6cbf5f08257d6360123af72af9f4da75a690bef50da77362d25f"}, - {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b0313e8b923b3814d1c4a524c93dfecea5f39fa95601f6a9b1ac96cd66f89ea0"}, - {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e861ad82892408487be144906a368ddbe2dc6297074ade2d892341b35c59844a"}, - {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:61014615c1274df8da5991a1e5da85a3ccb00c2d4701ac6f3383afd3ca47ab0a"}, - {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:71ebe3fe42656a2328ab08933d420df5f3ab121772eef78f2dc63624157f0ed9"}, - {file = "propcache-0.3.1-cp311-cp311-win32.whl", hash = "sha256:58aa11f4ca8b60113d4b8e32d37e7e78bd8af4d1a5b5cb4979ed856a45e62005"}, - {file = "propcache-0.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:9532ea0b26a401264b1365146c440a6d78269ed41f83f23818d4b79497aeabe7"}, - {file = "propcache-0.3.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f78eb8422acc93d7b69964012ad7048764bb45a54ba7a39bb9e146c72ea29723"}, - {file = "propcache-0.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:89498dd49c2f9a026ee057965cdf8192e5ae070ce7d7a7bd4b66a8e257d0c976"}, - {file = "propcache-0.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09400e98545c998d57d10035ff623266927cb784d13dd2b31fd33b8a5316b85b"}, - {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa8efd8c5adc5a2c9d3b952815ff8f7710cefdcaf5f2c36d26aff51aeca2f12f"}, - {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2fe5c910f6007e716a06d269608d307b4f36e7babee5f36533722660e8c4a70"}, - {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a0ab8cf8cdd2194f8ff979a43ab43049b1df0b37aa64ab7eca04ac14429baeb7"}, - {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:563f9d8c03ad645597b8d010ef4e9eab359faeb11a0a2ac9f7b4bc8c28ebef25"}, - {file = "propcache-0.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb6e0faf8cb6b4beea5d6ed7b5a578254c6d7df54c36ccd3d8b3eb00d6770277"}, - {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1c5c7ab7f2bb3f573d1cb921993006ba2d39e8621019dffb1c5bc94cdbae81e8"}, - {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:050b571b2e96ec942898f8eb46ea4bfbb19bd5502424747e83badc2d4a99a44e"}, - {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e1c4d24b804b3a87e9350f79e2371a705a188d292fd310e663483af6ee6718ee"}, - {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e4fe2a6d5ce975c117a6bb1e8ccda772d1e7029c1cca1acd209f91d30fa72815"}, - {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:feccd282de1f6322f56f6845bf1207a537227812f0a9bf5571df52bb418d79d5"}, - {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ec314cde7314d2dd0510c6787326bbffcbdc317ecee6b7401ce218b3099075a7"}, - {file = "propcache-0.3.1-cp312-cp312-win32.whl", hash = "sha256:7d2d5a0028d920738372630870e7d9644ce437142197f8c827194fca404bf03b"}, - {file = "propcache-0.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:88c423efef9d7a59dae0614eaed718449c09a5ac79a5f224a8b9664d603f04a3"}, - {file = "propcache-0.3.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f1528ec4374617a7a753f90f20e2f551121bb558fcb35926f99e3c42367164b8"}, - {file = "propcache-0.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dc1915ec523b3b494933b5424980831b636fe483d7d543f7afb7b3bf00f0c10f"}, - {file = "propcache-0.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a110205022d077da24e60b3df8bcee73971be9575dec5573dd17ae5d81751111"}, - {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d249609e547c04d190e820d0d4c8ca03ed4582bcf8e4e160a6969ddfb57b62e5"}, - {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ced33d827625d0a589e831126ccb4f5c29dfdf6766cac441d23995a65825dcb"}, - {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4114c4ada8f3181af20808bedb250da6bae56660e4b8dfd9cd95d4549c0962f7"}, - {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:975af16f406ce48f1333ec5e912fe11064605d5c5b3f6746969077cc3adeb120"}, - {file = "propcache-0.3.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a34aa3a1abc50740be6ac0ab9d594e274f59960d3ad253cd318af76b996dd654"}, - {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9cec3239c85ed15bfaded997773fdad9fb5662b0a7cbc854a43f291eb183179e"}, - {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:05543250deac8e61084234d5fc54f8ebd254e8f2b39a16b1dce48904f45b744b"}, - {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5cb5918253912e088edbf023788de539219718d3b10aef334476b62d2b53de53"}, - {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f3bbecd2f34d0e6d3c543fdb3b15d6b60dd69970c2b4c822379e5ec8f6f621d5"}, - {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aca63103895c7d960a5b9b044a83f544b233c95e0dcff114389d64d762017af7"}, - {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a0a9898fdb99bf11786265468571e628ba60af80dc3f6eb89a3545540c6b0ef"}, - {file = "propcache-0.3.1-cp313-cp313-win32.whl", hash = "sha256:3a02a28095b5e63128bcae98eb59025924f121f048a62393db682f049bf4ac24"}, - {file = "propcache-0.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:813fbb8b6aea2fc9659815e585e548fe706d6f663fa73dff59a1677d4595a037"}, - {file = "propcache-0.3.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a444192f20f5ce8a5e52761a031b90f5ea6288b1eef42ad4c7e64fef33540b8f"}, - {file = "propcache-0.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0fbe94666e62ebe36cd652f5fc012abfbc2342de99b523f8267a678e4dfdee3c"}, - {file = "propcache-0.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f011f104db880f4e2166bcdcf7f58250f7a465bc6b068dc84c824a3d4a5c94dc"}, - {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e584b6d388aeb0001d6d5c2bd86b26304adde6d9bb9bfa9c4889805021b96de"}, - {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a17583515a04358b034e241f952f1715243482fc2c2945fd99a1b03a0bd77d6"}, - {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5aed8d8308215089c0734a2af4f2e95eeb360660184ad3912686c181e500b2e7"}, - {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d8e309ff9a0503ef70dc9a0ebd3e69cf7b3894c9ae2ae81fc10943c37762458"}, - {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b655032b202028a582d27aeedc2e813299f82cb232f969f87a4fde491a233f11"}, - {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9f64d91b751df77931336b5ff7bafbe8845c5770b06630e27acd5dbb71e1931c"}, - {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:19a06db789a4bd896ee91ebc50d059e23b3639c25d58eb35be3ca1cbe967c3bf"}, - {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:bef100c88d8692864651b5f98e871fb090bd65c8a41a1cb0ff2322db39c96c27"}, - {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:87380fb1f3089d2a0b8b00f006ed12bd41bd858fabfa7330c954c70f50ed8757"}, - {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e474fc718e73ba5ec5180358aa07f6aded0ff5f2abe700e3115c37d75c947e18"}, - {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:17d1c688a443355234f3c031349da69444be052613483f3e4158eef751abcd8a"}, - {file = "propcache-0.3.1-cp313-cp313t-win32.whl", hash = "sha256:359e81a949a7619802eb601d66d37072b79b79c2505e6d3fd8b945538411400d"}, - {file = "propcache-0.3.1-cp313-cp313t-win_amd64.whl", hash = "sha256:e7fb9a84c9abbf2b2683fa3e7b0d7da4d8ecf139a1c635732a8bda29c5214b0e"}, - {file = "propcache-0.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ed5f6d2edbf349bd8d630e81f474d33d6ae5d07760c44d33cd808e2f5c8f4ae6"}, - {file = "propcache-0.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:668ddddc9f3075af019f784456267eb504cb77c2c4bd46cc8402d723b4d200bf"}, - {file = "propcache-0.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0c86e7ceea56376216eba345aa1fc6a8a6b27ac236181f840d1d7e6a1ea9ba5c"}, - {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83be47aa4e35b87c106fc0c84c0fc069d3f9b9b06d3c494cd404ec6747544894"}, - {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:27c6ac6aa9fc7bc662f594ef380707494cb42c22786a558d95fcdedb9aa5d035"}, - {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64a956dff37080b352c1c40b2966b09defb014347043e740d420ca1eb7c9b908"}, - {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82de5da8c8893056603ac2d6a89eb8b4df49abf1a7c19d536984c8dd63f481d5"}, - {file = "propcache-0.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c3c3a203c375b08fd06a20da3cf7aac293b834b6f4f4db71190e8422750cca5"}, - {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b303b194c2e6f171cfddf8b8ba30baefccf03d36a4d9cab7fd0bb68ba476a3d7"}, - {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:916cd229b0150129d645ec51614d38129ee74c03293a9f3f17537be0029a9641"}, - {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a461959ead5b38e2581998700b26346b78cd98540b5524796c175722f18b0294"}, - {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:069e7212890b0bcf9b2be0a03afb0c2d5161d91e1bf51569a64f629acc7defbf"}, - {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ef2e4e91fb3945769e14ce82ed53007195e616a63aa43b40fb7ebaaf907c8d4c"}, - {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8638f99dca15b9dff328fb6273e09f03d1c50d9b6512f3b65a4154588a7595fe"}, - {file = "propcache-0.3.1-cp39-cp39-win32.whl", hash = "sha256:6f173bbfe976105aaa890b712d1759de339d8a7cef2fc0a1714cc1a1e1c47f64"}, - {file = "propcache-0.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:603f1fe4144420374f1a69b907494c3acbc867a581c2d49d4175b0de7cc64566"}, - {file = "propcache-0.3.1-py3-none-any.whl", hash = "sha256:9a8ecf38de50a7f518c21568c80f985e776397b902f1ce0b01f799aba1608b40"}, - {file = "propcache-0.3.1.tar.gz", hash = "sha256:40d980c33765359098837527e18eddefc9a24cea5b45e078a7f3bb5b032c6ecf"}, + {file = "propcache-0.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:22d9962a358aedbb7a2e36187ff273adeaab9743373a272976d2e348d08c7770"}, + {file = "propcache-0.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0d0fda578d1dc3f77b6b5a5dce3b9ad69a8250a891760a548df850a5e8da87f3"}, + {file = "propcache-0.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3def3da3ac3ce41562d85db655d18ebac740cb3fa4367f11a52b3da9d03a5cc3"}, + {file = "propcache-0.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bec58347a5a6cebf239daba9bda37dffec5b8d2ce004d9fe4edef3d2815137e"}, + {file = "propcache-0.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55ffda449a507e9fbd4aca1a7d9aa6753b07d6166140e5a18d2ac9bc49eac220"}, + {file = "propcache-0.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64a67fb39229a8a8491dd42f864e5e263155e729c2e7ff723d6e25f596b1e8cb"}, + {file = "propcache-0.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9da1cf97b92b51253d5b68cf5a2b9e0dafca095e36b7f2da335e27dc6172a614"}, + {file = "propcache-0.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5f559e127134b07425134b4065be45b166183fdcb433cb6c24c8e4149056ad50"}, + {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:aff2e4e06435d61f11a428360a932138d0ec288b0a31dd9bd78d200bd4a2b339"}, + {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4927842833830942a5d0a56e6f4839bc484785b8e1ce8d287359794818633ba0"}, + {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6107ddd08b02654a30fb8ad7a132021759d750a82578b94cd55ee2772b6ebea2"}, + {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:70bd8b9cd6b519e12859c99f3fc9a93f375ebd22a50296c3a295028bea73b9e7"}, + {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2183111651d710d3097338dd1893fcf09c9f54e27ff1a8795495a16a469cc90b"}, + {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fb075ad271405dcad8e2a7ffc9a750a3bf70e533bd86e89f0603e607b93aa64c"}, + {file = "propcache-0.3.2-cp310-cp310-win32.whl", hash = "sha256:404d70768080d3d3bdb41d0771037da19d8340d50b08e104ca0e7f9ce55fce70"}, + {file = "propcache-0.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:7435d766f978b4ede777002e6b3b6641dd229cd1da8d3d3106a45770365f9ad9"}, + {file = "propcache-0.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b8d2f607bd8f80ddc04088bc2a037fdd17884a6fcadc47a96e334d72f3717be"}, + {file = "propcache-0.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06766d8f34733416e2e34f46fea488ad5d60726bb9481d3cddf89a6fa2d9603f"}, + {file = "propcache-0.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2dc1f4a1df4fecf4e6f68013575ff4af84ef6f478fe5344317a65d38a8e6dc9"}, + {file = "propcache-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be29c4f4810c5789cf10ddf6af80b041c724e629fa51e308a7a0fb19ed1ef7bf"}, + {file = "propcache-0.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59d61f6970ecbd8ff2e9360304d5c8876a6abd4530cb752c06586849ac8a9dc9"}, + {file = "propcache-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62180e0b8dbb6b004baec00a7983e4cc52f5ada9cd11f48c3528d8cfa7b96a66"}, + {file = "propcache-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c144ca294a204c470f18cf4c9d78887810d04a3e2fbb30eea903575a779159df"}, + {file = "propcache-0.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5c2a784234c28854878d68978265617aa6dc0780e53d44b4d67f3651a17a9a2"}, + {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5745bc7acdafa978ca1642891b82c19238eadc78ba2aaa293c6863b304e552d7"}, + {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:c0075bf773d66fa8c9d41f66cc132ecc75e5bb9dd7cce3cfd14adc5ca184cb95"}, + {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5f57aa0847730daceff0497f417c9de353c575d8da3579162cc74ac294c5369e"}, + {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:eef914c014bf72d18efb55619447e0aecd5fb7c2e3fa7441e2e5d6099bddff7e"}, + {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a4092e8549031e82facf3decdbc0883755d5bbcc62d3aea9d9e185549936dcf"}, + {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:85871b050f174bc0bfb437efbdb68aaf860611953ed12418e4361bc9c392749e"}, + {file = "propcache-0.3.2-cp311-cp311-win32.whl", hash = "sha256:36c8d9b673ec57900c3554264e630d45980fd302458e4ac801802a7fd2ef7897"}, + {file = "propcache-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53af8cb6a781b02d2ea079b5b853ba9430fcbe18a8e3ce647d5982a3ff69f39"}, + {file = "propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10"}, + {file = "propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154"}, + {file = "propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615"}, + {file = "propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db"}, + {file = "propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1"}, + {file = "propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c"}, + {file = "propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67"}, + {file = "propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b"}, + {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8"}, + {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251"}, + {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474"}, + {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535"}, + {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06"}, + {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1"}, + {file = "propcache-0.3.2-cp312-cp312-win32.whl", hash = "sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1"}, + {file = "propcache-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c"}, + {file = "propcache-0.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ca592ed634a73ca002967458187109265e980422116c0a107cf93d81f95af945"}, + {file = "propcache-0.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9ecb0aad4020e275652ba3975740f241bd12a61f1a784df044cf7477a02bc252"}, + {file = "propcache-0.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7f08f1cc28bd2eade7a8a3d2954ccc673bb02062e3e7da09bc75d843386b342f"}, + {file = "propcache-0.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1a342c834734edb4be5ecb1e9fb48cb64b1e2320fccbd8c54bf8da8f2a84c33"}, + {file = "propcache-0.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a544caaae1ac73f1fecfae70ded3e93728831affebd017d53449e3ac052ac1e"}, + {file = "propcache-0.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310d11aa44635298397db47a3ebce7db99a4cc4b9bbdfcf6c98a60c8d5261cf1"}, + {file = "propcache-0.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1396592321ac83157ac03a2023aa6cc4a3cc3cfdecb71090054c09e5a7cce3"}, + {file = "propcache-0.3.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cabf5b5902272565e78197edb682017d21cf3b550ba0460ee473753f28d23c1"}, + {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0a2f2235ac46a7aa25bdeb03a9e7060f6ecbd213b1f9101c43b3090ffb971ef6"}, + {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:92b69e12e34869a6970fd2f3da91669899994b47c98f5d430b781c26f1d9f387"}, + {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:54e02207c79968ebbdffc169591009f4474dde3b4679e16634d34c9363ff56b4"}, + {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4adfb44cb588001f68c5466579d3f1157ca07f7504fc91ec87862e2b8e556b88"}, + {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fd3e6019dc1261cd0291ee8919dd91fbab7b169bb76aeef6c716833a3f65d206"}, + {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4c181cad81158d71c41a2bce88edce078458e2dd5ffee7eddd6b05da85079f43"}, + {file = "propcache-0.3.2-cp313-cp313-win32.whl", hash = "sha256:8a08154613f2249519e549de2330cf8e2071c2887309a7b07fb56098f5170a02"}, + {file = "propcache-0.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e41671f1594fc4ab0a6dec1351864713cb3a279910ae8b58f884a88a0a632c05"}, + {file = "propcache-0.3.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:9a3cf035bbaf035f109987d9d55dc90e4b0e36e04bbbb95af3055ef17194057b"}, + {file = "propcache-0.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:156c03d07dc1323d8dacaa221fbe028c5c70d16709cdd63502778e6c3ccca1b0"}, + {file = "propcache-0.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74413c0ba02ba86f55cf60d18daab219f7e531620c15f1e23d95563f505efe7e"}, + {file = "propcache-0.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f066b437bb3fa39c58ff97ab2ca351db465157d68ed0440abecb21715eb24b28"}, + {file = "propcache-0.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1304b085c83067914721e7e9d9917d41ad87696bf70f0bc7dee450e9c71ad0a"}, + {file = "propcache-0.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab50cef01b372763a13333b4e54021bdcb291fc9a8e2ccb9c2df98be51bcde6c"}, + {file = "propcache-0.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad3b2a085ec259ad2c2842666b2a0a49dea8463579c606426128925af1ed725"}, + {file = "propcache-0.3.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:261fa020c1c14deafd54c76b014956e2f86991af198c51139faf41c4d5e83892"}, + {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:46d7f8aa79c927e5f987ee3a80205c987717d3659f035c85cf0c3680526bdb44"}, + {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:6d8f3f0eebf73e3c0ff0e7853f68be638b4043c65a70517bb575eff54edd8dbe"}, + {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:03c89c1b14a5452cf15403e291c0ccd7751d5b9736ecb2c5bab977ad6c5bcd81"}, + {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:0cc17efde71e12bbaad086d679ce575268d70bc123a5a71ea7ad76f70ba30bba"}, + {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:acdf05d00696bc0447e278bb53cb04ca72354e562cf88ea6f9107df8e7fd9770"}, + {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4445542398bd0b5d32df908031cb1b30d43ac848e20470a878b770ec2dcc6330"}, + {file = "propcache-0.3.2-cp313-cp313t-win32.whl", hash = "sha256:f86e5d7cd03afb3a1db8e9f9f6eff15794e79e791350ac48a8c924e6f439f394"}, + {file = "propcache-0.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9704bedf6e7cbe3c65eca4379a9b53ee6a83749f047808cbb5044d40d7d72198"}, + {file = "propcache-0.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a7fad897f14d92086d6b03fdd2eb844777b0c4d7ec5e3bac0fbae2ab0602bbe5"}, + {file = "propcache-0.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1f43837d4ca000243fd7fd6301947d7cb93360d03cd08369969450cc6b2ce3b4"}, + {file = "propcache-0.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:261df2e9474a5949c46e962065d88eb9b96ce0f2bd30e9d3136bcde84befd8f2"}, + {file = "propcache-0.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e514326b79e51f0a177daab1052bc164d9d9e54133797a3a58d24c9c87a3fe6d"}, + {file = "propcache-0.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4a996adb6904f85894570301939afeee65f072b4fd265ed7e569e8d9058e4ec"}, + {file = "propcache-0.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76cace5d6b2a54e55b137669b30f31aa15977eeed390c7cbfb1dafa8dfe9a701"}, + {file = "propcache-0.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31248e44b81d59d6addbb182c4720f90b44e1efdc19f58112a3c3a1615fb47ef"}, + {file = "propcache-0.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abb7fa19dbf88d3857363e0493b999b8011eea856b846305d8c0512dfdf8fbb1"}, + {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d81ac3ae39d38588ad0549e321e6f773a4e7cc68e7751524a22885d5bbadf886"}, + {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:cc2782eb0f7a16462285b6f8394bbbd0e1ee5f928034e941ffc444012224171b"}, + {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:db429c19a6c7e8a1c320e6a13c99799450f411b02251fb1b75e6217cf4a14fcb"}, + {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:21d8759141a9e00a681d35a1f160892a36fb6caa715ba0b832f7747da48fb6ea"}, + {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2ca6d378f09adb13837614ad2754fa8afaee330254f404299611bce41a8438cb"}, + {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:34a624af06c048946709f4278b4176470073deda88d91342665d95f7c6270fbe"}, + {file = "propcache-0.3.2-cp39-cp39-win32.whl", hash = "sha256:4ba3fef1c30f306b1c274ce0b8baaa2c3cdd91f645c48f06394068f37d3837a1"}, + {file = "propcache-0.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:7a2368eed65fc69a7a7a40b27f22e85e7627b74216f0846b04ba5c116e191ec9"}, + {file = "propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f"}, + {file = "propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168"}, ] [[package]] @@ -2791,21 +2786,21 @@ testing = ["google-api-core (>=1.31.5)"] [[package]] name = "protobuf" -version = "6.30.2" +version = "6.31.1" description = "" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "protobuf-6.30.2-cp310-abi3-win32.whl", hash = "sha256:b12ef7df7b9329886e66404bef5e9ce6a26b54069d7f7436a0853ccdeb91c103"}, - {file = "protobuf-6.30.2-cp310-abi3-win_amd64.whl", hash = "sha256:7653c99774f73fe6b9301b87da52af0e69783a2e371e8b599b3e9cb4da4b12b9"}, - {file = "protobuf-6.30.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:0eb523c550a66a09a0c20f86dd554afbf4d32b02af34ae53d93268c1f73bc65b"}, - {file = "protobuf-6.30.2-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:50f32cc9fd9cb09c783ebc275611b4f19dfdfb68d1ee55d2f0c7fa040df96815"}, - {file = "protobuf-6.30.2-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:4f6c687ae8efae6cf6093389a596548214467778146b7245e886f35e1485315d"}, - {file = "protobuf-6.30.2-cp39-cp39-win32.whl", hash = "sha256:524afedc03b31b15586ca7f64d877a98b184f007180ce25183d1a5cb230ee72b"}, - {file = "protobuf-6.30.2-cp39-cp39-win_amd64.whl", hash = "sha256:acec579c39c88bd8fbbacab1b8052c793efe83a0a5bd99db4a31423a25c0a0e2"}, - {file = "protobuf-6.30.2-py3-none-any.whl", hash = "sha256:ae86b030e69a98e08c77beab574cbcb9fff6d031d57209f574a5aea1445f4b51"}, - {file = "protobuf-6.30.2.tar.gz", hash = "sha256:35c859ae076d8c56054c25b59e5e59638d86545ed6e2b6efac6be0b6ea3ba048"}, + {file = "protobuf-6.31.1-cp310-abi3-win32.whl", hash = "sha256:7fa17d5a29c2e04b7d90e5e32388b8bfd0e7107cd8e616feef7ed3fa6bdab5c9"}, + {file = "protobuf-6.31.1-cp310-abi3-win_amd64.whl", hash = "sha256:426f59d2964864a1a366254fa703b8632dcec0790d8862d30034d8245e1cd447"}, + {file = "protobuf-6.31.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:6f1227473dc43d44ed644425268eb7c2e488ae245d51c6866d19fe158e207402"}, + {file = "protobuf-6.31.1-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:a40fc12b84c154884d7d4c4ebd675d5b3b5283e155f324049ae396b95ddebc39"}, + {file = "protobuf-6.31.1-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:4ee898bf66f7a8b0bd21bce523814e6fbd8c6add948045ce958b73af7e8878c6"}, + {file = "protobuf-6.31.1-cp39-cp39-win32.whl", hash = "sha256:0414e3aa5a5f3ff423828e1e6a6e907d6c65c1d5b7e6e975793d5590bdeecc16"}, + {file = "protobuf-6.31.1-cp39-cp39-win_amd64.whl", hash = "sha256:8764cf4587791e7564051b35524b72844f845ad0bb011704c3736cce762d8fe9"}, + {file = "protobuf-6.31.1-py3-none-any.whl", hash = "sha256:720a6c7e6b77288b85063569baae8536671b39f15cc22037ec7045658d80489e"}, + {file = "protobuf-6.31.1.tar.gz", hash = "sha256:d8cac4c982f0b957a4dc73a80e2ea24fab08e679c0de9deb835f4a12d69aca9a"}, ] [[package]] @@ -3559,19 +3554,19 @@ typing-extensions = {version = ">=4.4.0", markers = "python_version < \"3.13\""} [[package]] name = "requests" -version = "2.32.3" +version = "2.32.4" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" groups = ["main", "docs", "test"] files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, + {file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"}, + {file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"}, ] [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" +charset_normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<3" @@ -3646,126 +3641,129 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rpds-py" -version = "0.24.0" +version = "0.25.1" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.9" groups = ["main", "test"] files = [ - {file = "rpds_py-0.24.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:006f4342fe729a368c6df36578d7a348c7c716be1da0a1a0f86e3021f8e98724"}, - {file = "rpds_py-0.24.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2d53747da70a4e4b17f559569d5f9506420966083a31c5fbd84e764461c4444b"}, - {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8acd55bd5b071156bae57b555f5d33697998752673b9de554dd82f5b5352727"}, - {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7e80d375134ddb04231a53800503752093dbb65dad8dabacce2c84cccc78e964"}, - {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60748789e028d2a46fc1c70750454f83c6bdd0d05db50f5ae83e2db500b34da5"}, - {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6e1daf5bf6c2be39654beae83ee6b9a12347cb5aced9a29eecf12a2d25fff664"}, - {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b221c2457d92a1fb3c97bee9095c874144d196f47c038462ae6e4a14436f7bc"}, - {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:66420986c9afff67ef0c5d1e4cdc2d0e5262f53ad11e4f90e5e22448df485bf0"}, - {file = "rpds_py-0.24.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:43dba99f00f1d37b2a0265a259592d05fcc8e7c19d140fe51c6e6f16faabeb1f"}, - {file = "rpds_py-0.24.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a88c0d17d039333a41d9bf4616bd062f0bd7aa0edeb6cafe00a2fc2a804e944f"}, - {file = "rpds_py-0.24.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc31e13ce212e14a539d430428cd365e74f8b2d534f8bc22dd4c9c55b277b875"}, - {file = "rpds_py-0.24.0-cp310-cp310-win32.whl", hash = "sha256:fc2c1e1b00f88317d9de6b2c2b39b012ebbfe35fe5e7bef980fd2a91f6100a07"}, - {file = "rpds_py-0.24.0-cp310-cp310-win_amd64.whl", hash = "sha256:c0145295ca415668420ad142ee42189f78d27af806fcf1f32a18e51d47dd2052"}, - {file = "rpds_py-0.24.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:2d3ee4615df36ab8eb16c2507b11e764dcc11fd350bbf4da16d09cda11fcedef"}, - {file = "rpds_py-0.24.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e13ae74a8a3a0c2f22f450f773e35f893484fcfacb00bb4344a7e0f4f48e1f97"}, - {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf86f72d705fc2ef776bb7dd9e5fbba79d7e1f3e258bf9377f8204ad0fc1c51e"}, - {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c43583ea8517ed2e780a345dd9960896afc1327e8cf3ac8239c167530397440d"}, - {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4cd031e63bc5f05bdcda120646a0d32f6d729486d0067f09d79c8db5368f4586"}, - {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34d90ad8c045df9a4259c47d2e16a3f21fdb396665c94520dbfe8766e62187a4"}, - {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e838bf2bb0b91ee67bf2b889a1a841e5ecac06dd7a2b1ef4e6151e2ce155c7ae"}, - {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04ecf5c1ff4d589987b4d9882872f80ba13da7d42427234fce8f22efb43133bc"}, - {file = "rpds_py-0.24.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:630d3d8ea77eabd6cbcd2ea712e1c5cecb5b558d39547ac988351195db433f6c"}, - {file = "rpds_py-0.24.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ebcb786b9ff30b994d5969213a8430cbb984cdd7ea9fd6df06663194bd3c450c"}, - {file = "rpds_py-0.24.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:174e46569968ddbbeb8a806d9922f17cd2b524aa753b468f35b97ff9c19cb718"}, - {file = "rpds_py-0.24.0-cp311-cp311-win32.whl", hash = "sha256:5ef877fa3bbfb40b388a5ae1cb00636a624690dcb9a29a65267054c9ea86d88a"}, - {file = "rpds_py-0.24.0-cp311-cp311-win_amd64.whl", hash = "sha256:e274f62cbd274359eff63e5c7e7274c913e8e09620f6a57aae66744b3df046d6"}, - {file = "rpds_py-0.24.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:d8551e733626afec514b5d15befabea0dd70a343a9f23322860c4f16a9430205"}, - {file = "rpds_py-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0e374c0ce0ca82e5b67cd61fb964077d40ec177dd2c4eda67dba130de09085c7"}, - {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d69d003296df4840bd445a5d15fa5b6ff6ac40496f956a221c4d1f6f7b4bc4d9"}, - {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8212ff58ac6dfde49946bea57474a386cca3f7706fc72c25b772b9ca4af6b79e"}, - {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:528927e63a70b4d5f3f5ccc1fa988a35456eb5d15f804d276709c33fc2f19bda"}, - {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a824d2c7a703ba6daaca848f9c3d5cb93af0505be505de70e7e66829affd676e"}, - {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44d51febb7a114293ffd56c6cf4736cb31cd68c0fddd6aa303ed09ea5a48e029"}, - {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3fab5f4a2c64a8fb64fc13b3d139848817a64d467dd6ed60dcdd6b479e7febc9"}, - {file = "rpds_py-0.24.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9be4f99bee42ac107870c61dfdb294d912bf81c3c6d45538aad7aecab468b6b7"}, - {file = "rpds_py-0.24.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:564c96b6076a98215af52f55efa90d8419cc2ef45d99e314fddefe816bc24f91"}, - {file = "rpds_py-0.24.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:75a810b7664c17f24bf2ffd7f92416c00ec84b49bb68e6a0d93e542406336b56"}, - {file = "rpds_py-0.24.0-cp312-cp312-win32.whl", hash = "sha256:f6016bd950be4dcd047b7475fdf55fb1e1f59fc7403f387be0e8123e4a576d30"}, - {file = "rpds_py-0.24.0-cp312-cp312-win_amd64.whl", hash = "sha256:998c01b8e71cf051c28f5d6f1187abbdf5cf45fc0efce5da6c06447cba997034"}, - {file = "rpds_py-0.24.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:3d2d8e4508e15fc05b31285c4b00ddf2e0eb94259c2dc896771966a163122a0c"}, - {file = "rpds_py-0.24.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0f00c16e089282ad68a3820fd0c831c35d3194b7cdc31d6e469511d9bffc535c"}, - {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951cc481c0c395c4a08639a469d53b7d4afa252529a085418b82a6b43c45c240"}, - {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c9ca89938dff18828a328af41ffdf3902405a19f4131c88e22e776a8e228c5a8"}, - {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed0ef550042a8dbcd657dfb284a8ee00f0ba269d3f2286b0493b15a5694f9fe8"}, - {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b2356688e5d958c4d5cb964af865bea84db29971d3e563fb78e46e20fe1848b"}, - {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78884d155fd15d9f64f5d6124b486f3d3f7fd7cd71a78e9670a0f6f6ca06fb2d"}, - {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6a4a535013aeeef13c5532f802708cecae8d66c282babb5cd916379b72110cf7"}, - {file = "rpds_py-0.24.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:84e0566f15cf4d769dade9b366b7b87c959be472c92dffb70462dd0844d7cbad"}, - {file = "rpds_py-0.24.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:823e74ab6fbaa028ec89615ff6acb409e90ff45580c45920d4dfdddb069f2120"}, - {file = "rpds_py-0.24.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c61a2cb0085c8783906b2f8b1f16a7e65777823c7f4d0a6aaffe26dc0d358dd9"}, - {file = "rpds_py-0.24.0-cp313-cp313-win32.whl", hash = "sha256:60d9b630c8025b9458a9d114e3af579a2c54bd32df601c4581bd054e85258143"}, - {file = "rpds_py-0.24.0-cp313-cp313-win_amd64.whl", hash = "sha256:6eea559077d29486c68218178ea946263b87f1c41ae7f996b1f30a983c476a5a"}, - {file = "rpds_py-0.24.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:d09dc82af2d3c17e7dd17120b202a79b578d79f2b5424bda209d9966efeed114"}, - {file = "rpds_py-0.24.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5fc13b44de6419d1e7a7e592a4885b323fbc2f46e1f22151e3a8ed3b8b920405"}, - {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c347a20d79cedc0a7bd51c4d4b7dbc613ca4e65a756b5c3e57ec84bd43505b47"}, - {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:20f2712bd1cc26a3cc16c5a1bfee9ed1abc33d4cdf1aabd297fe0eb724df4272"}, - {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aad911555286884be1e427ef0dc0ba3929e6821cbeca2194b13dc415a462c7fd"}, - {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0aeb3329c1721c43c58cae274d7d2ca85c1690d89485d9c63a006cb79a85771a"}, - {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a0f156e9509cee987283abd2296ec816225145a13ed0391df8f71bf1d789e2d"}, - {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aa6800adc8204ce898c8a424303969b7aa6a5e4ad2789c13f8648739830323b7"}, - {file = "rpds_py-0.24.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a18fc371e900a21d7392517c6f60fe859e802547309e94313cd8181ad9db004d"}, - {file = "rpds_py-0.24.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9168764133fd919f8dcca2ead66de0105f4ef5659cbb4fa044f7014bed9a1797"}, - {file = "rpds_py-0.24.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5f6e3cec44ba05ee5cbdebe92d052f69b63ae792e7d05f1020ac5e964394080c"}, - {file = "rpds_py-0.24.0-cp313-cp313t-win32.whl", hash = "sha256:8ebc7e65ca4b111d928b669713865f021b7773350eeac4a31d3e70144297baba"}, - {file = "rpds_py-0.24.0-cp313-cp313t-win_amd64.whl", hash = "sha256:675269d407a257b8c00a6b58205b72eec8231656506c56fd429d924ca00bb350"}, - {file = "rpds_py-0.24.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a36b452abbf29f68527cf52e181fced56685731c86b52e852053e38d8b60bc8d"}, - {file = "rpds_py-0.24.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b3b397eefecec8e8e39fa65c630ef70a24b09141a6f9fc17b3c3a50bed6b50e"}, - {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdabcd3beb2a6dca7027007473d8ef1c3b053347c76f685f5f060a00327b8b65"}, - {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5db385bacd0c43f24be92b60c857cf760b7f10d8234f4bd4be67b5b20a7c0b6b"}, - {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8097b3422d020ff1c44effc40ae58e67d93e60d540a65649d2cdaf9466030791"}, - {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:493fe54318bed7d124ce272fc36adbf59d46729659b2c792e87c3b95649cdee9"}, - {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8aa362811ccdc1f8dadcc916c6d47e554169ab79559319ae9fae7d7752d0d60c"}, - {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d8f9a6e7fd5434817526815f09ea27f2746c4a51ee11bb3439065f5fc754db58"}, - {file = "rpds_py-0.24.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8205ee14463248d3349131bb8099efe15cd3ce83b8ef3ace63c7e976998e7124"}, - {file = "rpds_py-0.24.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:921ae54f9ecba3b6325df425cf72c074cd469dea843fb5743a26ca7fb2ccb149"}, - {file = "rpds_py-0.24.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:32bab0a56eac685828e00cc2f5d1200c548f8bc11f2e44abf311d6b548ce2e45"}, - {file = "rpds_py-0.24.0-cp39-cp39-win32.whl", hash = "sha256:f5c0ed12926dec1dfe7d645333ea59cf93f4d07750986a586f511c0bc61fe103"}, - {file = "rpds_py-0.24.0-cp39-cp39-win_amd64.whl", hash = "sha256:afc6e35f344490faa8276b5f2f7cbf71f88bc2cda4328e00553bd451728c571f"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:619ca56a5468f933d940e1bf431c6f4e13bef8e688698b067ae68eb4f9b30e3a"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:4b28e5122829181de1898c2c97f81c0b3246d49f585f22743a1246420bb8d399"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e5ab32cf9eb3647450bc74eb201b27c185d3857276162c101c0f8c6374e098"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:208b3a70a98cf3710e97cabdc308a51cd4f28aa6e7bb11de3d56cd8b74bab98d"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbc4362e06f950c62cad3d4abf1191021b2ffaf0b31ac230fbf0526453eee75e"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ebea2821cdb5f9fef44933617be76185b80150632736f3d76e54829ab4a3b4d1"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a4df06c35465ef4d81799999bba810c68d29972bf1c31db61bfdb81dd9d5bb"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d3aa13bdf38630da298f2e0d77aca967b200b8cc1473ea05248f6c5e9c9bdb44"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:041f00419e1da7a03c46042453598479f45be3d787eb837af382bfc169c0db33"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:d8754d872a5dfc3c5bf9c0e059e8107451364a30d9fd50f1f1a85c4fb9481164"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:896c41007931217a343eff197c34513c154267636c8056fb409eafd494c3dcdc"}, - {file = "rpds_py-0.24.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:92558d37d872e808944c3c96d0423b8604879a3d1c86fdad508d7ed91ea547d5"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f9e0057a509e096e47c87f753136c9b10d7a91842d8042c2ee6866899a717c0d"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d6e109a454412ab82979c5b1b3aee0604eca4bbf9a02693bb9df027af2bfa91a"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc1c892b1ec1f8cbd5da8de287577b455e388d9c328ad592eabbdcb6fc93bee5"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9c39438c55983d48f4bb3487734d040e22dad200dab22c41e331cee145e7a50d"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d7e8ce990ae17dda686f7e82fd41a055c668e13ddcf058e7fb5e9da20b57793"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9ea7f4174d2e4194289cb0c4e172d83e79a6404297ff95f2875cf9ac9bced8ba"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb2954155bb8f63bb19d56d80e5e5320b61d71084617ed89efedb861a684baea"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04f2b712a2206e13800a8136b07aaedc23af3facab84918e7aa89e4be0260032"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:eda5c1e2a715a4cbbca2d6d304988460942551e4e5e3b7457b50943cd741626d"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:9abc80fe8c1f87218db116016de575a7998ab1629078c90840e8d11ab423ee25"}, - {file = "rpds_py-0.24.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6a727fd083009bc83eb83d6950f0c32b3c94c8b80a9b667c87f4bd1274ca30ba"}, - {file = "rpds_py-0.24.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e0f3ef95795efcd3b2ec3fe0a5bcfb5dadf5e3996ea2117427e524d4fbf309c6"}, - {file = "rpds_py-0.24.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:2c13777ecdbbba2077670285dd1fe50828c8742f6a4119dbef6f83ea13ad10fb"}, - {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79e8d804c2ccd618417e96720ad5cd076a86fa3f8cb310ea386a3e6229bae7d1"}, - {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd822f019ccccd75c832deb7aa040bb02d70a92eb15a2f16c7987b7ad4ee8d83"}, - {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0047638c3aa0dbcd0ab99ed1e549bbf0e142c9ecc173b6492868432d8989a046"}, - {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a5b66d1b201cc71bc3081bc2f1fc36b0c1f268b773e03bbc39066651b9e18391"}, - {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbcbb6db5582ea33ce46a5d20a5793134b5365110d84df4e30b9d37c6fd40ad3"}, - {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:63981feca3f110ed132fd217bf7768ee8ed738a55549883628ee3da75bb9cb78"}, - {file = "rpds_py-0.24.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:3a55fc10fdcbf1a4bd3c018eea422c52cf08700cf99c28b5cb10fe97ab77a0d3"}, - {file = "rpds_py-0.24.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:c30ff468163a48535ee7e9bf21bd14c7a81147c0e58a36c1078289a8ca7af0bd"}, - {file = "rpds_py-0.24.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:369d9c6d4c714e36d4a03957b4783217a3ccd1e222cdd67d464a3a479fc17796"}, - {file = "rpds_py-0.24.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:24795c099453e3721fda5d8ddd45f5dfcc8e5a547ce7b8e9da06fecc3832e26f"}, - {file = "rpds_py-0.24.0.tar.gz", hash = "sha256:772cc1b2cd963e7e17e6cc55fe0371fb9c704d63e44cacec7b9b7f523b78919e"}, + {file = "rpds_py-0.25.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f4ad628b5174d5315761b67f212774a32f5bad5e61396d38108bd801c0a8f5d9"}, + {file = "rpds_py-0.25.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8c742af695f7525e559c16f1562cf2323db0e3f0fbdcabdf6865b095256b2d40"}, + {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:605ffe7769e24b1800b4d024d24034405d9404f0bc2f55b6db3362cd34145a6f"}, + {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ccc6f3ddef93243538be76f8e47045b4aad7a66a212cd3a0f23e34469473d36b"}, + {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f70316f760174ca04492b5ab01be631a8ae30cadab1d1081035136ba12738cfa"}, + {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1dafef8df605fdb46edcc0bf1573dea0d6d7b01ba87f85cd04dc855b2b4479e"}, + {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0701942049095741a8aeb298a31b203e735d1c61f4423511d2b1a41dcd8a16da"}, + {file = "rpds_py-0.25.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e87798852ae0b37c88babb7f7bbbb3e3fecc562a1c340195b44c7e24d403e380"}, + {file = "rpds_py-0.25.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3bcce0edc1488906c2d4c75c94c70a0417e83920dd4c88fec1078c94843a6ce9"}, + {file = "rpds_py-0.25.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e2f6a2347d3440ae789505693a02836383426249d5293541cd712e07e7aecf54"}, + {file = "rpds_py-0.25.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4fd52d3455a0aa997734f3835cbc4c9f32571345143960e7d7ebfe7b5fbfa3b2"}, + {file = "rpds_py-0.25.1-cp310-cp310-win32.whl", hash = "sha256:3f0b1798cae2bbbc9b9db44ee068c556d4737911ad53a4e5093d09d04b3bbc24"}, + {file = "rpds_py-0.25.1-cp310-cp310-win_amd64.whl", hash = "sha256:3ebd879ab996537fc510a2be58c59915b5dd63bccb06d1ef514fee787e05984a"}, + {file = "rpds_py-0.25.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5f048bbf18b1f9120685c6d6bb70cc1a52c8cc11bdd04e643d28d3be0baf666d"}, + {file = "rpds_py-0.25.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4fbb0dbba559959fcb5d0735a0f87cdbca9e95dac87982e9b95c0f8f7ad10255"}, + {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4ca54b9cf9d80b4016a67a0193ebe0bcf29f6b0a96f09db942087e294d3d4c2"}, + {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ee3e26eb83d39b886d2cb6e06ea701bba82ef30a0de044d34626ede51ec98b0"}, + {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89706d0683c73a26f76a5315d893c051324d771196ae8b13e6ffa1ffaf5e574f"}, + {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2013ee878c76269c7b557a9a9c042335d732e89d482606990b70a839635feb7"}, + {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45e484db65e5380804afbec784522de84fa95e6bb92ef1bd3325d33d13efaebd"}, + {file = "rpds_py-0.25.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:48d64155d02127c249695abb87d39f0faf410733428d499867606be138161d65"}, + {file = "rpds_py-0.25.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:048893e902132fd6548a2e661fb38bf4896a89eea95ac5816cf443524a85556f"}, + {file = "rpds_py-0.25.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0317177b1e8691ab5879f4f33f4b6dc55ad3b344399e23df2e499de7b10a548d"}, + {file = "rpds_py-0.25.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bffcf57826d77a4151962bf1701374e0fc87f536e56ec46f1abdd6a903354042"}, + {file = "rpds_py-0.25.1-cp311-cp311-win32.whl", hash = "sha256:cda776f1967cb304816173b30994faaf2fd5bcb37e73118a47964a02c348e1bc"}, + {file = "rpds_py-0.25.1-cp311-cp311-win_amd64.whl", hash = "sha256:dc3c1ff0abc91444cd20ec643d0f805df9a3661fcacf9c95000329f3ddf268a4"}, + {file = "rpds_py-0.25.1-cp311-cp311-win_arm64.whl", hash = "sha256:5a3ddb74b0985c4387719fc536faced33cadf2172769540c62e2a94b7b9be1c4"}, + {file = "rpds_py-0.25.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b5ffe453cde61f73fea9430223c81d29e2fbf412a6073951102146c84e19e34c"}, + {file = "rpds_py-0.25.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:115874ae5e2fdcfc16b2aedc95b5eef4aebe91b28e7e21951eda8a5dc0d3461b"}, + {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a714bf6e5e81b0e570d01f56e0c89c6375101b8463999ead3a93a5d2a4af91fa"}, + {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:35634369325906bcd01577da4c19e3b9541a15e99f31e91a02d010816b49bfda"}, + {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4cb2b3ddc16710548801c6fcc0cfcdeeff9dafbc983f77265877793f2660309"}, + {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9ceca1cf097ed77e1a51f1dbc8d174d10cb5931c188a4505ff9f3e119dfe519b"}, + {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2cd1a4b0c2b8c5e31ffff50d09f39906fe351389ba143c195566056c13a7ea"}, + {file = "rpds_py-0.25.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1de336a4b164c9188cb23f3703adb74a7623ab32d20090d0e9bf499a2203ad65"}, + {file = "rpds_py-0.25.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9fca84a15333e925dd59ce01da0ffe2ffe0d6e5d29a9eeba2148916d1824948c"}, + {file = "rpds_py-0.25.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:88ec04afe0c59fa64e2f6ea0dd9657e04fc83e38de90f6de201954b4d4eb59bd"}, + {file = "rpds_py-0.25.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a8bd2f19e312ce3e1d2c635618e8a8d8132892bb746a7cf74780a489f0f6cdcb"}, + {file = "rpds_py-0.25.1-cp312-cp312-win32.whl", hash = "sha256:e5e2f7280d8d0d3ef06f3ec1b4fd598d386cc6f0721e54f09109a8132182fbfe"}, + {file = "rpds_py-0.25.1-cp312-cp312-win_amd64.whl", hash = "sha256:db58483f71c5db67d643857404da360dce3573031586034b7d59f245144cc192"}, + {file = "rpds_py-0.25.1-cp312-cp312-win_arm64.whl", hash = "sha256:6d50841c425d16faf3206ddbba44c21aa3310a0cebc3c1cdfc3e3f4f9f6f5728"}, + {file = "rpds_py-0.25.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:659d87430a8c8c704d52d094f5ba6fa72ef13b4d385b7e542a08fc240cb4a559"}, + {file = "rpds_py-0.25.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:68f6f060f0bbdfb0245267da014d3a6da9be127fe3e8cc4a68c6f833f8a23bb1"}, + {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:083a9513a33e0b92cf6e7a6366036c6bb43ea595332c1ab5c8ae329e4bcc0a9c"}, + {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:816568614ecb22b18a010c7a12559c19f6fe993526af88e95a76d5a60b8b75fb"}, + {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c6564c0947a7f52e4792983f8e6cf9bac140438ebf81f527a21d944f2fd0a40"}, + {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c4a128527fe415d73cf1f70a9a688d06130d5810be69f3b553bf7b45e8acf79"}, + {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a49e1d7a4978ed554f095430b89ecc23f42014a50ac385eb0c4d163ce213c325"}, + {file = "rpds_py-0.25.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d74ec9bc0e2feb81d3f16946b005748119c0f52a153f6db6a29e8cd68636f295"}, + {file = "rpds_py-0.25.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3af5b4cc10fa41e5bc64e5c198a1b2d2864337f8fcbb9a67e747e34002ce812b"}, + {file = "rpds_py-0.25.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:79dc317a5f1c51fd9c6a0c4f48209c6b8526d0524a6904fc1076476e79b00f98"}, + {file = "rpds_py-0.25.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1521031351865e0181bc585147624d66b3b00a84109b57fcb7a779c3ec3772cd"}, + {file = "rpds_py-0.25.1-cp313-cp313-win32.whl", hash = "sha256:5d473be2b13600b93a5675d78f59e63b51b1ba2d0476893415dfbb5477e65b31"}, + {file = "rpds_py-0.25.1-cp313-cp313-win_amd64.whl", hash = "sha256:a7b74e92a3b212390bdce1d93da9f6488c3878c1d434c5e751cbc202c5e09500"}, + {file = "rpds_py-0.25.1-cp313-cp313-win_arm64.whl", hash = "sha256:dd326a81afe332ede08eb39ab75b301d5676802cdffd3a8f287a5f0b694dc3f5"}, + {file = "rpds_py-0.25.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:a58d1ed49a94d4183483a3ce0af22f20318d4a1434acee255d683ad90bf78129"}, + {file = "rpds_py-0.25.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f251bf23deb8332823aef1da169d5d89fa84c89f67bdfb566c49dea1fccfd50d"}, + {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dbd586bfa270c1103ece2109314dd423df1fa3d9719928b5d09e4840cec0d72"}, + {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6d273f136e912aa101a9274c3145dcbddbe4bac560e77e6d5b3c9f6e0ed06d34"}, + {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:666fa7b1bd0a3810a7f18f6d3a25ccd8866291fbbc3c9b912b917a6715874bb9"}, + {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:921954d7fbf3fccc7de8f717799304b14b6d9a45bbeec5a8d7408ccbf531faf5"}, + {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3d86373ff19ca0441ebeb696ef64cb58b8b5cbacffcda5a0ec2f3911732a194"}, + {file = "rpds_py-0.25.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c8980cde3bb8575e7c956a530f2c217c1d6aac453474bf3ea0f9c89868b531b6"}, + {file = "rpds_py-0.25.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8eb8c84ecea987a2523e057c0d950bcb3f789696c0499290b8d7b3107a719d78"}, + {file = "rpds_py-0.25.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:e43a005671a9ed5a650f3bc39e4dbccd6d4326b24fb5ea8be5f3a43a6f576c72"}, + {file = "rpds_py-0.25.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:58f77c60956501a4a627749a6dcb78dac522f249dd96b5c9f1c6af29bfacfb66"}, + {file = "rpds_py-0.25.1-cp313-cp313t-win32.whl", hash = "sha256:2cb9e5b5e26fc02c8a4345048cd9998c2aca7c2712bd1b36da0c72ee969a3523"}, + {file = "rpds_py-0.25.1-cp313-cp313t-win_amd64.whl", hash = "sha256:401ca1c4a20cc0510d3435d89c069fe0a9ae2ee6495135ac46bdd49ec0495763"}, + {file = "rpds_py-0.25.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ce4c8e485a3c59593f1a6f683cf0ea5ab1c1dc94d11eea5619e4fb5228b40fbd"}, + {file = "rpds_py-0.25.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d8222acdb51a22929c3b2ddb236b69c59c72af4019d2cba961e2f9add9b6e634"}, + {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4593c4eae9b27d22df41cde518b4b9e4464d139e4322e2127daa9b5b981b76be"}, + {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd035756830c712b64725a76327ce80e82ed12ebab361d3a1cdc0f51ea21acb0"}, + {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:114a07e85f32b125404f28f2ed0ba431685151c037a26032b213c882f26eb908"}, + {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dec21e02e6cc932538b5203d3a8bd6aa1480c98c4914cb88eea064ecdbc6396a"}, + {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09eab132f41bf792c7a0ea1578e55df3f3e7f61888e340779b06050a9a3f16e9"}, + {file = "rpds_py-0.25.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c98f126c4fc697b84c423e387337d5b07e4a61e9feac494362a59fd7a2d9ed80"}, + {file = "rpds_py-0.25.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0e6a327af8ebf6baba1c10fadd04964c1965d375d318f4435d5f3f9651550f4a"}, + {file = "rpds_py-0.25.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:bc120d1132cff853ff617754196d0ac0ae63befe7c8498bd67731ba368abe451"}, + {file = "rpds_py-0.25.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:140f61d9bed7839446bdd44852e30195c8e520f81329b4201ceead4d64eb3a9f"}, + {file = "rpds_py-0.25.1-cp39-cp39-win32.whl", hash = "sha256:9c006f3aadeda131b438c3092124bd196b66312f0caa5823ef09585a669cf449"}, + {file = "rpds_py-0.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:a61d0b2c7c9a0ae45732a77844917b427ff16ad5464b4d4f5e4adb955f582890"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b24bf3cd93d5b6ecfbedec73b15f143596c88ee249fa98cefa9a9dc9d92c6f28"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:0eb90e94f43e5085623932b68840b6f379f26db7b5c2e6bcef3179bd83c9330f"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d50e4864498a9ab639d6d8854b25e80642bd362ff104312d9770b05d66e5fb13"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c9409b47ba0650544b0bb3c188243b83654dfe55dcc173a86832314e1a6a35d"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:796ad874c89127c91970652a4ee8b00d56368b7e00d3477f4415fe78164c8000"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:85608eb70a659bf4c1142b2781083d4b7c0c4e2c90eff11856a9754e965b2540"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4feb9211d15d9160bc85fa72fed46432cdc143eb9cf6d5ca377335a921ac37b"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ccfa689b9246c48947d31dd9d8b16d89a0ecc8e0e26ea5253068efb6c542b76e"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:3c5b317ecbd8226887994852e85de562f7177add602514d4ac40f87de3ae45a8"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:454601988aab2c6e8fd49e7634c65476b2b919647626208e376afcd22019eeb8"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:1c0c434a53714358532d13539272db75a5ed9df75a4a090a753ac7173ec14e11"}, + {file = "rpds_py-0.25.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f73ce1512e04fbe2bc97836e89830d6b4314c171587a99688082d090f934d20a"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ee86d81551ec68a5c25373c5643d343150cc54672b5e9a0cafc93c1870a53954"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89c24300cd4a8e4a51e55c31a8ff3918e6651b241ee8876a42cc2b2a078533ba"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:771c16060ff4e79584dc48902a91ba79fd93eade3aa3a12d6d2a4aadaf7d542b"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:785ffacd0ee61c3e60bdfde93baa6d7c10d86f15655bd706c89da08068dc5038"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a40046a529cc15cef88ac5ab589f83f739e2d332cb4d7399072242400ed68c9"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:85fc223d9c76cabe5d0bff82214459189720dc135db45f9f66aa7cffbf9ff6c1"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0be9965f93c222fb9b4cc254235b3b2b215796c03ef5ee64f995b1b69af0762"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8378fa4a940f3fb509c081e06cb7f7f2adae8cf46ef258b0e0ed7519facd573e"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:33358883a4490287e67a2c391dfaea4d9359860281db3292b6886bf0be3d8692"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:1d1fadd539298e70cac2f2cb36f5b8a65f742b9b9f1014dd4ea1f7785e2470bf"}, + {file = "rpds_py-0.25.1-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9a46c2fb2545e21181445515960006e85d22025bd2fe6db23e76daec6eb689fe"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:50f2c501a89c9a5f4e454b126193c5495b9fb441a75b298c60591d8a2eb92e1b"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7d779b325cc8238227c47fbc53964c8cc9a941d5dbae87aa007a1f08f2f77b23"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:036ded36bedb727beeabc16dc1dad7cb154b3fa444e936a03b67a86dc6a5066e"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:245550f5a1ac98504147cba96ffec8fabc22b610742e9150138e5d60774686d7"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff7c23ba0a88cb7b104281a99476cccadf29de2a0ef5ce864959a52675b1ca83"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e37caa8cdb3b7cf24786451a0bdb853f6347b8b92005eeb64225ae1db54d1c2b"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2f48ab00181600ee266a095fe815134eb456163f7d6699f525dee471f312cf"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e5fc7484fa7dce57e25063b0ec9638ff02a908304f861d81ea49273e43838c1"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d3c10228d6cf6fe2b63d2e7985e94f6916fa46940df46b70449e9ff9297bd3d1"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:5d9e40f32745db28c1ef7aad23f6fc458dc1e29945bd6781060f0d15628b8ddf"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:35a8d1a24b5936b35c5003313bc177403d8bdef0f8b24f28b1c4a255f94ea992"}, + {file = "rpds_py-0.25.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:6099263f526efff9cf3883dfef505518730f7a7a93049b1d90d42e50a22b4793"}, + {file = "rpds_py-0.25.1.tar.gz", hash = "sha256:8960b6dac09b62dac26e75d7e2c4a22efb835d827a7278c34f72b2b84fa160e3"}, ] [[package]] @@ -3802,14 +3800,14 @@ win32 = ["pywin32 ; sys_platform == \"win32\""] [[package]] name = "setuptools" -version = "80.4.0" +version = "80.9.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" groups = ["test"] files = [ - {file = "setuptools-80.4.0-py3-none-any.whl", hash = "sha256:6cdc8cb9a7d590b237dbe4493614a9b75d0559b888047c1f67d49ba50fc3edb2"}, - {file = "setuptools-80.4.0.tar.gz", hash = "sha256:5a78f61820bc088c8e4add52932ae6b8cf423da2aff268c23f813cfbb13b4006"}, + {file = "setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"}, + {file = "setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c"}, ] [package.extras] @@ -4128,23 +4126,24 @@ test = ["pytest", "ruff"] [[package]] name = "tornado" -version = "6.4.2" +version = "6.5.1" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["test"] files = [ - {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1"}, - {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803"}, - {file = "tornado-6.4.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a017d239bd1bb0919f72af256a970624241f070496635784d9bf0db640d3fec"}, - {file = "tornado-6.4.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c36e62ce8f63409301537222faffcef7dfc5284f27eec227389f2ad11b09d946"}, - {file = "tornado-6.4.2-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca9eb02196e789c9cb5c3c7c0f04fb447dc2adffd95265b2c7223a8a615ccbf"}, - {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:304463bd0772442ff4d0f5149c6f1c2135a1fae045adf070821c6cdc76980634"}, - {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:c82c46813ba483a385ab2a99caeaedf92585a1f90defb5693351fa7e4ea0bf73"}, - {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:932d195ca9015956fa502c6b56af9eb06106140d844a335590c1ec7f5277d10c"}, - {file = "tornado-6.4.2-cp38-abi3-win32.whl", hash = "sha256:2876cef82e6c5978fde1e0d5b1f919d756968d5b4282418f3146b79b58556482"}, - {file = "tornado-6.4.2-cp38-abi3-win_amd64.whl", hash = "sha256:908b71bf3ff37d81073356a5fadcc660eb10c1476ee6e2725588626ce7e5ca38"}, - {file = "tornado-6.4.2.tar.gz", hash = "sha256:92bad5b4746e9879fd7bf1eb21dce4e3fc5128d71601f80005afa39237ad620b"}, + {file = "tornado-6.5.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d50065ba7fd11d3bd41bcad0825227cc9a95154bad83239357094c36708001f7"}, + {file = "tornado-6.5.1-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9e9ca370f717997cb85606d074b0e5b247282cf5e2e1611568b8821afe0342d6"}, + {file = "tornado-6.5.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b77e9dfa7ed69754a54c89d82ef746398be82f749df69c4d3abe75c4d1ff4888"}, + {file = "tornado-6.5.1-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:253b76040ee3bab8bcf7ba9feb136436a3787208717a1fb9f2c16b744fba7331"}, + {file = "tornado-6.5.1-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:308473f4cc5a76227157cdf904de33ac268af770b2c5f05ca6c1161d82fdd95e"}, + {file = "tornado-6.5.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:caec6314ce8a81cf69bd89909f4b633b9f523834dc1a352021775d45e51d9401"}, + {file = "tornado-6.5.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:13ce6e3396c24e2808774741331638ee6c2f50b114b97a55c5b442df65fd9692"}, + {file = "tornado-6.5.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5cae6145f4cdf5ab24744526cc0f55a17d76f02c98f4cff9daa08ae9a217448a"}, + {file = "tornado-6.5.1-cp39-abi3-win32.whl", hash = "sha256:e0a36e1bc684dca10b1aa75a31df8bdfed656831489bc1e6a6ebed05dc1ec365"}, + {file = "tornado-6.5.1-cp39-abi3-win_amd64.whl", hash = "sha256:908e7d64567cecd4c2b458075589a775063453aeb1d2a1853eedb806922f568b"}, + {file = "tornado-6.5.1-cp39-abi3-win_arm64.whl", hash = "sha256:02420a0eb7bf617257b9935e2b754d1b63897525d8a289c9d65690d580b4dcf7"}, + {file = "tornado-6.5.1.tar.gz", hash = "sha256:84ceece391e8eb9b2b95578db65e920d2a61070260594819589609ba9bc6308c"}, ] [[package]] @@ -4165,26 +4164,26 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0, [[package]] name = "types-python-dateutil" -version = "2.9.0.20241206" +version = "2.9.0.20250516" description = "Typing stubs for python-dateutil" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["test"] files = [ - {file = "types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53"}, - {file = "types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb"}, + {file = "types_python_dateutil-2.9.0.20250516-py3-none-any.whl", hash = "sha256:2b2b3f57f9c6a61fba26a9c0ffb9ea5681c9b83e69cd897c6b5f668d9c0cab93"}, + {file = "types_python_dateutil-2.9.0.20250516.tar.gz", hash = "sha256:13e80d6c9c47df23ad773d54b2826bd52dbbb41be87c3f339381c1700ad21ee5"}, ] [[package]] name = "typing-extensions" -version = "4.13.2" -description = "Backported and Experimental Type Hints for Python 3.8+" +version = "4.14.0" +description = "Backported and Experimental Type Hints for Python 3.9+" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main", "test"] files = [ - {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, - {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, + {file = "typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af"}, + {file = "typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4"}, ] [[package]] @@ -4409,116 +4408,116 @@ files = [ [[package]] name = "yarl" -version = "1.20.0" +version = "1.20.1" description = "Yet another URL library" optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "yarl-1.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f1f6670b9ae3daedb325fa55fbe31c22c8228f6e0b513772c2e1c623caa6ab22"}, - {file = "yarl-1.20.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85a231fa250dfa3308f3c7896cc007a47bc76e9e8e8595c20b7426cac4884c62"}, - {file = "yarl-1.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a06701b647c9939d7019acdfa7ebbfbb78ba6aa05985bb195ad716ea759a569"}, - {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7595498d085becc8fb9203aa314b136ab0516c7abd97e7d74f7bb4eb95042abe"}, - {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af5607159085dcdb055d5678fc2d34949bd75ae6ea6b4381e784bbab1c3aa195"}, - {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:95b50910e496567434cb77a577493c26bce0f31c8a305135f3bda6a2483b8e10"}, - {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b594113a301ad537766b4e16a5a6750fcbb1497dcc1bc8a4daae889e6402a634"}, - {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:083ce0393ea173cd37834eb84df15b6853b555d20c52703e21fbababa8c129d2"}, - {file = "yarl-1.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f1a350a652bbbe12f666109fbddfdf049b3ff43696d18c9ab1531fbba1c977a"}, - {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fb0caeac4a164aadce342f1597297ec0ce261ec4532bbc5a9ca8da5622f53867"}, - {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:d88cc43e923f324203f6ec14434fa33b85c06d18d59c167a0637164863b8e995"}, - {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e52d6ed9ea8fd3abf4031325dc714aed5afcbfa19ee4a89898d663c9976eb487"}, - {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ce360ae48a5e9961d0c730cf891d40698a82804e85f6e74658fb175207a77cb2"}, - {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:06d06c9d5b5bc3eb56542ceeba6658d31f54cf401e8468512447834856fb0e61"}, - {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c27d98f4e5c4060582f44e58309c1e55134880558f1add7a87c1bc36ecfade19"}, - {file = "yarl-1.20.0-cp310-cp310-win32.whl", hash = "sha256:f4d3fa9b9f013f7050326e165c3279e22850d02ae544ace285674cb6174b5d6d"}, - {file = "yarl-1.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:bc906b636239631d42eb8a07df8359905da02704a868983265603887ed68c076"}, - {file = "yarl-1.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fdb5204d17cb32b2de2d1e21c7461cabfacf17f3645e4b9039f210c5d3378bf3"}, - {file = "yarl-1.20.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eaddd7804d8e77d67c28d154ae5fab203163bd0998769569861258e525039d2a"}, - {file = "yarl-1.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:634b7ba6b4a85cf67e9df7c13a7fb2e44fa37b5d34501038d174a63eaac25ee2"}, - {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d409e321e4addf7d97ee84162538c7258e53792eb7c6defd0c33647d754172e"}, - {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ea52f7328a36960ba3231c6677380fa67811b414798a6e071c7085c57b6d20a9"}, - {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8703517b924463994c344dcdf99a2d5ce9eca2b6882bb640aa555fb5efc706a"}, - {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:077989b09ffd2f48fb2d8f6a86c5fef02f63ffe6b1dd4824c76de7bb01e4f2e2"}, - {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0acfaf1da020253f3533526e8b7dd212838fdc4109959a2c53cafc6db611bff2"}, - {file = "yarl-1.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4230ac0b97ec5eeb91d96b324d66060a43fd0d2a9b603e3327ed65f084e41f8"}, - {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a6a1e6ae21cdd84011c24c78d7a126425148b24d437b5702328e4ba640a8902"}, - {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:86de313371ec04dd2531f30bc41a5a1a96f25a02823558ee0f2af0beaa7ca791"}, - {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:dd59c9dd58ae16eaa0f48c3d0cbe6be8ab4dc7247c3ff7db678edecbaf59327f"}, - {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a0bc5e05f457b7c1994cc29e83b58f540b76234ba6b9648a4971ddc7f6aa52da"}, - {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c9471ca18e6aeb0e03276b5e9b27b14a54c052d370a9c0c04a68cefbd1455eb4"}, - {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:40ed574b4df723583a26c04b298b283ff171bcc387bc34c2683235e2487a65a5"}, - {file = "yarl-1.20.0-cp311-cp311-win32.whl", hash = "sha256:db243357c6c2bf3cd7e17080034ade668d54ce304d820c2a58514a4e51d0cfd6"}, - {file = "yarl-1.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:8c12cd754d9dbd14204c328915e23b0c361b88f3cffd124129955e60a4fbfcfb"}, - {file = "yarl-1.20.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e06b9f6cdd772f9b665e5ba8161968e11e403774114420737f7884b5bd7bdf6f"}, - {file = "yarl-1.20.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b9ae2fbe54d859b3ade40290f60fe40e7f969d83d482e84d2c31b9bff03e359e"}, - {file = "yarl-1.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6d12b8945250d80c67688602c891237994d203d42427cb14e36d1a732eda480e"}, - {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:087e9731884621b162a3e06dc0d2d626e1542a617f65ba7cc7aeab279d55ad33"}, - {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:69df35468b66c1a6e6556248e6443ef0ec5f11a7a4428cf1f6281f1879220f58"}, - {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b2992fe29002fd0d4cbaea9428b09af9b8686a9024c840b8a2b8f4ea4abc16f"}, - {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4c903e0b42aab48abfbac668b5a9d7b6938e721a6341751331bcd7553de2dcae"}, - {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf099e2432131093cc611623e0b0bcc399b8cddd9a91eded8bfb50402ec35018"}, - {file = "yarl-1.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a7f62f5dc70a6c763bec9ebf922be52aa22863d9496a9a30124d65b489ea672"}, - {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:54ac15a8b60382b2bcefd9a289ee26dc0920cf59b05368c9b2b72450751c6eb8"}, - {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:25b3bc0763a7aca16a0f1b5e8ef0f23829df11fb539a1b70476dcab28bd83da7"}, - {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b2586e36dc070fc8fad6270f93242124df68b379c3a251af534030a4a33ef594"}, - {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:866349da9d8c5290cfefb7fcc47721e94de3f315433613e01b435473be63daa6"}, - {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:33bb660b390a0554d41f8ebec5cd4475502d84104b27e9b42f5321c5192bfcd1"}, - {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:737e9f171e5a07031cbee5e9180f6ce21a6c599b9d4b2c24d35df20a52fabf4b"}, - {file = "yarl-1.20.0-cp312-cp312-win32.whl", hash = "sha256:839de4c574169b6598d47ad61534e6981979ca2c820ccb77bf70f4311dd2cc64"}, - {file = "yarl-1.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:3d7dbbe44b443b0c4aa0971cb07dcb2c2060e4a9bf8d1301140a33a93c98e18c"}, - {file = "yarl-1.20.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2137810a20b933b1b1b7e5cf06a64c3ed3b4747b0e5d79c9447c00db0e2f752f"}, - {file = "yarl-1.20.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:447c5eadd750db8389804030d15f43d30435ed47af1313303ed82a62388176d3"}, - {file = "yarl-1.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42fbe577272c203528d402eec8bf4b2d14fd49ecfec92272334270b850e9cd7d"}, - {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18e321617de4ab170226cd15006a565d0fa0d908f11f724a2c9142d6b2812ab0"}, - {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4345f58719825bba29895011e8e3b545e6e00257abb984f9f27fe923afca2501"}, - {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d9b980d7234614bc4674468ab173ed77d678349c860c3af83b1fffb6a837ddc"}, - {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af4baa8a445977831cbaa91a9a84cc09debb10bc8391f128da2f7bd070fc351d"}, - {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123393db7420e71d6ce40d24885a9e65eb1edefc7a5228db2d62bcab3386a5c0"}, - {file = "yarl-1.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab47acc9332f3de1b39e9b702d9c916af7f02656b2a86a474d9db4e53ef8fd7a"}, - {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4a34c52ed158f89876cba9c600b2c964dfc1ca52ba7b3ab6deb722d1d8be6df2"}, - {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:04d8cfb12714158abf2618f792c77bc5c3d8c5f37353e79509608be4f18705c9"}, - {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7dc63ad0d541c38b6ae2255aaa794434293964677d5c1ec5d0116b0e308031f5"}, - {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d02b591a64e4e6ca18c5e3d925f11b559c763b950184a64cf47d74d7e41877"}, - {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:95fc9876f917cac7f757df80a5dda9de59d423568460fe75d128c813b9af558e"}, - {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bb769ae5760cd1c6a712135ee7915f9d43f11d9ef769cb3f75a23e398a92d384"}, - {file = "yarl-1.20.0-cp313-cp313-win32.whl", hash = "sha256:70e0c580a0292c7414a1cead1e076c9786f685c1fc4757573d2967689b370e62"}, - {file = "yarl-1.20.0-cp313-cp313-win_amd64.whl", hash = "sha256:4c43030e4b0af775a85be1fa0433119b1565673266a70bf87ef68a9d5ba3174c"}, - {file = "yarl-1.20.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b6c4c3d0d6a0ae9b281e492b1465c72de433b782e6b5001c8e7249e085b69051"}, - {file = "yarl-1.20.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8681700f4e4df891eafa4f69a439a6e7d480d64e52bf460918f58e443bd3da7d"}, - {file = "yarl-1.20.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:84aeb556cb06c00652dbf87c17838eb6d92cfd317799a8092cee0e570ee11229"}, - {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f166eafa78810ddb383e930d62e623d288fb04ec566d1b4790099ae0f31485f1"}, - {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5d3d6d14754aefc7a458261027a562f024d4f6b8a798adb472277f675857b1eb"}, - {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a8f64df8ed5d04c51260dbae3cc82e5649834eebea9eadfd829837b8093eb00"}, - {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d9949eaf05b4d30e93e4034a7790634bbb41b8be2d07edd26754f2e38e491de"}, - {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c366b254082d21cc4f08f522ac201d0d83a8b8447ab562732931d31d80eb2a5"}, - {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91bc450c80a2e9685b10e34e41aef3d44ddf99b3a498717938926d05ca493f6a"}, - {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9c2aa4387de4bc3a5fe158080757748d16567119bef215bec643716b4fbf53f9"}, - {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:d2cbca6760a541189cf87ee54ff891e1d9ea6406079c66341008f7ef6ab61145"}, - {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:798a5074e656f06b9fad1a162be5a32da45237ce19d07884d0b67a0aa9d5fdda"}, - {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f106e75c454288472dbe615accef8248c686958c2e7dd3b8d8ee2669770d020f"}, - {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:3b60a86551669c23dc5445010534d2c5d8a4e012163218fc9114e857c0586fdd"}, - {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3e429857e341d5e8e15806118e0294f8073ba9c4580637e59ab7b238afca836f"}, - {file = "yarl-1.20.0-cp313-cp313t-win32.whl", hash = "sha256:65a4053580fe88a63e8e4056b427224cd01edfb5f951498bfefca4052f0ce0ac"}, - {file = "yarl-1.20.0-cp313-cp313t-win_amd64.whl", hash = "sha256:53b2da3a6ca0a541c1ae799c349788d480e5144cac47dba0266c7cb6c76151fe"}, - {file = "yarl-1.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:119bca25e63a7725b0c9d20ac67ca6d98fa40e5a894bd5d4686010ff73397914"}, - {file = "yarl-1.20.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:35d20fb919546995f1d8c9e41f485febd266f60e55383090010f272aca93edcc"}, - {file = "yarl-1.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:484e7a08f72683c0f160270566b4395ea5412b4359772b98659921411d32ad26"}, - {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d8a3d54a090e0fff5837cd3cc305dd8a07d3435a088ddb1f65e33b322f66a94"}, - {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f0cf05ae2d3d87a8c9022f3885ac6dea2b751aefd66a4f200e408a61ae9b7f0d"}, - {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a884b8974729e3899d9287df46f015ce53f7282d8d3340fa0ed57536b440621c"}, - {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f8d8aa8dd89ffb9a831fedbcb27d00ffd9f4842107d52dc9d57e64cb34073d5c"}, - {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4e88d6c3c8672f45a30867817e4537df1bbc6f882a91581faf1f6d9f0f1b5a"}, - {file = "yarl-1.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdb77efde644d6f1ad27be8a5d67c10b7f769804fff7a966ccb1da5a4de4b656"}, - {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4ba5e59f14bfe8d261a654278a0f6364feef64a794bd456a8c9e823071e5061c"}, - {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:d0bf955b96ea44ad914bc792c26a0edcd71b4668b93cbcd60f5b0aeaaed06c64"}, - {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:27359776bc359ee6eaefe40cb19060238f31228799e43ebd3884e9c589e63b20"}, - {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:04d9c7a1dc0a26efb33e1acb56c8849bd57a693b85f44774356c92d610369efa"}, - {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:faa709b66ae0e24c8e5134033187a972d849d87ed0a12a0366bedcc6b5dc14a5"}, - {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:44869ee8538208fe5d9342ed62c11cc6a7a1af1b3d0bb79bb795101b6e77f6e0"}, - {file = "yarl-1.20.0-cp39-cp39-win32.whl", hash = "sha256:b7fa0cb9fd27ffb1211cde944b41f5c67ab1c13a13ebafe470b1e206b8459da8"}, - {file = "yarl-1.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:d4fad6e5189c847820288286732075f213eabf81be4d08d6cc309912e62be5b7"}, - {file = "yarl-1.20.0-py3-none-any.whl", hash = "sha256:5d0fe6af927a47a230f31e6004621fd0959eaa915fc62acfafa67ff7229a3124"}, - {file = "yarl-1.20.0.tar.gz", hash = "sha256:686d51e51ee5dfe62dec86e4866ee0e9ed66df700d55c828a615640adc885307"}, + {file = "yarl-1.20.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6032e6da6abd41e4acda34d75a816012717000fa6839f37124a47fcefc49bec4"}, + {file = "yarl-1.20.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2c7b34d804b8cf9b214f05015c4fee2ebe7ed05cf581e7192c06555c71f4446a"}, + {file = "yarl-1.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c869f2651cc77465f6cd01d938d91a11d9ea5d798738c1dc077f3de0b5e5fed"}, + {file = "yarl-1.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62915e6688eb4d180d93840cda4110995ad50c459bf931b8b3775b37c264af1e"}, + {file = "yarl-1.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:41ebd28167bc6af8abb97fec1a399f412eec5fd61a3ccbe2305a18b84fb4ca73"}, + {file = "yarl-1.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21242b4288a6d56f04ea193adde174b7e347ac46ce6bc84989ff7c1b1ecea84e"}, + {file = "yarl-1.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bea21cdae6c7eb02ba02a475f37463abfe0a01f5d7200121b03e605d6a0439f8"}, + {file = "yarl-1.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f8a891e4a22a89f5dde7862994485e19db246b70bb288d3ce73a34422e55b23"}, + {file = "yarl-1.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd803820d44c8853a109a34e3660e5a61beae12970da479cf44aa2954019bf70"}, + {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b982fa7f74c80d5c0c7b5b38f908971e513380a10fecea528091405f519b9ebb"}, + {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:33f29ecfe0330c570d997bcf1afd304377f2e48f61447f37e846a6058a4d33b2"}, + {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:835ab2cfc74d5eb4a6a528c57f05688099da41cf4957cf08cad38647e4a83b30"}, + {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:46b5e0ccf1943a9a6e766b2c2b8c732c55b34e28be57d8daa2b3c1d1d4009309"}, + {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:df47c55f7d74127d1b11251fe6397d84afdde0d53b90bedb46a23c0e534f9d24"}, + {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76d12524d05841276b0e22573f28d5fbcb67589836772ae9244d90dd7d66aa13"}, + {file = "yarl-1.20.1-cp310-cp310-win32.whl", hash = "sha256:6c4fbf6b02d70e512d7ade4b1f998f237137f1417ab07ec06358ea04f69134f8"}, + {file = "yarl-1.20.1-cp310-cp310-win_amd64.whl", hash = "sha256:aef6c4d69554d44b7f9d923245f8ad9a707d971e6209d51279196d8e8fe1ae16"}, + {file = "yarl-1.20.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:47ee6188fea634bdfaeb2cc420f5b3b17332e6225ce88149a17c413c77ff269e"}, + {file = "yarl-1.20.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d0f6500f69e8402d513e5eedb77a4e1818691e8f45e6b687147963514d84b44b"}, + {file = "yarl-1.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a8900a42fcdaad568de58887c7b2f602962356908eedb7628eaf6021a6e435b"}, + {file = "yarl-1.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bad6d131fda8ef508b36be3ece16d0902e80b88ea7200f030a0f6c11d9e508d4"}, + {file = "yarl-1.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:df018d92fe22aaebb679a7f89fe0c0f368ec497e3dda6cb81a567610f04501f1"}, + {file = "yarl-1.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f969afbb0a9b63c18d0feecf0db09d164b7a44a053e78a7d05f5df163e43833"}, + {file = "yarl-1.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:812303eb4aa98e302886ccda58d6b099e3576b1b9276161469c25803a8db277d"}, + {file = "yarl-1.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98c4a7d166635147924aa0bf9bfe8d8abad6fffa6102de9c99ea04a1376f91e8"}, + {file = "yarl-1.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12e768f966538e81e6e7550f9086a6236b16e26cd964cf4df35349970f3551cf"}, + {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe41919b9d899661c5c28a8b4b0acf704510b88f27f0934ac7a7bebdd8938d5e"}, + {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8601bc010d1d7780592f3fc1bdc6c72e2b6466ea34569778422943e1a1f3c389"}, + {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:daadbdc1f2a9033a2399c42646fbd46da7992e868a5fe9513860122d7fe7a73f"}, + {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:03aa1e041727cb438ca762628109ef1333498b122e4c76dd858d186a37cec845"}, + {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:642980ef5e0fa1de5fa96d905c7e00cb2c47cb468bfcac5a18c58e27dbf8d8d1"}, + {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:86971e2795584fe8c002356d3b97ef6c61862720eeff03db2a7c86b678d85b3e"}, + {file = "yarl-1.20.1-cp311-cp311-win32.whl", hash = "sha256:597f40615b8d25812f14562699e287f0dcc035d25eb74da72cae043bb884d773"}, + {file = "yarl-1.20.1-cp311-cp311-win_amd64.whl", hash = "sha256:26ef53a9e726e61e9cd1cda6b478f17e350fb5800b4bd1cd9fe81c4d91cfeb2e"}, + {file = "yarl-1.20.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdcc4cd244e58593a4379fe60fdee5ac0331f8eb70320a24d591a3be197b94a9"}, + {file = "yarl-1.20.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b29a2c385a5f5b9c7d9347e5812b6f7ab267193c62d282a540b4fc528c8a9d2a"}, + {file = "yarl-1.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1112ae8154186dfe2de4732197f59c05a83dc814849a5ced892b708033f40dc2"}, + {file = "yarl-1.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90bbd29c4fe234233f7fa2b9b121fb63c321830e5d05b45153a2ca68f7d310ee"}, + {file = "yarl-1.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:680e19c7ce3710ac4cd964e90dad99bf9b5029372ba0c7cbfcd55e54d90ea819"}, + {file = "yarl-1.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a979218c1fdb4246a05efc2cc23859d47c89af463a90b99b7c56094daf25a16"}, + {file = "yarl-1.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255b468adf57b4a7b65d8aad5b5138dce6a0752c139965711bdcb81bc370e1b6"}, + {file = "yarl-1.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a97d67108e79cfe22e2b430d80d7571ae57d19f17cda8bb967057ca8a7bf5bfd"}, + {file = "yarl-1.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8570d998db4ddbfb9a590b185a0a33dbf8aafb831d07a5257b4ec9948df9cb0a"}, + {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97c75596019baae7c71ccf1d8cc4738bc08134060d0adfcbe5642f778d1dca38"}, + {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1c48912653e63aef91ff988c5432832692ac5a1d8f0fb8a33091520b5bbe19ef"}, + {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4c3ae28f3ae1563c50f3d37f064ddb1511ecc1d5584e88c6b7c63cf7702a6d5f"}, + {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c5e9642f27036283550f5f57dc6156c51084b458570b9d0d96100c8bebb186a8"}, + {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2c26b0c49220d5799f7b22c6838409ee9bc58ee5c95361a4d7831f03cc225b5a"}, + {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:564ab3d517e3d01c408c67f2e5247aad4019dcf1969982aba3974b4093279004"}, + {file = "yarl-1.20.1-cp312-cp312-win32.whl", hash = "sha256:daea0d313868da1cf2fac6b2d3a25c6e3a9e879483244be38c8e6a41f1d876a5"}, + {file = "yarl-1.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:48ea7d7f9be0487339828a4de0360d7ce0efc06524a48e1810f945c45b813698"}, + {file = "yarl-1.20.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:0b5ff0fbb7c9f1b1b5ab53330acbfc5247893069e7716840c8e7d5bb7355038a"}, + {file = "yarl-1.20.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:14f326acd845c2b2e2eb38fb1346c94f7f3b01a4f5c788f8144f9b630bfff9a3"}, + {file = "yarl-1.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f60e4ad5db23f0b96e49c018596707c3ae89f5d0bd97f0ad3684bcbad899f1e7"}, + {file = "yarl-1.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49bdd1b8e00ce57e68ba51916e4bb04461746e794e7c4d4bbc42ba2f18297691"}, + {file = "yarl-1.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:66252d780b45189975abfed839616e8fd2dbacbdc262105ad7742c6ae58f3e31"}, + {file = "yarl-1.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59174e7332f5d153d8f7452a102b103e2e74035ad085f404df2e40e663a22b28"}, + {file = "yarl-1.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3968ec7d92a0c0f9ac34d5ecfd03869ec0cab0697c91a45db3fbbd95fe1b653"}, + {file = "yarl-1.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1a4fbb50e14396ba3d375f68bfe02215d8e7bc3ec49da8341fe3157f59d2ff5"}, + {file = "yarl-1.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11a62c839c3a8eac2410e951301309426f368388ff2f33799052787035793b02"}, + {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:041eaa14f73ff5a8986b4388ac6bb43a77f2ea09bf1913df7a35d4646db69e53"}, + {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:377fae2fef158e8fd9d60b4c8751387b8d1fb121d3d0b8e9b0be07d1b41e83dc"}, + {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1c92f4390e407513f619d49319023664643d3339bd5e5a56a3bebe01bc67ec04"}, + {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d25ddcf954df1754ab0f86bb696af765c5bfaba39b74095f27eececa049ef9a4"}, + {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:909313577e9619dcff8c31a0ea2aa0a2a828341d92673015456b3ae492e7317b"}, + {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:793fd0580cb9664548c6b83c63b43c477212c0260891ddf86809e1c06c8b08f1"}, + {file = "yarl-1.20.1-cp313-cp313-win32.whl", hash = "sha256:468f6e40285de5a5b3c44981ca3a319a4b208ccc07d526b20b12aeedcfa654b7"}, + {file = "yarl-1.20.1-cp313-cp313-win_amd64.whl", hash = "sha256:495b4ef2fea40596bfc0affe3837411d6aa3371abcf31aac0ccc4bdd64d4ef5c"}, + {file = "yarl-1.20.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:f60233b98423aab21d249a30eb27c389c14929f47be8430efa7dbd91493a729d"}, + {file = "yarl-1.20.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6f3eff4cc3f03d650d8755c6eefc844edde99d641d0dcf4da3ab27141a5f8ddf"}, + {file = "yarl-1.20.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:69ff8439d8ba832d6bed88af2c2b3445977eba9a4588b787b32945871c2444e3"}, + {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cf34efa60eb81dd2645a2e13e00bb98b76c35ab5061a3989c7a70f78c85006d"}, + {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8e0fe9364ad0fddab2688ce72cb7a8e61ea42eff3c7caeeb83874a5d479c896c"}, + {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f64fbf81878ba914562c672024089e3401974a39767747691c65080a67b18c1"}, + {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6342d643bf9a1de97e512e45e4b9560a043347e779a173250824f8b254bd5ce"}, + {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56dac5f452ed25eef0f6e3c6a066c6ab68971d96a9fb441791cad0efba6140d3"}, + {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7d7f497126d65e2cad8dc5f97d34c27b19199b6414a40cb36b52f41b79014be"}, + {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:67e708dfb8e78d8a19169818eeb5c7a80717562de9051bf2413aca8e3696bf16"}, + {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:595c07bc79af2494365cc96ddeb772f76272364ef7c80fb892ef9d0649586513"}, + {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7bdd2f80f4a7df852ab9ab49484a4dee8030023aa536df41f2d922fd57bf023f"}, + {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c03bfebc4ae8d862f853a9757199677ab74ec25424d0ebd68a0027e9c639a390"}, + {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:344d1103e9c1523f32a5ed704d576172d2cabed3122ea90b1d4e11fe17c66458"}, + {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:88cab98aa4e13e1ade8c141daeedd300a4603b7132819c484841bb7af3edce9e"}, + {file = "yarl-1.20.1-cp313-cp313t-win32.whl", hash = "sha256:b121ff6a7cbd4abc28985b6028235491941b9fe8fe226e6fdc539c977ea1739d"}, + {file = "yarl-1.20.1-cp313-cp313t-win_amd64.whl", hash = "sha256:541d050a355bbbc27e55d906bc91cb6fe42f96c01413dd0f4ed5a5240513874f"}, + {file = "yarl-1.20.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e42ba79e2efb6845ebab49c7bf20306c4edf74a0b20fc6b2ccdd1a219d12fad3"}, + {file = "yarl-1.20.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:41493b9b7c312ac448b7f0a42a089dffe1d6e6e981a2d76205801a023ed26a2b"}, + {file = "yarl-1.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f5a5928ff5eb13408c62a968ac90d43f8322fd56d87008b8f9dabf3c0f6ee983"}, + {file = "yarl-1.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30c41ad5d717b3961b2dd785593b67d386b73feca30522048d37298fee981805"}, + {file = "yarl-1.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:59febc3969b0781682b469d4aca1a5cab7505a4f7b85acf6db01fa500fa3f6ba"}, + {file = "yarl-1.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d2b6fb3622b7e5bf7a6e5b679a69326b4279e805ed1699d749739a61d242449e"}, + {file = "yarl-1.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:749d73611db8d26a6281086f859ea7ec08f9c4c56cec864e52028c8b328db723"}, + {file = "yarl-1.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9427925776096e664c39e131447aa20ec738bdd77c049c48ea5200db2237e000"}, + {file = "yarl-1.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff70f32aa316393eaf8222d518ce9118148eddb8a53073c2403863b41033eed5"}, + {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c7ddf7a09f38667aea38801da8b8d6bfe81df767d9dfc8c88eb45827b195cd1c"}, + {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:57edc88517d7fc62b174fcfb2e939fbc486a68315d648d7e74d07fac42cec240"}, + {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:dab096ce479d5894d62c26ff4f699ec9072269d514b4edd630a393223f45a0ee"}, + {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:14a85f3bd2d7bb255be7183e5d7d6e70add151a98edf56a770d6140f5d5f4010"}, + {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c89b5c792685dd9cd3fa9761c1b9f46fc240c2a3265483acc1565769996a3f8"}, + {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:69e9b141de5511021942a6866990aea6d111c9042235de90e08f94cf972ca03d"}, + {file = "yarl-1.20.1-cp39-cp39-win32.whl", hash = "sha256:b5f307337819cdfdbb40193cad84978a029f847b0a357fbe49f712063cfc4f06"}, + {file = "yarl-1.20.1-cp39-cp39-win_amd64.whl", hash = "sha256:eae7bfe2069f9c1c5b05fc7fe5d612e5bbc089a39309904ee8b829e322dcad00"}, + {file = "yarl-1.20.1-py3-none-any.whl", hash = "sha256:83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77"}, + {file = "yarl-1.20.1.tar.gz", hash = "sha256:d017a4997ee50c91fd5466cef416231bb82177b93b029906cefc542ce14c35ac"}, ] [package.dependencies] From 1dc33436780db259186ae3806951ec4e679a2f9f Mon Sep 17 00:00:00 2001 From: Laura Fitzgerald Date: Thu, 29 May 2025 15:12:29 +0100 Subject: [PATCH 477/496] remote ray client example Co-authored-by: bkeane Co-authored-by: chipspeak Co-authored-by: Bobbins --- .../remote_ray_job_client.ipynb | 103 ++++++++++++++++++ 1 file changed, 103 insertions(+) create mode 100644 demo-notebooks/additional-demos/remote_ray_job_client.ipynb diff --git a/demo-notebooks/additional-demos/remote_ray_job_client.ipynb b/demo-notebooks/additional-demos/remote_ray_job_client.ipynb new file mode 100644 index 00000000..b2be6826 --- /dev/null +++ b/demo-notebooks/additional-demos/remote_ray_job_client.ipynb @@ -0,0 +1,103 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Submit a training job remotely to Ray Dashboard protected by oAuth.\n", + "This notebook will demonstrate how to submit Ray jobs to an existing Raycluster, using the CodeFlare SDK.\n", + "\n", + "### Requirements\n", + "* Ray Cluster running in OpenShift protected by oAuth.\n", + "* The Ray Dashboard URL for the Ray Cluster.\n", + "* An OpenShift authorization token with permissions to access the Route.\n", + "* A training job, defined in python, within the working directory.\n", + "* A requirements.txt or equivalent file containing any additional packages to install onto the Ray images." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Import dependencies from codeflare-sdk\n", + "from codeflare_sdk import RayJobClient" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Setup Authentication Configuration \n", + "auth_token = \"XXXX\" # Replace with the actual token\n", + "header = {\n", + " 'Authorization': f'Bearer {auth_token}'\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Gather the dashboard URL (provided by the creator of the RayCluster)\n", + "ray_dashboard = \"XXXX\" # Replace with the Ray dashboard URL" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#Initialize the RayJobClient\n", + "client = RayJobClient(address=ray_dashboard, headers=header, verify=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Submit a job using the RayJobClient\n", + "entrypoint_command = \"python XXXX\" # Replace with the training script name\n", + "submission_id = client.submit_job(\n", + " entrypoint=entrypoint_command,\n", + " runtime_env={\"working_dir\": \"./\",\"pip\": \"requirements.txt\"},\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Get the job's status\n", + "client.get_job_status(submission_id)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Get the job's logs\n", + "client.get_job_logs(submission_id)" + ] + } + ], + "metadata": { + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} From 973d3200caebb4efc844be23d244e37f29055200 Mon Sep 17 00:00:00 2001 From: kryanbeane Date: Wed, 11 Jun 2025 14:03:56 +0100 Subject: [PATCH 478/496] fix: add actions permissions to release action --- .github/workflows/release.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 50e3f9e1..c3e47dab 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -37,6 +37,7 @@ jobs: contents: write id-token: write # This permission is required for trusted publishing pull-requests: write # This permission is required for creating PRs + actions: write # This permission is required for running actions steps: - name: Checkout the repository uses: actions/checkout@v4 From 6e43e0905be46d5707b7518ecbb7ab99d645dd25 Mon Sep 17 00:00:00 2001 From: Pat O'Connor Date: Tue, 1 Jul 2025 09:41:13 +0100 Subject: [PATCH 479/496] task(RHOAIENG-28599): Removed dep fields min_cpus max_cpus min_memory max_memory Signed-off-by: Pat O'Connor --- .../user-docs/cluster-configuration.rst | 8 ------- src/codeflare_sdk/ray/cluster/config.py | 24 ------------------- src/codeflare_sdk/ray/cluster/test_config.py | 10 +++----- 3 files changed, 3 insertions(+), 39 deletions(-) diff --git a/docs/sphinx/user-docs/cluster-configuration.rst b/docs/sphinx/user-docs/cluster-configuration.rst index ad4e248e..18fceeac 100644 --- a/docs/sphinx/user-docs/cluster-configuration.rst +++ b/docs/sphinx/user-docs/cluster-configuration.rst @@ -172,11 +172,3 @@ deprecated. - ``head_cpu_requests``, ``head_cpu_limits`` * - ``head_memory`` - ``head_memory_requests``, ``head_memory_limits`` - * - ``min_cpus`` - - ``worker_cpu_requests`` - * - ``max_cpus`` - - ``worker_cpu_limits`` - * - ``min_memory`` - - ``worker_memory_requests`` - * - ``max_memory`` - - ``worker_memory_limits`` diff --git a/src/codeflare_sdk/ray/cluster/config.py b/src/codeflare_sdk/ray/cluster/config.py index bce54c32..ec89924a 100644 --- a/src/codeflare_sdk/ray/cluster/config.py +++ b/src/codeflare_sdk/ray/cluster/config.py @@ -58,16 +58,8 @@ class ClusterConfiguration: A dictionary of extended resource requests for the head node. ex: {"nvidia.com/gpu": 1} head_tolerations: List of tolerations for head nodes. - min_cpus: - The minimum number of CPUs to allocate to each worker. - max_cpus: - The maximum number of CPUs to allocate to each worker. num_workers: The number of workers to create. - min_memory: - The minimum amount of memory to allocate to each worker. - max_memory: - The maximum amount of memory to allocate to each worker. worker_tolerations: List of tolerations for worker nodes. appwrapper: @@ -122,13 +114,9 @@ class ClusterConfiguration: head_tolerations: Optional[List[V1Toleration]] = None worker_cpu_requests: Union[int, str] = 1 worker_cpu_limits: Union[int, str] = 1 - min_cpus: Optional[Union[int, str]] = None # Deprecating - max_cpus: Optional[Union[int, str]] = None # Deprecating num_workers: int = 1 worker_memory_requests: Union[int, str] = 2 worker_memory_limits: Union[int, str] = 2 - min_memory: Optional[Union[int, str]] = None # Deprecating - max_memory: Optional[Union[int, str]] = None # Deprecating worker_tolerations: Optional[List[V1Toleration]] = None appwrapper: bool = False envs: Dict[str, str] = field(default_factory=dict) @@ -250,12 +238,6 @@ def _cpu_to_resource(self): "head_cpus is being deprecated, use head_cpu_requests and head_cpu_limits" ) self.head_cpu_requests = self.head_cpu_limits = self.head_cpus - if self.min_cpus: - warnings.warn("min_cpus is being deprecated, use worker_cpu_requests") - self.worker_cpu_requests = self.min_cpus - if self.max_cpus: - warnings.warn("max_cpus is being deprecated, use worker_cpu_limits") - self.worker_cpu_limits = self.max_cpus def _memory_to_resource(self): if self.head_memory: @@ -263,12 +245,6 @@ def _memory_to_resource(self): "head_memory is being deprecated, use head_memory_requests and head_memory_limits" ) self.head_memory_requests = self.head_memory_limits = self.head_memory - if self.min_memory: - warnings.warn("min_memory is being deprecated, use worker_memory_requests") - self.worker_memory_requests = f"{self.min_memory}G" - if self.max_memory: - warnings.warn("max_memory is being deprecated, use worker_memory_limits") - self.worker_memory_limits = f"{self.max_memory}G" def _validate_types(self): """Validate the types of all fields in the ClusterConfiguration dataclass.""" diff --git a/src/codeflare_sdk/ray/cluster/test_config.py b/src/codeflare_sdk/ray/cluster/test_config.py index 23da97c3..6f002df1 100644 --- a/src/codeflare_sdk/ray/cluster/test_config.py +++ b/src/codeflare_sdk/ray/cluster/test_config.py @@ -141,19 +141,15 @@ def test_cluster_config_deprecation_conversion(mocker): name="test", head_cpus=3, head_memory=16, - min_memory=3, - max_memory=4, - min_cpus=1, - max_cpus=2, ) assert config.head_cpu_requests == 3 assert config.head_cpu_limits == 3 assert config.head_memory_requests == "16G" assert config.head_memory_limits == "16G" - assert config.worker_memory_requests == "3G" - assert config.worker_memory_limits == "4G" + assert config.worker_memory_requests == "2G" + assert config.worker_memory_limits == "2G" assert config.worker_cpu_requests == 1 - assert config.worker_cpu_limits == 2 + assert config.worker_cpu_limits == 1 def test_gcs_fault_tolerance_config_validation(): From 6e47ec94bbc8552aee099b82c4d9df9b789cc761 Mon Sep 17 00:00:00 2001 From: Pawel Paszki Date: Mon, 7 Jul 2025 12:48:49 +0100 Subject: [PATCH 480/496] test: increase limits to fix ray cluster issues in upgrade tests (#854) --- tests/upgrade/raycluster_sdk_upgrade_test.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/upgrade/raycluster_sdk_upgrade_test.py b/tests/upgrade/raycluster_sdk_upgrade_test.py index 7c8b2922..7a6d583e 100644 --- a/tests/upgrade/raycluster_sdk_upgrade_test.py +++ b/tests/upgrade/raycluster_sdk_upgrade_test.py @@ -50,12 +50,12 @@ def run_mnist_raycluster_sdk_oauth(self): num_workers=1, head_cpu_requests=1, head_cpu_limits=1, - head_memory_requests=4, - head_memory_limits=4, + head_memory_requests=6, + head_memory_limits=8, worker_cpu_requests=1, worker_cpu_limits=1, - worker_memory_requests=4, - worker_memory_limits=4, + worker_memory_requests=6, + worker_memory_limits=8, image=ray_image, write_to_file=True, verify_tls=False, From fc6baa2482ff129264b9a312d146a5552f91e576 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Mon, 7 Jul 2025 11:49:44 +0000 Subject: [PATCH 481/496] Updated coverage.svg --- coverage.svg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/coverage.svg b/coverage.svg index 59d64b37..a8c7e72a 100644 --- a/coverage.svg +++ b/coverage.svg @@ -15,7 +15,7 @@ coverage coverage - 93% - 93% + 92% + 92% From 529e32d4766671ab81c2c4f036212afce9c355ae Mon Sep 17 00:00:00 2001 From: kryanbeane Date: Fri, 4 Jul 2025 10:42:18 +0100 Subject: [PATCH 482/496] RHOAIENG-27828: Update Ray Version references --- .../additional-demos/local_interactive.ipynb | 2 +- .../additional-demos/ray_job_client.ipynb | 2 +- demo-notebooks/guided-demos/0_basic_ray.ipynb | 2 +- .../guided-demos/1_cluster_job_client.ipynb | 2 +- .../guided-demos/2_basic_interactive.ipynb | 2 +- .../guided-demos/3_widget_example.ipynb | 2 +- .../1_cluster_job_client.ipynb | 2 +- .../2_basic_interactive.ipynb | 2 +- .../notebook-ex-outputs/interactivetest.yaml | 6 +- .../notebook-ex-outputs/jobtest.yaml | 6 +- .../notebook-ex-outputs/raytest.yaml | 6 +- .../preview_nbs/0_basic_ray.ipynb | 2 +- .../preview_nbs/1_cluster_job_client.ipynb | 2 +- .../preview_nbs/2_basic_interactive.ipynb | 2 +- .../user-docs/cluster-configuration.rst | 4 +- poetry.lock | 196 +++++++++++++++--- pyproject.toml | 2 +- src/codeflare_sdk/common/utils/constants.py | 2 +- .../appwrapper/test-case-bad.yaml | 2 +- .../appwrapper/unit-test-all-params.yaml | 2 +- tests/test_cluster_yamls/kueue/aw_kueue.yaml | 2 +- .../kueue/ray_cluster_kueue.yaml | 2 +- .../ray/default-appwrapper.yaml | 2 +- .../ray/default-ray-cluster.yaml | 2 +- .../ray/unit-test-all-params.yaml | 2 +- .../support_clusters/test-aw-a.yaml | 2 +- .../support_clusters/test-aw-b.yaml | 2 +- .../support_clusters/test-rc-a.yaml | 2 +- .../support_clusters/test-rc-b.yaml | 2 +- 29 files changed, 197 insertions(+), 69 deletions(-) diff --git a/demo-notebooks/additional-demos/local_interactive.ipynb b/demo-notebooks/additional-demos/local_interactive.ipynb index be30ba40..a46e5d9f 100644 --- a/demo-notebooks/additional-demos/local_interactive.ipynb +++ b/demo-notebooks/additional-demos/local_interactive.ipynb @@ -37,7 +37,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.11: 'quay.io/modh/ray:2.46.0-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/additional-demos/ray_job_client.ipynb b/demo-notebooks/additional-demos/ray_job_client.ipynb index 599b2425..99187b4a 100644 --- a/demo-notebooks/additional-demos/ray_job_client.ipynb +++ b/demo-notebooks/additional-demos/ray_job_client.ipynb @@ -43,7 +43,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.11: 'quay.io/modh/ray:2.46.0-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb index 7e3bbd08..9a2ed8ca 100644 --- a/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -49,7 +49,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.11: 'quay.io/modh/ray:2.46.0-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb index 2aef3184..725866d7 100644 --- a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb @@ -43,7 +43,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.11: 'quay.io/modh/ray:2.46.0-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/2_basic_interactive.ipynb index df7728b3..890bdb47 100644 --- a/demo-notebooks/guided-demos/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/2_basic_interactive.ipynb @@ -46,7 +46,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.11: 'quay.io/modh/ray:2.46.0-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/3_widget_example.ipynb b/demo-notebooks/guided-demos/3_widget_example.ipynb index 56bd0a5e..d09271c9 100644 --- a/demo-notebooks/guided-demos/3_widget_example.ipynb +++ b/demo-notebooks/guided-demos/3_widget_example.ipynb @@ -49,7 +49,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.11: 'quay.io/modh/ray:2.46.0-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb index b1f0d5f6..6f5850ac 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb @@ -43,7 +43,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.11: 'quay.io/modh/ray:2.46.0-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb index bf0b49de..58a6c9e2 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb @@ -46,7 +46,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.11: 'quay.io/modh/ray:2.46.0-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml index 5182b92a..443da33c 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/interactivetest.yaml @@ -81,7 +81,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/modh/ray:2.46.0-py311-cu121 + image: quay.io/modh/ray:2.47.1-py311-cu121 imagePullPolicy: Always lifecycle: preStop: @@ -108,7 +108,7 @@ spec: memory: 8G nvidia.com/gpu: 0 imagePullSecrets: [] - rayVersion: 2.1.0 + rayVersion: 2.47.1 workerGroupSpecs: - groupName: small-group-interactivetest maxReplicas: 2 @@ -147,7 +147,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/modh/ray:2.46.0-py311-cu121 + image: quay.io/modh/ray:2.47.1-py311-cu121 lifecycle: preStop: exec: diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml index 5df084fd..5d5b0b0e 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/jobtest.yaml @@ -70,7 +70,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/modh/ray:2.46.0-py311-cu121 + image: quay.io/modh/ray:2.47.1-py311-cu121 imagePullPolicy: Always lifecycle: preStop: @@ -97,7 +97,7 @@ spec: memory: 8G nvidia.com/gpu: 0 imagePullSecrets: [] - rayVersion: 2.1.0 + rayVersion: 2.47.1 workerGroupSpecs: - groupName: small-group-jobtest maxReplicas: 2 @@ -127,7 +127,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/modh/ray:2.46.0-py311-cu121 + image: quay.io/modh/ray:2.47.1-py311-cu121 lifecycle: preStop: exec: diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml b/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml index ae81b795..81796687 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/raytest.yaml @@ -70,7 +70,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/modh/ray:2.46.0-py311-cu121 + image: quay.io/modh/ray:2.47.1-py311-cu121 imagePullPolicy: Always lifecycle: preStop: @@ -97,7 +97,7 @@ spec: memory: 8G nvidia.com/gpu: 0 imagePullSecrets: [] - rayVersion: 2.1.0 + rayVersion: 2.47.1 workerGroupSpecs: - groupName: small-group-raytest maxReplicas: 2 @@ -127,7 +127,7 @@ spec: value: /home/ray/workspace/tls/server.key - name: RAY_TLS_CA_CERT value: /home/ray/workspace/tls/ca.crt - image: quay.io/modh/ray:2.46.0-py311-cu121 + image: quay.io/modh/ray:2.47.1-py311-cu121 lifecycle: preStop: exec: diff --git a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb index 8d97751a..119d9ce6 100644 --- a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb @@ -49,7 +49,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.11: 'quay.io/modh/ray:2.46.0-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb index 58cb4958..db574ceb 100644 --- a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb @@ -43,7 +43,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.11: 'quay.io/modh/ray:2.46.0-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb index 4538c8d5..8f70f6c2 100644 --- a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb @@ -46,7 +46,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.11: 'quay.io/modh/ray:2.46.0-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/docs/sphinx/user-docs/cluster-configuration.rst b/docs/sphinx/user-docs/cluster-configuration.rst index 18fceeac..c810148b 100644 --- a/docs/sphinx/user-docs/cluster-configuration.rst +++ b/docs/sphinx/user-docs/cluster-configuration.rst @@ -35,12 +35,12 @@ requirements for creating the Ray Cluster. The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version: - - For Python 3.11: `quay.io/modh/ray:2.46.0-py311-cu121` + - For Python 3.11: `quay.io/modh/ray:2.47.1-py311-cu121` If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default. If you are using ROCm compatible GPUs you - can use `quay.io/modh/ray:2.35.0-py311-rocm62`. You can also find + can use `quay.io/modh/ray:2.47.1-py311-rocm62`. You can also find documentation on building a custom image `here `__. diff --git a/poetry.lock b/poetry.lock index 08d51b6f..89bca284 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1364,6 +1364,30 @@ files = [ {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, ] +[[package]] +name = "importlib-metadata" +version = "8.7.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}, + {file = "importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"}, +] + +[package.dependencies] +zipp = ">=3.20" + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] + [[package]] name = "iniconfig" version = "2.1.0" @@ -2419,6 +2443,87 @@ paramiko = "*" pyyaml = "*" six = "*" +[[package]] +name = "opentelemetry-api" +version = "1.34.1" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "opentelemetry_api-1.34.1-py3-none-any.whl", hash = "sha256:b7df4cb0830d5a6c29ad0c0691dbae874d8daefa934b8b1d642de48323d32a8c"}, + {file = "opentelemetry_api-1.34.1.tar.gz", hash = "sha256:64f0bd06d42824843731d05beea88d4d4b6ae59f9fe347ff7dfa2cc14233bbb3"}, +] + +[package.dependencies] +importlib-metadata = ">=6.0,<8.8.0" +typing-extensions = ">=4.5.0" + +[[package]] +name = "opentelemetry-exporter-prometheus" +version = "0.55b1" +description = "Prometheus Metric Exporter for OpenTelemetry" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "opentelemetry_exporter_prometheus-0.55b1-py3-none-any.whl", hash = "sha256:f364fbbff9e5de37a112ff104d1185fb1d7e2046c5ab5911e5afebc7ab3ddf0e"}, + {file = "opentelemetry_exporter_prometheus-0.55b1.tar.gz", hash = "sha256:d13ec0b22bf394113ff1ada5da98133a4b051779b803dae183188e26c4bd9ee0"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-sdk = ">=1.34.1,<1.35.0" +prometheus-client = ">=0.5.0,<1.0.0" + +[[package]] +name = "opentelemetry-proto" +version = "1.11.1" +description = "OpenTelemetry Python Proto" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "opentelemetry-proto-1.11.1.tar.gz", hash = "sha256:5df0ec69510a9e2414c0410d91a698ded5a04d3dd37f7d2a3e119e3c42a30647"}, + {file = "opentelemetry_proto-1.11.1-py3-none-any.whl", hash = "sha256:4d4663123b4777823aa533f478c6cef3ecbcf696d8dc6ac7fd6a90f37a01eafd"}, +] + +[package.dependencies] +protobuf = ">=3.13.0" + +[[package]] +name = "opentelemetry-sdk" +version = "1.34.1" +description = "OpenTelemetry Python SDK" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "opentelemetry_sdk-1.34.1-py3-none-any.whl", hash = "sha256:308effad4059562f1d92163c61c8141df649da24ce361827812c40abb2a1e96e"}, + {file = "opentelemetry_sdk-1.34.1.tar.gz", hash = "sha256:8091db0d763fcd6098d4781bbc80ff0971f94e260739aa6afe6fd379cdf3aa4d"}, +] + +[package.dependencies] +opentelemetry-api = "1.34.1" +opentelemetry-semantic-conventions = "0.55b1" +typing-extensions = ">=4.5.0" + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.55b1" +description = "OpenTelemetry Semantic Conventions" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "opentelemetry_semantic_conventions-0.55b1-py3-none-any.whl", hash = "sha256:5da81dfdf7d52e3d37f8fe88d5e771e191de924cfff5f550ab0b8f7b2409baed"}, + {file = "opentelemetry_semantic_conventions-0.55b1.tar.gz", hash = "sha256:ef95b1f009159c28d7a7849f5cbc71c4c34c845bb514d66adfdf1b3fff3598b3"}, +] + +[package.dependencies] +opentelemetry-api = "1.34.1" +typing-extensions = ">=4.5.0" + [[package]] name = "overrides" version = "7.7.0" @@ -3455,36 +3560,36 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "ray" -version = "2.46.0" +version = "2.47.1" description = "Ray provides a simple, universal API for building distributed applications." optional = false python-versions = ">=3.9" groups = ["main"] files = [ - {file = "ray-2.46.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:719244b84df79502e5f09497f256618d94d78d66fbaf229422008a0568d3a0ff"}, - {file = "ray-2.46.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4378a86919e6643238a1094f711b87fa8dc1a18b998d4190f69ab33c64a22a8c"}, - {file = "ray-2.46.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:396b912a4dbf64966e2fdfca9facbcafe57b792ca4842ac5ae17507fdbdfe89f"}, - {file = "ray-2.46.0-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:c12850608c57c8afd9613a9f757d77663c50d4bd4e77ba2f181425052520c01a"}, - {file = "ray-2.46.0-cp310-cp310-win_amd64.whl", hash = "sha256:bc953aa4879c7a77893f921905df5cf65227cafd94fbc8273bec65ea393eacdd"}, - {file = "ray-2.46.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:942ba51de6f9cd7fb2ed17618181af48ce6b9517743d3235d846ec32295eca76"}, - {file = "ray-2.46.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:af84f3ed0854bb6de28192ca9e0a3bfa1eb34d69f118ae6348522198896480c8"}, - {file = "ray-2.46.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:81c8ce8b7ba33cb607ec78f5eb2555470e3046bb317732d8282e8189bb58ccbd"}, - {file = "ray-2.46.0-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:d4ddedc3f4d48df564bcee7b131c98c9f898fef0a57483f4ba335f47f951a62f"}, - {file = "ray-2.46.0-cp311-cp311-win_amd64.whl", hash = "sha256:130415c4d231830156f37ce70acbdb5fdee10f6886adc4e85bdc4533d51c24c6"}, - {file = "ray-2.46.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:d1f37ead29299637144726f809c2e0ff958dd9c0e75930ef614156d6a0a3a57f"}, - {file = "ray-2.46.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7a064acfeee7f0677d9e3f25daef9c59593559faea764b44a3e2c5331d5d832"}, - {file = "ray-2.46.0-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:006cbe1a8fdc37664114aa218773100ee891399785e256c202e48958d2dac167"}, - {file = "ray-2.46.0-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:5cec1edda93f618ffd2301f81d5398037f03fa9b16825e7e4d8a00ae7a9a4381"}, - {file = "ray-2.46.0-cp312-cp312-win_amd64.whl", hash = "sha256:7d3160f8d187baaea91a86d16a9fd81136cf8607419c94b7a74d66fce774b5c2"}, - {file = "ray-2.46.0-cp313-cp313-macosx_10_15_x86_64.whl", hash = "sha256:b2fc2c43ea0a37521193c61ef9a27b6fca8dbab116a58a52fd44344cd73e1ece"}, - {file = "ray-2.46.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4296dd8c0174256a04ee4b54abe013b6802a45fb85fb7cfdb1375231965d6d4d"}, - {file = "ray-2.46.0-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:808daece1f12bd8924b9c6382a0f98da6f5c6886cfb271ed8d89407a89413cd5"}, - {file = "ray-2.46.0-cp313-cp313-manylinux2014_x86_64.whl", hash = "sha256:a5a28c0a311d2c3221dcf729c40898a6df82466bb5af21e81be0453e09856adf"}, - {file = "ray-2.46.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:e0ec198c16d0e9af7f03242ef7ad7d548eee37a918193917278a124ddd57410a"}, - {file = "ray-2.46.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e31568818973efa4f8ce18b82bce03089395a62ac9fe639e94d755959f607fe9"}, - {file = "ray-2.46.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:7c44a98cb24f4905e898d05b787cbe9f267a9f66c1e1f8cda50814f8b3673be2"}, - {file = "ray-2.46.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:91ea998a49578b1450cbef60705f6ece8622a262a3d764d5c99ba89b741de5d0"}, - {file = "ray-2.46.0-cp39-cp39-win_amd64.whl", hash = "sha256:018e98c9745eae53b53ad14fef1ca1c43bb64c39c3cceb9e6d4517729396003b"}, + {file = "ray-2.47.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:36a30930e8d265e708df96f37f6f1f5484f4b97090d505912f992e045a69d310"}, + {file = "ray-2.47.1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:7c03a1e366d3a868a55f8c2f728f5ce35ac85ddf093ac81d0c1a35bf1c25c377"}, + {file = "ray-2.47.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:6fc7df8657b8df684b77c2d1b643137ad745aa1c12ade34743f06cca79003df0"}, + {file = "ray-2.47.1-cp310-cp310-manylinux2014_x86_64.whl", hash = "sha256:84a96b4720175a0000521a48eb7aa915f3b419bb5cd6172d8dee005c3f23b813"}, + {file = "ray-2.47.1-cp310-cp310-win_amd64.whl", hash = "sha256:44900a1a72cb3bfb331db160a8975737c25945a97f376c70e72ccf35adf3b744"}, + {file = "ray-2.47.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a640d447e0e6cf63f85b9220c883ec02bb2b8e40a9c1d84efa012795c769ba68"}, + {file = "ray-2.47.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:feeba1e715cfd8737d3adcd2018d0cdabb7c6084fa4b093e638e6c7d42f3c956"}, + {file = "ray-2.47.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:db5ff652e9035f03c65e1742a706b76519f6e8a6744cc005396053ac8766fc46"}, + {file = "ray-2.47.1-cp311-cp311-manylinux2014_x86_64.whl", hash = "sha256:48961229614b2b56a535be510c8abc76e99a9aa7fa195b5c949bd0c6c69af40a"}, + {file = "ray-2.47.1-cp311-cp311-win_amd64.whl", hash = "sha256:bd1cba64070db06bbf79c0e075cdc4529193e2d0b19564f4f057b4193b29e912"}, + {file = "ray-2.47.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:322049c4546cf67e5efdad90c371c5508acbb193e5aaaf4038103c6c5ce1f578"}, + {file = "ray-2.47.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:e6d9c78e53ac89cabbc4056aecfec53c506c692e3132af9dae941d6180ef462f"}, + {file = "ray-2.47.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:cd4e7eb475487364b5209963b17cefedcb7fbd3a816fdb6def7ea533ebd72424"}, + {file = "ray-2.47.1-cp312-cp312-manylinux2014_x86_64.whl", hash = "sha256:3eaeaeec3bbe2ca6493e530c30473d84b8580a7ac3256bb9183d8c63def5a92f"}, + {file = "ray-2.47.1-cp312-cp312-win_amd64.whl", hash = "sha256:601f23ba89918b7b3ffebf967328f7bdb605deaf8c103aad7820dc2722fe450c"}, + {file = "ray-2.47.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8cd625d469ce15391e5f1f44ddf8dd30b2380f917603fa0172661229acb0011f"}, + {file = "ray-2.47.1-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:e578929f58b3f0c59c7544a96d864e26278238b755d13cd19ae798070c848e57"}, + {file = "ray-2.47.1-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:d6ed6d182e25d6f77179dc77bc97a749c81765b13cb671a46db3203029389663"}, + {file = "ray-2.47.1-cp313-cp313-manylinux2014_x86_64.whl", hash = "sha256:252a471e8afb918b105cdbffb4cbebb0143baad75a06c8ffcde27ac317579ccb"}, + {file = "ray-2.47.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c21720f283a3df360ddec002a592ddfbaf520faf4cb1b86562a7b7c196ad96a0"}, + {file = "ray-2.47.1-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:6c7b4abe112c4d698243e30023bcbffe2c2c9a68416b95a6a0d50f9ca5725545"}, + {file = "ray-2.47.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:21f2689c1bbc688f9cd31a18bae2c9582027e91b508073849441167bb5077816"}, + {file = "ray-2.47.1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:106817f80087d21d24e63f6e56ea5ab7c387a25105eb65e6b783551f569534ea"}, + {file = "ray-2.47.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee02ba9b8cd45c4eadc457183f6d80f1701b85f966d02cdacd5b11867cb7375"}, ] [package.dependencies] @@ -3499,6 +3604,9 @@ jsonschema = "*" msgpack = ">=1.0.0,<2.0.0" numpy = {version = ">=1.20", optional = true, markers = "extra == \"data\""} opencensus = {version = "*", optional = true, markers = "extra == \"default\""} +opentelemetry-exporter-prometheus = {version = "*", optional = true, markers = "extra == \"default\""} +opentelemetry-proto = {version = "*", optional = true, markers = "extra == \"default\""} +opentelemetry-sdk = {version = "*", optional = true, markers = "extra == \"default\""} packaging = "*" pandas = {version = ">=1.3", optional = true, markers = "extra == \"data\""} prometheus-client = {version = ">=0.7.1", optional = true, markers = "extra == \"default\""} @@ -3519,19 +3627,19 @@ virtualenv = {version = ">=20.0.24,<20.21.1 || >20.21.1", optional = true, marke [package.extras] adag = ["cupy-cuda12x ; sys_platform != \"darwin\""] -air = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "fsspec", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "numpy (>=1.20)", "opencensus", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] -all = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "cupy-cuda12x ; sys_platform != \"darwin\"", "dm-tree", "fastapi", "fsspec", "grpcio", "grpcio (!=1.56.0) ; sys_platform == \"darwin\"", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "gymnasium (==1.0.0)", "lz4", "memray ; sys_platform != \"win32\"", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "ormsgpack (==1.7.0)", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyOpenSSL", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "requests", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] -all-cpp = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "cupy-cuda12x ; sys_platform != \"darwin\"", "dm-tree", "fastapi", "fsspec", "grpcio", "grpcio (!=1.56.0) ; sys_platform == \"darwin\"", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "gymnasium (==1.0.0)", "lz4", "memray ; sys_platform != \"win32\"", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk", "ormsgpack (==1.7.0)", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyOpenSSL", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "ray-cpp (==2.46.0)", "requests", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +air = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "fsspec", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "numpy (>=1.20)", "opencensus", "opentelemetry-exporter-prometheus", "opentelemetry-proto", "opentelemetry-sdk", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +all = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "cupy-cuda12x ; sys_platform != \"darwin\"", "dm-tree", "fastapi", "fsspec", "grpcio", "grpcio (!=1.56.0) ; sys_platform == \"darwin\"", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "gymnasium (==1.0.0)", "lz4", "memray ; sys_platform != \"win32\"", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-exporter-prometheus", "opentelemetry-proto", "opentelemetry-sdk", "ormsgpack (==1.7.0)", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyOpenSSL", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "requests", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +all-cpp = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "cupy-cuda12x ; sys_platform != \"darwin\"", "dm-tree", "fastapi", "fsspec", "grpcio", "grpcio (!=1.56.0) ; sys_platform == \"darwin\"", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "gymnasium (==1.0.0)", "lz4", "memray ; sys_platform != \"win32\"", "numpy (>=1.20)", "opencensus", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-exporter-prometheus", "opentelemetry-proto", "opentelemetry-sdk", "ormsgpack (==1.7.0)", "pandas", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyOpenSSL", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "pyyaml", "ray-cpp (==2.47.1)", "requests", "scipy", "smart-open", "starlette", "tensorboardX (>=1.9)", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] cgraph = ["cupy-cuda12x ; sys_platform != \"darwin\""] client = ["grpcio", "grpcio (!=1.56.0) ; sys_platform == \"darwin\""] -cpp = ["ray-cpp (==2.46.0)"] +cpp = ["ray-cpp (==2.47.1)"] data = ["fsspec", "numpy (>=1.20)", "pandas (>=1.3)", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)"] -default = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "virtualenv (>=20.0.24,!=20.21.1)"] -llm = ["aiohttp (>=3.7)", "aiohttp-cors", "async-timeout ; python_version < \"3.11\"", "colorful", "fastapi", "fsspec", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "jsonref (>=1.1.0)", "jsonschema", "ninja", "numpy (>=1.20)", "opencensus", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "typer", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "vllm (>=0.8.5)", "watchfiles"] +default = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "opencensus", "opentelemetry-exporter-prometheus", "opentelemetry-proto", "opentelemetry-sdk", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "virtualenv (>=20.0.24,!=20.21.1)"] +llm = ["aiohttp (>=3.7)", "aiohttp-cors", "async-timeout ; python_version < \"3.11\"", "colorful", "fastapi", "fsspec", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "jsonref (>=1.1.0)", "jsonschema", "ninja", "numpy (>=1.20)", "opencensus", "opentelemetry-exporter-prometheus", "opentelemetry-proto", "opentelemetry-sdk", "pandas (>=1.3)", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "typer", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "vllm (>=0.8.5)", "watchfiles"] observability = ["memray ; sys_platform != \"win32\"", "opentelemetry-api", "opentelemetry-exporter-otlp", "opentelemetry-sdk"] rllib = ["dm-tree", "fsspec", "gymnasium (==1.0.0)", "lz4", "ormsgpack (==1.7.0)", "pandas", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pyyaml", "requests", "scipy", "tensorboardX (>=1.9)"] -serve = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] -serve-grpc = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "opencensus", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyOpenSSL", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +serve = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "opencensus", "opentelemetry-exporter-prometheus", "opentelemetry-proto", "opentelemetry-sdk", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] +serve-grpc = ["aiohttp (>=3.7)", "aiohttp-cors", "colorful", "fastapi", "grpcio (>=1.32.0) ; python_version < \"3.10\"", "grpcio (>=1.42.0) ; python_version >= \"3.10\"", "opencensus", "opentelemetry-exporter-prometheus", "opentelemetry-proto", "opentelemetry-sdk", "prometheus-client (>=0.7.1)", "py-spy (>=0.2.0) ; python_version < \"3.12\"", "py-spy (>=0.4.0) ; python_version >= \"3.12\"", "pyOpenSSL", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "smart-open", "starlette", "uvicorn[standard]", "virtualenv (>=20.0.24,!=20.21.1)", "watchfiles"] train = ["fsspec", "pandas", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "pydantic (<2.0.dev0 || >=2.5.dev0,<3)", "requests", "tensorboardX (>=1.9)"] tune = ["fsspec", "pandas", "pyarrow (<18) ; sys_platform == \"darwin\" and platform_machine == \"x86_64\"", "pyarrow (>=9.0.0)", "requests", "tensorboardX (>=1.9)"] @@ -4525,7 +4633,27 @@ idna = ">=2.0" multidict = ">=4.0" propcache = ">=0.2.1" +[[package]] +name = "zipp" +version = "3.23.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e"}, + {file = "zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more_itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + [metadata] lock-version = "2.1" python-versions = "^3.11" -content-hash = "7cdd47cb60807e3032d9746b1c3ace156a10f9b6b430c740f0201a8aed7ac860" +content-hash = "29175d71269b0c8a1a53eb7027e8d824ef150bf8b4aa4cea8a9ed75684aa6e43" diff --git a/pyproject.toml b/pyproject.toml index 03c390c3..8615f894 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ keywords = ['codeflare', 'python', 'sdk', 'client', 'batch', 'scale'] python = "^3.11" openshift-client = "1.0.18" rich = ">=12.5,<14.0" -ray = {version = "2.46.0", extras = ["data", "default"]} +ray = {version = "2.47.1", extras = ["data", "default"]} kubernetes = ">= 27.2.0" cryptography = "43.0.3" executing = "1.2.0" diff --git a/src/codeflare_sdk/common/utils/constants.py b/src/codeflare_sdk/common/utils/constants.py index a10e3e0e..270d8d46 100644 --- a/src/codeflare_sdk/common/utils/constants.py +++ b/src/codeflare_sdk/common/utils/constants.py @@ -1 +1 @@ -RAY_VERSION = "2.46.0" +RAY_VERSION = "2.47.1" diff --git a/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml b/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml index 4aa4d3dd..c6972565 100644 --- a/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml +++ b/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml @@ -66,7 +66,7 @@ spec: requests: cpu: 2 memory: 8G - rayVersion: 2.46.0 + rayVersion: 2.47.1 workerGroupSpecs: - groupName: small-group-unit-test-cluster maxReplicas: 2 diff --git a/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml b/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml index bfe23893..aa097dd9 100644 --- a/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml +++ b/tests/test_cluster_yamls/appwrapper/unit-test-all-params.yaml @@ -138,7 +138,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.46.0 + rayVersion: 2.47.1 workerGroupSpecs: - groupName: small-group-aw-all-params maxReplicas: 10 diff --git a/tests/test_cluster_yamls/kueue/aw_kueue.yaml b/tests/test_cluster_yamls/kueue/aw_kueue.yaml index 08b6d5cf..f5f16406 100644 --- a/tests/test_cluster_yamls/kueue/aw_kueue.yaml +++ b/tests/test_cluster_yamls/kueue/aw_kueue.yaml @@ -93,7 +93,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.46.0 + rayVersion: 2.47.1 workerGroupSpecs: - groupName: small-group-unit-test-aw-kueue maxReplicas: 2 diff --git a/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml b/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml index bb8e8494..d6db4f4a 100644 --- a/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml +++ b/tests/test_cluster_yamls/kueue/ray_cluster_kueue.yaml @@ -93,7 +93,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.46.0 + rayVersion: 2.47.1 workerGroupSpecs: - groupName: small-group-unit-test-cluster-kueue maxReplicas: 2 diff --git a/tests/test_cluster_yamls/ray/default-appwrapper.yaml b/tests/test_cluster_yamls/ray/default-appwrapper.yaml index 22fba282..27828163 100644 --- a/tests/test_cluster_yamls/ray/default-appwrapper.yaml +++ b/tests/test_cluster_yamls/ray/default-appwrapper.yaml @@ -91,7 +91,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.46.0 + rayVersion: 2.47.1 workerGroupSpecs: - groupName: small-group-default-appwrapper maxReplicas: 1 diff --git a/tests/test_cluster_yamls/ray/default-ray-cluster.yaml b/tests/test_cluster_yamls/ray/default-ray-cluster.yaml index 75713321..d2e6b05a 100644 --- a/tests/test_cluster_yamls/ray/default-ray-cluster.yaml +++ b/tests/test_cluster_yamls/ray/default-ray-cluster.yaml @@ -83,7 +83,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.46.0 + rayVersion: 2.47.1 workerGroupSpecs: - groupName: small-group-default-cluster maxReplicas: 1 diff --git a/tests/test_cluster_yamls/ray/unit-test-all-params.yaml b/tests/test_cluster_yamls/ray/unit-test-all-params.yaml index 98da45b1..ee0878c1 100644 --- a/tests/test_cluster_yamls/ray/unit-test-all-params.yaml +++ b/tests/test_cluster_yamls/ray/unit-test-all-params.yaml @@ -129,7 +129,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.46.0 + rayVersion: 2.47.1 workerGroupSpecs: - groupName: small-group-test-all-params maxReplicas: 10 diff --git a/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml b/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml index 4ffbef98..00e8755d 100644 --- a/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml +++ b/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml @@ -91,7 +91,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.46.0 + rayVersion: 2.47.1 workerGroupSpecs: - groupName: small-group-test-cluster-a maxReplicas: 1 diff --git a/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml b/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml index 56d2a8e2..be01e68b 100644 --- a/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml +++ b/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml @@ -91,7 +91,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.46.0 + rayVersion: 2.47.1 workerGroupSpecs: - groupName: small-group-test-cluster-b maxReplicas: 1 diff --git a/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml b/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml index e504566a..553d285a 100644 --- a/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml +++ b/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml @@ -82,7 +82,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.46.0 + rayVersion: 2.47.1 workerGroupSpecs: - groupName: small-group-test-cluster-a maxReplicas: 1 diff --git a/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml b/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml index 22162295..1b84ee02 100644 --- a/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml +++ b/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml @@ -82,7 +82,7 @@ spec: name: odh-trusted-ca-bundle optional: true name: odh-ca-cert - rayVersion: 2.46.0 + rayVersion: 2.47.1 workerGroupSpecs: - groupName: small-group-test-rc-b maxReplicas: 1 From f9f86719e09e56995efa5ad03facb21846041574 Mon Sep 17 00:00:00 2001 From: kryanbeane Date: Mon, 7 Jul 2025 13:14:04 +0100 Subject: [PATCH 483/496] RHOAIENG-27828: Template runtime image SHA --- .../notebook-ex-outputs/0_basic_ray.ipynb | 2 +- src/codeflare_sdk/common/utils/constants.py | 2 + .../common/utils/unit_test_support.py | 52 +++++++++++++------ .../ray/appwrapper/test_awload.py | 9 +++- .../ray/cluster/build_ray_cluster.py | 4 +- tests/e2e/support.py | 4 +- .../appwrapper/test-case-bad.yaml | 4 +- .../support_clusters/test-aw-a.yaml | 4 +- .../support_clusters/test-aw-b.yaml | 4 +- .../support_clusters/test-rc-a.yaml | 4 +- .../support_clusters/test-rc-b.yaml | 4 +- 11 files changed, 61 insertions(+), 32 deletions(-) diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb index 8d97751a..119d9ce6 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb @@ -49,7 +49,7 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.11: 'quay.io/modh/ray:2.46.0-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/src/codeflare_sdk/common/utils/constants.py b/src/codeflare_sdk/common/utils/constants.py index 270d8d46..9721ac85 100644 --- a/src/codeflare_sdk/common/utils/constants.py +++ b/src/codeflare_sdk/common/utils/constants.py @@ -1 +1,3 @@ RAY_VERSION = "2.47.1" +# Below references ray:2.47.1-py311-cu121 +CUDA_RUNTIME_IMAGE = "quay.io/modh/ray@sha256:6d076aeb38ab3c34a6a2ef0f58dc667089aa15826fa08a73273c629333e12f1e" diff --git a/src/codeflare_sdk/common/utils/unit_test_support.py b/src/codeflare_sdk/common/utils/unit_test_support.py index eee8d933..b382ec01 100644 --- a/src/codeflare_sdk/common/utils/unit_test_support.py +++ b/src/codeflare_sdk/common/utils/unit_test_support.py @@ -14,6 +14,7 @@ import string import sys +from codeflare_sdk.common.utils import constants from codeflare_sdk.ray.cluster.cluster import ( Cluster, ClusterConfiguration, @@ -68,7 +69,7 @@ def create_cluster_wrong_type(): worker_extended_resource_requests={"nvidia.com/gpu": 7}, appwrapper=True, image_pull_secrets=["unit-test-pull-secret"], - image="quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707", + image=constants.CUDA_RUNTIME_IMAGE, write_to_file=True, labels={1: 1}, ) @@ -148,9 +149,14 @@ def get_cluster_object(file_a, file_b): def get_ray_obj(group, version, namespace, plural): # To be used for mocking list_namespaced_custom_object for Ray Clusters - rc_a_path = f"{parent}/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml" - rc_b_path = f"{parent}/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml" - rc_a, rc_b = get_cluster_object(rc_a_path, rc_b_path) + rc_a = apply_template( + f"{parent}/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml", + get_template_variables(), + ) + rc_b = apply_template( + f"{parent}/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml", + get_template_variables(), + ) rc_list = {"items": [rc_a, rc_b]} return rc_list @@ -158,9 +164,14 @@ def get_ray_obj(group, version, namespace, plural): def get_ray_obj_with_status(group, version, namespace, plural): # To be used for mocking list_namespaced_custom_object for Ray Clusters with statuses - rc_a_path = f"{parent}/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml" - rc_b_path = f"{parent}/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml" - rc_a, rc_b = get_cluster_object(rc_a_path, rc_b_path) + rc_a = apply_template( + f"{parent}/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml", + get_template_variables(), + ) + rc_b = apply_template( + f"{parent}/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml", + get_template_variables(), + ) rc_a.update( { @@ -205,9 +216,14 @@ def get_ray_obj_with_status(group, version, namespace, plural): def get_aw_obj(group, version, namespace, plural): # To be used for mocking list_namespaced_custom_object for AppWrappers - aw_a_path = f"{parent}/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml" - aw_b_path = f"{parent}/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml" - aw_a, aw_b = get_cluster_object(aw_a_path, aw_b_path) + aw_a = apply_template( + f"{parent}/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml", + get_template_variables(), + ) + aw_b = apply_template( + f"{parent}/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml", + get_template_variables(), + ) aw_list = {"items": [aw_a, aw_b]} return aw_list @@ -215,9 +231,14 @@ def get_aw_obj(group, version, namespace, plural): def get_aw_obj_with_status(group, version, namespace, plural): # To be used for mocking list_namespaced_custom_object for AppWrappers with statuses - aw_a_path = f"{parent}/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml" - aw_b_path = f"{parent}/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml" - aw_a, aw_b = get_cluster_object(aw_a_path, aw_b_path) + aw_a = apply_template( + f"{parent}/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml", + get_template_variables(), + ) + aw_b = apply_template( + f"{parent}/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml", + get_template_variables(), + ) aw_a.update( { @@ -273,9 +294,8 @@ def apply_template(yaml_file_path, variables): def get_expected_image(): - # TODO: Add Python 3.12 support - python_version = sys.version_info - return "quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707" + # TODO: Select image based on Python version + return constants.CUDA_RUNTIME_IMAGE def get_template_variables(): diff --git a/src/codeflare_sdk/ray/appwrapper/test_awload.py b/src/codeflare_sdk/ray/appwrapper/test_awload.py index 6909394b..3f45e1a5 100644 --- a/src/codeflare_sdk/ray/appwrapper/test_awload.py +++ b/src/codeflare_sdk/ray/appwrapper/test_awload.py @@ -12,8 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. from codeflare_sdk.common.utils.unit_test_support import ( + apply_template, arg_check_aw_apply_effect, arg_check_aw_del_effect, + get_template_variables, ) from codeflare_sdk.ray.appwrapper import AWManager from codeflare_sdk.ray.cluster import Cluster, ClusterConfiguration @@ -47,8 +49,11 @@ def test_AWManager_creation(mocker): assert type(e) == FileNotFoundError assert str(e) == "[Errno 2] No such file or directory: 'fake'" try: - testaw = AWManager( - f"{parent}/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml" + testaw = apply_template( + AWManager( + f"{parent}/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml" + ), + get_template_variables(), ) except Exception as e: assert type(e) == ValueError diff --git a/src/codeflare_sdk/ray/cluster/build_ray_cluster.py b/src/codeflare_sdk/ray/cluster/build_ray_cluster.py index 11b10fc3..92cefef4 100644 --- a/src/codeflare_sdk/ray/cluster/build_ray_cluster.py +++ b/src/codeflare_sdk/ray/cluster/build_ray_cluster.py @@ -50,6 +50,8 @@ import warnings import json +from codeflare_sdk.common.utils import constants + FORBIDDEN_CUSTOM_RESOURCE_TYPES = ["GPU", "CPU", "memory"] VOLUME_MOUNTS = [ V1VolumeMount( @@ -94,7 +96,7 @@ ] SUPPORTED_PYTHON_VERSIONS = { - "3.11": "quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707", + "3.11": constants.CUDA_RUNTIME_IMAGE, } diff --git a/tests/e2e/support.py b/tests/e2e/support.py index c81f5b73..165a680b 100644 --- a/tests/e2e/support.py +++ b/tests/e2e/support.py @@ -7,6 +7,7 @@ from codeflare_sdk.common.kubernetes_cluster.kube_api_helpers import ( _kube_api_error_handling, ) +from codeflare_sdk.common.utils import constants def get_ray_cluster(cluster_name, namespace): @@ -26,8 +27,7 @@ def get_ray_cluster(cluster_name, namespace): def get_ray_image(): - default_ray_image = "quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707" - return os.getenv("RAY_IMAGE", default_ray_image) + return os.getenv("RAY_IMAGE", constants.CUDA_RUNTIME_IMAGE) def get_setup_env_variables(**kwargs): diff --git a/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml b/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml index c6972565..a5915820 100644 --- a/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml +++ b/tests/test_cluster_yamls/appwrapper/test-case-bad.yaml @@ -42,7 +42,7 @@ spec: valueFrom: fieldRef: fieldPath: status.podIP - image: quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707 + image: "${image}" imagePullPolicy: IfNotPresent lifecycle: preStop: @@ -89,7 +89,7 @@ spec: valueFrom: fieldRef: fieldPath: status.podIP - image: quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707 + image: "${image}" lifecycle: preStop: exec: diff --git a/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml b/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml index 00e8755d..49f2c38c 100644 --- a/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml +++ b/tests/test_cluster_yamls/support_clusters/test-aw-a.yaml @@ -38,7 +38,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707 + - image: "${image}" imagePullPolicy: IfNotPresent lifecycle: preStop: @@ -109,7 +109,7 @@ spec: key: value spec: containers: - - image: quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707 + - image: "${image}" lifecycle: preStop: exec: diff --git a/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml b/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml index be01e68b..aa6fad9c 100644 --- a/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml +++ b/tests/test_cluster_yamls/support_clusters/test-aw-b.yaml @@ -38,7 +38,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707 + - image: "${image}" imagePullPolicy: IfNotPresent lifecycle: preStop: @@ -109,7 +109,7 @@ spec: key: value spec: containers: - - image: quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707 + - image: "${image}" lifecycle: preStop: exec: diff --git a/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml b/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml index 553d285a..2bb13995 100644 --- a/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml +++ b/tests/test_cluster_yamls/support_clusters/test-rc-a.yaml @@ -29,7 +29,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707 + - image: "${image}" imagePullPolicy: IfNotPresent lifecycle: preStop: @@ -100,7 +100,7 @@ spec: key: value spec: containers: - - image: quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707 + - image: "${image}" lifecycle: preStop: exec: diff --git a/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml b/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml index 1b84ee02..70f1d5bf 100644 --- a/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml +++ b/tests/test_cluster_yamls/support_clusters/test-rc-b.yaml @@ -29,7 +29,7 @@ spec: template: spec: containers: - - image: quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707 + - image: "${image}" imagePullPolicy: IfNotPresent lifecycle: preStop: @@ -100,7 +100,7 @@ spec: key: value spec: containers: - - image: quay.io/modh/ray@sha256:a5b7c04a14f180d7ca6d06a5697f6bb684e40a26b95a0c872cac23b552741707 + - image: "${image}" lifecycle: preStop: exec: From 89c47c3d85c779bfbd11a2fbd2b4dac7fc5bc928 Mon Sep 17 00:00:00 2001 From: lilylinh Date: Mon, 14 Jul 2025 13:57:09 +0100 Subject: [PATCH 484/496] replace 'cluster.up' by 'cluster.apply' in demo-notebooks --- demo-notebooks/additional-demos/hf_interactive.ipynb | 2 +- demo-notebooks/additional-demos/local_interactive.ipynb | 2 +- demo-notebooks/additional-demos/ray_job_client.ipynb | 2 +- demo-notebooks/guided-demos/0_basic_ray.ipynb | 2 +- demo-notebooks/guided-demos/1_cluster_job_client.ipynb | 2 +- demo-notebooks/guided-demos/2_basic_interactive.ipynb | 2 +- .../guided-demos/notebook-ex-outputs/0_basic_ray.ipynb | 2 +- .../guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb | 2 +- .../guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb | 2 +- demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb | 2 +- .../guided-demos/preview_nbs/1_cluster_job_client.ipynb | 2 +- .../guided-demos/preview_nbs/2_basic_interactive.ipynb | 2 +- 12 files changed, 12 insertions(+), 12 deletions(-) diff --git a/demo-notebooks/additional-demos/hf_interactive.ipynb b/demo-notebooks/additional-demos/hf_interactive.ipynb index fcb45bf6..2e8e36fb 100644 --- a/demo-notebooks/additional-demos/hf_interactive.ipynb +++ b/demo-notebooks/additional-demos/hf_interactive.ipynb @@ -114,7 +114,7 @@ "metadata": {}, "outputs": [], "source": [ - "cluster.up()" + "cluster.apply()" ] }, { diff --git a/demo-notebooks/additional-demos/local_interactive.ipynb b/demo-notebooks/additional-demos/local_interactive.ipynb index a46e5d9f..fb1cb3ea 100644 --- a/demo-notebooks/additional-demos/local_interactive.ipynb +++ b/demo-notebooks/additional-demos/local_interactive.ipynb @@ -79,7 +79,7 @@ }, "outputs": [], "source": [ - "cluster.up()" + "cluster.apply()" ] }, { diff --git a/demo-notebooks/additional-demos/ray_job_client.ipynb b/demo-notebooks/additional-demos/ray_job_client.ipynb index 99187b4a..39cf2a16 100644 --- a/demo-notebooks/additional-demos/ray_job_client.ipynb +++ b/demo-notebooks/additional-demos/ray_job_client.ipynb @@ -78,7 +78,7 @@ "outputs": [], "source": [ "# Bring up the cluster\n", - "cluster.up()\n", + "cluster.apply()\n", "cluster.wait_ready()" ] }, diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb index 9a2ed8ca..413d31b4 100644 --- a/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -98,7 +98,7 @@ "outputs": [], "source": [ "# Bring up the cluster\n", - "cluster.up()" + "cluster.apply()" ] }, { diff --git a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb index 725866d7..5a825b55 100644 --- a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb @@ -89,7 +89,7 @@ "outputs": [], "source": [ "# Bring up the cluster\n", - "cluster.up()\n", + "cluster.apply()\n", "cluster.wait_ready()" ] }, diff --git a/demo-notebooks/guided-demos/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/2_basic_interactive.ipynb index 890bdb47..80f45f13 100644 --- a/demo-notebooks/guided-demos/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/2_basic_interactive.ipynb @@ -96,7 +96,7 @@ "outputs": [], "source": [ "# Bring up the cluster\n", - "cluster.up()\n", + "cluster.apply()\n", "cluster.wait_ready()" ] }, diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb index 119d9ce6..e5972711 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb @@ -94,7 +94,7 @@ "outputs": [], "source": [ "# Bring up the cluster\n", - "cluster.up()" + "cluster.apply()" ] }, { diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb index 6f5850ac..9b34da9b 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb @@ -78,7 +78,7 @@ "outputs": [], "source": [ "# Bring up the cluster\n", - "cluster.up()\n", + "cluster.apply()\n", "cluster.wait_ready()" ] }, diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb index 58a6c9e2..db1560ff 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb @@ -84,7 +84,7 @@ "outputs": [], "source": [ "# Bring up the cluster\n", - "cluster.up()\n", + "cluster.apply()\n", "cluster.wait_ready()" ] }, diff --git a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb index 119d9ce6..e5972711 100644 --- a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb @@ -94,7 +94,7 @@ "outputs": [], "source": [ "# Bring up the cluster\n", - "cluster.up()" + "cluster.apply()" ] }, { diff --git a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb index db574ceb..6b913142 100644 --- a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb @@ -78,7 +78,7 @@ "outputs": [], "source": [ "# Bring up the cluster\n", - "cluster.up()\n", + "cluster.apply()\n", "cluster.wait_ready()" ] }, diff --git a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb index 8f70f6c2..a7ebae08 100644 --- a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb @@ -84,7 +84,7 @@ "outputs": [], "source": [ "# Bring up the cluster\n", - "cluster.up()\n", + "cluster.apply()\n", "cluster.wait_ready()" ] }, From 0ac44ed2e8988eadb63d74a7613f6fddfb7c99d0 Mon Sep 17 00:00:00 2001 From: lilylinh Date: Fri, 25 Jul 2025 11:55:46 +0100 Subject: [PATCH 485/496] Change widget fromt cluster.up() to cluster.apply() --- .../common/widgets/test_widgets.py | 20 ++++++++++--------- src/codeflare_sdk/common/widgets/widgets.py | 16 +++++++-------- src/codeflare_sdk/ray/cluster/cluster.py | 4 ++-- .../tests/widget_notebook_example.test.ts | 18 ++++++++--------- 4 files changed, 30 insertions(+), 28 deletions(-) diff --git a/src/codeflare_sdk/common/widgets/test_widgets.py b/src/codeflare_sdk/common/widgets/test_widgets.py index a7d3de92..f88d8eb2 100644 --- a/src/codeflare_sdk/common/widgets/test_widgets.py +++ b/src/codeflare_sdk/common/widgets/test_widgets.py @@ -28,7 +28,7 @@ @patch.dict( "os.environ", {"JPY_SESSION_NAME": "example-test"} ) # Mock Jupyter environment variable -def test_cluster_up_down_buttons(mocker): +def test_cluster_apply_down_buttons(mocker): mocker.patch("kubernetes.client.ApisApi.get_api_versions") mocker.patch( "kubernetes.client.CustomObjectsApi.get_cluster_custom_object", @@ -45,36 +45,38 @@ def test_cluster_up_down_buttons(mocker): ) as MockCheckbox, patch("ipywidgets.Output"), patch("ipywidgets.HBox"), patch( "ipywidgets.VBox" ), patch.object( - cluster, "up" - ) as mock_up, patch.object( + cluster, "apply" + ) as mock_apply, patch.object( cluster, "down" ) as mock_down, patch.object( cluster, "wait_ready" ) as mock_wait_ready: # Create mock button & CheckBox instances - mock_up_button = MagicMock() + mock_apply_button = MagicMock() mock_down_button = MagicMock() mock_wait_ready_check_box = MagicMock() # Ensure the mock Button class returns the mock button instances in sequence MockCheckbox.side_effect = [mock_wait_ready_check_box] - MockButton.side_effect = [mock_up_button, mock_down_button] + MockButton.side_effect = [mock_apply_button, mock_down_button] # Call the method under test - cf_widgets.cluster_up_down_buttons(cluster) + cf_widgets.cluster_apply_down_buttons(cluster) # Simulate checkbox being checked or unchecked mock_wait_ready_check_box.value = True # Simulate checkbox being checked # Simulate the button clicks by calling the mock on_click handlers - mock_up_button.on_click.call_args[0][0](None) # Simulate clicking "Cluster Up" + mock_apply_button.on_click.call_args[0][0]( + None + ) # Simulate clicking "Cluster Apply" mock_down_button.on_click.call_args[0][0]( None ) # Simulate clicking "Cluster Down" - # Check if the `up` and `down` methods were called + # Check if the `apply` and `down` methods were called mock_wait_ready.assert_called_once() - mock_up.assert_called_once() + mock_apply.assert_called_once() mock_down.assert_called_once() diff --git a/src/codeflare_sdk/common/widgets/widgets.py b/src/codeflare_sdk/common/widgets/widgets.py index 6f3283ce..36d896e8 100644 --- a/src/codeflare_sdk/common/widgets/widgets.py +++ b/src/codeflare_sdk/common/widgets/widgets.py @@ -271,19 +271,19 @@ def display_widgets(self): ) -def cluster_up_down_buttons( +def cluster_apply_down_buttons( cluster: "codeflare_sdk.ray.cluster.cluster.Cluster", ) -> widgets.Button: """ - The cluster_up_down_buttons function returns two button widgets for a create and delete button. + The cluster_apply_down_buttons function returns two button widgets for a create and delete button. The function uses the appwrapper bool to distinguish between resource type for the tool tip. """ resource = "Ray Cluster" if cluster.config.appwrapper: resource = "AppWrapper" - up_button = widgets.Button( - description="Cluster Up", + apply_button = widgets.Button( + description="Cluster Apply", tooltip=f"Create the {resource}", icon="play", ) @@ -298,13 +298,13 @@ def cluster_up_down_buttons( output = widgets.Output() # Display the buttons in an HBox wrapped in a VBox which includes the wait_ready Checkbox - button_display = widgets.HBox([up_button, delete_button]) + button_display = widgets.HBox([apply_button, delete_button]) display(widgets.VBox([button_display, wait_ready_check]), output) - def on_up_button_clicked(b): # Handle the up button click event + def on_apply_button_clicked(b): # Handle the apply button click event with output: output.clear_output() - cluster.up() + cluster.apply() # If the wait_ready Checkbox is clicked(value == True) trigger the wait_ready function if wait_ready_check.value: @@ -315,7 +315,7 @@ def on_down_button_clicked(b): # Handle the down button click event output.clear_output() cluster.down() - up_button.on_click(on_up_button_clicked) + apply_button.on_click(on_apply_button_clicked) delete_button.on_click(on_down_button_clicked) diff --git a/src/codeflare_sdk/ray/cluster/cluster.py b/src/codeflare_sdk/ray/cluster/cluster.py index 86ed7c4d..4eaa2000 100644 --- a/src/codeflare_sdk/ray/cluster/cluster.py +++ b/src/codeflare_sdk/ray/cluster/cluster.py @@ -43,7 +43,7 @@ AppWrapperStatus, ) from ...common.widgets.widgets import ( - cluster_up_down_buttons, + cluster_apply_down_buttons, is_notebook, ) from kubernetes import client @@ -88,7 +88,7 @@ def __init__(self, config: ClusterConfiguration): self.resource_yaml = self.create_resource() if is_notebook(): - cluster_up_down_buttons(self) + cluster_apply_down_buttons(self) def get_dynamic_client(self): # pragma: no cover return DynamicClient(get_api_client()) diff --git a/ui-tests/tests/widget_notebook_example.test.ts b/ui-tests/tests/widget_notebook_example.test.ts index 3360ba83..7707f70b 100644 --- a/ui-tests/tests/widget_notebook_example.test.ts +++ b/ui-tests/tests/widget_notebook_example.test.ts @@ -67,25 +67,25 @@ test.describe("Visual Regression", () => { // At this point, all cells have been ran, and their screenshots have been captured. // We now interact with the widgets in the notebook. - const upDownWidgetCellIndex = 3; // 4 on OpenShift + const applyDownWidgetCellIndex = 3; // 4 on OpenShift - await waitForWidget(page, upDownWidgetCellIndex, 'input[type="checkbox"]'); - await waitForWidget(page, upDownWidgetCellIndex, 'button:has-text("Cluster Down")'); - await waitForWidget(page, upDownWidgetCellIndex, 'button:has-text("Cluster Up")'); + await waitForWidget(page, applyDownWidgetCellIndex, 'input[type="checkbox"]'); + await waitForWidget(page, applyDownWidgetCellIndex, 'button:has-text("Cluster Down")'); + await waitForWidget(page, applyDownWidgetCellIndex, 'button:has-text("Cluster Apply")'); - await interactWithWidget(page, upDownWidgetCellIndex, 'input[type="checkbox"]', async (checkbox) => { + await interactWithWidget(page, applyDownWidgetCellIndex, 'input[type="checkbox"]', async (checkbox) => { await checkbox.click(); const isChecked = await checkbox.isChecked(); expect(isChecked).toBe(true); }); - await interactWithWidget(page, upDownWidgetCellIndex, 'button:has-text("Cluster Down")', async (button) => { + await interactWithWidget(page, applyDownWidgetCellIndex, 'button:has-text("Cluster Down")', async (button) => { await button.click(); const clusterDownMessage = await page.waitForSelector('text=The requested resource could not be located.', { timeout: 5000 }); expect(await clusterDownMessage.innerText()).toContain('The requested resource could not be located.'); }); - await interactWithWidget(page, upDownWidgetCellIndex, 'button:has-text("Cluster Up")', async (button) => { + await interactWithWidget(page, applyDownWidgetCellIndex, 'button:has-text("Cluster Apply")', async (button) => { await button.click(); const successMessage = await page.waitForSelector('text=Ray Cluster: \'widgettest\' has successfully been created', { timeout: 10000 }); @@ -103,7 +103,7 @@ test.describe("Visual Regression", () => { await runPreviousCell(page, cellCount, '(, True)'); - await interactWithWidget(page, upDownWidgetCellIndex, 'button:has-text("Cluster Down")', async (button) => { + await interactWithWidget(page, applyDownWidgetCellIndex, 'button:has-text("Cluster Down")', async (button) => { await button.click(); const clusterDownMessage = await page.waitForSelector('text=Ray Cluster: \'widgettest\' has successfully been deleted', { timeout: 5000 }); expect(clusterDownMessage).not.toBeNull(); @@ -116,7 +116,7 @@ test.describe("Visual Regression", () => { await cell.fill('"widgettest-1"'); await page.notebook.runCell(cellCount - 3, true); // Run ClusterConfiguration cell - await interactWithWidget(page, upDownWidgetCellIndex, 'button:has-text("Cluster Up")', async (button) => { + await interactWithWidget(page, applyDownWidgetCellIndex, 'button:has-text("Cluster Apply")', async (button) => { await button.click(); const successMessage = await page.waitForSelector('text=Ray Cluster: \'widgettest-1\' has successfully been created', { timeout: 10000 }); expect(successMessage).not.toBeNull(); From 326434ddf1e484117384fb51561cdfbb724dabc4 Mon Sep 17 00:00:00 2001 From: Pat O'Connor Date: Wed, 30 Jul 2025 07:55:30 +0100 Subject: [PATCH 486/496] enable CI on ray-jobs-feature branch Signed-off-by: Pat O'Connor --- .github/workflows/coverage-badge.yaml | 2 +- .github/workflows/e2e_tests.yaml | 1 + .github/workflows/unit-tests.yml | 4 ++-- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/coverage-badge.yaml b/.github/workflows/coverage-badge.yaml index 0d2d41f6..2c3b40fa 100644 --- a/.github/workflows/coverage-badge.yaml +++ b/.github/workflows/coverage-badge.yaml @@ -4,7 +4,7 @@ name: Coverage Badge on: push: - branches: [ main ] + branches: [ main, ray-jobs-feature ] jobs: report: diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index fc80af56..d66e4b34 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -6,6 +6,7 @@ on: branches: - main - 'release-*' + - ray-jobs-feature paths-ignore: - 'docs/**' - '**.adoc' diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index 6697fc80..e38e6973 100755 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -2,9 +2,9 @@ name: Python Tests on: pull_request: - branches: [ main ] + branches: [ main, ray-jobs-feature ] push: - branches: [ main ] + branches: [ main, ray-jobs-feature ] jobs: unit-tests: From d4678105456cb42ae9153213f54afd8d5dee4945 Mon Sep 17 00:00:00 2001 From: Bryan Keane Date: Wed, 6 Aug 2025 15:38:55 +0100 Subject: [PATCH 487/496] feat: add remote offline batch inference with vllm example (#848) * no-jira: add remote offline batch inference with vllm example * no-jira: remove notebook output * feat: add temp workdir for gcs workaround * feat: add config details to batch inference demo * chore: ignore jupyter notebooks from codecov --- codecov.yml | 3 + .../batch-inference/remote_offline_bi.ipynb | 214 ++++++++++++++++++ .../batch-inference/requirements.txt | 4 + .../batch-inference/simple_batch_inf.py | 62 +++++ 4 files changed, 283 insertions(+) create mode 100644 codecov.yml create mode 100644 demo-notebooks/additional-demos/batch-inference/remote_offline_bi.ipynb create mode 100644 demo-notebooks/additional-demos/batch-inference/requirements.txt create mode 100644 demo-notebooks/additional-demos/batch-inference/simple_batch_inf.py diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 00000000..550965e6 --- /dev/null +++ b/codecov.yml @@ -0,0 +1,3 @@ +ignore: + - "**/*.ipynb" + - "demo-notebooks/**" diff --git a/demo-notebooks/additional-demos/batch-inference/remote_offline_bi.ipynb b/demo-notebooks/additional-demos/batch-inference/remote_offline_bi.ipynb new file mode 100644 index 00000000..68b514c4 --- /dev/null +++ b/demo-notebooks/additional-demos/batch-inference/remote_offline_bi.ipynb @@ -0,0 +1,214 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Remote Offline Batch Inference with Ray Data & vLLM Example\n", + "\n", + "This notebook presumes:\n", + "- You have a Ray Cluster URL given to you to run workloads on\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "from codeflare_sdk import RayJobClient\n", + "\n", + "# Setup Authentication Configuration\n", + "auth_token = \"XXXX\"\n", + "header = {\"Authorization\": f\"Bearer {auth_token}\"}" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "# Gather the dashboard URL (provided by the creator of the RayCluster)\n", + "ray_dashboard = \"XXXX\" # Replace with the Ray dashboard URL\n", + "\n", + "# Initialize the RayJobClient\n", + "client = RayJobClient(address=ray_dashboard, headers=header, verify=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Simple Example Explanation\n", + "\n", + "With the RayJobClient instantiated, lets run some batch inference. The following code is stored in `simple_batch_inf.py`, and is used as the entrypoint for the RayJob.\n", + "\n", + "What this processor configuration does:\n", + "- Set up a vLLM engine with your model\n", + "- Configure some settings for GPU processing\n", + "- Defines batch processing parameters (8 requests per batch, 2 GPU workers)\n", + "\n", + "#### Model Source Configuration\n", + "\n", + "The `model_source` parameter supports several loading methods:\n", + "\n", + "* **Hugging Face Hub** (default): Use repository ID `model_source=\"meta-llama/Llama-2-7b-chat-hf\"`\n", + "* **Local Directory**: Use file path `model_source=\"/path/to/my/local/model\"`\n", + "* **Other Sources**: ModelScope via environment variables `VLLM_MODELSCOPE_DOWNLOADS_DIR`\n", + "\n", + "For complete model support and options, see the [official vLLM documentation](https://docs.vllm.ai/en/latest/models/supported_models.html).\n", + "\n", + "```python\n", + "import ray\n", + "from ray.data.llm import build_llm_processor, vLLMEngineProcessorConfig\n", + "\n", + "processor_config = vLLMEngineProcessorConfig(\n", + " model_source=\"replace-me\",\n", + " engine_kwargs=dict(\n", + " enable_lora=False,\n", + " dtype=\"half\",\n", + " max_model_len=1024,\n", + " ),\n", + " # Batch size: Larger batches increase throughput but reduce fault tolerance\n", + " # - Small batches (4-8): Better for fault tolerance and memory constraints\n", + " # - Large batches (16-32): Higher throughput, better GPU utilization\n", + " # - Choose based on your Ray Cluster size and memory availability\n", + " batch_size=8,\n", + " # Concurrency: Number of vLLM engine workers to spawn \n", + " # - Set to match your total GPU count for maximum utilization\n", + " # - Each worker gets assigned to a GPU automatically by Ray scheduler\n", + " # - Can use all GPUs across head and worker nodes\n", + " concurrency=2,\n", + ")\n", + "```" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "With the config defined, we can instantiate the processor. This enables batch inference by processing multiple requests through the vLLM engine, with two key steps:\n", + "- **Preprocess**: Converts each row into a structured chat format with system instructions and user queries, preparing the input for the LLM\n", + "- **Postprocess**: Extracts only the generated text from the model response, cleaning up the output\n", + "\n", + "The processor defines the pipeline that will be applied to each row in the dataset, enabling efficient batch processing through Ray Data's distributed execution framework.\n", + "\n", + "```python\n", + "processor = build_llm_processor(\n", + " processor_config,\n", + " preprocess=lambda row: dict(\n", + " messages=[\n", + " {\n", + " \"role\": \"system\",\n", + " \"content\": \"You are a calculator. Please only output the answer \"\n", + " \"of the given equation.\",\n", + " },\n", + " {\"role\": \"user\", \"content\": f\"{row['id']} ** 3 = ?\"},\n", + " ],\n", + " sampling_params=dict(\n", + " temperature=0.3,\n", + " max_tokens=20,\n", + " detokenize=False,\n", + " ),\n", + " ),\n", + " postprocess=lambda row: {\n", + " \"resp\": row[\"generated_text\"],\n", + " },\n", + ")\n", + "```" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Running the Pipeline\n", + "Now we can run the batch inference pipeline on our data, it will:\n", + "- In the background, the processor will download the model into memory where vLLM serves it locally (on Ray Cluster) for use in inference\n", + "- Generate a sample Ray Dataset with 32 rows (0-31) to process\n", + "- Run the LLM processor on the dataset, triggering the preprocessing, inference, and postprocessing steps\n", + "- Execute the lazy pipeline and loads results into memory\n", + "- Iterate through all outputs and print each response \n", + "\n", + "```python\n", + "ds = ray.data.range(30)\n", + "ds = processor(ds)\n", + "ds = ds.materialize()\n", + "\n", + "for out in ds.take_all():\n", + " print(out)\n", + " print(\"==========\")\n", + "```\n", + "\n", + "### Job Submission\n", + "\n", + "Now we can submit this job against the Ray Cluster using the `RayJobClient` from earlier " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import tempfile\n", + "import shutil\n", + "\n", + "# Create a clean directory with ONLY your script\n", + "temp_dir = tempfile.mkdtemp()\n", + "shutil.copy(\"simple_batch_inf.py\", temp_dir)\n", + "\n", + "entrypoint_command = \"python simple_batch_inf.py\"\n", + "\n", + "submission_id = client.submit_job(\n", + " entrypoint=entrypoint_command,\n", + " runtime_env={\"working_dir\": temp_dir, \"pip\": \"requirements.txt\"},\n", + ")\n", + "\n", + "print(submission_id + \" successfully submitted\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Get the job's status\n", + "client.get_job_status(submission_id)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Get the job's logs\n", + "client.get_job_logs(submission_id)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.12" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/demo-notebooks/additional-demos/batch-inference/requirements.txt b/demo-notebooks/additional-demos/batch-inference/requirements.txt new file mode 100644 index 00000000..d9e8b73b --- /dev/null +++ b/demo-notebooks/additional-demos/batch-inference/requirements.txt @@ -0,0 +1,4 @@ +vllm +transformers +triton>=2.0.0 +torch>=2.0.0 diff --git a/demo-notebooks/additional-demos/batch-inference/simple_batch_inf.py b/demo-notebooks/additional-demos/batch-inference/simple_batch_inf.py new file mode 100644 index 00000000..c86ed15b --- /dev/null +++ b/demo-notebooks/additional-demos/batch-inference/simple_batch_inf.py @@ -0,0 +1,62 @@ +import ray +from ray.data.llm import build_llm_processor, vLLMEngineProcessorConfig + + +# 1. Construct a vLLM processor config. +processor_config = vLLMEngineProcessorConfig( + # The base model. + model_source="unsloth/Llama-3.2-1B-Instruct", + # vLLM engine config. + engine_kwargs=dict( + enable_lora=False, + # # Older GPUs (e.g. T4) don't support bfloat16. You should remove + # # this line if you're using later GPUs. + dtype="half", + # Reduce the model length to fit small GPUs. You should remove + # this line if you're using large GPUs. + max_model_len=1024, + ), + # The batch size used in Ray Data. + batch_size=8, + # Use one GPU in this example. + concurrency=1, + # If you save the LoRA adapter in S3, you can set the following path. + # dynamic_lora_loading_path="s3://your-lora-bucket/", +) + +# 2. Construct a processor using the processor config. +processor = build_llm_processor( + processor_config, + preprocess=lambda row: dict( + # Remove the LoRA model specification + messages=[ + { + "role": "system", + "content": "You are a calculator. Please only output the answer " + "of the given equation.", + }, + {"role": "user", "content": f"{row['id']} ** 3 = ?"}, + ], + sampling_params=dict( + temperature=0.3, + max_tokens=20, + detokenize=False, + ), + ), + postprocess=lambda row: { + "resp": row["generated_text"], + }, +) + +# 3. Synthesize a dataset with 32 rows. +ds = ray.data.range(32) +# 4. Apply the processor to the dataset. Note that this line won't kick off +# anything because processor is execution lazily. +ds = processor(ds) +# Materialization kicks off the pipeline execution. +ds = ds.materialize() + +# 5. Print all outputs. +for out in ds.take_all(): + print(out) + print("==========") From 4531e55bec3a4325895180c3886c11ff271484d3 Mon Sep 17 00:00:00 2001 From: Pat O'Connor Date: Thu, 14 Aug 2025 11:56:28 +0100 Subject: [PATCH 488/496] fix(RHOAIENG-31076): Added project and updated version in .toml Signed-off-by: Pat O'Connor --- pyproject.toml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 8615f894..5caafcb6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,10 @@ +[project] +name = "codeflare-sdk" +version = "0.30.0" + [tool.poetry] name = "codeflare-sdk" -version = "0.0.0-dev" +version = "0.30.0" description = "Python SDK for codeflare client" license = "Apache-2.0" @@ -60,3 +64,7 @@ markers = [ addopts = "--timeout=900" testpaths = ["src/codeflare_sdk"] collect_ignore = ["src/codeflare_sdk/common/utils/unit_test_support.py"] + +[build-system] +requires = ["poetry-core>=1.6.0"] +build-backend = "poetry.core.masonry.api" From 74f234c52e6cecb0958a61e4d69c2fdc2d45a501 Mon Sep 17 00:00:00 2001 From: lilylinh Date: Mon, 25 Aug 2025 15:18:32 +0100 Subject: [PATCH 489/496] task(RHOAIENG-30478)Remove Deprecated Params from CodeFlare SDK (head_cpus & head_memory) --- .../user-docs/cluster-configuration.rst | 17 ------------- src/codeflare_sdk/ray/cluster/config.py | 24 ------------------- src/codeflare_sdk/ray/cluster/test_config.py | 17 ------------- 3 files changed, 58 deletions(-) diff --git a/docs/sphinx/user-docs/cluster-configuration.rst b/docs/sphinx/user-docs/cluster-configuration.rst index c810148b..f8212823 100644 --- a/docs/sphinx/user-docs/cluster-configuration.rst +++ b/docs/sphinx/user-docs/cluster-configuration.rst @@ -155,20 +155,3 @@ Example configuration: .. note:: You need to have a Redis instance deployed in your Kubernetes cluster before using this feature. - -Deprecating Parameters ----------------------- - -The following parameters of the ``ClusterConfiguration`` are being -deprecated. - -.. list-table:: - :header-rows: 1 - :widths: auto - - * - Deprecated Parameter - - Replaced By - * - ``head_cpus`` - - ``head_cpu_requests``, ``head_cpu_limits`` - * - ``head_memory`` - - ``head_memory_requests``, ``head_memory_limits`` diff --git a/src/codeflare_sdk/ray/cluster/config.py b/src/codeflare_sdk/ray/cluster/config.py index ec89924a..dc61de2a 100644 --- a/src/codeflare_sdk/ray/cluster/config.py +++ b/src/codeflare_sdk/ray/cluster/config.py @@ -50,10 +50,6 @@ class ClusterConfiguration: The name of the cluster. namespace: The namespace in which the cluster should be created. - head_cpus: - The number of CPUs to allocate to the head node. - head_memory: - The amount of memory to allocate to the head node. head_extended_resource_requests: A dictionary of extended resource requests for the head node. ex: {"nvidia.com/gpu": 1} head_tolerations: @@ -104,10 +100,8 @@ class ClusterConfiguration: namespace: Optional[str] = None head_cpu_requests: Union[int, str] = 2 head_cpu_limits: Union[int, str] = 2 - head_cpus: Optional[Union[int, str]] = None # Deprecating head_memory_requests: Union[int, str] = 8 head_memory_limits: Union[int, str] = 8 - head_memory: Optional[Union[int, str]] = None # Deprecating head_extended_resource_requests: Dict[str, Union[str, int]] = field( default_factory=dict ) @@ -173,10 +167,8 @@ def __post_init__(self): ) self._validate_types() - self._memory_to_resource() self._memory_to_string() self._str_mem_no_unit_add_GB() - self._cpu_to_resource() self._combine_extended_resource_mapping() self._validate_extended_resource_requests(self.head_extended_resource_requests) self._validate_extended_resource_requests( @@ -209,8 +201,6 @@ def _validate_extended_resource_requests(self, extended_resources: Dict[str, int ) def _str_mem_no_unit_add_GB(self): - if isinstance(self.head_memory, str) and self.head_memory.isdecimal(): - self.head_memory = f"{self.head_memory}G" if ( isinstance(self.worker_memory_requests, str) and self.worker_memory_requests.isdecimal() @@ -232,20 +222,6 @@ def _memory_to_string(self): if isinstance(self.worker_memory_limits, int): self.worker_memory_limits = f"{self.worker_memory_limits}G" - def _cpu_to_resource(self): - if self.head_cpus: - warnings.warn( - "head_cpus is being deprecated, use head_cpu_requests and head_cpu_limits" - ) - self.head_cpu_requests = self.head_cpu_limits = self.head_cpus - - def _memory_to_resource(self): - if self.head_memory: - warnings.warn( - "head_memory is being deprecated, use head_memory_requests and head_memory_limits" - ) - self.head_memory_requests = self.head_memory_limits = self.head_memory - def _validate_types(self): """Validate the types of all fields in the ClusterConfiguration dataclass.""" errors = [] diff --git a/src/codeflare_sdk/ray/cluster/test_config.py b/src/codeflare_sdk/ray/cluster/test_config.py index 6f002df1..e405bc5b 100644 --- a/src/codeflare_sdk/ray/cluster/test_config.py +++ b/src/codeflare_sdk/ray/cluster/test_config.py @@ -135,23 +135,6 @@ def test_config_creation_wrong_type(): assert len(str(error_info.value).splitlines()) == 4 -@pytest.mark.filterwarnings("ignore::UserWarning") -def test_cluster_config_deprecation_conversion(mocker): - config = ClusterConfiguration( - name="test", - head_cpus=3, - head_memory=16, - ) - assert config.head_cpu_requests == 3 - assert config.head_cpu_limits == 3 - assert config.head_memory_requests == "16G" - assert config.head_memory_limits == "16G" - assert config.worker_memory_requests == "2G" - assert config.worker_memory_limits == "2G" - assert config.worker_cpu_requests == 1 - assert config.worker_cpu_limits == 1 - - def test_gcs_fault_tolerance_config_validation(): config = ClusterConfiguration( name="test", From 573a431b5426140abe25e2e389150e559ff1422f Mon Sep 17 00:00:00 2001 From: Pawel Paszki Date: Tue, 2 Sep 2025 16:18:17 +0100 Subject: [PATCH 490/496] test: change deprecated cluster up to apply and add more resources to ray clusters --- tests/e2e/heterogeneous_clusters_kind_test.py | 2 +- tests/e2e/heterogeneous_clusters_oauth_test.py | 8 ++++---- tests/e2e/local_interactive_sdk_kind_test.py | 2 +- tests/e2e/local_interactive_sdk_oauth_test.py | 6 +++++- tests/e2e/mnist_raycluster_sdk_aw_kind_test.py | 2 +- tests/e2e/mnist_raycluster_sdk_kind_test.py | 2 +- tests/e2e/mnist_raycluster_sdk_oauth_test.py | 10 +++++----- tests/e2e/start_ray_cluster.py | 2 +- 8 files changed, 19 insertions(+), 15 deletions(-) diff --git a/tests/e2e/heterogeneous_clusters_kind_test.py b/tests/e2e/heterogeneous_clusters_kind_test.py index 052fa7b8..fb650176 100644 --- a/tests/e2e/heterogeneous_clusters_kind_test.py +++ b/tests/e2e/heterogeneous_clusters_kind_test.py @@ -63,7 +63,7 @@ def run_heterogeneous_clusters( local_queue=queue_name, ) ) - cluster.up() + cluster.apply() sleep(5) node_name = get_pod_node(self, self.namespace, cluster_name) print(f"Cluster {cluster_name}-{flavor} is running on node: {node_name}") diff --git a/tests/e2e/heterogeneous_clusters_oauth_test.py b/tests/e2e/heterogeneous_clusters_oauth_test.py index d57cff48..0fbe4df3 100644 --- a/tests/e2e/heterogeneous_clusters_oauth_test.py +++ b/tests/e2e/heterogeneous_clusters_oauth_test.py @@ -55,9 +55,9 @@ def run_heterogeneous_clusters( namespace=self.namespace, name=cluster_name, num_workers=1, - head_cpu_requests="500m", - head_cpu_limits="500m", - worker_cpu_requests="500m", + head_cpu_requests=1, + head_cpu_limits=1, + worker_cpu_requests=1, worker_cpu_limits=1, worker_memory_requests=1, worker_memory_limits=4, @@ -66,7 +66,7 @@ def run_heterogeneous_clusters( local_queue=queue_name, ) ) - cluster.up() + cluster.apply() sleep(5) node_name = get_pod_node(self, self.namespace, cluster_name) print(f"Cluster {cluster_name}-{flavor} is running on node: {node_name}") diff --git a/tests/e2e/local_interactive_sdk_kind_test.py b/tests/e2e/local_interactive_sdk_kind_test.py index 6f025d22..0f5fd93f 100644 --- a/tests/e2e/local_interactive_sdk_kind_test.py +++ b/tests/e2e/local_interactive_sdk_kind_test.py @@ -65,7 +65,7 @@ def run_local_interactives( ) ) - cluster.up() + cluster.apply() cluster.wait_ready() cluster.status() diff --git a/tests/e2e/local_interactive_sdk_oauth_test.py b/tests/e2e/local_interactive_sdk_oauth_test.py index b5229deb..8be0bf9c 100644 --- a/tests/e2e/local_interactive_sdk_oauth_test.py +++ b/tests/e2e/local_interactive_sdk_oauth_test.py @@ -44,6 +44,10 @@ def run_local_interactives(self): namespace=self.namespace, name=cluster_name, num_workers=1, + head_memory_requests=6, + head_memory_limits=8, + head_cpu_requests=1, + head_cpu_limits=1, worker_cpu_requests=1, worker_cpu_limits=1, worker_memory_requests=1, @@ -52,7 +56,7 @@ def run_local_interactives(self): verify_tls=False, ) ) - cluster.up() + cluster.apply() cluster.wait_ready() generate_cert.generate_tls_cert(cluster_name, self.namespace) diff --git a/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py b/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py index 49f0888d..5d06214c 100644 --- a/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py +++ b/tests/e2e/mnist_raycluster_sdk_aw_kind_test.py @@ -55,7 +55,7 @@ def run_mnist_raycluster_sdk_kind( ) ) - cluster.up() + cluster.apply() cluster.status() diff --git a/tests/e2e/mnist_raycluster_sdk_kind_test.py b/tests/e2e/mnist_raycluster_sdk_kind_test.py index 42d0c46b..4ba728cf 100644 --- a/tests/e2e/mnist_raycluster_sdk_kind_test.py +++ b/tests/e2e/mnist_raycluster_sdk_kind_test.py @@ -54,7 +54,7 @@ def run_mnist_raycluster_sdk_kind( ) ) - cluster.up() + cluster.apply() cluster.status() diff --git a/tests/e2e/mnist_raycluster_sdk_oauth_test.py b/tests/e2e/mnist_raycluster_sdk_oauth_test.py index 212c9784..18447d74 100644 --- a/tests/e2e/mnist_raycluster_sdk_oauth_test.py +++ b/tests/e2e/mnist_raycluster_sdk_oauth_test.py @@ -46,19 +46,19 @@ def run_mnist_raycluster_sdk_oauth(self): name="mnist", namespace=self.namespace, num_workers=1, - head_cpu_requests="500m", - head_cpu_limits="500m", + head_memory_requests=6, + head_memory_limits=8, worker_cpu_requests=1, worker_cpu_limits=1, - worker_memory_requests=1, - worker_memory_limits=4, + worker_memory_requests=6, + worker_memory_limits=8, image=ray_image, write_to_file=True, verify_tls=False, ) ) - cluster.up() + cluster.apply() cluster.status() diff --git a/tests/e2e/start_ray_cluster.py b/tests/e2e/start_ray_cluster.py index 8aac19f0..bc7f531f 100644 --- a/tests/e2e/start_ray_cluster.py +++ b/tests/e2e/start_ray_cluster.py @@ -26,7 +26,7 @@ ) ) -cluster.up() +cluster.apply() cluster.status() From 2bb23769424e8b0cd8246bf48af3852f35a092e0 Mon Sep 17 00:00:00 2001 From: Pat O'Connor Date: Mon, 1 Sep 2025 12:29:03 +0100 Subject: [PATCH 491/496] task(RHOAIENG-33045): Update Runtime image SHAs for PY312 Signed-off-by: Pat O'Connor --- .github/workflows/e2e_tests.yaml | 2 +- .../additional-demos/hf_interactive.ipynb | 3 +- .../additional-demos/local_interactive.ipynb | 1 + .../additional-demos/ray_job_client.ipynb | 1 + demo-notebooks/guided-demos/0_basic_ray.ipynb | 1 + .../guided-demos/1_cluster_job_client.ipynb | 1 + .../guided-demos/2_basic_interactive.ipynb | 1 + .../guided-demos/3_widget_example.ipynb | 1 + .../notebook-ex-outputs/0_basic_ray.ipynb | 1 + .../1_cluster_job_client.ipynb | 1 + .../2_basic_interactive.ipynb | 1 + .../preview_nbs/0_basic_ray.ipynb | 1 + .../preview_nbs/1_cluster_job_client.ipynb | 1 + .../preview_nbs/2_basic_interactive.ipynb | 1 + src/codeflare_sdk/common/utils/constants.py | 15 +++++- .../common/utils/unit_test_support.py | 7 +-- src/codeflare_sdk/common/utils/utils.py | 46 +++++++++++++++++++ .../ray/cluster/build_ray_cluster.py | 17 +++---- .../ray/cluster/test_build_ray_cluster.py | 3 +- tests/e2e/local_interactive_sdk_kind_test.py | 4 +- tests/e2e/support.py | 6 ++- 21 files changed, 94 insertions(+), 21 deletions(-) create mode 100644 src/codeflare_sdk/common/utils/utils.py diff --git a/.github/workflows/e2e_tests.yaml b/.github/workflows/e2e_tests.yaml index d66e4b34..ba59a9e1 100644 --- a/.github/workflows/e2e_tests.yaml +++ b/.github/workflows/e2e_tests.yaml @@ -57,7 +57,7 @@ jobs: - name: Set up specific Python version uses: actions/setup-python@v5 with: - python-version: '3.11' + python-version: '3.12' cache: 'pip' # caching pip dependencies - name: Setup NVidia GPU environment for KinD diff --git a/demo-notebooks/additional-demos/hf_interactive.ipynb b/demo-notebooks/additional-demos/hf_interactive.ipynb index 2e8e36fb..2f0334dc 100644 --- a/demo-notebooks/additional-demos/hf_interactive.ipynb +++ b/demo-notebooks/additional-demos/hf_interactive.ipynb @@ -70,7 +70,8 @@ "\n", "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", - "- For Python 3.11: 'quay.io/modh/ray:2.35.0-py311-cu121'\n", + "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", + "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/additional-demos/local_interactive.ipynb b/demo-notebooks/additional-demos/local_interactive.ipynb index fb1cb3ea..d1318103 100644 --- a/demo-notebooks/additional-demos/local_interactive.ipynb +++ b/demo-notebooks/additional-demos/local_interactive.ipynb @@ -38,6 +38,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", + "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/additional-demos/ray_job_client.ipynb b/demo-notebooks/additional-demos/ray_job_client.ipynb index 39cf2a16..11689a1b 100644 --- a/demo-notebooks/additional-demos/ray_job_client.ipynb +++ b/demo-notebooks/additional-demos/ray_job_client.ipynb @@ -44,6 +44,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", + "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb index 413d31b4..7d35107d 100644 --- a/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -50,6 +50,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", + "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb index 5a825b55..0c0b3488 100644 --- a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb @@ -44,6 +44,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", + "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/2_basic_interactive.ipynb index 80f45f13..71d6b78e 100644 --- a/demo-notebooks/guided-demos/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/2_basic_interactive.ipynb @@ -47,6 +47,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", + "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/3_widget_example.ipynb b/demo-notebooks/guided-demos/3_widget_example.ipynb index d09271c9..28ad43f6 100644 --- a/demo-notebooks/guided-demos/3_widget_example.ipynb +++ b/demo-notebooks/guided-demos/3_widget_example.ipynb @@ -50,6 +50,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", + "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb index e5972711..21631288 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb @@ -50,6 +50,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", + "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb index 9b34da9b..44546a94 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb @@ -44,6 +44,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", + "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb index db1560ff..35daa0ff 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb @@ -47,6 +47,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", + "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb index e5972711..21631288 100644 --- a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb @@ -50,6 +50,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", + "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb index 6b913142..1d752f6a 100644 --- a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb @@ -44,6 +44,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", + "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb index a7ebae08..1c099cfd 100644 --- a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb @@ -47,6 +47,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", + "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu121'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/src/codeflare_sdk/common/utils/constants.py b/src/codeflare_sdk/common/utils/constants.py index 9721ac85..d380dcc0 100644 --- a/src/codeflare_sdk/common/utils/constants.py +++ b/src/codeflare_sdk/common/utils/constants.py @@ -1,3 +1,14 @@ RAY_VERSION = "2.47.1" -# Below references ray:2.47.1-py311-cu121 -CUDA_RUNTIME_IMAGE = "quay.io/modh/ray@sha256:6d076aeb38ab3c34a6a2ef0f58dc667089aa15826fa08a73273c629333e12f1e" +""" +The below are used to define the default runtime image for the Ray Cluster. +* For python 3.11:ray:2.47.1-py311-cu121 +* For python 3.12:ray:2.47.1-py312-cu121 +""" +CUDA_PY311_RUNTIME_IMAGE = "quay.io/modh/ray@sha256:6d076aeb38ab3c34a6a2ef0f58dc667089aa15826fa08a73273c629333e12f1e" +CUDA_PY312_RUNTIME_IMAGE = "quay.io/modh/ray@sha256:23860dfe2e47bb69709b3883b08fd1a4d836ce02eaf8d0afeeafe6986d0fc8fb" + +# Centralized image selection +SUPPORTED_PYTHON_VERSIONS = { + "3.11": CUDA_PY311_RUNTIME_IMAGE, + "3.12": CUDA_PY312_RUNTIME_IMAGE, +} diff --git a/src/codeflare_sdk/common/utils/unit_test_support.py b/src/codeflare_sdk/common/utils/unit_test_support.py index b382ec01..653e818c 100644 --- a/src/codeflare_sdk/common/utils/unit_test_support.py +++ b/src/codeflare_sdk/common/utils/unit_test_support.py @@ -15,6 +15,7 @@ import string import sys from codeflare_sdk.common.utils import constants +from codeflare_sdk.common.utils.utils import get_ray_image_for_python_version from codeflare_sdk.ray.cluster.cluster import ( Cluster, ClusterConfiguration, @@ -69,7 +70,7 @@ def create_cluster_wrong_type(): worker_extended_resource_requests={"nvidia.com/gpu": 7}, appwrapper=True, image_pull_secrets=["unit-test-pull-secret"], - image=constants.CUDA_RUNTIME_IMAGE, + image=constants.CUDA_PY312_RUNTIME_IMAGE, write_to_file=True, labels={1: 1}, ) @@ -294,8 +295,8 @@ def apply_template(yaml_file_path, variables): def get_expected_image(): - # TODO: Select image based on Python version - return constants.CUDA_RUNTIME_IMAGE + # Use centralized image selection logic (fallback to 3.12 for test consistency) + return get_ray_image_for_python_version(warn_on_unsupported=True) def get_template_variables(): diff --git a/src/codeflare_sdk/common/utils/utils.py b/src/codeflare_sdk/common/utils/utils.py new file mode 100644 index 00000000..f876e924 --- /dev/null +++ b/src/codeflare_sdk/common/utils/utils.py @@ -0,0 +1,46 @@ +# Copyright 2025 IBM, Red Hat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import sys + +from codeflare_sdk.common.utils.constants import ( + SUPPORTED_PYTHON_VERSIONS, + CUDA_PY312_RUNTIME_IMAGE, +) + + +def get_ray_image_for_python_version(python_version=None, warn_on_unsupported=True): + """ + Get the appropriate Ray image for a given Python version. + If no version is provided, uses the current runtime Python version. + This prevents us needing to hard code image versions for tests. + + Args: + python_version: Python version string (e.g. "3.11"). If None, detects current version. + warn_on_unsupported: If True, warns and returns None for unsupported versions. + If False, silently falls back to Python 3.12 image. + """ + if python_version is None: + python_version = f"{sys.version_info.major}.{sys.version_info.minor}" + + if python_version in SUPPORTED_PYTHON_VERSIONS: + return SUPPORTED_PYTHON_VERSIONS[python_version] + elif warn_on_unsupported: + import warnings + + warnings.warn( + f"No default Ray image defined for {python_version}. Please provide your own image or use one of the following python versions: {', '.join(SUPPORTED_PYTHON_VERSIONS.keys())}." + ) + return None + else: + return CUDA_PY312_RUNTIME_IMAGE diff --git a/src/codeflare_sdk/ray/cluster/build_ray_cluster.py b/src/codeflare_sdk/ray/cluster/build_ray_cluster.py index 92cefef4..e8b68919 100644 --- a/src/codeflare_sdk/ray/cluster/build_ray_cluster.py +++ b/src/codeflare_sdk/ray/cluster/build_ray_cluster.py @@ -21,6 +21,7 @@ from ...common.kubernetes_cluster import get_api_client, config_check from kubernetes.client.exceptions import ApiException from ...common.utils.constants import RAY_VERSION +from ...common.utils.utils import get_ray_image_for_python_version import codeflare_sdk import os @@ -95,9 +96,8 @@ ), ] -SUPPORTED_PYTHON_VERSIONS = { - "3.11": constants.CUDA_RUNTIME_IMAGE, -} +# Use centralized mapping from constants (so that we only have to update constants.py) +SUPPORTED_PYTHON_VERSIONS = constants.SUPPORTED_PYTHON_VERSIONS # RayCluster/AppWrapper builder function @@ -272,16 +272,11 @@ def with_nb_annotations(annotations: dict): def update_image(image) -> str: """ The update_image() function automatically sets the image config parameter to a preset image based on Python version if not specified. - If no Ray image exists for the given Python version a warning is produced. + This now points to the centralized function in utils.py. """ if not image: - python_version = f"{sys.version_info.major}.{sys.version_info.minor}" - if python_version in SUPPORTED_PYTHON_VERSIONS: - image = SUPPORTED_PYTHON_VERSIONS[python_version] - else: - warnings.warn( - f"No default Ray image defined for {python_version}. Please provide your own image or use one of the following python versions: {', '.join(SUPPORTED_PYTHON_VERSIONS.keys())}." - ) + # Pull the image based on the matching Python version (or output a warning if not supported) + image = get_ray_image_for_python_version(warn_on_unsupported=True) return image diff --git a/src/codeflare_sdk/ray/cluster/test_build_ray_cluster.py b/src/codeflare_sdk/ray/cluster/test_build_ray_cluster.py index 6d322b5f..f970d945 100644 --- a/src/codeflare_sdk/ray/cluster/test_build_ray_cluster.py +++ b/src/codeflare_sdk/ray/cluster/test_build_ray_cluster.py @@ -43,6 +43,7 @@ def test_update_image_without_supported_python_version(mocker): "codeflare_sdk.ray.cluster.build_ray_cluster.SUPPORTED_PYTHON_VERSIONS", { "3.11": "ray-py3.11", + "3.12": "ray-py3.12", }, ) @@ -60,7 +61,7 @@ def test_update_image_without_supported_python_version(mocker): # Assert that the warning was called with the expected message warn_mock.assert_called_once_with( - "No default Ray image defined for 3.8. Please provide your own image or use one of the following python versions: 3.11." + "No default Ray image defined for 3.8. Please provide your own image or use one of the following python versions: 3.11, 3.12." ) # Assert that no image was set since the Python version is not supported diff --git a/tests/e2e/local_interactive_sdk_kind_test.py b/tests/e2e/local_interactive_sdk_kind_test.py index 0f5fd93f..1dd8a2e0 100644 --- a/tests/e2e/local_interactive_sdk_kind_test.py +++ b/tests/e2e/local_interactive_sdk_kind_test.py @@ -115,7 +115,9 @@ def heavy_calculation(num_iterations): ref = heavy_calculation.remote(3000) result = ray.get(ref) - assert result == 1789.4644387076714 + assert ( + result == 1789.4644387076728 + ) # Updated result after moving to Python 3.12 (0.0000000000008% difference to old assertion) ray.cancel(ref) ray.shutdown() diff --git a/tests/e2e/support.py b/tests/e2e/support.py index 165a680b..fe9261a2 100644 --- a/tests/e2e/support.py +++ b/tests/e2e/support.py @@ -8,6 +8,7 @@ _kube_api_error_handling, ) from codeflare_sdk.common.utils import constants +from codeflare_sdk.common.utils.utils import get_ray_image_for_python_version def get_ray_cluster(cluster_name, namespace): @@ -27,7 +28,10 @@ def get_ray_cluster(cluster_name, namespace): def get_ray_image(): - return os.getenv("RAY_IMAGE", constants.CUDA_RUNTIME_IMAGE) + return os.getenv( + "RAY_IMAGE", + get_ray_image_for_python_version(warn_on_unsupported=False), + ) def get_setup_env_variables(**kwargs): From a20be01b02f5ae3b5e2648e78d49753367971d1e Mon Sep 17 00:00:00 2001 From: Pat O'Connor Date: Wed, 3 Sep 2025 08:58:59 +0100 Subject: [PATCH 492/496] task(RHAIENG-866): Updated pydantic for feast compat Signed-off-by: Pat O'Connor --- poetry.lock | 210 +++++++++++++++++++++++++++++++++++-------------- pyproject.toml | 2 +- 2 files changed, 153 insertions(+), 59 deletions(-) diff --git a/poetry.lock b/poetry.lock index 89bca284..49e45352 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. [[package]] name = "aiohappyeyeballs" @@ -162,6 +162,18 @@ files = [ {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, ] +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + [[package]] name = "anyio" version = "4.9.0" @@ -3141,70 +3153,137 @@ files = [ [[package]] name = "pydantic" -version = "1.10.22" -description = "Data validation and settings management using python type hints" +version = "2.11.7" +description = "Data validation using Python type hints" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "pydantic-1.10.22-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:57889565ccc1e5b7b73343329bbe6198ebc472e3ee874af2fa1865cfe7048228"}, - {file = "pydantic-1.10.22-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:90729e22426de79bc6a3526b4c45ec4400caf0d4f10d7181ba7f12c01bb3897d"}, - {file = "pydantic-1.10.22-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8684d347f351554ec94fdcb507983d3116dc4577fb8799fed63c65869a2d10"}, - {file = "pydantic-1.10.22-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8dad498ceff2d9ef1d2e2bc6608f5b59b8e1ba2031759b22dfb8c16608e1802"}, - {file = "pydantic-1.10.22-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fac529cc654d4575cf8de191cce354b12ba705f528a0a5c654de6d01f76cd818"}, - {file = "pydantic-1.10.22-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4148232aded8dd1dd13cf910a01b32a763c34bd79a0ab4d1ee66164fcb0b7b9d"}, - {file = "pydantic-1.10.22-cp310-cp310-win_amd64.whl", hash = "sha256:ece68105d9e436db45d8650dc375c760cc85a6793ae019c08769052902dca7db"}, - {file = "pydantic-1.10.22-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8e530a8da353f791ad89e701c35787418605d35085f4bdda51b416946070e938"}, - {file = "pydantic-1.10.22-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:654322b85642e9439d7de4c83cb4084ddd513df7ff8706005dada43b34544946"}, - {file = "pydantic-1.10.22-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8bece75bd1b9fc1c32b57a32831517943b1159ba18b4ba32c0d431d76a120ae"}, - {file = "pydantic-1.10.22-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eccb58767f13c6963dcf96d02cb8723ebb98b16692030803ac075d2439c07b0f"}, - {file = "pydantic-1.10.22-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7778e6200ff8ed5f7052c1516617423d22517ad36cc7a3aedd51428168e3e5e8"}, - {file = "pydantic-1.10.22-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bffe02767d27c39af9ca7dc7cd479c00dda6346bb62ffc89e306f665108317a2"}, - {file = "pydantic-1.10.22-cp311-cp311-win_amd64.whl", hash = "sha256:23bc19c55427091b8e589bc08f635ab90005f2dc99518f1233386f46462c550a"}, - {file = "pydantic-1.10.22-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:92d0f97828a075a71d9efc65cf75db5f149b4d79a38c89648a63d2932894d8c9"}, - {file = "pydantic-1.10.22-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6af5a2811b6b95b58b829aeac5996d465a5f0c7ed84bd871d603cf8646edf6ff"}, - {file = "pydantic-1.10.22-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6cf06d8d40993e79af0ab2102ef5da77b9ddba51248e4cb27f9f3f591fbb096e"}, - {file = "pydantic-1.10.22-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:184b7865b171a6057ad97f4a17fbac81cec29bd103e996e7add3d16b0d95f609"}, - {file = "pydantic-1.10.22-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:923ad861677ab09d89be35d36111156063a7ebb44322cdb7b49266e1adaba4bb"}, - {file = "pydantic-1.10.22-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:82d9a3da1686443fb854c8d2ab9a473251f8f4cdd11b125522efb4d7c646e7bc"}, - {file = "pydantic-1.10.22-cp312-cp312-win_amd64.whl", hash = "sha256:1612604929af4c602694a7f3338b18039d402eb5ddfbf0db44f1ebfaf07f93e7"}, - {file = "pydantic-1.10.22-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b259dc89c9abcd24bf42f31951fb46c62e904ccf4316393f317abeeecda39978"}, - {file = "pydantic-1.10.22-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9238aa0964d80c0908d2f385e981add58faead4412ca80ef0fa352094c24e46d"}, - {file = "pydantic-1.10.22-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f8029f05b04080e3f1a550575a1bca747c0ea4be48e2d551473d47fd768fc1b"}, - {file = "pydantic-1.10.22-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5c06918894f119e0431a36c9393bc7cceeb34d1feeb66670ef9b9ca48c073937"}, - {file = "pydantic-1.10.22-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e205311649622ee8fc1ec9089bd2076823797f5cd2c1e3182dc0e12aab835b35"}, - {file = "pydantic-1.10.22-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:815f0a73d5688d6dd0796a7edb9eca7071bfef961a7b33f91e618822ae7345b7"}, - {file = "pydantic-1.10.22-cp313-cp313-win_amd64.whl", hash = "sha256:9dfce71d42a5cde10e78a469e3d986f656afc245ab1b97c7106036f088dd91f8"}, - {file = "pydantic-1.10.22-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3ecaf8177b06aac5d1f442db1288e3b46d9f05f34fd17fdca3ad34105328b61a"}, - {file = "pydantic-1.10.22-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb36c2de9ea74bd7f66b5481dea8032d399affd1cbfbb9bb7ce539437f1fce62"}, - {file = "pydantic-1.10.22-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6b8d14a256be3b8fff9286d76c532f1a7573fbba5f189305b22471c6679854d"}, - {file = "pydantic-1.10.22-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:1c33269e815db4324e71577174c29c7aa30d1bba51340ce6be976f6f3053a4c6"}, - {file = "pydantic-1.10.22-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:8661b3ab2735b2a9ccca2634738534a795f4a10bae3ab28ec0a10c96baa20182"}, - {file = "pydantic-1.10.22-cp37-cp37m-win_amd64.whl", hash = "sha256:22bdd5fe70d4549995981c55b970f59de5c502d5656b2abdfcd0a25be6f3763e"}, - {file = "pydantic-1.10.22-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e3f33d1358aa4bc2795208cc29ff3118aeaad0ea36f0946788cf7cadeccc166b"}, - {file = "pydantic-1.10.22-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:813f079f9cd136cac621f3f9128a4406eb8abd2ad9fdf916a0731d91c6590017"}, - {file = "pydantic-1.10.22-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab618ab8dca6eac7f0755db25f6aba3c22c40e3463f85a1c08dc93092d917704"}, - {file = "pydantic-1.10.22-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d128e1aaa38db88caca920d5822c98fc06516a09a58b6d3d60fa5ea9099b32cc"}, - {file = "pydantic-1.10.22-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:cc97bbc25def7025e55fc9016080773167cda2aad7294e06a37dda04c7d69ece"}, - {file = "pydantic-1.10.22-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dda5d7157d543b1fa565038cae6e952549d0f90071c839b3740fb77c820fab8"}, - {file = "pydantic-1.10.22-cp38-cp38-win_amd64.whl", hash = "sha256:a093fe44fe518cb445d23119511a71f756f8503139d02fcdd1173f7b76c95ffe"}, - {file = "pydantic-1.10.22-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ec54c89b2568b258bb30d7348ac4d82bec1b58b377fb56a00441e2ac66b24587"}, - {file = "pydantic-1.10.22-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d8f1d1a1532e4f3bcab4e34e8d2197a7def4b67072acd26cfa60e92d75803a48"}, - {file = "pydantic-1.10.22-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ad83ca35508c27eae1005b6b61f369f78aae6d27ead2135ec156a2599910121"}, - {file = "pydantic-1.10.22-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53cdb44b78c420f570ff16b071ea8cd5a477635c6b0efc343c8a91e3029bbf1a"}, - {file = "pydantic-1.10.22-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:16d0a5ae9d98264186ce31acdd7686ec05fd331fab9d68ed777d5cb2d1514e5e"}, - {file = "pydantic-1.10.22-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8aee040e25843f036192b1a1af62117504a209a043aa8db12e190bb86ad7e611"}, - {file = "pydantic-1.10.22-cp39-cp39-win_amd64.whl", hash = "sha256:7f691eec68dbbfca497d3c11b92a3e5987393174cbedf03ec7a4184c35c2def6"}, - {file = "pydantic-1.10.22-py3-none-any.whl", hash = "sha256:343037d608bcbd34df937ac259708bfc83664dadf88afe8516c4f282d7d471a9"}, - {file = "pydantic-1.10.22.tar.gz", hash = "sha256:ee1006cebd43a8e7158fb7190bb8f4e2da9649719bff65d0c287282ec38dec6d"}, + {file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"}, + {file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"}, ] [package.dependencies] -typing-extensions = ">=4.2.0" +annotated-types = ">=0.6.0" +pydantic-core = "2.33.2" +typing-extensions = ">=4.12.2" +typing-inspection = ">=0.4.0" [package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.33.2" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}, + {file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"}, + {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"}, + {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"}, + {file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"}, + {file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"}, + {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"}, + {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"}, + {file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"}, + {file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"}, + {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"}, + {file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"}, + {file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"}, + {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"}, + {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"}, + {file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"}, + {file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"}, + {file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"}, + {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"}, + {file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"}, + {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"}, + {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"}, + {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"}, + {file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pygments" @@ -4294,6 +4373,21 @@ files = [ {file = "typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4"}, ] +[[package]] +name = "typing-inspection" +version = "0.4.1" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51"}, + {file = "typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" + [[package]] name = "tzdata" version = "2025.2" @@ -4656,4 +4750,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.1" python-versions = "^3.11" -content-hash = "29175d71269b0c8a1a53eb7027e8d824ef150bf8b4aa4cea8a9ed75684aa6e43" +content-hash = "6720576cf9ff57c7bb15b97e268bb414218f6a053e7e0a5bdd45d022c0847111" diff --git a/pyproject.toml b/pyproject.toml index 5caafcb6..23d45003 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,7 @@ ray = {version = "2.47.1", extras = ["data", "default"]} kubernetes = ">= 27.2.0" cryptography = "43.0.3" executing = "1.2.0" -pydantic = "< 2" +pydantic = ">= 2.10.6" ipywidgets = "8.1.2" [tool.poetry.group.docs] From 9d1a08b4c0fc836d27f99d99a73c968a9a32b1c6 Mon Sep 17 00:00:00 2001 From: Pat O'Connor Date: Tue, 9 Sep 2025 10:53:13 +0100 Subject: [PATCH 493/496] task(RHOAIENG-33492): Bump Ray image to CUDA 12.8 Signed-off-by: Pat O'Connor --- demo-notebooks/additional-demos/hf_interactive.ipynb | 2 +- demo-notebooks/additional-demos/local_interactive.ipynb | 2 +- demo-notebooks/additional-demos/ray_job_client.ipynb | 2 +- demo-notebooks/guided-demos/0_basic_ray.ipynb | 2 +- demo-notebooks/guided-demos/1_cluster_job_client.ipynb | 2 +- demo-notebooks/guided-demos/2_basic_interactive.ipynb | 2 +- demo-notebooks/guided-demos/3_widget_example.ipynb | 2 +- .../guided-demos/notebook-ex-outputs/0_basic_ray.ipynb | 2 +- .../notebook-ex-outputs/1_cluster_job_client.ipynb | 2 +- .../notebook-ex-outputs/2_basic_interactive.ipynb | 2 +- demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb | 2 +- .../guided-demos/preview_nbs/1_cluster_job_client.ipynb | 2 +- .../guided-demos/preview_nbs/2_basic_interactive.ipynb | 2 +- src/codeflare_sdk/common/utils/constants.py | 4 ++-- 14 files changed, 15 insertions(+), 15 deletions(-) diff --git a/demo-notebooks/additional-demos/hf_interactive.ipynb b/demo-notebooks/additional-demos/hf_interactive.ipynb index 2f0334dc..9b32ab2e 100644 --- a/demo-notebooks/additional-demos/hf_interactive.ipynb +++ b/demo-notebooks/additional-demos/hf_interactive.ipynb @@ -71,7 +71,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", - "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu121'\n", + "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu128'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/additional-demos/local_interactive.ipynb b/demo-notebooks/additional-demos/local_interactive.ipynb index d1318103..257c6c1b 100644 --- a/demo-notebooks/additional-demos/local_interactive.ipynb +++ b/demo-notebooks/additional-demos/local_interactive.ipynb @@ -38,7 +38,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", - "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu121'\n", + "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu128'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/additional-demos/ray_job_client.ipynb b/demo-notebooks/additional-demos/ray_job_client.ipynb index 11689a1b..42d3faa0 100644 --- a/demo-notebooks/additional-demos/ray_job_client.ipynb +++ b/demo-notebooks/additional-demos/ray_job_client.ipynb @@ -44,7 +44,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", - "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu121'\n", + "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu128'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/0_basic_ray.ipynb b/demo-notebooks/guided-demos/0_basic_ray.ipynb index 7d35107d..7bc69afa 100644 --- a/demo-notebooks/guided-demos/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/0_basic_ray.ipynb @@ -50,7 +50,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", - "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu121'\n", + "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu128'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb index 0c0b3488..2f042a6d 100644 --- a/demo-notebooks/guided-demos/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/1_cluster_job_client.ipynb @@ -44,7 +44,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", - "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu121'\n", + "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu128'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/2_basic_interactive.ipynb index 71d6b78e..683ec236 100644 --- a/demo-notebooks/guided-demos/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/2_basic_interactive.ipynb @@ -47,7 +47,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", - "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu121'\n", + "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu128'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/3_widget_example.ipynb b/demo-notebooks/guided-demos/3_widget_example.ipynb index 28ad43f6..8b70e1da 100644 --- a/demo-notebooks/guided-demos/3_widget_example.ipynb +++ b/demo-notebooks/guided-demos/3_widget_example.ipynb @@ -50,7 +50,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", - "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu121'\n", + "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu128'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb index 21631288..49f7f687 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/0_basic_ray.ipynb @@ -50,7 +50,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", - "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu121'\n", + "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu128'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb index 44546a94..913fb919 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/1_cluster_job_client.ipynb @@ -44,7 +44,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", - "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu121'\n", + "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu128'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb index 35daa0ff..9c816c53 100644 --- a/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/notebook-ex-outputs/2_basic_interactive.ipynb @@ -47,7 +47,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", - "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu121'\n", + "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu128'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb index 21631288..49f7f687 100644 --- a/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/0_basic_ray.ipynb @@ -50,7 +50,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", - "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu121'\n", + "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu128'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb index 1d752f6a..3c7b7876 100644 --- a/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/1_cluster_job_client.ipynb @@ -44,7 +44,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", - "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu121'\n", + "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu128'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb index 1c099cfd..1de3fc9c 100644 --- a/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb +++ b/demo-notebooks/guided-demos/preview_nbs/2_basic_interactive.ipynb @@ -47,7 +47,7 @@ "NOTE: The default images used by the CodeFlare SDK for creating a RayCluster resource depend on the installed Python version:\n", "\n", "- For Python 3.11: 'quay.io/modh/ray:2.47.1-py311-cu121'\n", - "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu121'\n", + "- For Python 3.12: 'quay.io/modh/ray:2.47.1-py312-cu128'\n", "\n", "If you prefer to use a custom Ray image that better suits your needs, you can specify it in the image field to override the default." ] diff --git a/src/codeflare_sdk/common/utils/constants.py b/src/codeflare_sdk/common/utils/constants.py index d380dcc0..7172da40 100644 --- a/src/codeflare_sdk/common/utils/constants.py +++ b/src/codeflare_sdk/common/utils/constants.py @@ -2,10 +2,10 @@ """ The below are used to define the default runtime image for the Ray Cluster. * For python 3.11:ray:2.47.1-py311-cu121 -* For python 3.12:ray:2.47.1-py312-cu121 +* For python 3.12:ray:2.47.1-py312-cu128 """ CUDA_PY311_RUNTIME_IMAGE = "quay.io/modh/ray@sha256:6d076aeb38ab3c34a6a2ef0f58dc667089aa15826fa08a73273c629333e12f1e" -CUDA_PY312_RUNTIME_IMAGE = "quay.io/modh/ray@sha256:23860dfe2e47bb69709b3883b08fd1a4d836ce02eaf8d0afeeafe6986d0fc8fb" +CUDA_PY312_RUNTIME_IMAGE = "quay.io/modh/ray@sha256:9c72e890f5c66bb2a0f0d940120539ffc875fb6fed83380cbe2eba938e8789b1" # Centralized image selection SUPPORTED_PYTHON_VERSIONS = { From 22dbaaf491fa8d354cba759a284e7d1a3511eec2 Mon Sep 17 00:00:00 2001 From: kryanbeane Date: Fri, 12 Sep 2025 09:22:21 +0100 Subject: [PATCH 494/496] fix: update pyproject.toml --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 23d45003..db864def 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,10 +1,10 @@ [project] name = "codeflare-sdk" -version = "0.30.0" +version = "0.31.0" [tool.poetry] name = "codeflare-sdk" -version = "0.30.0" +version = "0.31.0" description = "Python SDK for codeflare client" license = "Apache-2.0" From 3668d60e46ec35365e8b5be6e3175562037209ec Mon Sep 17 00:00:00 2001 From: Pat O'Connor Date: Tue, 16 Sep 2025 14:54:15 +0100 Subject: [PATCH 495/496] task(RHOAIENG-34228): CodeFlare SDK Patch release 0.31.1 Signed-off-by: Pat O'Connor --- pyproject.toml | 4 ++-- src/codeflare_sdk/common/utils/constants.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index db864def..71610478 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,10 +1,10 @@ [project] name = "codeflare-sdk" -version = "0.31.0" +version = "0.31.1" [tool.poetry] name = "codeflare-sdk" -version = "0.31.0" +version = "0.31.1" description = "Python SDK for codeflare client" license = "Apache-2.0" diff --git a/src/codeflare_sdk/common/utils/constants.py b/src/codeflare_sdk/common/utils/constants.py index 7172da40..fcd064d6 100644 --- a/src/codeflare_sdk/common/utils/constants.py +++ b/src/codeflare_sdk/common/utils/constants.py @@ -5,7 +5,7 @@ * For python 3.12:ray:2.47.1-py312-cu128 """ CUDA_PY311_RUNTIME_IMAGE = "quay.io/modh/ray@sha256:6d076aeb38ab3c34a6a2ef0f58dc667089aa15826fa08a73273c629333e12f1e" -CUDA_PY312_RUNTIME_IMAGE = "quay.io/modh/ray@sha256:9c72e890f5c66bb2a0f0d940120539ffc875fb6fed83380cbe2eba938e8789b1" +CUDA_PY312_RUNTIME_IMAGE = "quay.io/modh/ray@sha256:fb6f207de63e442c67bb48955cf0584f3704281faf17b90419cfa274fdec63c5" # Centralized image selection SUPPORTED_PYTHON_VERSIONS = { From e8404ec55d615c7a15687ad13adfc1e23e67349b Mon Sep 17 00:00:00 2001 From: Cathal O'Connor Date: Wed, 17 Sep 2025 12:50:09 +0100 Subject: [PATCH 496/496] Update Owners file (#908) --- OWNERS | 2 ++ 1 file changed, 2 insertions(+) diff --git a/OWNERS b/OWNERS index e26dd545..78bda8e8 100644 --- a/OWNERS +++ b/OWNERS @@ -1,6 +1,7 @@ approvers: - astefanutti - Bobbins228 + - CathalOConnorRH - chipspeak - ChristianZaccaria - dimakis @@ -16,6 +17,7 @@ approvers: reviewers: - astefanutti - Bobbins228 + - CathalOConnorRH - chipspeak - ChristianZaccaria - dimakis

    oWlm$*BD0Nnc$|_&8IbAidoPoMEJ^&s={MI7z5?@!`E?%(x z=Qef%+k*1aT8)lX=7yI4vdP#ovLtCdRFztL#7;2j%JCC2m)BOp-Mc?*wBGiGB(Qy$ zUIbw`9+2otJvo@ySN%9%^Mp0-+5##v1-_vqHpHrARjTu{za8&r2xc6q%$|*9`Ny*r z6aa@AD~RrFu|X{Bi9eB5d`+?Ln*tp%ZxSG~PttZrl>{uN1z%vIH03Y$M&b!>Bgud( zd8G05cY#hO{uNyOugVqYVaI9$;#FP4br68!)Tu=~`|5WfXanGSioz_0>(yI1d-{A^ z8!U6GIm&LOeo;gUhN%nWHvo!(10fcb#}YVO2rp)rc!d5M40uJ8jWW!C^O;aGop3`CUK`m~AJrN1uUvvrr9EbS3Y!829oRNxIcMH6 zbiYKQ|EI~I~0f`KCAQUo$O@Q{!aw}6p8NReR{a?E~iB+SMVCGD}i7nNntKsZLwb5&g}eV zK$TN1w5zQtaSzdfnmA1!_VqDSD<~?P!PVXPkSk#4mEajJctJ7Idz~dz*WdTgx zB85Ltu$_4_>n*M`Kiy|B+nLDu*WH36$*!LiGG`~9vY+t)ZaB6dLb>*LT8%%vJsRbg#UwX$Ya2%S+ScL=}rnwo{LLMB@%2zhI zS0xS19pfhl701-GAYWNb!fJ$*ZiK`;tWO`s&qPALN#`jGM_H-9o209Shi(#C6tWPzr6iTJ_hN*vc%(Y2=*d< z4f^V-&f$1@$c33vyIr!#=i^>2A~qvS*^Y$@O&b5()k-#T?X{tlzLR>rQE9!+jAWP| z&)JZYX@T&4lksqgF1gL;_vZ^efz%(CGewomUgL_Fvhk8>^VRUi_H36Ur{QkjaYY>V zWvv3=rogLRGU@3R=!TS17HaV~5F|tap)aT%FVx`@zf1|h8 z>P_a2;%Z~OEdltI=sV&}`%Y2{ytq$p26hQ_OFew`=ZNzVS-^Bi2Q~5GGdRIwrVg*) zBs>8Gk%^@fNaS&q*cL7lig$eorrmx>H zP!3-z(CDCU3aD8k7|%6{GfE@!p+!5gy^@>U?gKzio0w`CN&YaX@uxiZ64yn@u@&d7 zbQ-PrG+V>de=JQUKmvPAn5KhtF035oy26V2Yscb9xEO9at^5nvPFR{bC~~Bqa=Z7E z#IEJ4;c_CcPhfB4hp$nD@G+~4LNoXQJg>Lx!Vs?na0j`N-T}~J1*4+6`4=gl zYxf4TFlkO(VdOAF{NE>Z0A-ejs4$i;ekiV4snGfULHU&0;3DO^$7QZUz9s<;@a`f@ z=JzY${`TJ+eBoVW>9H8Xh8M`h!iik@iN0Z@WP5=JR=H;4X(Z%OT!@H(x&c~4qi4cv zobJTB!Po<$J}puy9U@I`Ja?St4)9_d50}Ib0;aQ;yqjo8sZOFBU%vflA8bf`LT_m} zA&eSfFNUfgL8){aRY%{5?5)&nG9Kebn-qT$oiN52zma-7yzkg;B5F_gA%4`kgTi9M z7^B0dD;gJEA&QkjDiZeGsB=|xaqdme$^W|G=kJ6CfHRU8atp@e2jon;vR!x6Zm*uC zCdj(<8R7-wEQ*lumHa{%1==V;QQ0y|zW z0dDYjT*;-15Xazp3W!KffP=I3oB1+3J11idhllQj5|be)C_6dqBFZFBB#F=ceuU%w znDG^`j-zY@Z-8gVZbH_OudaDwMqi&jO)A*D93FS_9IoTy6p_6ADCWL%K8HQayQ#%Nu~%&-u5L zC92&`WxlJ(c%<=@5RuK==rn$z@pZpqfjr4CDt)Vv1fNFsJdJF?0d93;dHJeaXY_M> z=)B@}vUwF2z=i<0^2EBqKO5WeL!R7c#C*FRM_OhjLpBN(Y6?GGdC(RU~w9tL~q>_YRwZ!cV#mccoI`Pm_;Mejk<|JF=NBaE>|COWjM_9fO#Dad8 z2elvI5fOvKoCV?zGn3sKEX93a9wRW$uFt&!4Z5YuR2gV_HfJ)(#GDSL9lum?0oMuz zU_Nh?8yk6gdvPz3IJ&>ZAxf9e%gZ-DxTv*by(sW_e*7&z`fWI1Xr^zSF&S8tgrUhP zhYxC#$bVwCJOeGE5JYh|lx-JH6J(9r7fP3IO&9!xOl3EWA}KHL_9NpxX{&(Izp}e$ z5dA8%0^>%z|IH)!V5E5cg*5W`dm_y35iV)E??q53(}ZSoZ%N6uo}V4!+5+wO>UbU( z56Iy?&u7%351rOj}iCrfn<$?;04v{KfDoZzy(pfH5US_2>c z0l1~yG?H-?QZQKlO|=}GM$Df)5G%F+`*rf6ZJb4hgdkGrQ@);8Dm-ur1fNsdoJaHv zFy%T@$i#j8oNo^P6xTgvAs$YuM++5UL3H|#Onf^}1=bHM>S}Y^_3w#G}&0Xa5L_Sq>`n4t@!Flx47L!HXRV6G`P*gLo9i3#MF3N z!_%RV+kY-$E*i_Q`oUzkJ4ym{);~sn{45H698Vn&m5Z{hIXUkKmi}2{jca<)@`cOp zBn6mKjz5Sh$^7x!HsSJos+fgHJxixGJ%RfsB0|t%Vq(+b`$o~lgO^iZ8DVdqGgedc zWjj#jsz5RDs?KZ1BoxMX9G+)x*>`bG`=JrA z{-}7l+fG)i7_@S123{t2fktj5x?bHPFpkqFv8>JLupLsvRWFxVCR_c}V{;6J?U5>~ z74b1U!dyCUQ52aVg`f*+zNnZQ6fNS@aeY(2d(JOm_V1-a-B2&yWeCbf4kyy`&vYkx zk~y!>Hyip~Z@!?V{O5NI5fa=s2_7*70L>I>!`z!P7BxmktmyW>u-7Ph*QSl88^eS3_OV`=pFH!w@ja zq-!cI?O+xf{We9%F+~(NOD(7uKUx2=U!;NMx%WveTFmZ_?a|ZfVlV8Dw8H@~TX#WP_d0|jWjdA!v+5fIL#v6ZLyCYCA|)pHw33Tl2*tegyjyLdLCp(s-- z9uth4XAsv=L|K=Jy4CJJUsbC4Vi586_BM^!{$w6Assj9`te_rNhM|o$%(-T4${K!( zk3Lmtg$5XF6c~-A&5fR7uwYv~55avC(`C!XYLIee{_YlFPb&EHqlgT4xXAuVpy;VA zoK~yD(h28+_P54Qj^sI3;nnk1aGdm@g&d45E>m9#s@XOPNZ}1`D^o47ttRFqb_$B& z%HN^*F2#H|6??J~5oiXK7#A;F-yXa1U;H!>uN81dk9XfT=zx0C<68J|vZ%%3bb7Nr zkj=gSvY=5Msmecc>H$7}_q54PLgA!fDi2PL0D^(ETB|{0IFTL_x<0z0eBmj}=Yzn$ zw2;g0FjoHGX%QdF4|^YMs?~pwgI&e}Fe~J@d=e_qyqF%r)!@$EwTN{N-F&r{r83kH zpU%Ol?Sikb+>4oUBQOn?iP%uU2`A2t4)Y!T?fZ$qOYGkgG^G%%94#j8gfDEBEmj7GU`);nDIV9dfxzwuRuAL9&xf4Xy~uWR zy_|NsjT*_sA`|1tCB@U{$3Af>2Zuy%6EyLfPel$09HuniUY-YJT@0);C2dA!Hw;l$vTsq&{^tiv(q|z#8X!0LkzU+@>?eBob=;|hwrC%sK z{6phnVS%0|4unP#&_h^WJXCKosgyytMS@B}rIP#X4e(~l4WSAYs6cXpKCaEch^pxG zq+I8r`uca!lREnBH5P=3AbJhvzh37fB(LR}kU6~ECFa*+qrNi7i<$m;&kJU0l+-24 z_&N6qsQNjQIRvYK4+6nADes1p@~-EZ{E7v(UkMtFGA(+#dyEfiV*hoG7~3cNxD#8= z=x-*q|F|WBZ?BSmFFwYO(a^W~Ek$E(rsQFCo zv&dpJc(Ji^&u7}v-0VsCf5>{PfT;Q}Tv)oKyKCr@?gn9Kk#3|rrMr=4=nxc;?v|D= zk?!v9u5a`H&p9{W-OSa*-g~X}d)5;jEEe6Y5&nP6%#Asj64hy-@I;@u09j`_Wipd` z(`jp=rvwwb4w&5`q=hBp(kmZYfY|=KmGCd1HO)1)D_IFh40VZOm+3503}V0Le1*PiZm}G;q%u3BN(%4$BSt4YsQ+*$a&+ z=a66E^&q%>^J~Tn#SX)Ki#U5)r-Ey*ua&g_n|3XjV1ZLYZpUi^5Fe59M@d3#7anR^ zA8D`O01xs@0V0a<%)(8?m8{F&2>Se+Yogumv(N5!Tilwk?+!E8Rz<3`yko6uYmv{@ zi*8)`Z5QwBoee?aM%Vxyd24WQo2yFY_hh#AXYj@SwX9FRK@M z`^2F>IGZZQbPOWz!;v3oEs%V6{U9r=+pd?c^BTYF#TRysvpY820iQ_g`Sct=EP0HSWp~82QYiTb#_jNiM;_JFzxk1cp=JWse-ep< zi;IMRzAg|X2e>W#nCt|eq0VKDwU{~<2!_ttQk_{T-~uftm(2W@IF`68P{-}3<=}so z?sIpNnaCU0v8_@n%m`)2O~iHP;<kHU}-Cam7dRWCcnR z>C}5n?K>@d1q}&pGb$n?m+5Yoqzp)U2xp@4)+VJf=it1vt9zn(Sdhbe#z<1)+}mxb z#|qHzQNldFS|+a9E>_C}>PBlkXklZSYQk0EPLztwabWdeN#zMS1QIDth3(UHcpR9^ zb-87}tSr_5uI2prM2w-xcwGH(U1P6>hW@DnvQ*wECH7cz$&5+(I?TN+sceQR-`hmv z(O3NRk?|PCaxfyHx`w4?$CAh1ucZD_SS$LK}2QZ+ko1?Vej(`Wj8=(89RrN!R9HFyk8P2_ z8)%E~t28PR>468j{w*g!k$YuTQbw-xoWg3Q>3g01ydFF%Arux>XzP13od?(bvgv|3 zucUz3P>EMZN1*?>ok7VNdDPVs2)ngXS{=@o(_AUhYNuoJ;NWeS6<9M`n|!e!cnXc8 zVHT2DIar>&nE5Q6sBpXtC@fx@d1P;N;N%W)El+=y8Ppp5c0Hj0(705j&pSgq<$_gv zw;AvIZrSbl~v8`vZ&w4XmPeXQm*zHn{!IH!7Qj~Z-Zs0Dn* zl~geVwM3pbbRlPD_Qx4D4zEyP^H%e`qN|zg#o;0My ztrODy`yqLXjIkk$)h7_CE-xO|xx8js0&oHj>bKyrTC8t;;#?z^l!F+(de6~MU5l6X zzEl6*{V^J*(pT5Oqa8cKkS!S3un-Pu&`(Fk)$>Q;)!vG<^ZiZ|KM>=#$iwoZj1`3c zuQKQ1mBHZ}I8oX!F+LjkPn3941J3OvWhytrQOmyvO1KZjm~(o6S`nLMs#Y7*d6vt& zKuDaP`$iLilwqS4G6Iq$UeOn&jOpU&%+WAaOw%Um(PuT7JtG9n?qDqfV9iR7N2QYn z_Z|V|Q@7{4Kx!@e!RWE-M)y91mGQ!F0X7sLOlE8uKL5s%V)E3>lUzVam05#ziyp6C zn&eC#zU5{YEP{3Y^0U(JzY>pu{#N2^v(GLuRX$4x*9{ma84Io{>(p5qZZPI4UD;nCt|YGgk;d0tZNZwI1U{tb<)m!vorT<1XI`Rw0A6*b#fG z%n63ymj@Qv&;gR=u(@NW!hf2=D^5Wkh~*8}!0&&vFC!sg^DyE$Q7(@#n93Fh@GNEQ zH(P}47F;hD1H@HS8xwT6OVg#ku0B_G_`HtNB~p9e%&{f}oIJi*sMhMx{NNUIz2Zia z_|gR_R0=+_ry zRyMD<1bdrykka=g*0Fld9zUQ-EKTp-e9HnG14ZOqn4f_^L+ zs+dSHH${2oKsqNShvgpqeV)H)DmcDZD!wN!k~Vu@33LZt*m5!I zM5>KRwVPms^uP?pf1CTGFs5pQ|NojeFn8kCMC!63wO-zxm{7Gu%jUbEPteSLQAif@ z^8-XgoZ#z#%9C1*e@6_*^uPXQpCts-7zOG&*Pb z4~t=M3n{Y65s8NsKc*yYXft4w&h-8Q_hO%pd?+kN*Yx;z%!L5%plpUE1D#RV`0K4` zL4VX3F*lKWlh5_?*hO#u>9-DlAgu5$b6Et)+a8J{e^AK$2H?zc^6_IV89ZFop#e!b zaOxK{@m-o!d8W7R!4qV2u7?xgk1>Q6Q=4`=JP`0_i>@zu@28g4O%CZQI$qhOYRm>< zH=2UaO&eOR{4B(VKC3U&pg{?vz`8#2!Cu{pKC_;7EDw8pj3W}%uF!A!)(dS|tWjSB zUTHF&X9SoX6laS=8#Ke)ISHHj3}@G8vi6A|4VoOa4g9K^`uOz#KSKs`-?Iiqw=1m& zvq~9OOIR{d)_$>?*|4GP6c!yIVI*1WJwv=GITq%Q^Z%{r(FELh_a-G&{B|#}Gr7QF z^+ww0`rzw;^rtDU$oWRITWdum%UdFc<8rE8L_YSn`FP((z-maL2v60R_(q?8{pU7c zlBZgN@0~sQAW`^I6Ew8$Q0)*qS_$ZNA03vVEr?oxD=?f zF_RquoeBR^ghhD{wg`?E4)1MlBKvoXbw%U(6P?XBgHACW?J+V_g`p7MYT>Wem)7f* zLfZZH(5Tetw%$QV;a2Hupoed1p<1z?cnYf_3y=+ffcY2(+X>^D4)B)f1;s7dbXxTc z!hAzJ4DBU<^)E$;Kl7Tda z2JJp@JQSSKtIo53kE5w9?~2YjEcUJMZ}J+W!>@>aMZz|k!&)P@QtDv%UGIu0rSa{rO-kxI)Nv+L;YgaW zra)aNG*OE~Y=GXe=$XSkaswG*6`~jRCaxE&0$zaSO51I&PiWIEDPIUEgtE`v1W2Wj zkWXY2ea0Z0SlRJt=sLQ|-J zJ>kS=?J3c0PH8OUhG`u!tB=au%V5)L0@R8n{28-~8X6&iA|0t^G9PT$55PtY{|7b# zX7jcD|Dp#1dp^%r7zWc=U_tg6v3MH-RTj$s3_0c)2#+>=vXLwTJi#%iA8bPdu>Lhj z@Hic|!4F4B%cpibQ_am8{QG6hEjt4E-O0m&dKPQ!TEIy%+_Rw(D1VN7hXLF8v$IQ; zX%PpZn0@vUSYNA&Ux9W~KeTEdjD=0S0d53jyL3hlFmcRBZM{jSaHNvm7ky2qKs{7T ztrYRp(Y-v`Apf;B%*6Nhv@GKmSc42g$E>vXfam#UzHR!1 zYQB-5Hj@8tw@mC{zY0q&y25WwK&)dDc+q?NY}To480aHnF|fU(NK6=dt-1Of!l%=@ z*<7Zr*AxSa#vMvc0Kj+@2(~KJF)}Py|F@x26=rLkK95U*iH$i#+WsQMO1b|pH8|uJrO)j%!fR$`r>s&$hRI&OpM%g7EGdquLv#(1UDZR1rZj3~l z!Pw_tV~JVwwSQKd;Sm?!%_DwgOF^S)wji>aZG{>bP@_nolhvh9+%HgYIQ-GwPau!x z^`twLJRp02627{Ho7f?%H}D4(q7Xj-CrgW#FFYwh9}59?aY=vE*W^b{14fO;olyGW zgA~3uI!2yXRj3v}y~?l1Li62SS!vhWV+&~P)k!&b zAy;|pH2Oh~WSoT^w0@q2#b3LJRbF|{#oI17z>rNB8mA^IJvr}ktW54!f2e})aRzcA z#Dg?bj|JU^fv~Pmh3e{T?2<($Z@4J725cYzd%o(~ub1RunwBS)bI0f4?woC;N&~nNh+00e~pLgMt8rygCM^$D? z=H^jgqa}#1c6n1D4_Xt)pk>?K_K+ zES3WK)FdCz%iqqt^$zyYov>P3>J@qhl}^0v@LA=Dc^~5-57!4pyd5zJN=TN(FlE#K z14~%mCTVg3AFZ8HLl6V=7Hk9>IXOAtq~E$Vmf26sdjvG#L&kA0+nBdv{88)Bod@>k z&x)bc)fc0Ihus_-bo70yIGWY$a{pK|HY7_{M#qFdY>lVBC@8UHEiHZSxG4H$TJIxU z5VtC{EIahEp>$ZCKBYuO{s%{|arD3yw+W3(0CVPH89O7?aT#@?)7v)#Sf29G8N5{r z@X~OES3VNlOa3&GAbDhA3N=eLm<0*;Ux2E`^3X6gCV&a+-x2mPo^06M#^v!Gb;-Ce z6Er@Xe&S5I&Ts&biyVcb7<5|}(Rb?ev;iawvUMw6ySUI#-4X7-H5cJHk6v%h=mfpN|)-GV6cA)@7nLxoHBW zEDIk>7l^ycaqIL>()EByoIe>+iXVrvi~v(Qk8TUO&#QJ0)Bk~JB|6m+ErG40TwntV zoznNj=f_xCfCTSUo3?DAVlBuT^iCAJa4d`cM#1S=P%qybZ58!J1Yw=iN9e3ctyDvr zg^$FqM=BN$mMAkLx~c)dae=3iDZ!`_18czgr=li_{_N2tkcdPhKT~#~Yrna!{>Hwv zx&H4-)I$01i{`zh7cOz!7D;^h;W~)L1J1~Uy~YGl7sqb|2`8|e2}=P2%-=w#jc zKr&MyW9~7!f2;1+Ztv=tat$T_FJM`S3mEGA<>4^Lqer@26S?=Ef9(}vF|1Yvn7H;3 zJ(-FKO^L^RSXYUJ$xEF7z*A1>22k!iZb?yEKok;!%i|6QV~JqZ7UD-$i3Nwd`}>8F z+0RIpf`$qUdO?_uAECW^2G|oteBnJV@sghr(1srDyg?Tma7b~07pzJptt3@LDLXH) zNA5_0zE8l3@_qooC*{aaARTx6XudWP4#_$_EuKe72Ou8kgB*mu9wcR<@>>A(lEFl6 zT)mrMQE?Vh5g#16mJ!-!iy+2-5F99g<_qfz>@{5zdb|3;-H+&j-591@Bk7a05~Z&? z;~hx>Gd6GC4hM=(ymJPYD>(s}K2lv=|>%?6VOb&H}`s+^(}(nVyvTCZA@q-SnG z=+%238i>gOej2dXKHijTz;=Bg!GuO}q3*2y5^_z#Jbh6w;c6KA7vFM}$dzY^D+qM80XFp9?$X22dxRDQlu$byH&_6PI~oPv#uCv1}uCJ`Q*@}h$EYdm6}FaEbn zY&xSnm#eg?Vtsj4YQtb!Y{ihk2pI^}S-{_rF?m2Fy5W>UR)WZf4roGId83&1re=P= zne7$EBQUs_lb>H*xrRAr^SrIZ-1fK}fl0-Y7uHr$tWv-6liSG(lkcj}^D9H)9*cIR zG=4hAgK}-b--Wrwi~w4;vdf(7$*y6T))dip&0KLgr&;kC3=51{}dZTfR^SxZY zROff`s`|zLe%*;*V3UXfM^M>fy+x&3u@;%`)Px9T2Lig$^3i5(&#h7Rjo%4yyZkez zHZI*{S~lFnLcymq?72KXku6+pH{O95Uf@U`-%WQB7j*O`7 zOm%bQP!Ep%OHGu;=YnT-9n?2*s=9T67H(O=T;vBBktJ;}&xR6%h{)Q_;9L+441Mnr zZRcwgk;MXfy@gpCB*l;bC3z|L|GJ7gtSwj!60KA1jQW(LJS@*9^!jRVR2dv%H@#0o zcy)Mt=k(WN+OL}9t$50~oeIBWN*KjL0}B4G)nq}ch{F}eSbzLKko+yV02Od3Np>uH zq+-?UYuJgFElQL=m@bABgt5(j)Cs=xBXEAgb)_Ax2$o4rN_idBAOnaZN0ps~0~2dqNW#2`~;pV~@F z8jrsD{UOqA|B^g!_3PkY$al6g*?H&t!lcRZy}? zi8v4IpOT;}Q4=tc0r9|~c03jx5TGbZNT8OXT8Dh~rvMBpX-()95=7bz;Knn+i8S(P z5vN0_MI}5zAQ!yxb> z?W~XlAQl8ct-8B6ACXAI zZdmo2ELZ59ltNtrzEyKD%Vm~PLXx=?Qx&QP9OuZ84*p3Ej4(d_F z_-4$rL`+F?^z|R$gonES+qIWSl8DDzMN>7us$F6Nh;2i1)sa!5RFFu8Y-oCS&~>~% zWq%?SwGFTb;xD68%i{r#zU*%7@%!T8P1(AChe?Cq6TLyH#?*aafhDn>cVNAGz#^gZ z9FSZmlFZkD)m)Q+L?(TKua>AaHx)qYU=Mu|Clz?M{TZRaZ&@di&Cr05*SLhTSFpi` z28eJ;AgtYakq6xoIP;J*V-|_TmMWk^Lyf`(lCg<6AX@f)r1JcelwO4#nUvb%&Lee6 zFr{DOfjNZ_m<wC7B5o7+7OG$>WKFf(nW+&d?1vWlv_(hzHJS6yv5y`!_y?TAA-l zOd^8;d4{?fKy_7!`AuANCqJ9m22fQe)`FWm%HIGx@r0B)Zo$djvxqb01(fD2eEob zHu7q(yOuYW8+^m>{-lcPHk6&rd_Atrq^L4K_BwJKX5x3*afFcB@Me<+s+Z2BdA`SY zuAf7L0yZb%DxKu$J!2-o(PX%!6= zsXK-&Gn-Z`EYh6YaB7b4*#PnNUa6)TbOZXYGg-=P!TPpSzLld|m4c^ri&eQ3J^rmx zUITW}5s=(4;?@oD<%s>4$2wl1G|1T`=k>&jK8(G`nq3qO9?9&G1&ovcByv(W1=?X| z`i;`4)AAb7u|1vI5(xll^MFbtI$c25@8yPb5Gip{RaWfvW>>{Z3`;PsLoZ158gr@x zzsK!s-7=7hQdGq@*bWn3z;(n~#6iaGSHk5mA@&yVbn1JR{B(24+r&P=;F7!d3;MTG zx%EWNXSQ@+v$NOQWTTv7v6brG0H2p`VYn*P@27vc+JJ-LE{J3(SL?yW#Jr>bG}a z{0X*4F0&Vir498imV?B<06WNRtpS^s*l+LfcPydAe~LqHL4WC`*BgU9(V95raQ{Ve z!k=4Rlk8|(P3H4NHJtM+$Em|+!9gty7xV{#E+PLb;N9VIU&tWB;{5hmKm22{fC3)N zfkhM!6!16J0WeP172Ffty^{$h*rG#P@4gnp2T zpl{zKCLfv&kc$Rd-bA5NB5F_=0y~YZq3>|khs@45dTAsASQNs5zr;elb!QDV5a!xw zx5)5_MwYNPX<^#^+wNlg(LpW>57q3h$f;_i$HHL*r2{%^OQdS{rSWB#UeC760Q?l;0;nL!6;$$_(K=|^??+~@6!ZKktw zGS+e(u9T8wM%n&~$_O@EFX?gCtcvY-PYt!^=LrUtxABy+8JY#o(#0ADiQQTbTAi9e zbZm$SBlIoCnnQ0=@niv}o1`f>kHEKRVazq?bt|8K1caP~nr*r67ls1i%-II4fX+Sk z&3x;(Q$*N*J@=h%{_8L#cs~!la?;<{#pr*?knrjiD2>$@s_0Z6FWzmGZgL7at zMnffy3Il}F*zSwXTf?BDWe_DrEnr?6@Jf(8D(vkVtS>7d**?+oo1nF1lM$sD$ZDCh zyyI|B1iTr$9r&lFa>(6C0b&;q4+FIr$5=*(l!)i4;++0cilKifW3#Tz{#}&0zHQgx}XC z^0)W`@*^<>=|W~<)ngRx>?%@|n-}i}PVb{qz?<-26*w{|U!&P!U2!q~9sj zW6`XhoEELU+S9nzpE!3S(*T_5n2_mau0T~Ljp-;2``5@o9?ms`>%isY+vedn-!CFL zTuu0L-?RAR>*kQ(N^EG^OzD3mHu-J8XdGY=p85eB00;cv`Hk2ai93zO#q#=tggqyf zHXw_}ERiQ3lKy)GpVGNJe+lcpLonV+eRl^IZd;lY<&6AZXxf{oW#*@CRe>tN5nk(ng_vZBLOPF(M)?vIx&PDe_W;`lr)j!eABKlia_gb~@PhHc z>`z?o=Rz9dhB(fIbzo+RPSio#3(>!H8r$F&#QAoy)ZVVeyoYtm@~n$lg$zA=#!trM z18O&1EG{fp$A~ecd1~8;xeSfMSAeEz^Nc0;i*&k@8o_1MppUwjc(?#{Lh-V;9phzBIgv+snVd%4?KsvjKZ2PJs3yIt(@E+xjBs-W(z2 z16W%m*vdA^)TC&4jdk8OEqHd;ABUFj8-G%y+Vb?j$EckSNoE{E9ryP!KraY!r z{Vpr$w+9tYKj}gP#}@BuE~7W&-|epxQ0+syjmPo&Ew>p`Q5Jtg0&GRl|IFc=y7+<= zM`!^IJQ>PAIv(n|L+YaYXP8f1Y&&D^&fnB!Z}iUFmZoBt#S@r`I%7Ml`MLxWl=q3E z2yqFOqK8b;#{7sjC2hs+f_@iCQ!wY>MIFern>^A@2Qvu2$+98AS7+hF3agJn3dd3o zt0amw8_^=})Go#boU||o?0vqYcmK@SKir{hU?8ZJ!yTq3lx0tI;nVm8S7#Cv{t^XD48mcqRm=imx6AGw+d<0;SF8+B*?W??P;Ueisxw#MqCUIw342Vb zV|<4Sq!W8GfaM>C4RS{XXRNZFbN%n1qk=sL z(W;D+t1OA>aE7{4H-1J4@AQafh18IfmaWtV;3E+o)=Fbc%GI8*RtubW7h44gDCH4T}?h4TjfaD!+t%2`Q&D6%0SMda-pFvK*T&+FF(m%)5l6b}tZiZu@% z@JV=n@`*|1i#&kP1uJ?3P7RzTP7S@E29Gm`=Ny*WePv?1@c{X_0>Ii5y#Sez*8C0HIV6&m;w5}nUYVT0zODgQbO*CZi01K+Vl{q-WC0B@(H|JdfVVhcB@Aw zBOje#wCK!yXd>u-^5iZwX3>lfvx)s#lzg+HA2?>Dq%r$ewZ5($9^EdtxPRGPdjw5s zip8`6u*v??k9xIAjr-U8>}BH|aS=`loiADsw?@w?d@Rpfk0~Bt1MKjv!9-#oLh99B z(V2e#r_6pRxE>D&ArBHm>Mq6iq~%p(kL%OI{|5#TzY`KN&dcj5$0JcDi#$B@=?KY2 zx~-5Vi92NF#`B%%BIESUpILB2^bx?IXh6FLXC>nAL)EH*W$)FjQcL z7LHL5+P=I1z#pH#1b2iqA^;A&5ShKKU z-tNm-+Ok0B^1}c10w`vQMaP~=4EY6@1k9PD4@=@bp{I3oPzC>W!*4_2$`1q6x8lEs7);ff`6L~MH@Ma%X$mN??-E?I zadmX*C|&+7?A9g*U}|ldlW`}9u3h83f%8O8U2||` zHZ-`JK()fwsyId}%}JCTl#NeSytDF*FzSS#u#~j22ZyrdZMv=JA%KTL56CLHuf2j@ zk;PP%X%~+WOS8)6pw5fFrXb!;LbR^>(Sn+sglgD{Te}W@M}NntVu)NVEq`$N3#|nc zDtkI>z+rG_%R##n2Ao(_4yK9;TWpN`WA!Ia3fxUQZ^>n|b-0aqbB8R6-^Udv4#O;E zV@3MUSD9>E;qlnwM$J6Bz9q8H%waq3#{GwN&((Zwxj)`;$O7IGVi559)lBsY(OUfV zZVt+#G`EQUgk)(!!X2XxfWu2zz&Nq&JXgMARkF=oGDOF~TfWkSq6W+@DDO95^tDhb zZ+#zc-!!z*7wHb(e`He0cAP}1!;P#s-TtUy^s^||v}xDw5*Lif%fdetNuXo_YC%Gj z+zMF&IbjL&1)C=g&Elr?8&-ht*HB}F_;YsF!#v7RoGHP(9m z_a$8Srma;-{Gm@4kr}_ne2|Kr_e=CwWul(pSs~%JXuH4N5wbv;zle-%Mk^)r&s*EM z6(4AJsTu^Ta$b%wc5Wg;A&v6w3b}kvs$f@3TM9s*gTfOpToYY|MpCUrfyDfhOGnoD z*aeUMfG(rgMB%c-nsoYro>EZTPgs*}_wOVkQp4BBN5Mx;Ej(M4ZLn%_{pPr(Q>ei7 zowx%-o#K}ojl|Qd3%YUt%|1mfyli$Ph5fd%=JE73<@OwXMG+RzOp&k9U zs+>9w0jR~ip=8Dq{{rmycbFjlo!tOW`u}x^g|BA;AC)j@F7=#T^=p*_1N%pX!UWgq z{(q-vKvh`cXl?ebQxcORUGfXF)r4vF45bsg8ngf_QN{V~#CGZS)&3+!F0q_5Mq1s6 zvx}?f`iknSM1zxAG5SK~90U7xqI7O=vH>GDtNGT*?fBPQS?FXW$+15&FQL1FeG6(T z+*u@YHafazAAMK;(A2o~V-~9|k*DBtiz{iSbcDquTod!k?_)_9AjqCjcJ~kTpI!&L z+uCaxgrtGKgX+g*vQpVMe~I~KeL^D@q$C@J$d0A9fR8h#PpSWbMbHTaB+y7vlLq!| zRBJ4uu)Tp4C^Py4R$YMLjyDwYi*t8`*B9Ww8DB7iq2g{GM11?_B1XP>0WopF19$J$bYJZ(p4+cUT4!MrBrf zHgQ|s<^7IcQ(ISWxk1t_Mo)(Tle&n@W(i|ZVYqi@cgWvSJZN(`>mB++Uz2l3cD=(0 zZUUtwyh;CNneV?Q+`xXyPPoqtidL#q{XgylNo7$u` zHj~EhobzMDu*qqg{}}qTGc!5M^BO8%W3JA@_Ds9CNLKWIO7UzEg2{Re5yS!$W+qP> zs0p^g??fTiUB<+=^%~NhLGS!+0OGG*L>OAemtq8z%0LK3ytx?Ea=x)JgGSw1aCnc%t{(fl5+WT( zM}wWHknicf9Fo3ADE=c^AM52VQl+c_@&7g-mUk=(r?`Pz84{tsJWhU100L?8|W zeA+0Q@*?U(<9Tt{4Zj)vo`4Gnk77CDic_!sex28|WwuncT0(oym^no<9DR7!(CI-{*yPDwQc*wyU2% zqTLOFb_oTo!k7(fKefXUDmwpIjq3m}&WvvH>~wAqCA0I|A6*Vj+vLw*p2|)Y+tpx| z?9&Wu$nD138Eq#hnM|}Ty7VavtCPr6BwqBC+m)KD)_zpy zeANfmlAnSLC2P7d^ZKB=NdTV{1VV1F*8CoyM!|?<7zFg;vvKUL!qW~eYbW_v{p$pe zoFM4zAvZ4NeATX9m@L3^(XI0lzAuA5`)0V1JFJ#jJ)*+?8J{cg}l#qcZY` zrDrS((zlwgDYlDd8F%*v_8gdMmU@xVT0Ba2Q&F}V2#-@zTK5^5xHV_`aV*>0NTOd1cSA3Z4(c z&7%rM&LBp|T5k26{0U48Gt!;gNPFdrTR@Vu3(B9)-@Fc{(C# zzTt875SOD_=EUQ4gd)U;oQ-vaSz$@oK<7k!O;Gq13f0E=S1q=P#?$8_7HOE~f!BAH0@iF~=mfZiDx>91Fgb z*lCij46|IVkZ{^3`n>ETm{*ub9&Z%L*Z6gt&)J2Tu%=2%uA=@fZ|rFI*QoCHdn~dZr}B)z9v=37LAY3!1J82{DcKnVgKQTE8l zTu#620^I@aV|sHsL6~8zTC1rP&n3nZ0ZwnI``#HQMtO70E{Qr`;SVlj4F!8B5cmkf z59!hC*Wfi36-D@&*6!LLTg^(o4l4@NVDZr~hDVB7_*z!gS)kTGk9}r1jiP0oWL@YggQhw4L9HKsHp^6NyAT@Tp{tEIv4K zVh;8xW>zPDM&c!=OeA}53PSRPUlNl61fg)ejR_OHarxY}wJtQ@87UHnQY-a%tj{vf z7EP81t4R5|m`BQxv*D1GPB~UoaK%>9CEF5(y)`h_jN!3YNVKQEG_-}(yGeKYVmO*X zd*LN{04B}xvxbLOQ@V}}ZaIO|-v!=7gN?$);ubo>(NY0x6(TA+VZXe4Z_Oqnk*8sY zhC3Is4}KGBp0CRy6vZe43Qy=LAqR6mx<6*D-kz`tEtK9F@BSXPx#Ca?$~Iei?ybQD z?Sk%jI-bpyB};T1*G_7JDPm)s7(GZ<160$tEIV=# zF(@?Aj2?A@!z-o8{`iKEH2`ds~G+giKMgFk7xpg=1js0Ir5 zlr#d<;An;r_C~fCP5A&&V^gmTbHERJ_)utBzFXaWVDTQ<>sn3cr&nJs{6KdS?8d-H zz-x@GCzUAciLrN|YfnoCk}7tzU-}UzMF{PcNFss58C*!I)K?(1$bSsZ`QYV(&5r|FX1by4Wj;7Oy*tnhGUwldtyc} z+?V@yKugMiLXh#_GmxHy51x_z@nUtmK}Bkj$*>{+pV>x2OA*rcdqi`Ce)!{Y(i=DX43FsV`83oOe!x}Q zoOm5R?hwne641aPpmKs-;!gC5h@ulQuU4TzSm+!=69c_Eh`Rq@kgx$i^E6{&5`-*ULQ;0VwclVX9mC{~fpK z+lK>evf8iyC)d6`Y5UVNbhSJCMG$r=CEyoT=z)c~#kJH=LY5iv06gkp%`zVBHRSE# z1JIm6l{L$^fd6g_;6ym!Wea#Y*DAkH58h@a{VGlI34kGfy8i=>r@M3|WRlhW!+Nq|z)ryBdbLdQG>bkm*fwHkhxVAq3xaziPsG&bUv zC-2f10i9e8@zBDq;j0*9>HwSCabl~8?@8-OrhuIJhz_%SihUAP$GRZ%UsT>hP?xaJ zl@zm{X}pkMv)F)rLQMg3cV$y;wSv}r<*kIxLhz;+Hx1%9ixauS^NmaA{R>=(9-GAt z^F)D(ZcnvtUuST$)Oe6H0y{h);sZGKs}cAwL>x=>hJ=&tl28{rHp>NOwSCoM=huoV z80hcz^(qvQ=G$H)xNNkDt=C~Dn#A?*=n9j_{hoCJhnW~sC*vX}IJfTPL>`t*0`98| z^+O}@#4x=bj56%7ps!IERP~>-7aVwl@Dmqg@^*>tNt9^n)oA$U^7mas>NHYGyc(>MgJCYwFB})<3?J~J9p`G zmt7lo3!1U0<2DgNU-()5?Aa@JUznp#`GMvC1<&-F^(#Pd8Cfema4 zNWc(s*uh0s`zf3q_D-~aMd=|oi?B3z_WwYRa4bq7sy?q>YtFOF!A=R8`4mG4 z&D0Pdj{zYWhuLCr@iz_nlO_lsTxAl^c(#?c3XGrKTHQ{u_|S*k_HHJuSd3jPjOIGS z*^+*cFLObo+fs}zaox8WcRK-QndBFd7(G>ImwjH{2**Nv7NC_U877Q{)PM4s$o4w@ zW-^HsJ8H&gxPSSr%*5*Ntcywo>ul^`Y0+n7%Ji$x9=5ri2~qI)Mx6B-s9TRAmJDHUj)l30qszRCQ5(OWAxZK4)tr) zQaB$y01|iAC`a6z0Ipv=`Q)7g1y3+8{FU7`tMe>^|fRo z)#&m#9BJeu0N(sByH1x`ybcER9o(2rZIxYj4ZQd>oInT!`>ou$ux6a*8`Vf!K812t z%y=++ahCAUGyyIYp~#myE^96FCnHwr?l8nA7O2tWT_9~$9@rqgET7m(CM_3PuQWmC z3NvU?IQm6BHcvjPSZd-zl~U*aPYsV7=mETRZKa&VwvB#wjBlAR4nKpf#=O`9K+gy+ zqO^;DtrNd2wmhTAA`3XIV}W**;z&`FvEX35Z~5)LLAhh0VMJd&fVh;UJ}(d}{NEdQ z4B=d-HV(qp|Ni#e99H*B$HrLT4Iu`f-xI7`xS3Na+))n!ej`n8voAZvS<{wG--+7q z=RxaPvc_~k0V|?T!mr)%JlH6r#6;>-<~7q<)a8y!>*-Ufpfl z1lo@a@VzC}brtMLH-a{E{VNUHlrFZ1^W+2(NXbzT=P|WHOq*!F;j4=-TZF6H0QLed zyDVb%-!A$lhLse5n{nF9iy8iQ1@BNm+==hDt>C{L>OZz3A$EcILS>L`yRS6h>aKIA zGjIUF@tQ~lmgF7qg476u3pQLLAP32d(`7fa1@z*Q4+ZR2Dlbr~*3ZHX+jLL_CiaAc z@VweTKaZIH0-BPh)EZ##lSiwV!OOd<;MLMV1WPiIIyPS2ci%e@RB?( zuKm##@o1?{W~OYJUfAI18>yGorn|tjj#J+;By<)CV>?zDV6j}iFXygs+ z1aA3#O?)Ce?(NQo~BAi zX%2Zi+)5-KaqV0arv<+`7Ok{1fcV|g#F31rMzV%inq3x0Og>ZPfGxGso;aPez&)Bj z^awv20`C1Wj$h6vaNlJ3ikVzJHxp`MS+v>;^~{x4C21_||2_uWv6G$HMv2=tIxO@2 z1guJ&pyCRSiO@KzNd&zV02@}DFY8kd-F>4I`O@UgzRzpH`ZK#cVgY1xfHySf&x|`n z8C#5R8uvJeB%*%MgB+Y)VSs7DcW74O9wbaeG!YiUHcq*WDQnKnknY_SN{5A3X=GgH zh-^K=<(k)T#v~2v6VJK^QY-*APj}qX~0<<10kn* z{DC@CFmr4IKyw$zz+?cQk*_j+{+$ng_k8;=VW-K*^BHiP(s_DvmTv~qCwoh{X~C}c zt9oJazw}|i2dXsk1RGw<4~mw%Dd~-#6mc@yI@737?ZnmDXd={L2n{#r>o^tM=#%~B z%&{}TT;%BNPuaRf?MZ?K-Cw62_8|zIdBpn&Iwt7}wEurJomE`aUE8*a0S1r`rH5{i zmL5vFOB$r5LAo31M!LJZLr_4vyQI6Nhu<B1qgkgiRrjNd-GX*TGL(+`YWISt;5y*f@J`?DgA{Jq z4%-&3Y?90lwQpBP$fp~m$>Vg!&?k2Lnu5NY$&YAXWX})k>%0e8+^g+a7ZHTVq*cwN zN7K{`p=$zwRuTUjFln#8aT7_d{6skrs36_v(HPn0abMP`f;6CB@rl$0LKOMH=X$CO z>2w9$OMXl>&zG|s-w333+m~|M=#o*nI4t_RP_XF7*I~N26`aQc-O!~Cg1LYmE^_RX7`FM=%RY1i}Q-df70B)>7E~iUT>jfErWd3-; z|I63#o^G&QHX8tJtNua^v;mjNH=00SsnjSL*?v?RRC8BQlXi1a;mRZ$ZSBbnqw7&? zUE+SvShj@{*X|ZbOC&R8(C(m`#u0Lr32}>KKQ1?&_2Bos`LxJdfwDi4S7^rocLC_e z_wW7`0(&-1fY8E1C+9HBd5Ya=KMc9Q?YL5(m1ukq;LZ)F{Y~Ggk6Dl7R}u;h{L}&d ze0$uAnaXC>b?v^T9;>0XT}@8;i#DMd05O12Uk<1nHNSj|3{UJ1jgB9J6cNppQ<{s4 zZtoc&0g4o74u*f!p}}%iSDu0%tWRc7Y!+bx^R=0gggmNgoMn_YI$YfFY{qlMViqoE zbH7R|Sj95EfwF`b{HMRSu)M&xd)$;O>;Tv^k;&GtmX0P~G##vob3_e#7+%fhp$IzQ z*d08|exlG+Jo`HSqtUop8afT17J)ZgUW(K}lB;O1y4-xmdAL_0MOgY@&1W{ANn0qB z7%SlZq*v>?MEh3U@M^bGrA3OozJ3^?eeeB$A+;=~)ALrkRE4_6^x|TzUj&d;pVl#} z0Q*vO9%ObRf8JAC{eqU<2{~GHiw6@Y+UjL{T8}nIAzf3~KgVY&mihTG={^)uV6p-a zbo5(Yj~WU?9N^m~158-j+xH-f?!AY)sUzT{HrkhjFN`(CLaip%^G?GaI zyt%+A5P;C2e@fpOO&D1C&MVe2Mwk!0F>-SbOb{pvJ`<#LL+`f55gT-!v7=L{ke>{I z2_is<#0jV({DqIeavM?-plz1_{+AojL$qkoo5wE;+4Uw;tp>GAW zNMPkR+g<^>1hhFR7x?l;TamBn%jbT>hY{ZX%Y3;5&$Y>a*oV&&sKXt>!`lPvp~DNg z!h{Id00ctL`*%Eb=#+{Bj7KZz)@LItBY> z5_}dtL7VGq9rvh(7Lye57%m_Fmm{XUz}MCn`Qx(c&)~HedpnAN@L9AeOox;G5kgPp zYIn}tm|?xKe*f){&Zb&z7qnylD2#3boK}|KZj?I}_A-?u1qs72@J6>V%C4(|Hba5+ z!;3!I7!8oMKk#i6bXM?)%Hf0e7)2^7dzsjgSm)jQ@K%*sqZDe^o0DN%NY+D zWbMX?Z-#0kfwKHo2iz=YE3jN%@sjWu=X*3LRXnt6;Zu}I#VuToF&!du}*IG7JfaAV`g?)IBG3Evw zb*rCRsfyq(owqEOJuyghEEC6W`~4P08~%TfGbcwFQZFmButrvPVucv;elcy1-Aae0 z*S;H$gd#K(w0zoE&ecu^T*mhG?^ZD+82^d@JGl(rJlS(ifO1}cyy^;rvi}vpdi%7Z zo$Ub$#1P?VTnQCT{mHUdMIDaEl}t$GZ@1P#tyO;qz0f7VCoPqC6q)<69dsS7 zUuPXWQ?4pI791iCc1#P}yxGmek0vN)sGNV+7J&rm?kx=^H$R#<#zd}OSs0U>eCrlnQw%QZ8yBwO%o}Vw0xg@ z49rPMi^DQe3HC(KqHKV)>3Vk&`J>IwVdGuoZXBQwRKTnZDFOc^^PjbvlZJg71D5kB zUucgA!l3|e$ocS1!Wk2=V)kvK?UC~Uh-)TliPj4>T3^(vFMm=60qsyhSns1b=6aii z^31ihwHAUuh{`~NjlvM^->Zojt{qBoZVhf>fR)FVb!8S@Zj76#|A!UgqWje985u4W zKSZxu^xkJg(`fp~_I#-bOg*r7EG8fzu#Mgyflt~TcQ=mX#KPcGqTfa8L?Yfe&}m%5 z7-dqMQcN4Rv%Kr2-;HbJw#n6GL_dGAjDW`!92`6~hYyYg^FO^0A_(x7G6N-A#>R*NKxKQN0>U{_mFnNzOL^=< zzU#~J0Zb?kG4My)U!TcdrJmwJ?dNVB_N+Cpn?@aqM2xPNf|W>|c9GGeOYvlcz zjJO+}7s7N}~K|3s4B0lXXrl%t6gUPZA>*XNyW?rOG+f^i#&wN$nP3^ zhk*Feftw0+)x}iPIDKa38N;*px52R$#^BFNS8L?M>KdDc=Y`mY^&s!od?hmFR%klu zi2#`RM+b#f@(2K0{^1@lC$&oGpOa8L!Zl3tS=&0)4+wA>OintK&D27_i|^t?Jhe83as>DyPPu@kLcPBKHd^rNQ= znSsl8Z#I{wGFq<+O@43253CD!6(3%N`>8@|U|Z2HcP~R%q<4gl8dF8XesaTEj(vBJ zdVP<-O&)&!QK?cvi`~K7m~s9&w;oSy4Rx8R}<-e>$4pY=mltNFQBs@_Rz~ zzh{mFq?_uO#pM}o{nO-crBG*W(p|>!QoZ8M@hXyz9|Sq31#B?mS=RRP?~&9xkHIu^ z|6FT9Lox``v;G2XlnkF^8Siwut|#^e6YH=M19Eo&LeQ7@=q$$(KNwkPwL`P}cFnRC zX57GMg#LZ?hC*Nyb9&mUtPF%^qt|{*_4KoX2mbo9kWebr=Vg!aq(63{_2~Mg%jlHg zJJZzq_*-7vGQzm1O!&#&ENp*cSlCSfE~PyuK&|ib{fhj(@5^U;{XDv^7)O4CbHZ93 z3aH(S+?~FeE$aU@$zKI>ytAcD1QPxFpO;AJzR|{zFq8_Y%0AODkNzUWaY7Ap;%rhk zjl~&&HP$kbv4d!MY;{vo6F#P=OT~BbIz7t% zVwl}ITYsT-CT#Sh=r!Q3HlWVk8|!tUtR&{G(b0DGPb{}QO?)u?7^WTb}q}cklrf9$;JuB?YTBeo8=blvR%=cE?GO%KMsrQ>3l9H z@@#CA4VI(WgnSP+d^J`asCl_6wj%y7Df|6@-yaH_;4-K3%W|z0n)|BrlxJvRkVHxB zMOLpB12uwjYF#q!D0}iNwYe0%*8w(<%?uoZs_NTOZfRgoaT+ZUQT>xC&^eaRwJ$m6mt*{^d{AB zt;Kj@Y(+?_1Ivsz=2X+1b-}4Un*fyWMjK@ z`jEd>H289nS3VjKbdBiGyzHXX$=6tWuP1Hyz(m4j3yLPW4J!Fbz_4G}j~0|HY1aVy zDo{b;bv~hv+D@FxJfr^+%5jyRm?-6Hx1M)yrAdF|^G0t#k;YH&kh?(G+(%-JOLwp? zfa}-5FNWZHtMZ$#_GjE>2P)ZVA^=DEfs4=K7<0DwqPZ%xoKM?zKg6k9t$-V63B#U+ z7L8cI5xdf$HMVTFM80y#)&B;A#P9_{7#q4^z1|QXkByVK6B4NET!unDG@q@IYukzF z|Ad}97`Dv$<|XN@PL5o_}3k1 z+cudv3uGV!od8A_-MxpqEW#iZad^InXRxXJB}?*5AysY`Sg+pcrE=1S)1Ee-jK+msGsn_F)RszyJB#A zda5+Vpf&7w45WJS&{|da-PSJr&6LR+#QtGKYdfn(-5vI_NWxMA(~S!>x1}{R+DHI& zvi{66#Mg6(_x$s?gu3m=BDJ=*GL(l!D#@hyAs!6-3|ki(>9;ZAF#;0GAjz*6D+GL= zm+y|*v?r(TeU)Om0)J1o7S$aYo|!5rwAlWFE1Rz&n6;acyB8r_YKA9vge?p)+f1|J zGkd)uQAcneHQ``o;w9HK_2kz+-(yuPHo|d4aIYi}Ydw>D>hJHL@Q&qz^?8d|yV@!# z{IEBF7B)L22bElDmCmQ_#89El@_FnYFTL%-`pUD3f3GV5kKjUfr|>pcJTf=KRKlc! zIUjdH*VP5QFL|CSgmE?pSfQ!RmMLH(4#LP_fgF)xjUhS3fqn|&y-qn@O5J zi*=?{Z4@b0^rbY9U<>fRun4rfcn|dTMv~PW497~*6Ye404TE=2v-XX3Gyg=B6J@_V z*J!3i89oW=`ke8N|lZz_}!Tv@l!m$Ao3ryx@pynu%#1QZ{IUIm-CSRCJ z)Q9v?fDNd2O%S}ztiE) z8bsc@4r|(}3?uhm$Bfh2N-XJw_yE0v7yWcmbP^oFs60dra#VFJ(@watUWajZVqUPH zQAH=jYPO?0+X_zTM9i_#(&CC+K zSdNlJyos)^m#?tjPCRb*8PZ`03=0|UYEM38F0pKHZxRXW{q57(!`#}%gRKR*Z}H0*+C z%xh2c#W9ygXwuX0)yg(sNdvQYpFE%RKgUyimPf%?8o2CZ)Um}iB>0<?JQvAxTGt+L&+@%F8n;Rb6lLTu4S-nch6lR4YP4GyN)z;Cd44!z zRV$R)(JWOkA2Zd-ald-|1O8P=KK-UjW~z)MGa(ubXNkEnPVNbfK#(Lf+*Zt&@Ds+# zc+Cea?eAXjU;jL{V18&u(}8yc884X!J?+Odq_V-q#l^WPFwjiq5%f`=9kRi>o^2Ac z>PLq6ZUPxj4hC2k{f)|;)GvosnMvS-E(@ilqJrpLt5Y47#M3XvVZm0W64Lnnz!1KV z*`1{oPTf|w>)N*S;XPqX6Kh?Y*%NYqHWi+_5qr*jcuHxws=H~;JjR@m^Fu;1P)23Y z(IL_TbzA3cJBvV&z%@g^g*VurIn0H)td+7oz8*!Mk4||!^BV7D#8k6-|NZOAc^pMP z)2_%WZhU^431k~QSeS6yJbMxzNxonzDhg!s%Q8;O6)7f!OCv%zn;s$<9=p|mr5f#` zxXk+r2}^b~xQw6)bP6~tt8&h>!R!D(BZ&R>zJuwoiH-W?dqk@I_imuMs%0r=9nKW0 zAnh#6eCixy1Yj_*+x<6xdi>GiCDo#{J8XZ!tG-?u45r+>=!N77!`noNEb-bNbWw)XPSuS`iXe zBBb|rtK`4s(=4z{0heeP42G>7uBY@K#sH}ZjfLVk*UosWs$hQbUt(R{=9#*qy7leFAs{Ys1*(+W|LGkGFK~qN6W-iKXe&4fFG0M!kn>usME8#gtikJeL^_fZ9^n z({8fpb?nPmYPTIo+_4DTKi)myc#hzOAV{arF_)s`v9LdkeT(pfB5kM=hEd@ZnH4GN zF01fTi$Aww_mZOZb?_t%yi{*4yxN5$vNMznsAoopel$)&L^g*OE9Z!ONc?j6a0=DmIMa}U`rDh&sDCJh=0Q- zpkM-hodXuIV@M~-Z~MYWEEj6J^f9#v6uLU-!S9gaupl{|-d=;1t>r4zN0&8P7v9_7 zX)8Z<`ez)ME3t!rbcS~B#}F8(RHn@u0QOBhwo^g3T{&OS&&R4v#Ex}5;4zN>%-c=X zki&~O7R8=bFeu8&iJmUJ&IEQ?il}0yLToRd%|5GO?om%}4LoHL3~nUeeu^8LQ-~p> zEk6_Fs2BavC1w8NemSGXhx(Z4kRQspvFb<62^@X@5P-$;cE16d`2OIL#0b6f_It()&IMW)JBk z`VTDzoPJ!(uQ?}T$TbD9I~L<>&ke@6uxZZ&arS688DdW;V9T5C=Q1l}W>JBNl`u5U|I3Qf9$IhF$qZxlOxNOv7n3$Od z*I(K7(Aq^_0m?^D-kS$ErOfF@C!|@PQ>5{?F`178&pY#7Pp6V!7CQ(U?UutAza&kj zbSk$pXBl^t9R7JZUw@jOnUVX;;p{s#7Kb@ui51r!MFRUGXMZgY3@uGpQ!w7b`wBaK z{=2ckSX2K6k<34OH&emzI1^60tVKTF9wO4Zs&Qqv%JW0BM|ibQ4>>-NbzlRVoA!)6aj_r=S-nCj z3*eE-XH>ZGTc?MM1ryH-|8xUxWkZUOmphQc zUDL~@H)RbGYeKl7c*6xE_)_J-hXaP(R5`{SKr*ud)&i~3lWs`*!)1j8TT=nS#MHr z6VWu}bs8l@C(igHCatb;IKSmbRa({5slsn>gOEh5H7gGbz`MU>HOrJ<{h#$!gRUoy z^}_`eq@={LIT<%p14XMtCaJfmcuJ%35iMAXFGn(Oo?lT z9Ba2M2}nsv%;2uiD%N%6L1RffDaEwMww?~DM*oPNg1 zJqf03*`{}#WHuy|!U53xzUQGd2E!OGC}6 z?)B$RR|tWtR?ue~#N&Fff`zi5ShJQQ!u;<=ve0P1zel1|Z`Dg0I*4av?bFhK3*5CQ zF$fA(Hmgv36cP7?I|32T#IRKtw-l*Mssx^P%C|1|~AQVX47@frnZLQd`Wa1jkP za;X%&j~3oQ%3dO@w$W8q)l_r+J~-{>?ip|F|ZP}8(-aIC?*}(nS0cGu47mh|p8}ZpP-oD*!?8M$&$WSO< zlD&rpF(I4>uma7wJNn1OT$5l9|JA>xQ>=dS6%p-Fc@O=>;^;y0QSQ(|nNya0`s1_uJF* z)%LHw&T27p0nl!TBlJeQ>G&#tzz~va?zWNiD^e51`qMdMqoY+GzDHF_H{H@4E=^@h zz=8DSEIbXEVy^cVgO z9JE_nRmd@a9r4(zQyqq$byunlAN+moAJw^@y#>%(C~P!_#v)SsXlU@QT1s)56&cUw zHv-|UWJspY2Bx);b;$o`0W812s+KS@*YRH?ix*ga4^+Q<}DJjH=^>Tvw_NMjq}x^1P8}JPbQ8!GL=1yvNtz&yVEH zT1^4O$0Q{>^U*STV?R;%h+AAv_yoFEjS8pWZg1MIC(R8e!~RtF4ds(pYISRs&Pn>; z?FH``IPE4NJgoGNMrl-NVMGqo?YLmAw4?O61SOrZJ`#grGat(0))8P;@VQSH62C-- z!RtRt|JDHkmkgioHt&=k!&Vu($Na);dGfE^sbcu72l?+2V=;WTvN1&v=^v&XdgHLd zPV+3`N2$2FlJ^(rG<=U7yJ7D=UWX?;`8r~D!|e7C)Q9i`lRVzfux z^q>63vV=s;F6J#E3G4*88dDBoyq;+e?>2@>H#kZ}*RX*X&I<~`{=LT1N7jM`R`kAG zaV%jAy=6PFu+>M|zDHXSp<;0SYnl}fj2`apUFrOfRGiIe-ONHJPrZ?#`!2=kCrc%0 z4P@poGBKD6`qj}9DpKohPH!>3&CQ_ShF>JC7yhw(lXJS-rev*4JgQfdcu3*@r%w_? zo&6`qdU{?(Xce@O|9sT4!DI~MeEUsN;OW4A#Q2(~2A(J8+Aq+x4$gN!1>Za_&^18z zDaA<~lhY-;{P3~M$EReE(zO0Z^1H{I_CCgkfJ1oNY(6x+>z@vGDyCG%85dVfrqjxA zQZvl`h+p?o&JI@YQQw`3P%V7_<a2V!Qeh>8Ic9fXejokl`{-efTU8VEj*G{KnqC z1&9lS!jM?e>eZaI*$j*K$Fg2ea2CR0#x#Ru)B<@wAUW$K!g5$+(&wXQz3X`59})(I zUoL?xRY1~4qe#O4@yAb&geQS<>w#M99qqKsxet#1Hgi)ujK6=tg1)9if02_l98l@5 zLM^+GQOE#F9cwh%*rMx;>*@n}x?Zi>?quZs?-oyJaBbqq5~uZWZJOh!J+lU@&2WLD zgT%L0BF~;;qx!ry%~>zZplemS?YAOO@*WfmVXx@plM};Y%Ehg#hq{e3xwSB3ouVa{ zM*TDcNcAK2NhdL^!o$dRePQ9wFh;y92v!iV?`2vg?m?)TjRY!tGR*Tnr%2|kd-h?B zVhrQS9ie~mvMc4s56!{fzuBgP3CnBnnbu{z5itgI2@T9FX>wASvlNXHoqMBxvh*FL zzWtFg@(s@><5)Tz0s-$M!DS4_Fn?Wm%aaeXZeu5;fz_%V7N@;+|6Qj(wBvK&A)@XvgC;9!9)_asHEo=J<>>aCru|TgRVgE{4X>dh*k8n4wJg$0;T)`Or z3Mm_MgSnjVLNS~X$U#Kd~oRfQBz~Z=`bXD{i8i0u{XuLU_Jc>9_?^7 z3i0~1YG&i=Epu*d{zpTcTvBYbv3yC4Ep<8qUos350j0Rc`j@m~sV_HrbBXq`w{p8Y zY8s?_xqd?8*|l(l1vX*}-ZdV4oJ<)siqEk=&lg06@#art0%5dgvO?DvuU*BghKoOt z;cf#qPT|v|#7AU9ySDZx=6;30-`i@f!Qd{h@qKh+dFDThdD|hAI(T_=0e@hZuQuzY z_{Z1xMKCnac++C@PPyUBUu{f1bSa9JjF&s7=(O}^ny5f#87gKNAZMM)-L080M7Xw~ z%W_vWMXQMnFy~^-NrgNfE)%Gs9-jDkdzRkI_Y}crzdpxwtKcyS(Te2iKT(UPVNRqm zO8TI{(Sy(TZX69`FH=)MZud^==fpMRf}Sf!eS!kMN>iB{yxNIQWP>45rz794sa&xP ztAg{{f-na4bPKsp$a}bh(2nK<1>@;*8bjIPA}uc7^#pGt}F@e%WK>$EFOLwcGy zd}wJ?inew8sCOb7{E$1vJCe23J+6dE9}n2@8=CFba!ArOrd+4j)z?x9(EJ>BYTPua z4#ntvKN`o~f?$nB{gPBkYwY)z+clHBaISEOd?tk4A1WvW7KVA5I5W_D;_w4b`0WI$e5 zzC-bS_@Vaq`S&}3Ar(X;|E|*d{gi6DQ}=g2eN>En2jh@J@pN{&J|EzTTbi4?DIj9h~9$*e$V39Wl)+>=osi$;|x@+w#|; z^H*eKkmuiH8N7{3FZizC*6Bu;M*cs;0>kha+o8aleS`Y&wrj~tp?T~Cs+3(=I_+E3 zx13V?Gx(2`^?pE5QU~mNyO(ZA;A#nt z;7&Yw?#LwNxY>%*VorI%{6-=2=R8|$tfxq@%8xl#>~z}m*-?BRv%;^3R2gZskA$gy=C)ZYVsc?(m)fu# zd1vW#?B%-=)SMAmDE8PCzA$6vsd8IR36ND0x1CC46{4f7^;2hn_Xq-a_4Y}&Qk$FB z{0=IM1-gdyKN+O&8>j#LBX@O?9hd@;g$N2nD*-C8I;wgM1g%b|7^EaRD+#mU2$5gv z*WY@3?C&2o?Z@?OKRpQ8T z{Yr_E-j`62r<@w81PLi5%Q&~Jsi_R+D3V(Z4~rYP^l{z3m}o(j@a37k)$2LxR@_RK zvCHs}e?wd^BB*E+ko|#2o&@<#yX#2-(}$M&iiO=EcZu8rTh!i^&Z5p(syI9~=M96= zCSn0nnjBeUoO57_yFi6HeA)!_3h1$^Z*{t|hDUp5wHxw+}Zu_OhVB zB*`RxFIP|Jli82%`zHjuPg!mZQ>{wm980K-@dFF?C5PM4Uu+6O0O@xZ_uIXGi!~bB z;j(JyfVk&7DSMr}ue8Dn8u}X*=VttIsF0n-oBR*EC4kl6Ik-B$Nk{P&AMSx3_c8*K zgSjF914?!FXEbL9Spp%eLkvPg^U>zo;WNK7@h-_JuMPbhqVf)Ox5ba<3P(v-AXq&S z$o3aHip3#19R(>SlW>Zv4bW$lGPdb1OOQy0>2s zmA0v3sg4;(OqfFHt%k&6=j3`)==4Rs$1k4!dFWfwXu=|J>GPg)k?^Rx2J2RZzA7mAM79~Ld3W?kE5eRGf8JqZbdJc+ znpy8W&L8m^dYTw2P~^0xMhUQHRWHM(4y=%ul7qQ;%)W$sHb4DL2or(aG$GYVFzCJQ z@x5Zt>Hblh1HGRG_AEtYz2)VxhmpXTQM=*YufC~3hg~5Kv)Z08R9YC5sc*G0F}^vw zg`lvr-kuOPcfa{}cBdSSQ}?=7Zd%BSF?^n3mv*`wiB?=3V(&9%*zuDYB6*h!EOr&9 zvm1ni{S+d?o@|c6=jM9=A#Da%nf7uc`BoEty}Y;f0^j9?a8~l}y>GVcqz!0A5S{UF zeN@!l*5Qqu^qFHgjAY`hpGDw33{YAML2B?_yeqX^!VyA~X@`Mvx{sTn`(>gw(-pjr z3knWe#{$Q7FUFpnb^#3wI}wL*U;D$8p8@GNt~w`MGw0#Q06$2MAm~FWsV5cxg45;1 zwP0f7eB0^#h|Egqw2;5=S64a*og?ee)I?$L8{$lVzP7~H*2{JUMbpZ9_=&)1WY|ii z1S-7|Rghc#p`&4r3N)hFCQG!MaLXJU|MyB%N3nIDijLxrw_U-n3Wz;G7AzGTb;wSp z0KcPv0|tbHVk-K2d;h1%`8VGw)5<5zyBj84+xTiB{$I6{@*5wWCnZ?>R4=X~0~O^> zKzK!qiUKNZrssl;e1IP}Rtqw#VN>4zY(CLUnc0%H4C~P@k{20M(XftoPg$?1`9M#) zpKH#8>EEySm&w>`<-w`gaN4LjGD#j$Kuka<33%nh_NjaRU#m!wc=k6|n2k=e3Po(k ziGM?~9?0c*MV`w_V|?P2zTJ7~BEXM@yyk9wXQme|*|jD=fdae_59fJ%Qq0$AB?haf zPu5o6?9B;8g--GmCLIaN5VTx&Fnn@6KiJ-allJ>(gg3r!!G2>Tx`p^6&}DE9i5OLf z2cus@;<(L&g>b)^5N)>nI5)*J$K{^7d{A`NEr;)f`(KSSCE~ z^vxJeK2gqfPS$kdj>Fo;h9fncjw&vZUU=1Vsej?GX&4w>2W|)@rq459Y9C<~^GpF4 z?!ga+w%i<5wCOJ4f1L-7wd8XzlQS7DroA~W~^opn52z#PDEr>s%xR6iyov;5FSvd}dEijK8_p42TnO89R~N<0TCMuu~b=mGfQ zBHkXiK_rEGYuwf*mH%42-{JtX8ktJ%rr69@-Hv@N3HOIy~Ejz;FWO+>p}(* zXgavYmn!spu3Se_sCfybeK_X{3i6wKq)&w1W`}!9Z9(3N4VK_l{<_QFq*WS?Y|%8? z=JQO`+A@O+$H$nnbH%19CRW1O>4J_i(Oew*JzGK0LUX#@xIw&Xn^PDbhBl()jG zE4^xmlPqxUE@y?n93Qw2E=DxKXliS7)8Y{rwiT*+ev%kGb+Xkn&seztn$IOeaY2}< zz_`r`RjX7IbAknKl1&a83b_|$CeLX^ZJepv`}a%k;n1y)JA)Q{m?0bNi;LmgBdfPH}4b?zOCd1MMp$I;PVOa{>owPi7jT*$QuBiPM7lgbkbb0Rre zyVQUII7A{?VK3I@g(WGt+Z9A^d2irD&Y^sGbfh?2rXD=$7ROmLn9VEUM<#l@W{R;d z%1|K}Z@7ai&8vtxr~~>yJ{Cp91L(#?xT7!eJuKzNF((jVM<>NDFvyf^^CTR-;rDMc z`8mx`MN3W?fZmJP^&6XmDj^6^=(^rg&H$EQz(21qhz)jBS#FYT(!|C_-mIwTegf5! zogbXR3&>n;taH%u99j5Ve%C$(!dx-oF?>!alp}aPXt*;B*(GrI} zDOB?Wo`rdz8Ey%Z19TbXL!lduHq+Q;Ei2Siz@=T5mAB&?+k}tKscme;NBAlbO;+|PSKt_hxz4kUm=(N_0QY!9rY71g)70cKX>d>g zu}8S+*tzJ`?fAq7D;Du@t~N?Kk-~Cc#@fI6E5aOwjrH^{qo>C($%1+s=gkI;25Snd zU9KoY1faW8PnIsq|D5qjZ)}dpY*QqKUf&T26^!8b{yo`x(crj21ie(egHRTV95lGJ z*$6VP8!n)liO_E0%7-gO93v8~{%lSS`pWRW{+w~#?x`>IQ0ivN^lAE0VS9^IsMiB2 zA!${r4GCj&HcciWaGR}~ZPM&V05JEukkxXfQG(^{d6ufi@Otw)y#El^;Tm&8cAi6{ zM5|e39rD9dO>__iWM^g_Mh@|qla92^J!;=TM z-q*b`JqG=}cX|3h>prgUDmx5+PPDH#XRaof;!}mG#sZGnaFZVHD_vX#7JG1GtA7fd8>U-DeA>@}Un)n*RWn7- zvVSq-CwuP6e~t@7-S^Z)c9$bb!`aIu*KmL)8!)=ycx?k3q0bN39x{I0HW&q5B2b`j z4cvIDKmDW27B_WKGnMK%c&Wckw8=4@PL`WOs;y>;*A;;P^Wgj!lj=On8_9AjSuY&d z*=EWQurUE)4DYFjv)!Iew>rH&$i9~?(fC_-eNTI4-ibwgfB)JQ*jq#=?`@u^@}?f{ zC4sdoAHaT3F5?W8ww*#sS1Nq*mC(#-2`7q?9#q3fX@xa@HWsnb)IPyKm1@-&nV?*1 ztKKb5#sV(3RZ|$k&mR-|J4@q7E|1b~IR~!BOG@YngY}ef^6dN+-x43wbuLb(7wA9q9gR|vd53jZOoX66~ z?h#a{`DO=MfEZ1zj)v7dREx>_YlHY7$Edj)9%rt0dx&`>8SOenvD{w0;_Wz z8BqNN@7uodZQV{p^~%ExdgOE?Q;(Zn-%l$}8YjTdPBfGzaHu!cJOq%rpuT00Gu^m1 z>79ZASsf(j3jDLb{|Ed-h{wAjnTEhI3@JW3KpF*WgE_$_1$PMF{qtvm_~*GOyxI=rJMY0almpz3w&(LaHurm{RG;*>KV!r!tM z9d?AfFp7XK?;Pq5`v7=Yl3iP1pdcQ6ZnyUUsYI{K*;1VDmszDf5QOcE<-eSr#@4_AbjwHj-o(NoyE;_wGysjfbcjoQh-R0j}vy494Z{pOyYkk(Lr zVz=V%oGTv3(LpXedLNG(oWw4^++oorowseCPDpR9>3=@pGcyFFu^ij$-yhpSiwtEpREi* zdBn(&GVm2tFNLfMMh6_D7fvf7c3;hb3fEqkq@Ao5Aj2BH4lnPbLP5k(0BM!(JMl04)jP{a|#H_aQc?$|~fONoH@siQI-=qK7Zk7qC3b_BEK zX4TPF$jj+7P8o|&2TPBUHUI!*$=YJ6K)Gm~t8LEL_;}nO{&ZV+RN4kguytoT5)#t` zFvdz0PPBnZ?XPnrMoLaF2!KjRSB<6_R#Jl+xM^pAx=+7`^!7T4R-ck-CR@|L06_r6 z5FMrCqLS`%f=7^k<1O}@-iHrBmG>P!3!u4s8;11lgyQ-5vAiME(m=56rQi^dUc>!2 z*pY;wf78-}wTZD;y4g(pcc%_0|HXJ7yZJx?RAl7ba-%=nA^R0(3+IB!qIIUInuQZ` z-8`w?44|*O?=G-1!kqg@v`hk`t{%?Gc1r*=-qZz?Bs=X##GChP4W>k^ZQe+r>$?iB z%WXP+#!-WoAW~|o$@5%zOKP=#K+JGJ5vDv`XIkodM%wW87oS8GI>Ah6G>NdtO-*l2 zT^ST$n8yeCt@8GSP_V@P<3a{c+%-ui2< z%>MWh{9{r2aq|Bo>Yc;-eBb}^+%vnDZ7th&E!(zj*K(_c<(6%0%U-tami4>$dVh}Z z@9&OdN4u}H>pIVe&cY)Y6nwctDa7FQ9~_{4M)ul3;&M1qe%2czf4*H!pUd;>#?80V zFS!W>&+TNoZ;(tCEWQLh)i;fZL~}}25}Q7UY?)7gaMY^GF@6a85<$Qb&n;$&8p*M~ zeFPLb3i&Gi7>aKAGwIk&#h#0zxQrRC@u_aAMX;jn7I&22HYfKR_m|Wff&aMYErgd@ zcIw!TO+_yuxFsFYHLnMh!ch^d)D}6*)l)Kgo2*&tGvXz@Et3e+&SMhBxK=q}*BC z>-xup8k(Lwjnn|PIUA~U(`&CRG+r%ss|sQ;cj2>7tt(bWFW&$i z#mr-!|;INb@7#?X@!rc9o^!{+nuI0-wCkt3C=B#s72cDTm z%Xf4hk$9nJ)365C^F=Mjkeln55LOd;uK5Pz8QsNGfH_}EZ0ci_K~zc^pCyN@F0}Io^ivA^$`UlMSTAHk64cy zYIlEecs@(9RvQEZ2~*`k3ilg^*vsK)HDl6ieBDkh7y#9_=VCqC)sX*35+5*X$yezB z^C9VERet<2Xu)LO7}i2X5p#Ma+Y~4V@~gb-^9P|>YT~XE(OXssZIBhlC>`KKLD^oc zsrY(+_Cf)F^tGD7TM2cV5K1fQ0Tk0xFg#pMis)A2Ig248?D?-lJwJAj0R*U0|L~7< zK+#PQ)x9kS`{$$yzm`<(JD?bb>RGe&fLj&;97UD+zXOJPJ~(ur>=VKzpaO}j^*WN0 z^As(AGx>^^FeTJ(cU!woVovL!ywKE$4&Eh4^~(*eSfi$PQ*Nj8+x;~=`BQ-p9jr&h zUn=D1UwCw$&|Pp%4l`>j(D4VYG++Dpg(6w7Of?O6Nm)}WRGLYEBvYq zkh);-+k~vgvj$2XeyrHU-S;>K7EB3cKkHesr_`HLD-|9SD}~dwQn8SJ9G-=f6xtsl z(Zpzvbb2l0J)3UJWfW6!kqTsj_b+=U@0-;P9cdWF;^jO6>|C;EtT$%JiZOz z!?pmc=kf$0ZLt0%Oh1(Lrk0B-lE5BJ2CJC3Pjz&2Z_7Ya00rKRMQ>(7)f~VBt~5D< z1LZ~di?beGqDcQS4gi!TczmzwS`004GEk?rsq->^?L_Kpd~+R+V+d(_0Z&_j4qg-8 zVv+{GN4B4BHU7C7-8TPojvRFKzg|P{#eG3&uD^;_Ut|ueLK8o9c(u;9hb(tpFDs;S zID;b|_(yw2oBKW-sZ$#ag>lEQF~>A2)i*}EeB4wt@P7+m)G4u~_}lF_+Yd3dN4I71 z2OwfWCH;arRG>KCjdT!XD&#=T?eW{kh$+~g9P`I}%skte0#mC&6b7zelp^ZB#1-GQJDsuKjI zGnmrb8lGF)8wX6x74msJpZ+lG;e=UjN)f%NfFZ~R+eODO>nj{AIRPgkOdQW5BAQ2q z%$CXz@l~bxYV-0oo85#Wt~gaB_!T!-DMZq{)Joy7e~l7%J!;{#BPqTT<_A?vW;;1w z28*#1x(+NkhY{*_v22J9%IEi32FNVt-vf(@{~CE28j7h+0SmdM4Fm4M=RL~GQg`dP zr;uY*m1-~=QHVN;6|+oGfsnwLY=WDEDqZOVEykfN^|C|RfJG(DRbX>kYbD_93H(6_-fL^@!7DEy8M1qFUi)|)?1ka><+GyYW?Fv8UZM=eXlhPHXO{j@X8YG{MbhL`W30~}q_!X#=$oRU<79&~)e0h;OY~|?3=f5ZM zd;Z@N)f{o9jeWJu*-IK~f41(O*1V=(ibby?;(OlTs|$B76g1&CmqYpL^EHi4Pev`u z>3W`lM|Wi{jV&Qju&0#Uh5wyd6#6sgJdhW2p^y$4dUVMXTjs%NnbeOFGBD;`&T-R(x_XUQnTPDeH&WhLtLQ` znM)&-g%4D;=qZMQS0p)U42DRyU&VbeqQL<%W2E3ERXX0J7o+wQ zv+IXs907c>7rOV$V?T~B9|I4JGM`rDXVTnu3en?-iG%+MCwE55Z*}&Aj2#Pcb6I+CPZEAl~`UHpVC9%Aum7L zYEy(oju#{;6_t*5TrD&RgY~>w~2XPx?;Dg7Xhw(Z3?&Xw+HNIN8!t+Vf=w6rCGN`Wv&-RbUgmqWVJ` zq-87cv)w64+K>7IBXD91=<^ehQUQ%yp>MWJX9upJWnjk=y45Ny z-0K?~h1W+;TG?t_oq{Y~&0lxJ{HUPiOGSU*j7E_2uxL|G8W$`LaS1;WE(XJAq)^{* zGkF=3S^kY%G_NEQ47^4Mou7~x<}Mr67Dp{c|53&lx(polH|uX9n)@RWi>Xn>x9ZBx zv!rS*-X0zik$<^A0x4@xwd)uJ6#xn4wm;f@3^BE9Trh`&#w9PPYmCT|zrv`ShM|s? zCq^WN>G6$z`3q*hu|EruDx2y&Q6Q%lhYiFuZVSMxCRTM<%HoZ{t$Xc8FiN3gppT$s zL9by?sV_A-)kofBaKXu@(FD7nFv3gCuKT#_A%;|%LuA(IzE|E-xmdK>&;I!2&7y>M z^_RD7?(yu9>Ax=G5zo`DcEJBkyIxm}pyca_6DqLqhL0m6FNjO=|E>samA;cmPJ5m1 zt^k|>($YtOUxT?*PUk; zqOcvJU$iDaqKy=cn??sBuTR|BR2wYqS`UKKU&jGktmz@*De&D?qB&T79(`n7Z|eVX z=a2v>?W*CSF+6g}NlYc2IfoDBk`i>#{vCC1I6qbyp~-q)bX~|h<&hLK>m|>sD&EWi z5DdD!wqZi(I5IJIozu`42R`q6$_Bx=`>&?W%}%IGPxEIh1l+eG(SO%%aom0B=MmfY zC-RWX90db^pR^QmX{HC}B!JH{pjI?CZH|cnvHnj&92~opeYiJXoR}pRbB_uDggRvu zsPAT$jwPMXtRdX=z-2t;Nlpf+#)N5AmqJ3sI0?@PX`cfSCvrNzM<894s4-Lu*GYBw zDavl=GNX@8feuqYD*+W7CsN@1hjd8tJ zr{L@)_vO*i7!=LJ;>K5#mlLp8YQPCDB=1T_Otri`_PMLu=Hwe-tOAz&ft+^26BCtP zM234%r`y?>XED~rNVwpv%w?P|_}>H^%f4h#VbX{vU=US^Pz%WyP~F*1zg#ASm(Q>a zEpP)yzzq&FD}f!PaWvD-{FcFlA`1mPxkno!rOz1Cor9M6$b6`R1woU$aKj?5|3)^O zB?cpW!vE$Ir<*eXG7}qJ z2HLdz`$k-+9I(Di1BwNslMAi>C}5Aq{d6swZgqKPJfq?0d}Ozj!7q{f)EUjJ!P12j zZ9Dlx;QjL0gasGsB2ZbO!6$S67lFs{(7LEe5~OtZY=tEXT?E5+W&vp#Mm{eK3vFJM zAu_aol+%W2I6|3^xACZP)WgMkA+XV!tY{9JH2cvg*i}(lUKyoH{?qQ~RGjD|z$kZp zjiUa^qOBej9VV2Is-2mU418M$awq)JGB!(QmK78LttE`|}# z79>Y@`XX)EJBMhPn(-O(Zh*s=#L>RAJbBz(f|RMR$`(-U52Uz6c!ei*W>JybA)qqMX6dD z`kAnfs+JZ_-3Ov?Yn|nfxF!yc;LN?H8Ph%Jn^yf*&c{U!$^hf?M~3F% zkc07kNl*^Kye1lMl=W>O8EAc*my+q}=wd{-HhlpT5h0IH2r}{gPs-SU zqUs;3`?Zuj6Iy7}L0xNrTv$D15R=)`RDul9Shv|f5)1K<3Gja$kXA6llA$C)qfgx2 ze*f>~Y<0e+%GiC8=KO`|?1AJeU!hS`lE#=78Yt%s|9M>H;G^`Pion= za9Z1Q9mxXH_T>H1m{3tr1n-*<7=S7i)~7Si32cgbkfJ;7a7{=MGPx5*^gnF5 zC`X{(+(xcwU?8y?4njvt7$1rqLuyQ;TIJyq+hO8pm$|znFVTtOM?-$gP=auBdpYx3 zRs*jxRo>x*Vc3MD?bTqWH#D zM?EU8d`nvficfn;DSnO^>RrD*WG$0RJ%s!1xasfQcQkJQ1H@Jl-|K_L8wn%8j!d7I zKp+UiQHwvkm96Xa8GK^HQ!b53;>ZWcmaj;Y;# zFCc71T9{oRT@A>#}m!oU7f1+hzL-SR47o9^r@x;_tcrCw*Wr@=`RThnG8JfCwESbT!^BB2!&YJYyF#1MC(6vh&8IXYdEu1T>V7m%75 zm5~esLm;a*>`_chOFNdF#YMw>VxPmTlM@%GAY(YsS;{No2a=^CiWZRK zSpgP;%)lQ~hLs}3al7}eg+#l@`3hF~CWNfOubgr;PjXZfb8v+C9;>s`?b1am(!`&Q}FQeMnUNK=IWPeqYkS>k&#^_b0w!YJGrW3^S0@<0mk@GsQ+O6 z>qQMml*w&+gS=i#e^KEbJ-uW_O2%*tAzH_m1gu zN2d?yopEu)vr%Q(-*(I9I<+w>jl()w^W-#6#kTdR>}Y$F95brQnl^CB?MVnklc#HK z=TJ?O$I1BOFv))4-M8K140p?7?d_En(yUyZ@kl>&$by*TK2v(H6h%%{BtAl_) z>aF3KT{VRM30Cu8crsZ=`2UQYuEa#+IdcW!^!4uP!>GGo;jEOWq2+0MH*@bVR+Sta zCa{1dvh-icue_QguLImmzXm>?qnP(OzxP8w@|Xn2U$q9NGZfNZ%4^t&De71u+_6&Q zXTQ&*G@1{{N&vW{U}cm&RitLMf!x5xC;5dX7R%)C#35qao1Y+|Ihmi;A(Df@P^Bl5 z1C!;VOu-=#7hotrwa#?WqUIKyX$bZJ6R;3C47s8LcovmMf8=5ZjTy2+Ug>!f%Jy7!lxXVZ{Mv!QUhvgz zhx{S4LBXFxrrFdH(=x0Di7!yWXC%Fge}Zt>#0OQvm&-KcCwL*8ot=kaF)9jB zD8aoJoX2X&m+!yk3hmz}8?up{+u(yA5PlyvHS__RD?ySi_Z+bb&y|Y!oA;Ei3VhpugWwS}y3dB5`>7!JTWYTohDYw7QU!qL;%Ew< z>A<}75~vg1q;H zcz9F2Plk-XpdfgxM+jU*hP7;juOJiL2fIOn9}}|XP(5n~v7f50+0`Wh%~*g5^!B=) zjZOKAG*7~+OcI;xpszy;PFcC&YmNzV$o+MQwTPG8jzC-s`-Y!Ry_oQ=m^8?PpJ&8G zLM!H~4S;tg)g}HnO~1Y#!?gWwb30OLnqjEx*1~Wm@k9VNp>sN-8QEVSbzE%y47^uU1P~4b8G&J8&7HjBbK)1*q)-ot) z|0ESQhdqaHax60}C*2vTK>vV-JrpqwBvK`Ps&$CxatjYy7>JdhY9G%^KhxhRmV@2b zuC`oXe7U&!i4`~rNVUWOkDn}N3ek9J$s!yDNu|6-lvmP!gW*uTZ&H^(R((0mVk_xn z4QaeJ;7%O=JqtJ$tAzhI8fJR#$M-|F)7mkQPq5unjS_)Zb&ZVHWHyKiH^jsafGsap z!lTU24L?nP1YRDX0_sTJ zp2xib%Ofm+2qx_!>67Ngoj#8z%(y@)fHCZslhk>#*kwus-m>pO2g8$q)DN)is9;8V zBLv3lAb?usvD2{Cur;EA8W9uU0@Ts;WQv5*IYM2+1N^D-KpM z|4A0R0}W|@TYR;emGr;DQ$)#KTH@Aevk+Jgp0izTmxvci2b__S)tzZspsP%uhYkO| zgA+QUoi;c->{Anfp{%VY&u^2f!UHBvGKVMcCBQK@GBX06^P%yyjAX5gI*kVbi`=dn zcQP-&U&9eryt5{0bp9NtHN+k&7UIWz$k|+W3TXOxFZv*P{T@GYc-kjhduRZdlA9GYXD?te__Gx!2Uc39#qO|7{G zYvdexcEBb$%E?kUQj_(5+U_tEY2NKyGRq@!?PHJx;Wv8qFGRm!gp3IBNie1>1GqO^K z{r9o%FNMS>=gVVqtC6RY+|IqB1mDl-@!k%U%i-zi|8MxDl3_NCfQ}6H|7nBt-*<0l zxo=0xQtuD;u-Ao4D(;%9o?`&*(Lq!;^ZQ;9G}*37zLL9QX_t*pqYm2xCE$P{d|z2r z)Fmp6zk*eowF^8v;!}H5YoEdWac3d}hWlp0(J+9Vj+i{;W6Br(2NQh|Q0B~U6->D7sP zFN21m==Hr`T#QJxQycPF&4F7L;g-~Y*dJ;sZDh6|;sNS*twzrV6RI(&_nxq$)Hw86 z4In}p=a|%O{vlAAT`_NOkfn5@^D%%nzmI%9_kXCpInPVxH#+TP0zyJFwCoEkN@jS7 z_DHZ7sT@GaxYB|F4QPww|4OH$4huj$vp~b>=L3rjm0t&>Xrb7pE!Fd)EFjvYw?#)o z3fmi0So)f7Z$z7Ji1NJl zK|)c{+|)F|UVim^rsq*se%!wuAGeI(MRj%Wx^qLM;FK?FsVoTgmy7w>-jQpcw-Jy; zmcwZWWOyvfk=i|e@2W-n`g0iUa2>?~ZdY(p`E6d>eid1c8glXwz!C@0gcm?p!f|C^oNkSCTeUkuZTnr^iM1yFP5O&4a z!`b9=3|402VuogIEVX$i1s&6fV`PZ&s1a=ct#M)Iz=p^ShsNpjKTb|yE~tf90ms0w zS!{>a*3seT`XP_pLCmA*T=x(x06Nk3}>Gm)4 zd-|I~|AW^*Z6o)dv5i^)HN3}G$b#Gi5d;^nLm2=0TB%p2C*WFIgM*V;dp`#0gK|T3`B`-)sCKM3?Wu> z0V3ktfgjB14(bCJt`-K?q+0xrE6rItE!Gw3qfwe&7EgT-JK?crH-_j)0bPL+^L4C8 z$q5Nxgd2M9EBM(h|DTpY*lbS#URc_y5C-Y$O6RlYY1%08^ojlhLyHNJx$U@nsY8J(P-%*z2?zt8#^ZpM;Aa6c%Q)9*NoIIOP;ou?l54XjTq2$RCV5 z3C64nZ_ipL#a7I^>DwE+5>k4xR(CFkHG#Jl8d@_uz8NqSq0bP9 z#}c9w^mo3}wbeo6{N@hLhvVxrl7B)!1V0TgZv+t~{};MbttE<7T2)Ey)ViIFmtxZ4 zfjVzTcxZO&A%GPHQA8+&wJ)!AQp7Vyd%!ERo^<;@FkRy{1L5&nw%?S83?gpkN6R6; zNvu~!#rLuR+={GGAD!`FFpB>!7#P>OjJSx}q2LN92S;%vNr!1cAz*a&hD)qI#-chX zEGQ%ypl*UQiMVa8ZGQzQMR@89u9{r$H72WNj?3XvTA#Yr?2TwSWtbP3 zUQGt?BU1!;-=YGg})`ZGMlZ$nceVW9FUX!ddkvIkqur%jR&{vI!&ynZ&L0Bf> z@{J<@qpr?53XHepYL~O^a%~Y*TD5Et#)TIndOf*=Rh^X*a?|OWdtodV%1qJxWm?ZN z)8poZ`XTgi>mUwatwLi$KBeMNy{G6$I{Cjjs=I{FoNs4)pJS8{z;#KtoF8d;cvg0E zEUI(~Q#OFaMv>k>rxfZH2lD2>I1ea6`;%ipRhH1?J*cd5Ri3GLbKYm}Ux2GPSiIuIIq}ay~g-zs%+LBV9CgMAg(DgVoo=l_DNh=JmXi5GU z@h9mJup)lHrV<6yrC@gy{WiTZSk*C%nB@=od|&ia0sqg2KWx4SbG*AXg8O8%{tFE< zMLm}EGD~3uDZ^YQGO>Up;MohF<%qAO*D82>m%V;o%m#J{_*%Nuwx}+SKQr=)lnavZ z>C(G@byb(T2|9VfTWGL*aT)o5Z?gIl8PR>+Uf(7yuq1BO=DKLW>vR-F7V2BDaWH{b zL4&J7?6|sHJ-l|$m^E#rm-TfZ`{^Sj%wJw}!10TlJj31jGVZRGj9glEk-06W%3Hg; zmC+^%~*`3hBZucRdSZRp4$a6U;JEu;R6j2 z)nwqKHTJoXn^w{?ApXh+MD{blUdf}qaIkVabCDYN%c!`p`=zg-pSw9FG*ncX@)PiX zj*`T35XuNSuJupWt1tkyp$gi|*-*gWR_dzF)^CC@kN)G5;#aP`4HUX%n(j!qjbyEge3qB_tTUH0jo_lF$k`Z)_5v}w^n#KOqrRbL}J#%m`^ znE;6pD%1^fFc6H=_z}d{C0N??rFLI`Y&MxB&k(dn^>4KTA!^A>8M0ct`)MnG%abcL zS;zsSp6ox}?gP^ggA>baT%Uh;>UA?%Gkhf-llo}3NxqtW&|Vy1(o!>Xw)yI%P(o|G z*l#_bi8O}@r8u0w+qxNt&XI3oyHgozxV2sF(8DfAi~{trj>>K==&j@_%5`t!W)eJ zW*xpiv&{KQV}YTHrtYiVY$(^K9(+Oei|@`JZgFo_7A)XC6Jwkhz+JVxQ7M1-089~O6(wa-t;@s?P zs5@!I^;RN9Aap<_T7i{3RTVIPe*+5_8)M}&Fx*ZGK!Avd0IMkBR?Ak`F9_mnT>f^w zNA<65W3&E+rV+^>-%D4pt@?tbV5kk<1q67Q6Hhnj9uh>(;{zA42ZkXaIAtJqm}%vT zr0Lm4(rs2vi9kAPGFYE|Q;ZEeA5cUst`t3+Eeox;oi@t7r=FT8qDciFQpsE~(y_zW z=R-Z0GL>_pYJY^4FhydY~iVT1BW05bcO$(EPtaT z-_8w}(2km7J=i%?Wkq6F@&lU5xTHv1s-E_$hyfq2(gQm=`(RMYYW zdCKsSSsw|hq^A4vk(Os{0smt>M-F-dex~TUP(ARnFSZl)J*UM8jcC-)>L?LB<}nA7 zjFXd7!(QhdAhGs^1T5y9e1h5@f$Bfv_&Wo=_j<*Wa-5E9Si$r7R?{{I4ZnYXt}}zR zl=_!gFdSe7`Mj7G$Z`@c1HQ`$ba<+0d|EXYNy!X4;Mo@UmD8-uC(`f8>;5j1(LBn66h5(`yLO{-(!#L^oRtzdq84{8xIA-7 zss8%?n{G;7+xHOjcPC#$mD^H(M2{T6u|`CJtwMYgWoMTv2a@f8@PMGP;8B=!^Lw-B z&oKT+g#gDbpIXmFNKf;{aw%ZB=z3u22Opa9;#$y9oe=gCO41gLb6A9quyDhskpxA& zbM*pN;7bTj96bi9X1 zW(;Uq$P#s-*b^?8`~raMPzf2?V-C~!6>mp6>5o?7XYln_V}BbYVyk=DNf{OxV3eLd zV0y$M`ki0}`0Ym_?0~dE68)~Dz}!G!4C7e3<_%8~bXcuJ*fUd+cuWfqbd;ooL33pb zcvdg-&FbI;f0A*VJ4MtcQY|H5pRWBjj!l@2`tm6m)kSSXSPNL8>$aOF;C6B*QD0+LCatZ( z-rzqB2=CCBg=ZsX(sMhJfN&G7Z2`!=EAGT% zJHjsz_lN>{S_kfz{StCCP(V&+VcW&`wfDORA!8X6IY(sTf+&l@*u~C^_k`AN}R2cWn9}%_E_jeeKV6=r$e*<8Z#5 zm#df6gJ~PQKydN|{;n&uI-4$OGNPl>u$yzc!VPSLk5<%nR_O36Qq~S)d@J>+%5|_;BDvz&# zlHl6UJZsze<|j#B4d`s(z^2JYeg2`XJ6`13UJtlYr}*8KEYXTF&;KEMe<2?keSC6? zBtgV`i9=_YR5jjpmWbgg%|$I> z-#nmiISBTU)9}E%DTp)q^sw?FOHS>@bmJnvE;iA0pS~C#gj$OTQF48i6KTi8bU~4; zJN5}9x}O#9)0d!w$^{_IR~bHS^5J-04C`!rY7oTI`vt{mN>#YX7gfo{vmn~}E zGNca@tHtue!-$Z8xB*RPi^H3wtfEZeqD6Np1NqWn`kO`qHfDc=7peG^vKP!|X+7$b zU}V?Vr;S8}H|K>khuzh#Uqxmb(=oMQxeE;If)lVYw0puKq(jm55N``5$2wIp}(K}3T|%`=xyUma=e6}bf~FI zW=jmm3pEYBb38~PSCDWQxCS1+Lyxw}1~C%$~$KVe? z-CE67$j>Kjw3y&67gR+E{s1?V%q>~wM)Bmr$>cOge*-GAVnnW)qD-pqHw?HB#II?s z_J?iiBSI}TZf@?`2f1{5@OFIGoY?sEX*T9JuD7RrzHfmpEcN>vey5N=&r>KVj62)? z*>(#3(WvbTaMRh~aSYCrL_eMeSO2!8=cp9eqKUs0j|Hl2x(@2EE&Q-KtMNb?Bbxq0 zLeK!qj&L@ri~oC&xHAyu%*kY~sS( zx$;bdlE6hxKtR&h}GA12wSTC`F6y()#)s&foThc5_Ol z5v|{xTcg?^5LHaJYe_@PbXz`sQ!i&01xhi4Jhs90z+diSn5;QGYKnegsom^T=MPU3 zjCT0tu3bqa$kVFYsqYo=`%S-ZJh5xUc-?`p1%TQxyR$P|PP(u!b(v-}3C?cp=6sCWH;`42r19Ol zuSugp?}!^CrVgK$NrX*%)@e9LAlMOdvQhi6vFs>~QC4HGrj5M997A*6J@L3KFeh|d zL5tyhRF8aRy9MOrYBT*f1_7ZtUT5GpQU{my`}E%=^rTCSRtS0!w1;QhxTsh(s+aRt z7R`mHGCG8pjXIy6$0fltYw8B9k5Nl{)U|Cp(c@c=J>|)0H-IC3 zK&@}%BbQ_1d!YFtq?te#G^L8onz(4c7E3O)f;41Kkt7`{LJ zI3uRA^jR0~&hriL)Z*&(WxPl4PVjH=P;MxgfWhzCpGdyKi$Uim=8!jQe-bLb-WStr zl(CeDV|D;(=_mDh(|v~MmjPnYi;g$Poi}YlNHLBNgLaHm(!|nG_suE;FrHj z_1ZvNbuaJjXJm)(M4QLd<3S4e$d0b3HG(4D! zEzMOJS=}L5VU;%p1xN>ZXq=P%{X4`ZzF25*nwtnwdqFlhd7^!^N)zhhTv7BMkJ{jI zn&mlIML`dT{W3GB$0o#@dE_;O%`Wq!V8qb)R7eAdTStqU>fgnmgnK6I4Lx)muS+c= zKk?Hv{5QMH9x7!b6&Zb-X(Y+M>i9j1T~;p{rlm#0q6<`*dUMe5m{3eNlgM!$%)}fSB;$L8wm>3L~3IsXVDr)eJYZ5;@wF1VD=e5&`lEIln+gC~KbNt6L_R|abVltz7He zbX`WNpc;iqh#v&>5TZ4;MlARtH0Xt02j#ZwOy|1&ebpz{yrQZq%)8+ZV6FHV5cbDi zTsKp7ar}97N%Mrjv0=VAH*kAhk&FZ_5El|yLqmNf6?W}w;JNJymd2@MHG+hEMM_h* zW`tlj^kJYf{3XlKjkArV)h^>i#J2I66V`M1z`XhBzL9IstEE3rIP`MW?f2CocPwFd z@rhUo9auIAJ@`^*<##;mSmTP{&go5j5&HoAEce3Kp@TF&!sE$r&il2!r&=iB$7RE# zz{aDyzTX;40k!*5b*;XxKTDD5p3%o&yKc)J(YSCu;AvKjH_!1G+D$w z4T3-YDQ?JgW6@-oy}U!yf*s3as+EI7LTc-VG-D>sV}Zc0jiH3WC`{h3sS9q{aT5A| z$D$Bi^+`!_?Ej8MQVy9_zkguhSI9unq9$Rh!&0M!IyNz@S5O~O9@3`*c1ozQUUiTY zm4S~WOq%?IJ*I4ReO4z0RAmkrf@${h?Z_DqjqpR!jh;O%W-~c=P~{9HEb{CPhco-r zpb9$-eIsw)Cw$A+dWGJ7Q9ak^1}gA&9z8R|0W>4<#`1Dmb9zI$zZ`mw2e#3SOxR5C z@qV41#+xkn@LA}$67wW`I0ai2mNP(WrJ7?q&hu$;`5`|{93CDKGjQ#6kFvu$0ic}t z=F3O(@`TT!nbCA9!I>(@E2?$cZ>H(CId+SxG=dInmYr!HS??oM%IUn=zK>KZpl-q5 zrl8ih<{>dZb@F!al@M# z?Y|M@pj*Ty_XgeHT-jA3tOuRfE5zp}B+z}g>9r`PB{gD!nl(!QMf9GNlY@8WwJ@Hk7EB_t(FLriQJs-^PV46W{^ zF4?ovK+_YF7PF9#K0AmelMagtD-A29Je>g|hGZ`ub{X1RsE+ zwv_9Pcq>SuSqu6w#=P=*W$?Qjn2wC)D_>E2dFE@P^RNV!d4nB9TbuXXWW^5CQG4RS0d^9xD zX~MW$D~FNj)!hcp?GWmHrbX~A;NN92M0?es6o2rB&m&}91zr513He^`cq|37iVk9d zI@%qIwLWeBfr`JbEY1KQzUHS+Wlb9sy0W@jLFU-;a8bhNn6+l7_TS0LAa9+Pf_8I~ zG9)M|i$D3|zzZ9n<<#nJLwI%EpM`?-W5*x1zt4HvXvq5sc@#8$eN~=VpC-HV{*{*P zqNV*C5qB;m?Z^;V$w_rx4N3=P;j*gO{D531Y{|ycX$Lz7^Lps|`uu-1r=!*j6T3X7D#5}LqXzfY>|LrTa zeqHq1nx0x%Tz=;(Kfe`JY2<|IP>j>p1m0#>H_VBw52W?L&GnP~u!`uzePVU>WThN_ z8OzfX+E!Jm(IHzxqm$h9bR_|&2A;q^E_TzwAE|NuF$8Qw7oplxlRrm(4Jgp0NgRj~ z9GE7MOA1;n*ArXsMdg>fa9Y->k-GLnqM&Enr+oh|jyBx6V@lpXaD27Ro*5KEuiN}f zwI*kMK2$+h3N~t@#Cd~ruFe$Ieqi91F~wnUsz|UkEw!#NB>4jbPU@rp(HZuP7;sq! zMuqae_>0031kM@(HjyiIAJ`3Yu4%rgEEv}?woQsd`%}z@gv%bpq~9(LEWw)~0!Pcs ziW{eZHM21PvxtBVxC6l4`??Il88FkT^puo@-ocQFIm6PbwW571Wqq!?JJd)=h2Y`w zC;~jW&eUP#ii^iIrpZy(1H(b%qQ!zeo6JJ-4L>{q11OKTH@o|dxOy%|5qH+>sFquiBYfZx?yNWz;1HkNRr2youdO*w8Ee#r9R6fzPi3 zJI`HgfXhyMdHIB@y=V^#N-uE#w^poqEyLhdC@BtsjFg8Fmltl`5X+QS!{f=}X~i#t z?(JQCxO2BouW^LyNzf|YaaNm~Su8>h6cYhxZL?rdh{6J5oc>(G z1-Gh2k8pA}qh`mFRKtE5iN-h~&>oJLlph%>vq_6YNarTKUuoBIkxYvV4Ta>0+6hpL z@b6mjCWFDA6C+u&Bsb+3j-E$;4DBDg_cq;b&&x|RG3_`~r;y>ozEKAzzgtvp9c5vB zPCHlCVV4IU}*Iz2 z>#CuHuiP*(`@_ZYT$eAy^72ZVuO{|S zef{kDE@DeuO$`%Dl4O#}T%nS%h=6u=|PY@2KHNtO>8wuHdp5bKlyH+ z(SN(?Xzv~_N2VgkNgx`xwPfW)g@L1(K;N<(W0enq7GY03qn$u5gmQp@7(~tUzIj>K z&Yk^*KP0>)r&*EiXPn!+K#)ScUi6FKUr7PV%6|m+vX!Ay!}n+;-Rj-)Qbo(k%2dkF z-kA(CwhO?K`^^He;s^x8HU^rm4^C^8KWk*|8~xrGowv0fFGX`hULMSy$N)H=N?GNt zc|TLhguKv5bZgsJ$Do{-47)G4ML=ew6zrcX|MVM~k&cS(EDf#5`3N68cI+Z5(D~zK*;U?iL;Y^Z3Yd zCV5|a@2YSF!?DO4p(k88M~dq!f#@>Nlk_jAd|-x__`p$Li${*9Gfmedj1T&ho|-NC zVNf4p;B^6~<@J}8nAj0555Femq_fqif^fzS%Aehf+c`CV|V{{&xV3sNBv4spu)m#$#;q zL^pn?0LJv9JCp!7b(OnJ^-s-Vw?EKf2yBD6Tf>7KOnB1Q^_1 zgFC?nhu{gpEjYp58Qd+nyN2Mw-3cVP6Wrb1&dz)4e)m?LbN?|_6ztiLboW}Tdz}+k zbxa$!Bce9cAM%N(l6|08{tT(sQ;HRGXL~b*2!7;px;b2F7ya~fLwdFNyEDJ(NbX_H zAZI+4Sek(&U1Plc@`e5iz|`V>GI)Q&yILpEE9R0dxA3qg?O0`MJhD945=Ikq5#g~v zCn;aIl6y-wDv-pYfRSheMBP`>>W5>c&IvI@8i$q7k+|>J zB$4k9nuUIIYC?jH&zHcSa^8Q5%@bkFKDa_XMccIyoP{|a+jYQmSgO;gsqoQtvIfl!fc#Ir4ExF2vF*% z2BUhAK*Xy8-W!{}XXV`q39jP&c^#(fqm^h|h`JSVSnz{2735IjPt)L0$joAB-R|TM zw-+4`h`|bRSwNI% zbinG%@fCq&+_f*2e8ksunx4xkzz})DF%n1mit(;FD9GfROvg8|_q*|4HA?9h&Wxre zBK%Ki5WFiJ3=#Rdm8bGyZ!cfwD!C+HNggH zQ*?EvG+PvBl$Yc6%veWJL9AT&XzYRNB*X z9oUWK5)p8^>K)s~QpjDuJVeb|BQE`9FVkLuEa}(KW))}UDD<#pu-9)+CgX4@hWW)t zEp9L(!s2={ntOi-WhiL1>?tvn>halt+((J6{SM9hTLA-U_HWT2+HgzJ=Qvnc(k?^E zBH<5IglP*OxXs>0)-lL7dfk%ILh4usMOQs?idP)D+W3tfNU`*Ku6IyXDex0wG8(M` zG)qg^d0)1M-a}j*{M_V8y3BixcQDcZZe}`@a@6kxMcJ}az80Ho=60Ejw1I+DC8aPR z1S1C-_Q80u9Nq$If{8Fb{g{(a>tu$7y85>yOON>C!ZsANtP@`M@_I8I`JMj@Yw}m8 ziEf3=UVMW;C3ZVd6ji+l%!7f=*p6BlJr#SjIS3uPG&-TVd+-Q=%g);U{oy^j(5Uxc z-NLN*5~%Ic%TxH@+pqR{REiK2<}x5BX9+4_AdvE#JsOu=`|r}}#p3t6VU~R8XbNY!FFSGq&dk&R>Qz+;piz7>2ENsVU?)I;XVEg^pt;KkkFSqBky>^{1YQN2r z9GmDXd~F_TT$-*jUl&F?6bik4b|jkbWD_dPvb;&TT$_7hzF!wRC{XTHOCbD2AwD;o zowqGo`MDpxx$CO<&-rjlT3tEYYD7fLluXXp=LGU8!;EezB+*w-%65)RDV%!NA`eu& zslJQ>z*s@nL8q}>pE_GV>plSd^ZCg;vE*_D`bus}$%z|$9uX;@STTs~u3d)?xOSbt z6@HvH9;Qx2rF$zyX@i&ddNZKIeYgzGJu@?jtb809>nG~i?MZ&}>K$CtX|gozid=s@ zJ=tMOKp-M`hoUM9(q(SUDxvp&q0LYe4RA~>L&=0V0%g<)6V+xsKI~$Xcah6~Tb~Zu z9#PNJlP*H-`dg@mVpJ-;B`YWdCs)Af8<#v+T`eO$A4b1&Ptq<|S)5}V_nzkE?o1Yy-Mx`qU z>L&>aFODvMcdXe~FWKu=mkD2d$X&iCu#9&*(# zIXj`Mw33C_@4fZmQ&DozO-Bwiucq4F660O;2;WVDh&O+9@@w^*!7OpBL^ zfz-ILPJFHJVPB-pl3Q$-b+h(?{3EWrJ>1*7-C~gidxoc-KxC3R#>>kqfA!n&o4)RD ze_!4DWZpp=YMnk|$fUb9m#O~aBQ{jWs5qH)pPY3z*BU0B9z=g!q=$Sq_>xCi zafQaR`4hM}q8#EFg8VQsL1wh}56-l;`xobDLP|CV9&J`!VhYb8es`&7T9_Op3c2gcBZDdweeOQ=jNf- zHLgsksR&Y|7VY-uV9vFyneU08&hCUUUd`||G&az_%7it=#Jv3q3gj}dITKY@cB9pI zVmLa4A|89XyMOnwV$bUiXn>13zp~fp+#SmjEV=nIg2*a+_ZVGuJrx#f`~|C3Pf*zu zp?B_Bs&A-)Xy&ZU)x{-`=tv^$OE=#t<482)Q2Gut)#%2K^-F^dX_9jN=tw5li*mPM zdUUBUCPogb9SPr~lo!OrEl6|4P$_>U{#7>+#(=GaJ^Xjt{`!}^_;wd`rQ|EegLP`n zq_q;xSr$gta4BShh%Ppyw@i_AqN1V_JdDT4um#{XqYnME?cH!s0Az>2TWr=OB?H_d zCMcb<3k1jZ?Jdea4IW84ArHzl2|OAjqz)I>ya5pbk%H?d+wsC9*AdoioamdTg~f#_ zVK=4EX$0>llyWYgsbSOmf4|o}Q~Uf`0=c3W&Nxm%d>?KxB6b}VOw4CP3CNOxSDuVrsRB{45D^jdT{ydg@?%&N@VU0@UI%&KCA#)%RkkeTF--$pFjE;R_J_8Ou``7UK zqHj}4?7@C3|GovesJ;R*-=ONLzHpgWqvo(5!}*heei8laRrSTwbr8Oue4`aW)M4qw zpsBG+reuFpEVCrs3Syp4V|z{+wy?46bZooET(dSCb+s}+&E>42rtVd1sJq5wKTUH) zbas$K+1@5C(id*!GwaUaW7=AFe1CU->+gku8$D1{57|h?lgO`MCuH zX7TToMNdnHhGgecfA!^Z$QZy|7*8fO{I?cFlq$ire-VNQ)i{`aQXZ_f)Mynb$EiRe zc)5HwUtnACT@H!<)xcL*mw@P&&q?jP8xP^(usa&$Y|s8D)wh)iMngpff+AOTV4&*2 zkwlH%N-8GcSpBZ1lr{sii43&Mrb{zQLd4@pVEulvTV0|u>u}v{$VaKS zB}jBoEE5(T!()Lt2}Z&6?DgkQ=ZjDt|D2qvA2`+1dGSr!k`#=L2=6#SeP+c{INOGw zz^Q=RC7M|9O*t4&aiL=78pxJs(3>->f!!s(CW?}nocxi>7EcC^oC%}qHC#p?3lt5H z#saBYsz$*Uoxq6%IW~LTUkx9=7B=Vj$c{45m>D~vAoOPvKRXZyS`6?pL&omEif|r- z7~eC+&;D4IBzAjIXxJSCclV~WhuYZKoE_6%TwWLJlqh^8tF^zgk!Gtc|BT&L6dyv> zK$#Wr>sNDyf`00qkztPY&B6{Y1u*)$~`3tKB{nzm6bQF>hKBx>%h>Ju8`u}l^R16H1KQqv62Osee z6Ei**ruip86qUN-K-`a&(2M@L#6(9)ExfbcY5+YzFx29W>|w>sBX5BsUY{<>+ZmW8 z!3vv`wPguT+B7N^7s#M83Ug%uF7=Fr4T6LV_gjW`GOi5`I7>y8aOtQ6zv$pA0l*bv zx73h;`1VNVgn^79V;{v{Y{b*O*-1=pfjI-_@t9meHopr7>JDivh#{p}8m*O4$e#*@ zSZbwd|Jn|EBv=UY!$bD&bVfiY<3_U5eV~CHk_)V%j~?RNzew|KbOHA9{#N zq@;DS`1lg`mPYI~mN%<2Y3AWjOV^s{YgWnxDYIp6VB;if0dkOu^rrJj+#lI2ZHR{6CjUM@Qb&ls*Ys=nkw@Q!Y7jd5W#Xq4mJ zus19iC@9HR4VDTlz8dg*c+`oA|HZjQ`Y!qHk00MoRp$k6uYZ2*Wi&UG1XU>d@lr8a zE`C6+Mvwu@J?>nn0qu9KUzR$*ErBa&;@oIBFx0zBUpNuk)(=nbubb4B+~jVu82pJI zCp76lb__7c%TFE6N}BVq5)u_CnU(R2WW?B8jeUB4)?WRkHBJ}~Vx#1M>p?QgMRtX`$3thXKwd=sOM>>@!*Ln4rThk?@b z!*y}>xXP3>Lp!oR9vEOw*n)$Db0O9cRGs+V3@Qqc^C#ym;s27qB+SxKKb6l~!A9oC zSFEmPw540z6`6j>Bo14dr z6k$K3obC4iDr2d)SyF$M-jby>?1uj!FaHY{K{acOsBuj-ew>wFMO$0CvXb@V$c7_> z^qoGu1vMmz?!TCzO|7D;Dz+ftF!0m5-z$)={JRH_21vWkL&GKsjCbUDM*j-13-C&OHmMIP*|qe?8m#Nk;X zk20&&#|s)Q$^1oI&|g{PIo_5)IJ3oh-};y#%JYQ^LX2C*&HC>y09r=za-Aq3Zl^1s zBQU=j57B(!0+wV^VXa6flC~=+yr;+byM1gySPmUIJuM_hNN!LyjGc*7y!NZ22(4v`3@w(herzNe-3_+i1aN5hjMzl zNz18PKBXd8q}{67ysj|@;ogqNhIS^%8l^rnS?Bywst#PLm)f~B&Eh)D72-w00}*u@A$9i@K| zETugQ$h!MmT9RVB=M5(8gLd4O3ndJ|n~j)Kz*o|!AYtqwpuQ?j6)Oc4`3W`Y$qn9# z*1+h!!Pv{sr#c`ZrNtu|kXhLv0~)m};P-MVuy(c4J`q*Rf`B%zqH2{ilbWIKZ@HBp z3}1SrxSh`+JZZi&q+3#fn7}en5ENnkw+szHkyL*W6hs504*UyCBNuu$vBi>h3czC! zCsh8dkOIsn7{m}OV`Ceip4oe0YGHhiG_l^G?5Y5LSVId73uvmcDSs%do#x2GoG58P z&Q7JjI<%<(K?vYsQBlq$Sz6RYtA0ppzah!k+}!*v;gil`uweifP>;dK6`}t33`N-d zUJcnymU#VoZ%Bfi}W1&JIpa}Upws_QLl zeHZv7L3VOd8zBcL;e)ntaAJFcDOfn4gcu?(J{<@X=>eQSPRe2=L4iJ@e+aCMaLeFF zSy}o^cthIu2pAh$IF=H|e<#5r0-`$Muoy#|*QnHvfDlEJzW8|s8>FrXY=?3(UxE8_ zOn&G(sHmu@T!9b&+^^#eded9+5(=QynoHksweDHh@rv)oiwnEIEh6VELcr2uxY?`y_ZUM%7vYpHaNjUv;I|`18_$=q7WA;B>#GejuLNSpFHi7W@ z%dUVy5y0~~QX>1mL&+3{W`nU47~jADSi4*)X|oi>W^h_GLvu z{r$AVuUgDjSDW-QS|a{bcbQ)9?s2-C$)f7uFL3_CUblJt)(ij?BxWS}ZsMvZDX)i+rYd*VeyFNx*S`ya%|HatbQWmnS%Ig|h-|yEDuch{QjL;RAh%;PD$316%n zJJ*BIP)X@+O`|&Qo39HSP#WzBhpK$_#rFoh_^kRAf-*>NP|)+b1Ch#!AJsiu9JSSz z*0#z%0Tn?ETEmOGdwrcFk>i8-+9L4UI(Uga2H&Gh=fkYf*X@o2@=r#2jX(A%_K4h7 zMobLMg_SiNACV`~jK}}7Hum*XPq79B1Pb!rsw@~FTLg3g2*Ux<={Th<_W6f6@wH85 z(GrdVnUBCUCxj3zlqXRb7?{`BTct0d|BvbKQ5KRr&fH9hD9ZJUmz^wV!t0ducE#(j zysx0r5e=(D%D`t~-8 zhK7dCTL`tP&c2+Vh|dG}Y?W?czC^9&HYfzlucaR&5z0AHCxZdb6(sa*`FF8}07ny#Yld@N9b8&v*Ogv>I9_E(?(XUN z^MYC6M>}#g3?M2K2RW00vEPqK_}zf4`dk}qm;BGFRp8<8h->gIwUM1k{I}3EFo}ty zVW7mXO{QY9BY~J9=5a8~^6(Z;sc*x(qfmgzKafD3=YkBY z1S9YvO(JujZlSR_C%_5h%z-j~51b@2XolzCdMh?@@qo46-KFBoZ5m3$6Zk|JMZ)om zxcwx2o*0 zsHnXmcIVZ`QTV-dClI$%ffo=@T%gzKj(#+dQ+Y)RX;XCD=6%mkH_SdLunCw9j4V>x z+V2ssPTBBC=qOpUERukg)XowJU($rXv&&Bukq{CRDh%;zXLIRjLAYS(v~t(Hhf!0$w3g<#iX4?hL0aO9*1en&1Un)-}PSxj7V=z z&o9do>1yuvC6EeYAS>aE2f_!~&#V5SA>S;_H8fz_I_l}(^nb{vS!^JtU2|t}CgUOF;NTdI z%-W=KI|u9b&C@4j;;dxSa*oN(->AMKW2ZsU$ zj|~w+&`FsCVkg24e34M`Sc|Q%B_*XLb$yK|12EYoKy@9@+3KHfXv@DAvpY~s6Yq@a#egv z;C)GT4cP)oIq1qiQB6`xT1CAiqu+6F$X`QUeO=9z*4q*{!35&ArSP-z6v!JqH8&CQ z@poslC}dDzqp^nmj0Co}3hHKV6V1&}Ygso5x*mzUOA`3R09nxQNZ^J41FWw9WxvnA zi=qmpKu2Cl4ttmnS0}Uv`Z_c;g27z^K)4x6qJU4wHiY%utrXt-D}YBv!#`xE4a$V| z2@uLv-omH&7J&(fiPCss_Wu}y+BTw6Jdh9&iM%T_0o!?qUnJ&0pQNg^WgK2l5K=fuKBA!iPi;U7R3O?i35*h!;Xu$!$zl%8sluRvYoQgdRYuPI$D8Y->?o}< zBh1To{AO0rZ_;u*2vYjlvwiFRa{qIxgm|VbYcI2SQ_Etfg0VMwNffy0p4||Md2PjS zPvp+dZ`20~E<{IHmSqU1vy6?`j<+p`%52`dP9@tQqn*jPyA%9yQMvRpZ;q(VDKEE5 ztg@fVJU937f@bsm5$MElTvGHo7w*gZO>OV!sHS`WKH+!+{?6aOJQ6jNR`6;gZVOit zduU{&1kHl4d4NYaG8y+NzGx(gCmv^%`=l)r#zwbevPJ)2`JIxM=%5<}v&!Ju&{IFB ziy|oaE4u}mZh-~94^F!hwvJv!dE;`Lm`u#Wl+ij78AUBQtINmjZ&d4P2Qh$Rr;te4 z*syfmUA)_x0w26ScD{bfr;GeOCHg>sjvnoP_R=b+W-m?~o=rY_7gfvu%xJ~%rlw|s zhQf=-DkE|e7Z<;Gbu5L`={Tc(_DY^?0qbcq+U;!I$1PvyqtTRY!L!$^vGV5kaAOYN zzk8QPEfep##Cmn%FRuAk`(x`;x~RK|DPKg8QBnH*M1(mk3=M{NF6>emzRRwyG&}LB zS1kEO{brU72{zn|dm(sl0YgNrytzc~`|(o9bkaTWl=}Pk@7)bJ92^tJ;PPg^i|Ze! z?5XY4@_d|Z*Ve3!RVT?{`m>T?>$cXcek*ZDZq9u{r z-`GqeJZ!J<*^iIq?hj?5zdU=&# zLaTb~NXyFV-5K0C^y$(@=wVGbj2gBUu#Jq@auTX+d3ewNo<9>mu37OVw(%gH=$fc; z^`}FzD`~xzGKMBOaL^|C6*AdlMmx_4Mi1&gYAx5-yD(*>)OD7B|Gu`jRaiTiv1mMS z)O=)k3K6oI+1+U-fQaTq<|Vk$n}$v#ij7v)g?WV-ts6xvJ~k`*T6FoeylkNS9^^G; zyWHg(%ge}!jOb(xDTdzvy)7A|sroE~Mkpj39d7)1ad?VJNERVO<_pOhw7GzT6F_&`Q4qM*hr?x9(#010)k$~s`%9DZOM6?J`1L~Hm&cf8&01%|mz$@$-R3jz8B8|c z`3W_W<}wzQadezd*pE)%tae}NM@IHf`}`|&lF5fH%xS=7m=MdB`W2JJ01BCk$S@<(Try)F43FehFIbN9>_mJ>Oio`#Ouk6lgP)`!e4sikste zZ!r8eLR@tC$F?Ud(#OfO_1|OXlLk5cGUU(?;x_f*#^o~1b(7(MV-ozp6#ZxXncSse_RN9MkB-dProB{pjti$ZiTX_zds5FYpl1wZf);S zF)@LO8;|-v)8s4BaC89Ko?^h=U=FmxcQ>Go8M8L07W=x!{8WT?RNv{VpRzs0&m~vkQpFu&;DlSg%N&90`*Rh1F z2H&bZI)<8!KO!gI7qK|z4&DK zDAnaY%`TC5jn3j>ysncC+q>nL2}iOlxf(9Ea-Gc*voUy8+@q#^?;s*3R#U&K=gzLJ ze== zt^kKOHlQAnBB1Yet+q8O5#BGaksIF!#RumO#t;SrGHvS1Gprxlb!7Edag;PP$QqM8 z{TNg~8~&s<`tiHwg$k-_qv~zof-0!kq1{w86JAm#lJ4a(DS<%{8SJ~W0_AC8IdiQv zqvjBu7|UB43Oe|4U-drsoF(Lu+OV-tZrUHdO{UAU~OpV-Y| z=;Ncg>;L56RZ}4IJ!)b$%AZTEGxNIr;zRa_g`P`%RxxzbJwzl#3PC<{K7BV@-|W^f z^-;A_=KWZUW*1p|OwZc`e{$b+BvEp^zaEb)G}*RY!#uy-H@-bG95!Q}wuwsgb4HN4 z(%JnL-|v|v;H)Mm^LUWDI!*IyJ#9mH#3(eneo(yIEWlv+*21VidXOaufRULo}8~m+0(hKfC4={rKbM1ALYem4|~Ip>6Xz6FAoCe3}b7 z=ryI^ALL*>iaJiRQ~-VAIkRoQ!qE<7H%Cjc z<}Y_m!>*#QJ#3Ywayonit0$SH?1d`t+)T5QNJDO3rfVa%x=@MgPUp8;%y$kb931Gq$IdzrOI3Dhg~ zOC;9@$vymPRef}X?Qd}PTfA{2A|gH)pQRW;BLNGY^6lx&BthRJ**1=22?a9IfZB}E zwPg&EkH>;41MH|nnbD6&Q_4p8;q@cx6|}GqzDLU`0X$Z7n-ifYq2wOoPUm6`Bc;?6 zx4{~#J#e>wBVZGkM;3aWcG-GLzx9q6=uFILn0dvS?>yW(su1hYr>}m^gu%J z@sr|8g(1QWnyp{nQHclI_a~n3iEAyUZRKsb_ICH>YkV%2B2!k%vFTd~c9OwLC$HvuwQK@8UH+rx@97*f*k+hV9f{!`Ah zM7VLHVrTk}4?*~GQ4#$)x`!0~fE%5EGgm-h00Z$-?2awE9{JpqZx z`B6%I*vo9EXg@vIDg;4~3pM=K*kmr>G-nj3_+>LARW@IF z*I<+?1BqPM?GNs*4YoEHxMAHeGMdLKVIrk(dW?biZoBg}jhR*Hot?#vtgfQbUi;QD z30Q%!^Hww_@7Xg~=9b2^S@olKMCZqW1KSHUeWheVW(d9Ad;m8%?iC`1)-EJG>czX8 z5cYIIJasny?3O3E06sIH$n-!>BKQ-$nBG7Fb~g9Eptb%r=)n+P&-~_;QuQ&bQNI`H z`KxxgRR8NXXS4R><7mu^7-8@2iW9!sD|gDkIB|j8_znx}7xv=ze$dq`GrxP=)XF2W za4T*6nJ@_w7iUdOM^p?c4+snEi>$mn^*3*#mDk(y)J~HmPoL@Pm*J5Id2rV2#nt8( z={^v+)Kx@#Su%+fu*Tx?B2^Ul%L(lo72Ih=*)<*s_XiVnwm z4)-~f(&oL?RdqZ2Es1&hb)S1puKHVxLt^1?CL>UMWv>o< zetMFWN6C@HH<1MsYlSs8@gEvP%-+8N@NZ`$8fLD4)n}SwSkTl8{CvPBvX7jJolMP| zgJcJ{xDu4G~QR&!n^r2>tNMjnOrWRMuO0yj}^L~1!RlY|W=H@6y@OYW?w#bHaw zJA^Ymhr*#w9hX~I9*=N6@yjE!DVLM_=~h|e=E&HpItIt9e8AV8Z`AJBMV;|_{UX1q zPuE3-uaQk>ti#{KIjy%?n*2Ej$~PthUA$ZYxEAQD$Z&QZ zn^qQ0^#?RLR4bmx9tU&y^2rssN2cK1P2(h(=ZB0bdn8jry9EF% zXMdx%Q%SvGqu_N42ez@1?dpVkO}j@xMBkIgW26o<<;eEl@tEOd)60<;f4ZL2k0!&x ze^kq%fLgKc3aJp8XlC43wZjd0GostuabkdG3 zqGo+}{^}J2f53hb)q$URJlfOInG@@|2RK1M)u?OwFht>@g+1pQwXbRN`k!nIz?MlYDOB1eIWgeCEuOdP6Juz;t+lnl$^8QDHG@ zPX(V|!4LZRjsmZbR6aF_6+bu}FE0|e`)t7_5^_PrVj?3|PnUW^NkDM}NFug&P6U;SKW&;K`})d0s{1 zI7&N_$!RZxE{0d1)%Ha&+v7q^A>Vqv7^xCS$F6Tex56`nRC_}yvpCff7_n0#FQwvO zvf|StL|>25pLUn|&K96h6|K%^FPDBYrcq60!of__QNrJrIGeH;bV@WM(QQJ*N501f ziRV7_wRKLPzqJGXXY1p6z5G5mQ5Vv_VL?lJeg0g2kJtPD#tR-FN^`h*fnUkI^M<%!Z)!Q8HRMAoufZuDGoAGhzHIlE~X@!^}Bh-5zdGH_v!iCp4DLs{-k z?0PJijG7`P|5lG1rWc@H?01Bxy7hj&2z{A~eC;78??83?>$&Lx-7gkN9Sc#^IR{V? z*{)cKwKL8_o{8a1i((?~bHFh8H9hDB0?wsZB9IVSf=WqhRx>H3D!5XdklC8tIwTw< zpvt`^=S99p2v*CqXEWtmEq?HN37#uu-~9XmCRNy+@{br@Wh%F{a3R0cEb;BC6aCw` zXym21vrH>3zZLml7C1(o%nA!Y!|X{@9UjpmEh)*Z!;v@WFr0cN;m1;ccbP!GF$^^Q z0%&(^#m^t~zz>I(yr#@7m}_j+B}~c_wH9Nm-sjfq_6xM|%*p?V0>SHa;yVtemj;ie zx4HtRAfK}LDaU`u$7NSW-^o3_p?DX1()sAy8C@GQXEz^ZRh1qfJ8Wi*u`Q7%lemn4 zTJbGWN@s*@l&urds`d+kP_-yZnDDPxe)49Tt{6ffK)x;f7Hh|NJtu)XWc<3crMOh~ z=hxnBedKVVo;YEx1QG^>fMvm~(+JPR*f<{(m(U^G3r>rJt2~@RCnEJeDS^vp^NJw3Q#Jx|PtD({y+5`ANE#&-CVmIUVY9e&UjMJ3J z$&B5cpo;o2UhLT`5WxeFC1b;k-&(cCeg6%BBgItXUlDcQoFYi*nUL@{V+p%^`hcksHqEj|$cm*wJM59u@Jg~_u$a>&kDsw*nVZ;;A zM*Yk-`!p~Kg4Jho^Q)~6c}wWLf?XHfYm$Ol?GEz9&edu&LWR=@pNNswQiW+g(b}@Z zH24897y^gMyKdYqQ?4kOS9JABiM8}}qVHf7Q1x{{Je&}@ixBaBX5>Ms=R$dsaDc%4H4?q7tUraVgZ#+rv zZ==SXS}}09p8J1)iG${|NU)(RZ(IT)t zA3LCtzpu6|y{N*pzy;cd&BQ8@aFMR^jJ#{pvLOif+8Xf&+-#(_Og}6h7zzhx!G7$6HZ$&%Wx0-fvti@et!p%fjN~Z#g!p$X`kA5$qA22l5n1 zWaWAbQH;m%GmrdK3P;qzsn^FCYCr1mH;jz6Jwix;u^Cc^520KfU$^XOXq;!&*T5D1 z;Q3~&M(w1H(_Ql#GzAWOtXxI0X6nc4{83AR9P1Q02>jj%JVQx_Agq3fR|IEiTU#52 zy!{`a2AYl-np`Q^hpDlS0OAEJcMlJv-VjC2v^LU)EEo-QnfeWXJKO_eB8Rc8#$4so zY<@+lZ6JD7(00vXKy9lrnHn=Iie`NWIlNfuhRTVAgqOeAn6g}s`?@c+(`Hxo^S%0Eri%}CHU8~cS&MVieQ9;(DtbCH0jFC0 z{PVp7=^e`xdIF7~^mI@Knci$b(t^Enx{vF2sTzCbw~zLDzUq^@JzS*~(`7s#O7_bh zSfm-t8=zHGRIDBC1ueF`=T=#2b2*qs0S4u3>*(~xFzGlwvDlEf3H!)ahmyxG$9S(< z-|N25ewX2tQNkz~F?J$Zb;OiXHoQmJ+TnkOj7~%T>lTFyk}Q#860$X=6aWFCfm`CpCulfOD%}+ zvu_2*GtY$x+5pKXJEwq{4t&Jk$au#y61(*tX5bqFORGe11!j{sNE;_t^yLcm?(yDu zD9(TAw@|>c*Dcla8Wzs@st@UMV|6fS6-mhZHH!vvoBJ4*d@8}3ll?PS9^`D$_zHPL zMMX7@&xbXwAUNU;&33s;eOMxU?~J+3C0P3CF(pmeULOLP;;RXkPoDUADj5_SI(19s z(P@J&C?G8e|NfhLwc`xY)xiv$&z@)*dTv&8-Igr$!6UoHYynA|cN4e@AH*A+HS9xd z^Xo2AtJ5ONNLZLvjcUnFv9FFv0`E$ZxyKI;#rwa2|_{|6!$YdoB&Pkf$nuZDAw$<>H>}|&!4#W;b{1L1U z?a-~Ymw&@;y$?t6cMer}!w8cY?>oGm%Ou;Y75{Qr??Usu>~V6UMs7^s%BUEuY(%&? zH|_vK-uK1JS!+p-PHDwj_e05N*Ju=(*wLD^A`aUHHyG48JO9v(I&e@L5^8rg>u8h9 zg?s-VUd^u|uQrFHBe#qO2lgy~(4FfHlQThs3Jfj=W5fo;MZ?2K3(>v;$3UBfnl-#odAd%axy zyKMD7D2u}HOH@Q+8P76dT?x8sShagITPUiN6)kN*T=AN_N)UJ~E(4mC14?#L09kRnpYDFE9Y}|T&;W_kh5yans?xn2 z@?IE>Xd)VwO>f3ECICz#A{Q2sOWNCG{?*mv!K`qVz=3_}X{N4mKkb&dGu!`wlax#y zx59lahbH`iZzhYO_2%1QRpEz9O82dzxrt64@JWj!_^+Ia4LCPaDZ4L{92(Ba1iH9@ zbEBf8kwh6MF6$b5u4<2zsmcmEl*Sl5G^}Z}Is3 zUR72qzGevpJgId^kTtMJ}o^XY~Y50$Xea-pK~+DXuZe&hjQ(`>SZ>(Fcn>9f_KiCfcESIXD2Wd&NG z&wJV%yJvo3o%%jv3Ig~K%ep3=$bEtzE7Kyp=wn63)_V3k|3BLb z7%3u4i7_Sy_c)TGajaYt)fEQ+lNDkR7NVi%a9)djwI;lIhUI*ij{NJXDK75mFA=Ei zqs>wSsccy6Ctjrr0ht&}QBbBsc1A`C8I!|6o^SNLTKMp zj!T08s8c+J_1rPK@mo%&nI{OaE|h94WIemS=JDCZn=U{eS4>b`Xt%a=cq~87Z7ao8 zUhyVHyjeTadS{}7cV(33Vbuv`_=y7se z{s7`$RvW+1xGh?%v3%_&FOKT1)?c?_*6~L2E3!Gm6c`vGNONa#fD#e zE^df1alt`ciSOGDZ8**1&b*ci9o+A}hwt`j5U~y3pETA1Jx!?4mTPUAe(Cbrw+VZ# z)`v>E0@f4@VVaxa&=({=MFM}ug3w9((fr&(v{yQ;G@lyoeVXBZ!=t{Qlh$w?}hXE6Z~Xn?20|lQ(OB`T`jB?)A=6BWJ8i!kvr{_AmX^FtrfoPyhV87=ZG`Ncm{<|fD%u6_k zR-~pr8i4MQe=0zfLC#ai_f8o$O+PGx6Qa=iGz#Flq2Id z?UK{3AiyDDfc6#-+S$iir)^ZUNiU;`XeeOX7wLjCIsbd)(1h`zg=qvv zW)@?&8gBs*pjuE^!VBpc;LaX8+hZ8)+}iH9B+g2TR5QPLr@w5X>EB3IQmJ13(pZr+ z@O*Dn3iI5xRnzq4udG7GHFbylN<>EJRqv3<^7`R^IK}sdUmIMgE7l}F{Pal5om_A= zSb6R@F&S%$gq=?a=lhoAR4jtTw+Z7!TZ78x3?dg_kHFX!ZF!>U!WfJ)F&o`hRS!;*x zxFr|6sl9;$omjfm_NE6y>iH!IJQ#S+AHF%l7d8s?qO}T$>BGhmmR{ zHLu}$eDdu69BC&DlT+BS!5msy%e>v;}J}iZIXmgZsn$##Ysos5Fe0gOC*ASk9P1zO~Ae z?B5rY{rAishX`A^c$-x@FF?aHA|23Q>JJg{aS-->;s;}6CqlhZLx)&_3M$Nx48bBz zlhR%Q{$Oa*^hVi*5xKJ$q)HjgUT%3brfw~$O-VVD~YZ=r1cAuk>|PaDYvw0h|%AJv`q2ab#J zUxQi2eBG}n9+R@*i>Py!H1X+LqY_NW9XMcH!fM?N9-}yO%fFuqnL_G9IT?rNo6O0x zDLC#sq=nF`!LnQTn%A0w$7EYhWOi(3+bLoqk+T6wgR-nW2 zJXT`OasEIz=xXAxH#nuF0F?#yaq5__CvbF~<9oFJE;&~j(cY@aJ<&G(NSUJfHoA_I z6v6Xe*|U#^vRGB0`_oS&bp6*0!OVQ?!|z{N8Ds-|z98uPKD`wZPn*%@wjWSNtqzgg z+qG-e&k}qH@7!SHGboevX2sK#-k8fU@D2QkmrLFR@hfgff>GSnQWcyNzNHiKeNj~) z;tUJT<@~LTleCsZn8WxOC%WQFZN{Qg@#E9L^=Whp;|=W3Cv%vE)HaSo`o9kSlCp^0 zfeDI${!Rs;Vw^Ahk5{W=HSM`&s5?>o%>c5|HblY~3&eDu8~ zfSh4{V#7~KN+Jp~MBeOAU;^!%M{1z|#nxLmR26mW-muwphlF%@OV_45R6rV}6+sZB zyE~*yHX#^*G)QiwyA%YZk&^D_PJHkAzH{!qe}KAOYpyXzJkM{q(J}w-Wq}^6Se#^? zM8?GFw;p>_<;AK7KsoGFTZyN?_4~!Ttm{KJSXzs0Cm+}CceY)l%%_B1Pd>$`kBvU4 z#2OIBS3cCz$(P|Ek$vB*-FiuC14~9lkLugaDY%QVH&z>1n7_XCu^y`oXgSk&i;Mqm zNK|*%#x>b){gBI&%Nt3fRdbw8fzf^L+k z@(sgrR>?m34fCwUy_S?xF_2dLE@H>B$3VDgbeMHYhjOC5&J#@RM{n^Z*5FWs1wGD{ zGOj*FVL?PXFD$tKW%DJ1+SX9%QZ)G+2|F?4oM^U|{V_X? z56dFjzr7jf@1FBahx}Tq%r0+_jx*lJv@t$o(ca#nP*w?iA1cv}2lCFg(es;tPL#_t ziax?D7KCD7e}Dg?_80!nVEKus?Cq49po^%4#x>*+k7NfM$!#N$XBP|Prn455LiR^; zj($0MKDO1XRTKPJ_=qJ@{=`ADHwzI)90JNvai&Chp&H4)m%7_HZld4BsmI%jTVno} zw?dMVlEkU=y>Qf_8`$eX3eyxRve#V&e;RxaMPon61>~}Ycm=ExYGk(%jOZ*iBhNK0 z_lexbb#B<3s0$O64{I?6zr*pKj81Ew{uJ!L@YN(jf;GN~^5L zKzP>7SJES&X}>vloG*6v)Z@I>@uPtSL$=-3O*F1CmDO#T7YFQsN<{p9_j<%rm6*UIz^Nd;kA6d$*z z6r%~ZJ}hbPkf^&27gkYwDSK8`yYNN!<`T}@t}y2|+3hW@MI~bCO5hl@JVs&lwBIO+ zH?Y+yY@~szCz?;VdhwhYB})o!)pxjp+t0^aRPyvpgW@X}Oluy9CAF6v4EEEX&;7FeZ$&plS~vyq z?VfKe1}%3uUb6Ew^eD!p=W~{g;^3F;DS}u}O?3YJlL7MDmvj`knde^v-pR z6K9 z&#qKAN!Iv~nFTjRz&}W4&i8@MgS_tnwj-cPeu;~DEdN6B!J&$8tJk&jZ*iAWjw+F( z9>rg|xp6A}7VgZl;_0DzNcbYDk8=nflsCuWKc+`0-(5&AvpHOZ$^6c*O1eNhItXZu zsJ)udRr>lRIXSxZ{b@vl*h+XMBfrBYM-h!!_%oVsWCzEJ@Z$2cSF*OxH+NAyuP-@W zCVJ`n2ePz_5|b3W@Y+l&NSXxjI;LhROW3cjbk-i@Y`%y47E!WYT^*z^Fd=($I5CZ- z6Fk?xq;J01v@!kuG)~GOHDg6(v!E4jr=UTI^$qVMn`pemUrkn80%xoN~$s~%pMka7hV8r2;xI_z#5Uc$5&$|9NGWyZ%=9p4cVrE6#W3Kd- zqW(MO6Z6|6F*SRhTla;X2A}bbBL+!V`=2Zjw=aBsvNOyG)Y$m3m%*UP-S>Wc_mSEv z3mxccV$!LTWicR5lz=29q_xH8@rfs`W>U(c0JLLpb=!W?alAU0B zVr28W1xvh6QmZ(T#T<1GUxg1tK`;uN!#R$>(Kj@k)h~n7W6hO`1CV1}M)Z!ME;P8^M`vgvzK+x@=JBe(&kopQ3i%W5}l)V9Z~6F zl~nh*=IoN@I?m2dTrF#v_8YF*hEy(5Ektp0T)+O>E#)W8*N0yy=IU9;tKth2t0I-h zM_R2D+9_kE2l@iUqB+L!N^qQ<)P}*-Z+xAS(JM zy7*Z$dz4w2G>V^pk#H(ev8UMd%B7$I%6NIETOe^|IGeOF01Jcsvdm7)^F*~NZ*uSNr)~`ov0-J zBc!(@Q?PX!k9oxa&%K0JNzqZrCT5l)>(nD5CMZ1{`vGlfSGW|r^~e}zheQ-pKOcBG=U$}rYTo+@h+%0w zj}#Xlm3@>@IP=c0d7_zSobA7L-lV9dMN{=D(Ch1aDK?a>-6I=82gp776UR^}JUl$( zZ4OZmfjuW^`;#$7=|59eNeeJ0?Dt|}skno2FnqerGGL@2YsMJmK45;3`g+aam9*<= zY(Kfar64(PQ0Vc1D&9*%8I+{ znLG8^^;vjFg8{wqoBc|ztv8}n3F7IeN9T_IgugGEm59nIUj!tl3-w(7En{b~ZI7EC z6U`QmPnuoy_wt?8pZqK}|60BB=-Xf<>fyv-u0mT_{F{u58b<@0~!!OI$7X z5lCOtW}9s&8<|)>rUuVMnb0O3`tf{4D}@&&Aq51~dOxLQoY-UgCD4_uKND>e=fD1I zT-p7Y&2&ZgsxNn26nNi^==eP5<#5PHpZUeVq&F+;W`oq|+i5KebAukc=76!cpjgm~ zHFk~LwA#Ihan+7@`YB5u(x#5!y#{SiR~62}01O%w5dxdYwge`hkm_C)Gb2<5!9?hn z2Oa8DR_y|=V^>nY;y5{P!B7NXJjvbp`y~=AOeapP8qmWjAg^0b@K`jdl+D(ovZ4$j zwXYyAHa9Wwu)-;)G?Itx@9&_8MlTGW-T!+(-TE#3)!|9{?z(!eklR#sL&`pLOFIr*`I z^414?5J5MDcNNNek()wP>R!c^y)#gV{XjR{7>`mTWCvCmv z^(xa5t1}ZP_Pp7=TW$<`&BaKXLNByVhdX9C*J^2VlO6W|`~{%9^h^6T2@v`KOdt)Z zPnv!o8=Rg$PY57Gn9hXXAPZ9(jhfrX^N1*ul z__&JP`9=akDL)>aAQZqr#ODDx690$01oq+ug@6EFKCc(dW9!EzgL0n}D4T(|$>HAH zq+@Ix1GKsipR#D{2vZH2r!K^5f`tCk(;d${8NZ$+_>u6z!0UjXcvB3F{X(3L#DUiu zMu6U=Vlj7V-Px(YD9YB0Q<$6>ig=BAeN^V*+)?|CJ>YhsmBVM|-juPU_+(~G1U&c? zsWt&|60-C~nA$@${D(-!#_V!z>7fJ-_ z7kO-KMS2>jDjJayEiI-IT@2Lq@rdFfW7WV@WnF89zgmffp@H2PT&8tIU}$)(15h#c zueP{^e~*tp0knkg!&zM z05Ag*cr0K3Jr;BG1e<|p`Lt#5=ahKU8tQ?l8$g8a%pDOK>FkAJVRY@dwX>hZ<*DN$ z{Q5xSHL;9X-!*1YQ4s?l8-dxA(V=Trbp~Ci^vyMAETzQ5{N zD9Wl;Pt_-k=i2>&bt1A~$GiRzsLWSHH7kW!e>(dEOK}~_LR_Q9j1?-s`z4Th!lmj= zMx>%G6S=r((a_jv(qQcGWCsS)^@2Jtv`X%S*JfvbMHYrLa$zA8o!ibkHkwgQomF%C zQ3!Y|#(@k?Tv;%Ac@zVkzw>2ZtliuZsR)Q0@LXW^hBHb_$EV@Ppemzlgvgo}7ZrbD z2n|9;M$2VI$3`wAu*f^AsL(mTxOmBI9X0VCb6-CB_QsPry)7)6Z+BnuKRG<_{ZAOs z=ojUYgBKM+KfTx_7lq5bdId^Ah79X^V06U$h8BwW3Se!=l`GR6C^LOy2hYCtf9+-g zzr+6)b9F2LNc+Lp`U^3ef5UBD4}JvU($dao)+2i8U;?6F&tX)Ihctf~+0j3lk+4>U z4v6=msAwO(Nx5n0Bxb|zk7)4YhhtLU|NB;^D@_{F*eDL*ABdhQ^htgFw0QY$UMPudesX^)oT7jIr3o?>)V zys95fdNr$b+(e57A79xla@A=V(*Z>C4mEH->6n^A|GNhe``!Gxf96SmE+>9Jq6c7;+ z0GQ4oUoi&^Fj>8sAGF|&oQRYN6xc$~l@4^)o*(c2OqPU82>ED&wvXIm0606hwAf%q z#20jVi3S`QX)iI3Ln4}*VEp{Ze~>i?=CoJW3$Y&%)d8xy3v@g;A7uD5xH9JE2R%Y0 zIaFUMAi!BlNKG9EbjCxSTtP;{LEo&~+t2uKZ~fD7zxNgbKt*rh2d~_FT?s2Odq)F# z(C4j}B*1~4!=bN!`lE}j&z~N6bl>1&1ep^rK6uneDO{kvXZB9)*g=%#vZY{|ghNY@ z4zXa4g_#|B@yE?IC)&L+QKme-Wc|-0#&s;@YLJ$2x2~#=aB}|3dn!CP-yDCqA~*jW z!Sw&W;6HxEknq@{Z>uA$;rdY*#t0-D1P~2yKL3c{-)ujUky@Ng1ujo_xG6@5#xhn# z07J)nn_NIJ0l4sF6Pfo|+Wif4OAVXM3?~(o70vV|-c%9rJ%h;HNYb|Wcql7)Hhc?b zbGdJ^k#ZRY`}=vTqqzxk?A*nGFJ(m6=m+EddlCI2P>hYC`xMPB$Qou2Q}8GG@99nj z`XjP-t9;pL0{6kUIxe4yO0;qdaO`E=wwQ#Cs0Fdh_D($A;}8Fy9v|D^sJk%7I%sbJ96!7|7#F807nNZaI;7sOCc4E z6`)G5VkOx2`&0}?{Lv$F{;{$2*_0e!=-@lV?+ffvDR#`HBtlz?JSa2NUvu&2)LmHi z>t?*iK;tuIKXc*%G(HbX9u*tkYkb^a+E}b(q@<)V2ys)RT%8f$seEG#MKFUa#}8BV z@SP%&sOuV6KWD$5Qb}A~oT9)+uWU7vsjhCUT-A7VOk7ZIuBmc3qhCFs9yreG)p7K_X3Dt7bKvDy?G!lt4g93mi1B8v`>QQED&@IJ zEKE@mwJM%Qeu^gw1Q5$a>ox+q`v3xvGFim`Z2CffN818ujT$-9AVZtvclAdgsM9Ym zTY|7GeK@d7FC>I(Hg8sCxmcyDhO(w+F&l3h3ID$4@87!iu%MF}nnjo)5CX1*Px>j)-@9EMrc55I-3KzTQ1dSt#D-OyfqRGU@-ZQMpGES39EQm~V~Na;@v8v~ z@@;l+U!Pu>&-Ts^H?bUgd=~0PaIoC!pFDU(FD3)bW6`9h?k}~32m65-6H`NNNw13> z>?u@#LcTBbd>8`pS#OM z=HMa7eFXQJr%7`pDs=e$JBZTIbnk1qMLCEj(AX@#<2}r0gXkWi}gmOJLk_`HGL}m$VYXzGrRyk##LEQA-3JH(l zRB?E;(T|S4z9i|6GTyKJVzV32mzxq_YHMpHEBM7@}A!9h#2r^RX(Z+pIikQg05u%t<%RKl=b z&CMQ#2UDyj3Sn4*;X%qZNZmKwzJ5fyva`AOsDJQRw22m8SNsrP<{16(@K~@Z6~*`OQ!}%8pr+5fo)g^f9~fz$>ggdDEz-nh zMVzT_Zc=QiQ)0ubGSdRRN8R=b@QR8pmqT5g-QNf}B8_LaL;c?i=5dW!>Mq%ZXr~e1 z%OZRqtvrn-=O;O6<^U98I5NcNbo-g%)$Xp-!$|!m`Yo+MTiOZkn$;B&Zm|k@}1va_=@9$Uqm6r#kW^Ar%4-hIHp!qPPq zQ`%O`B86nJB7#SNBi3^*(~f|gM2#lGKgG-5N`J-DI5fKdWswa{4=wGt1c6$g7?D!L zp+x`S7xwm@@zfGyAJiGZ5{(l4(GvPgbKQ}Wb%hZdnGN9)wRpF2A3_Ku@0@zn=G(Xtbh`wRME>Y`O(BR)< zE?Vdwd2(7^8;9HM=STREv_PQ8^IOh8g9iW&C%(%HZx2E1eIq8NRT10A20gU@gJ`{B z&k*qtt2LkIITiAyO->Jh%$vRMCn_lmdMZdR@CpjVkdcT$B*MH|tS7@)rVw0QT!Xx3 zLcgnHhDuq12!ApI30lM+h1j=my~D#F#PQ~M|N0f?$|tY!$XQGjmmTkq(xB56dx^;1Z8vb#nOB=VHXHqni2D>*69ZK00Nay-cg1bB-Q*ZWh9gG4t0oxT}cK zP+9N6?trP<2*0YZb%Y5i=}&g-(LUXe-?aB=Kxjn_9{DJFd7)~(myELDcxmV0VD^Py z94Ttr!)sL~tsf^pgwJl#S^@fm`x~yxT*%q5AvH3aiTu*1m?CbzU`E?^bxDUlA$CK}W7UC6V z#m3u1>u*NxO!|Vh5}HIAs053#TR>}gh;{=1aC}P2XGu{oiMfvz2)xs5SHc+ax;~0P zDqwNdo|R?fe86;pBO(U^r58^I$)ZctK4}0Nq@``HsRkAnjo{99 zP?Q&3kv}8zN^}|=ko}+hR|A94IiFL@AH7S<556RcCG(xAf;}`eociTTw1*QXXGat3 zbT+z;3%_~LF2>o2mCq#S_r-Xi@OCSZtw>!S1Eg*=^z^ckaZ&uM zkHqu=FdqSYXQPEaM4js6<~>g88} zY+n*UXURkEkdSny3%fOICM0JQRu7*@7OVj#&0@O}oQqfA!9v0N%sPq-w+6PE4UXb{N5!}EClz@?HOT;Ycle9iKt za6jMXqS4Z&ja8gDf?ri{B?<}{7S}l;HA=;U%N>J*lBJz?fE6daRST;6#|LUF_jog) zQ~y|6s=&i7Who=aZc!8`mdtjd8rUUy`UZDNj!hS#EPPKL+S=Nxv_E+AM`rsE^UoDc zDf4wMQMJcUo<#TdVt_8F6td$+LKvP0zQ_6HdCk%*F-43FnV^&u-4mTB(!_%S-7iD0&@HDx z0lGOt@E{5bhz_-m2@a}JY}5ZGYB_9dFgag40H*SQk&J(!ht@eR$WB82BQfv=|La)3 zy@aNQ2NJ|4X zHMPxyWVE#}b#?kJZR%rMxEg7pzgR#U@@Id7**l||P;IR(B*@6!jm>?-*n*Su|MyV+ zJC|AXiRtMHpzY~0^z)etGUL!-7WA|7ee^oN?aAW@%e|p}0x&79%H8fCzdRp;W^iP5 zY$yP#?RMMkqAW+~1Me@mUg9xVdJui8Ylz?Luja42ti4F1Iy<4R_@#Y-?pHL783s05?F*u63P!Vx`01^7%vC(>ZsAk?I6c3`bCMO?88GR># zTrwiQ5aQ8_$z>MFI=i?`E+E+`G%_MG>jB(G|L4jF=i<#0@Pd;6{9)tfmiILH_{%=i z9}8!fpYt!9$(+g3%8ujyw>QD}tk#<25RCn^kdl&;|Dbr%c#QLRt*)<&oQwH+(f6R78o=n#T7ZtpBOGThLm3PM8vOzF|(TbNzMD-8OFZ>(O#k-;%@E zG&vR$#f=R|z5j|_K;C$~H-(OehxagYrb^zViO0}urCUMSP#A-v#R+rJydqMXxDkv3 zP3o;uf;MB2+IWwI2Bb&-FFGOoodA3~oKc)3+7}Z5dG!{M;vA^Ozy#*a9oJj#VT@( znIycH-8{?a*pdb00>6NHLu!Z9pRYfcH&kYN}pLPp=mEnqsvByNZAd;}gwm-yh@h z5`>`Y#KFSG)+^2i1*SwT#}K(3yt0z;IF)V1{IV+7H@)cM=ziX3 zDZC@yX!CeGucsSLAJ`KI{B{py*UVrJ()%*Sz{7)zcngDh!Dyv5PAO%bAK=?l4}QFj zKuo=w9J9ipUwFeFXm@#Gi`>}U3;|oYw){FKl_cSsGT7lKpb;8=@UE~{83x|3h+P_u z3+S;mCSVp{a#}Lm4m$iF7d*JsN*R^)4p7ZV|IL1=xqX6E0@LWm)I z%V3W3JG;96JTb6wzNCX!oeCz_Y8@@fTWr z@P`%+2Qn2YKHcB2aB7VGbOUzSFFd*rnDow>&>ss0LJv?zTWox_#s!$x8_KCN^xEt_9!iU4wH*~UQj{R9krt@};T z(%!S_Ea+Fkl95Qy{id2#BznN%)}P$?(RUf#rbu`ct74qxgbvYn8`7WB<-D^yA$SUn zaEHHCA=a-bKEt`4@0i{=L_!d_Q#*4FC3`27v=KAE>0MAq*9dldVFGcBn|1$gc&X*0 zgz5E$*2-jl{PGXc;$C~Io*n=->R1Z4i8Os{TF&}Xt^kOSfL4Wl9(?qc6=u#SeDV1H zC(=QURPSkbtlDYGtoijs1;$=kz`_#>lOSH?>Rw(Z0#wD`CztNFoI?~5FjG@%|4T0# z77cQ2UTy6aH-<8^cc3cf{-qXkqi@4J!TdP0D+b70H#RU>FSSAPuKW*-*U@2$HL>OL z=B;>fg3Jw3GHX0BzCGi}*Z*;l7CWPee^tD|F3%9i#3G-5NpBtX7-Qodfa%JN7y^V* z1RWnAEZr(a7KCZLIa?cQMQ7(kqMye?;zzW&c9OKx@4{kZHDamwb=MHrB{(r@T=H(# zP@BoOldu}10s_7qFmWCJp4g5;Kuqj--uEV)U;_!Vi7uFOzP-kA@+>y$e{N4e48$KE z9euKV%fyYlQ2RSnuBfs)=_Vy{m{d*b4P50 zO+COERv^bCN6yH}`tS34NBhe;$+yT%nB>O4IN5r;F@&fQk3tvS!b|0H8AhQ!5Ng@STKm@q==o$bz3&%!_dj$~vZ55lP&bW1Wlp8N^i+ zsrQ0&z%St@44wua_UJW|n21Q;yh!8s`T6q$8Dp^;fNmM&(qP(lOd#@bcXt=?MymOvk;MfWNCA|*_0`qSI5@1V2k1g6c~q~xA=jhU0BN|DAJvqdy@^t>Ph%L!^YtSPlzmZI zK`F@G_(?-I@9raegwSw5tIFu&A&6dqM8XVOz| zKW;JBN&v{UVUq!sf4_!|IZ=7~Xou-rhlv{JV9iEoW)HoaIe=&3<3k1`kbha)L8jMx0j>p7uXPbY2s z{HUq6sV!`5KBQJKYth3cAY3dzCnrCn%P#knH*Nm>6DaB+BXbv86-#&(5ua&!8oz!A zxQ~AXo2`lwa5INZSu0ithl9Vuky%uwV3u7|H6A~o&^f8CUTr&FX=}Hh$lFGilhdg- znDGpR;O-($$N!*{<={1?IGLFdV@=Pr0r>bog$)iWeBlWDE)=)amx&P@(6lk!2dx96i2B=X?WBlZr!wFhRdBL6#^h8>1$0vLvF z9V`pe2vTbg$qP6T>W-yQ+#6~q9cup-0)`d8LO=NZOQ_K>a>UAL3(Sg(lRKJe?e^F?9Pl3#O4dB=nCig+PfjMyL)L zw;-BD3%qebmSV^WAWh920+NWNIo!%3~It z)DiK0VBo1T<3zWPC`uN$4w!vu1PT3p2gny` zRo5sp&UHBqPz`O1Ns$Gs&d7Wq(h&IHo(|GS(*nBp&jZngp{two9jN(7!?2bk_qP8H zZY+F35Sb0q1yc}}6VH{EG@6+p7^B@z39W*)f@sbMjOP*rLZz7Fd5=M_PM~UhIhA<% z+e`Accv0`Oc|WE%(C;Ht^^A?_(qVyPnA`9OuBD~t@BX}Y8y}bTA^au3H$dn{yRx)9 zZ>Fgx!3{=8ji_u@>i3b};sq3Rqnzo4a6`~>1Iyy#KH!>nb3zBh??7cuGm z7FiuA>4%L7MB_txdKTvIiYpORof1%_mf~^ zl^06)KMHOGQB>?Y{&Rv9jz`UYb?5^E?5koO>7j`~gd;bw5OfYoE$fkmWA+^UU(w;*$ z`b>E0OyD_G$I}!-^lmNfETYH)(Rxh4h#sgrpPyH>GX;9TaE8v{Wzr(;a}!E8S> zJ^jK5MoMr4+LZgp$AMo+vT6gT>jvpC}=^m!+-^eqIM0#SDo`)_+tir=L%04 zrRyP3_OO_~F#h*Q2*D#!{H3HW*Aqv}SzN{mg`7lviE) z^>MDWAKJ9ov^6UM5<0q%AFdWkA=vB#hZ6uAE!ao^x^CHbMMK$b7;}wp^;$RAXv-AX zpg64iHwiL@<_q(0;wa;_hBxRS%JJ6iBMbOoWyJT7g8xU)puAl6`Ez25FL=b~eX@;7nXQ!-Wej-$@n!^+TnrwapJF`v7vHuVeFbQ42p1z* zKrb6q%7KN+g|}$-YmAo2jTIFtQVP4j#k4@t<=ZOSTxMMV*riSfii-M@&It1{2DuiU z*9wb*vQod0lc=~@VF)!k>dy!A5dsn+bmk})@eM<6o|n;YO8K7yvoSOz{ITcB<+TR7sS7* zHv2E)#S!%O4Rw9~`S%|7+&*sxT({OUOd8ZgAIr)*rtWS?iCJS)vn{BHztTi>8eXw~ z+Kh~f3I;^>?hNmEF_g>2c6k*|^^VX3t0g8^i!GpCZu`0+)hctb@}|2HWU&d*`8;V9 zfYei#lWV4l0M2;&AZm%TQ9q3$weIny{XtumMiRIe`{JjkscgKwJ=21AgJ^Zo0$GT@ z{%5&bL7RfBf8Z8Zf*67%o}cr)}R@n({`XTdZot1#Hgby zGl2E>D+CtCbY#!SM8%@HlU*>>zwBsQte4MOGm~O!YbTsK8O<~W2JQk(b(T?4aS?;f zf^^P@>`JZ4WgG8y-+=@)B2i5_grNgxgO6AK9@xqeXG?u zZGdR>;skQFg+V&lW2OYc{KX0yg?j-NH}sL5#YMDUWI_V)v*r%mr(ugWDaca_w1Njdr;BrV<3TgAcq;zL?SnK<@VRo*LP@FCNa9<7kNM8 z0XE)KZH~>6#gTn1qb?Ga1;17Ls!w)^{aeuq9wt$8AWaC*O5yWAq! zN*!d+tU!mF7?o2+mQ!UeI+8i-qWc>x z(Dy@RVK$CQaBu!S{-~&E{X8s`21`;tr!S{RkdbeN3Ccq6!#sQo z5_;rXeZKi{Sj)&D97gKVtUONImp%XIO-6iRIdtRj@J*@hWa7ewc#Xq*4oJ;rIpx*g z%;4YdPEpd%r&LOD&C|DQyn>TbLCLJz&wGYVH|D;egCgUtjCm%Uph6#_cN}Qeey@ju2l97@wA=*($TE>}u z)O!AMJ+1bK^lNEcgs`wOtICS}HRp*+1cZI>(^q0)+8&~!y?77RO-Md`urWe&et{=g+-zFXy=U1b43R_t12AA3HY#s&?G|k_xVWOh_UgiVd=72 z3h>XLv0_MhWkKo!AVz5r&EVGy~f_&@0pCngp z`-dkaGa+y$l~9e>6l`WZ!sTGZ3}PY35cXqfbl;4P#Q64N92y(&OBcE!qrk+9fecn4 z8S$Xx5{vp>;(#$&7~o9_`t+-=SEp`bBL8KEV>NP{S7L3xa3oZbc_tPbSW7(WltopH z{-}j`Zf>qj%4Dvq1iC@Yisih0?o#t|Iv7$Dv%B`O9+TU4s32vPNkLh8)A6MMEj^qt zV<6bc@kKk~1Lmm)6QVjr@}klfE8iXI^JY@Z zMIT6q(u0`rv5^G{?r$%kCNvC_2&EWZ{__%TRX~9l(_TaqjIMg{Ho^8UN6XjazjYvL zf%?KmcEcp&uZ4`ZM9Hy6A?viy(Q5i}9^`jG_H9(E2}C{lvW1W#UU35s!G?hYbeb#` zY*zd;buS7ZL^{5)8*jG(wVa>fBNp!hiYv$NyZs@C^MIX(A69tjqU+!L+8$jyUYR$3 z**hz^{kifIXGexi%Ky2^NzRsrl=Ilp*3trvk&LfSHp-pe(fQ68U)toH;*IHb!%J6} zS#`IgStstxYQUiC;tk1ukH{3Xt6@;e6?X@rZDJ*jFce{fsrzio;wC)KevIJ(D0tGP zwBLh|T@GdgqlNz>Af_GyIClrk0jK4U^^2lm$u&zVY8v2AQyN^Pe$i)?=82!qPx`M9 z{=TB0tWr_1whmp2`w&J@ffN)nJ&l2noh;YNVhfsUQOM@M{pK#jNz!u_QNM&;dHHnA z4L$hDC9JE^arDrC5oiA_>`S%a*4FTfLA&Y@bZ%-ueZu=o19Sbg3F@qVdkn>6r*a%ymUgkoSnbL%q6F*5ryg%`^RUY^OI`Z2xV~be;cRKBYQ(Y|7?JgZoi0Y;;ERZq-E>)c<5t&4tE-Ed z)e37rNbqh=JK|ls9^p58`|%WD&vp34pDg~wb9hjo7+7vjd-2r?^CM1OwcW_eOsuWF z5Gs%s8H5L(W|`{Z@bYT+S=S5_^?pPnx_4pMFC=utO(l#9oZ-IWDWMDs^uE!T$b@)K zs9~{DAb*T%V&mc%-+uXHoerFT8qAN>6f{}3^ivHqQ_m7yy$?PFEbcZrO+*L&b@87R zUa7oQ6fs zpG9OHawtHrR^B^YE~N$C*cZ`+mz+lG`Q~TTWHDY{K07UKpEJGb3(_wgp9M zrnWsKz=wvAU-f*8miPOC2qa}Z?3v?qIIn+J+;p~kd*4rrAd=C$N0xL-_%wkPi*WC- ztfRra(O;ybj@~l&T)&#}P5FYeH*>U+NFL{db|!!3?nu}! zhlaV$&GRd$0Pkq4PKOZ?cu7F`66XY>}Lm>Z&u*PW)z^TCcGbBtlPBj}wmbWAtK4PFVW7K*# zC!si%IiSB({i)h{VgHK@fQbF7@!_4n^@pfse%tTmo?6LjZK-&*gC==2R-&UCoxvl7mpLjj(J&J0ND%AR$%zJ#b)n4kI^XXxL<$n0R~a6WDSkjlB7uT} z==(&UTz2bpndo8%x0%tEXd+>krL8y)E0~x#i ze1G@ESH?$LaY;i`{E&ssC!UoZCgZka@mt_yo9&JZ)1Bz8kUNnUS?Xf-`du`;dYaYl zCAVkf_cLT^k-QaY|09g!z2AgRgVsEverw~9N41MYt)Ffz``x$-0&f70r2t#eNjZA6 zXL@ZyRpz;fV2ycU@0_?;(#nIC8_cId*zx)<{O2#V(4_sBW!=;rBE}D^$whpSCtuCQ z2S{D8eGfr-Jgo6m?91d*uBzZZBC>sOh6aPoD8^up9_+E@j)Gd7Ku~LLrW+k4%D1MJ z>uxB?%8mgvf^}FWIr6vx?P2)BB-Rxl!~v2A07*@VIR&q-FCs8a!HYB^U1lq5lgjzJ&y%) zm5=$|et;mHJ!?HjG^oCfT03v+W%Kp=6&uymtm6*>b%*8OsOSDpXl9Ljc*igE!pnTl zR&pzo$LjrzaW;O?oHZZ3#@R_Jwo#cn#61+nTK~chYdia4WhB#2<|9%KzuWukVoyyQ zvc5-y`uZ6smj!4{iUmBi=E5dm%TKr_wB`JWBYcohgMjpcOInL31fkhE z4)Wl|(P(}dU0n3O`5rEpuCtSZXgr}z%qoD|%SbR=yng{SAvD8eN{_q3ih>v7CTin18K&}h@FbjYrF+7xvwbNoY}?eHG%}I z(?w`RW7F9+&I<5rXFfl3UOToy-7sN>1&AQmlECk7NBuBXPNmBj=oP15V3Vx~Udpw( zEjW_2%`TU84IB&&dXX!PePfo1jLj6nkLMCNUt`3l6J z5vR@-F9w;=aCkwQ&Q2!c5|=XRT@Dw>c6cE`v4V3R&}c_`>~4#>aGr3szg*VA?|AXB z*=@wp+4XIEef8Q)OXu#j-1D;){3-(rV~rxa!z@RM{l&PNMf;!cGNbU=3eAsJFJdZv zC-f!Vj^2J;qr5AzZ+Gc|w>G3v{J3&GI=C%*v4P-P2N4Tphz%*=VnSokw4P!lu9fiM z*hWA2a4gwwzgylTpL_2(hl{JUeh~w^Q+Y%(+!im^M32TA&5b_>{nqaA5!XT^3h%_o zH`nYmqi%P8!TPr6kK0oRYy3}^t_Y~XwcoAXUNtmJKuLu0%#Ms?V)oM7g`I}d3+3|d zZs36t+A&`9*|8Ou33+6HKL=pp;JuE`jz<4V87&ck7Z?+=a%WL}jsgZLzL-o%6GT7_ z)cIm&>m#N9GVKea$?+zFb16ta%H^-hmE%A>c%pTfM6f@vr`IqK<|C78+28c*Tg#T| zI9yUM^m>-4WN6v2NxX3Dk6&INoQrY&+8wEVB4Gv!Rc|J^@30=rCzBO#6H~uo_L!3) zhHrw3z5|*~t>79++q*l}LKbgZBr&_$@_F^E8lT$W$DSD0C^8q1CvD92pY>IEQ5D*N zv==u0_5Qc0uDjdUgV6IP;6Im^8cmZPXu_xI_NZ9{fV0kpU79$ltLlSJ^t_cy%JTMJ zhjQQn_R@Ht9#mg4+%170+YP6g%T$#jKH6Nm#y2u?v!5f%JdXBRZqcrNwU)lyWLkaD zus4)Iu(}!D#!sV+HL`voqa5ANQD@5KCK|1Xbya6U_`P*5Nn@nS(eUbEsT7rb_|>{y zre^NI;9rkc>WG>(cJkjX*uOLTe2Z&H^6o^b<7<9;ZqCmhJnbgpny#P-Xz^p(k&&#( zHC!MjD+{EJnY_*&VeL^P{C?MBzY0{=-jEXQyfxA-zm1s>$*&fKq?{;ZF-+26LLQW5 zhE4I)F5IiTw;PoxNfyzWC3mjl^lW;BbHhQ+0W-J-18{gy%yB1 z+zWR*xsV!;t+NNHOT25m9c5DD%VT3A9od%Ke@U|J%H|?HZ4(Etr#LmFox%@YQwOt5Wz!hrP0XFOsxd9S;B&X9b-_4@amj;RMtgf`h&i8!A2!Xpjh za?_lgyYx&UNd{odtH2@(!Ng>{>n$AAw;z+#1M-Ys!$wj_fCB7d({^_{1a!^LBhim( zm)@RD^`G+05UbTk%Q0 zupkCEM7i%UNWoar)PtFVR%&u__s=3?wpRrpzV%@1^opb>9S7r;;8&M4HL)C$?rXLY z<#a$o2qdchesqFv!l2n3A-d92N+OS3^YW6`*+b@sRkCOqSJA5ey zerdC35An7}oc|YLZy8qQ^M(J)cGF0w2nf=hl9B?Ff=D+=OQ%RmH%NDfbb~aK(%p@8 zH~dFKnfW8kQOuPX~?r6&PVjI2(&2-u`_Vi8c`@ zC+VtaQA_p=PHnsuY(L}X?-;3HDhv^8(f}0-F%CfX%C)IB;6hZTGvXXc(a&mlQ=E86 zNl8`bWQTAd_^&b;Q}Tur{NV0y$!P;Gr2nuofW94f|8)4F5eXSQ7DuPru}|n#A1V&p z!;h_-Drq8W*cmD3w@ZRN4WKS|RE43i4LE5_cx&Hax4?G-VOp{;osoNckeA; z;lEG~mm8{n3FkQeX}RS-(CE~6fs^e~6trRR;AjGc!f1mdL%oI}oAsGt#V%Gnqo#LE z4)m?KxcYx20Y>jHX9StgEQLVZa|JXv^(XTFd>zKB?9}FcWzC$2Q45vQ63_E`y7 z-}vZuPNpR$?dqZb3)c~D>m{vuaeZP%nHW)FPpCQV=E3iQtONq4M6nx99qplacrj@l z&Tu!CDpJD2Ud&sjgVAG9Q!A38t)ZURxEV&bVd?w<>1l#!gP`;I~`%79ZM4`jaf zAmjQ4d85JefcF~eUS5@9iu~RiN0nx);9T_o$PKm8X>0(GY4(gpiz)tiUO7^HP^s&L z?s8I>~KHC#R9x*2vr<=Q`qNG&t!be5# zKuUVthYw*_%V0{>d92tZ(~`i27{6q-Zl|dJJgS9rKERrSMh;DqeF-k5qGrVF@5xMg zWnul};}x-K|9N?pFW=7f>d8kN7!VqSA{iR`g-YK1qS8A#flh#ok4afz_4{1OIJmf0) z@BGf=bjRb?%2LyA#n?y;Bt#H-(RUVtbREf;adMx-G#dK{6O@0bmII7(wBK zFMuCg!F>^9sIqrEpDQyzFW(sU&n29{0>i4}a3tGN#V7|qj)>7Bl!+bFI#{;2*Prg* z(O)iyKH6s;@3gT^h+G-hmb4t#yB6ogv6a8S$eo)!ku&9*PcuOa-yaXEFtatir{~}1 z1DUFVyw4r=lV^7=m;W#;LfhWo z@0fG4z?v9nb=ZA#v)UmU>AsEG;JinlmG?KkZ3V0K?MhZb>%pZEEbK-rFMiACY@`M}AobjCwg#vdA>4S^qk@UOu zu4+p#SGIh@8ZY&nFR{>emWW1WS5@hEeqHLN=SKB1xepvzMiapQdYTn@)A29DRU{0?PloZ-D=aF>u3^%b%-xdPA;Vx zwohA|`oA|Vaf6ff zU5-eRHl6(JWAW5#zwi&S5gRK^hacY@q`D8)r3{B9vSCctOq!;}1d?m>oBCnL7BpFH zSryN;phl6m*XMeY)qhunTTkQjWNXN;6Y2j$DXUxg0>muu{2~rNcJAk0U&*E+5oKVo{OTREguoU3181?2710P{-Qxj?iz|P&Za}S# zt(}B?yVSm5iVwK9t%R@oD86fN*yGstSj%1`V_>vCuX{!k_=BjQ&G>alTTYEA?y^_uVyyT{E#~`057e@@P6;q>Qnff8eNF_Qi7lnw>I#t^4SNE!`)5a+7{I#&b_# zz^)}Xdbi3gD{UEoLBgRUD%AR&j~Cc2jpaRJi_3xkn8$9f42UIHHOxFSveJVf z{qEq$Sj?UMa+fSLJ$5K&sSF$$52x7cm!EUTY7$Bx_W{aV?@^Yn9_T>%sJy_C-Y*uO50-L>piR7+t!0b?vveD*`x8+#p&16 zl03r$_LD<{lX!^NdACJ7_BNaQym!$H$|`k8BmP=5cBPn4hSmOW_4G&~RBK_1 zwtWL`2JPf6EJ*7XIq~`V@UNmW{n}!&6sD|`Ut8wQj7V25mZBvp4re`tVpGQyl-WEi zD%85zsd0PXBchvgaewqL02zN^q;xP}cF-FM1n{s+r&;^2E>o#3uWj0jR*nr6aIQeo zHa~?>QW);0nTD)AUT82UvIQ<(@Rs$%x`9+s z%k-UsfKfX(Q~ymk`F#{FNsx1*_nRC|kgaRP+9}?2Bjd;xN_&mi<%Z)%2gjFu`I5!+ z@0&4fLtEu|`D>8!boj)W?XUBUTjk)vYE!GE>>;JGlHiN)>~b3J^3mVhTbKLc?2&v6 z0)dAiNxdNJrYTTDQ!2Ej5+=X4 z?h;-auv57d1_v5)JuHZF^;OdF?uqu_=S0||3@l);j>Sgj>CL!-q*v+z(`|*J4Mf4P~a_ zC>2*OGhT9<3?j^FowvnTZLVM39@R&}xzlv5%=@E@>Pyk7SS^M#C7yd16*^lzc_REo z>aEB}<2?JzAEMW;U)G+@IaaT#^@X@{tuOuN|1L=zeAN5KdnEE@6YHrfrG&T^X2#=1 zXZp$S&L3*kRx~YN@6CMKa5Qo36Wf~%##NenWOw!7G-issq3Qh9$9`}fcPz-dI{T_* zyc$Ftpx?8qCFkV<(hi=3UdHqEu2AOXdicl8nw|sP!CS>o9%p+M1)x8B*%sAOMD&=1 zBO{+hHw+{F2DPfuWL2QK%z9`1Sw!nSuTJwjFTwY4HS?Eb^eBp%t@enOopQy7+m?Bhg}*ZuA`Pt#^Da zVdhJWl@%L?ew=z}_nBoB?Y$}@J#xr5i3HMD3Hu{^JAsI6xxLJ*nOLVM>1+qT#{g zVC)^SR>@wxTtTeMW^wgNim3a3otJT;94{K5sYB?nmE_IX+_79nwy}@xWAbD7qR-7L z!kqfT!)YVqJ3=1CPr%MiN_Fp*-t-BC>LIOq}TH%Nn?2!=b8WoqfUGe=T=cpP2{wOTbx)wTJM|Pif9|p2D^@d*`qR761V5 zQtTd7_e#V~P)c}H(rLR9aqza{dQ`Ja(geSZNCso?hm5NJLXl?&wW~#h$&{1D(AT@Q zNtZK^Sh&T%Ap35%$7VI;7MfW1wu)@0V;^I8vHK19pqSt94DKQU@`$J<^1+CpHVUEo z;_f@)%eTTt(N+u9;$NS>XuClLU%3ShXoB&Zwci9596Hr_NR^j1js&_si09;U^Nt5& ziQghzgf(v|td}bB6r-Y$l;QV4rjU=a{JT?H-tV1Vg2TbBPA zie!v+m7h^ui~jkONCkn=t;Crv>AU36$$2NX>^|aVI{aG>dl%;^kEZi3XVdtk&|CH1 zV^D69&{BKyCWB0@yWv?)(>JAS|JT^_mn3gxZ7?}9l2>1% zBG#@U{zfb#3&vbh{JFm2**=~~@i`ga#yB%F+E1J(@+%tCTGmdcKqyvfPkFvh``3tt zFv2A+wJFlwaxhZ}~&oRrTMdr(M%3GP`OVodHrhe!M-Me5^xfX=qUP|R#vnIPkob3sJl~_xj`?-w0D4A{jMkILq zX`?e+>4ikc1v$jwDe_;{d_Vvh`XJmZW@nf63HV8c8EMpt2+tDj8^Ns-9aDBo}tBRqhMjL*8M-`oNvucRUjekaT($vA1$4O)e&o}+QBCDsXY=%}Xr_^UaQ}mX=$=0QoRZUX8hHZqpn7kQC0tX@SK*$%773I5 zjwdKFsZ8TTxp{J}29lj0RxD#k0>uR8K`L`isVIXqMrh<-~}%Q$}0!%|f6Hn5dNV_YyBo{6zZKtQ3*<@Mo` zPY15G%&>~Wi%W_5KhT3Oej<49&-Eov0tFZ8yr0t0+E{1ml?(|dtC zA~dYAhR~GGG~lQ@%H}JO`ahqG-p>gyEg&gYF9LaWVTM*>ljp9U74^7TNF=ky<#5}qBH z9pu-z9=KNLGR1y=T0QW*x4<7ufm<#n1D_b@6=+e+p2t~MVGNlEjqFi0ZjwWgI zrp0MWWID>B(xHydj+2$;lhGH$s^dKk2sQgXQMNy3QfW&I*(Osa?gVI=i?UbUHke8_ zb8hVKH{8(W*jglrmt$>zY<(1v&QP5q>ND<`{OK`t;hs88s%rc5#7$DelvALz(`=&z z{HI#m!t^(sI_Mf`u`hJHicwG-SU2q% zFR*C;$fHrcbTCZ^e1Z$6h_|04q0BDA`P4F-kt8LJOz)MOkF;70sQ8?=UlOI_tG&B; zxMjUhzv4j7aV|Twu?Nj`sEj@$Vphu)N>#;*>LdRWi6a6TKc5Ov2*^f^gFIqnTU^O} zLJirE1NqU_^I~IZ3Sn+1x&lY@fDdLMJAK!5^RRaIJnh!9aXD@L*uLX0!K2%cBW2HL zTd&U-ZFD+@pA8rFOkNQPd7jv-+O_a))wy#D*%im{r z--Gma_Xb_FI*E5Qqilmu%L(Y$_TsSK?j;9(_eMyxY}PxKcp$yq&xnRa!&rsFUPuo8 z_`$2v>-g#_dKyEljc5lHr3q{a{v`h5(7g_`cEqXuw`bRg1Ti5HmHA z7#|*B%|(FRu*f>iLty*U+gIPkeg2aHH=6x=PU`IqcY34m-Sq*~>?l&AqXRBU##SV5 z-|!YEwN;0W)l%yX;_U^{IJ_k;Yt-36n@8vAD5XYpE5)#|YmO3v^L5E6{ ze^LzK2}^VrExe3rtvcni>VBlN{^Qzw)wG$d)xWh%<~;JC+`*u0>ezDn-R6_xn~kgD z@Uy#gSFPrXrt6cNhe<GF*9Q*);rwYimwV$x}JrypTro*NpM`N3CoMhy2L z>Q?Ql-Y>AecPw<(`YWuvYm-stAeJoi#(Jy=ga$gDorP2AM+8kpTQ=YazD>+RG!mza z71XAKOw0Bj5BKci43GS`4hw`hx$cx{xxtY?sp@5IU3`3)cv*x^zZzk4bu}6}Bjm96 zY6q56fE?PLoSuHduMTiTgw31;_W!c+D^eowfr1+q+WkHulv$e(WG{jU{t-r0AohnR zkQcqb{Q{C1=5lk|Iu?}~NL>JU;1fsxNW3+V1`XA()L{m=E8Gyz1EUkni_9wJ50Gzn zchv<$6>9oZnH{u@bmAa`3w)POZB-gJ{|5Xlp<3o7SrW41Yj^2*?WSb)`RZ zl9~IbGAe8euktm2v2zc_4LN(C_I9&fjFD9>jPzj66dvkaffP)y<#Li4%$Qfs*MNhH zT*&*w;M;+k-tTD5VS2TXHjhy+0Fl(ji|o=k8h%Ti#di8CYm)%$g|A8wvPP5i@#{Sx zZ&J7I*8`=`+QY;7)^zclIT1^6xd&zO|JuCIXG?h81(}&)@p=Kx;;v_!)26mu0@5$Z*TZbhfEXz=j6< z8X>!SpV333=|z=Fskp110BOUHD=>_)5Is1R(>%Uk5ZA~%>);L(j;MAD4gTd|i`}2CmuMW_+=vgX}H+nH)<`?L^?u!lY}b;}*6e0*9LO?3V$r}!DY zQ&we3Z0}(Q|K0lqmuKQ%oZSQuOyss77;ep$3*n^`Do#35;Ca55LbNrwd*-9_)&+Vl z?FxsFOLMNZ)O7qbzdCo{bE5?`&f$Z1S6h2ZMy|&Q!Na3}sv(dO)}?dTv2V&z-aTY&rVt6zv@KISM!&w8C zjkJ+7Wu|59q>1)0NKUme$qb$*Z9+YuRP&Rn}NIuDb zW<}WLY@tVT*>|E|jVZ+Cu+Z_^Yt3%nS8YLHoBrDMt07sDo4k5Cp=ynF05RhEq=&~r zV-Vp89myiUW6qOeyGS@yW5yoVA(DYok9{AOm916(`W#BPztciyi4*B^DgHH$>=j&F zJX3C`2=WSn9vekuBMi+X2Ut-`=X% z?^mGr{IJVarVnDfI_RIjbM1qEju&oD;Kpcnbr3f-EjW;WOHoG*Q@Hi^^@&1+s=vHb z`kr*WQw^EK&E(gHVNHUwqWx-KrrT>rbx)zd3<9-;Pk;ImB$(j zC6W9E9(G`l#9}C>&dP1Xi}wEI(EKh5fvmfxs0t4pLDx%L=%@y`?4+|L*L$h|rU?m- z!X)xneQ;ozQaVoM5qj$Uj0~8caWOF#o*WY&{feI_ogAjmcGat6PC7p5W!&%#CkptB z+alz8F6dyYn9f$B6=!eOli6ubw&cG2J<*TbBO!Fp)v=@I-c{#VU!`RzZQprtio|8- z%sL92#Jdt$*j)79^QbC%*R`bkBBAu_4r=>f=r(sG#6Ee0@6+hmSu!x=Wc#D6v&1d? zP2%qwt6OF_@%CmTgBNkuv*F;n=`ZMJdxOz+R(WbC;C^&~l%YDoYbElaKc!{25SVY( zNVGUU_rb+@WOK;Ps$LLy3B4qL+>PXxFM+*BIp7lp3$u=wSskyn6b3R(?i z>kebTZ$Fn=xemfP_qxPSKkJ5o&zB}6xa0EEsid@B za(w8X&01i>jWTR?7)Od_;V%kN~oiyDLUc$#vW-UZt3nD1e7R&60YT4*6DYZhY4%e zr*pBQ7N6D1jlDCAISjUa?AKSkdD=pER9o*zrpZtep|8Z}{qk30h=71?Xycg=C!45C{{*s(PhNTl8mCYMW1<|p-*dpL!NPOV_jdYXJeuNv&aS+x>L^7A(aTL!-eUnz< zI4Ivdy!4)-8OV8AGdzW`+=?x}FcXEHdW9QVrW_xa%LuYypulO;lL?evrfLUEyX*W9o%I7ruE3Gci4cXac%aoMGF?!< z#WqI1vP$>As9voje-9%^w+>7@kntgKFh+JB)`Y3<356A z{lUZg>(z~XOd@>$Fn*^sVwCkkDqoPv-mBbNi)?{8mM(DwJ94=J!IaUszd5lH= z>W)%(#N%HkhR0)XvMWHyixC2s!2Y19RiyKauGHbyfD$F4DJbAp6G>w6Ll2aqGuYFW zYaJugSxR>oL>OYOpK)ZQ*Watm`K0u;CO9rqv*C*yZ6vAPQ{KWb$^^L%G|*BnVRF<2 zQXU)3o%0N5gD3@%v%oAzgNv1GFzZ*rcaWKK%V5Is3#Esarodn(b)}@@l*>55)_-H| zIW`li7TR;n3w{0kdoAkO+LH>(Vr5mN10yz$WOITfBCY(qjF5}7VvQ-7I6Hp40T-Ol z-fa>MNSzA&sLzlq{A?^=O8}}>lfyH}dH2AZ`n@@ea?=af;<=xaI2L63?X8moH%x2E zojnrE-|RF@&^%75&dsCV32nc$?AL1lq0~E*=J&fyFoe*U`tj@jrxLZy%EFni+entI z-Q}zqWGQs}OM?&Vzr;?KFrp(JISEpCs8oPz7#p8*K;U~w>jLMC=@C?`g$2Lr-rhDt zZY>}E^K2j+o2S4>PH8sa$}3e1A=ptmkeLserW20HB0C=h$#J)m?_qlwi1?&hKff@f z{kyFEd)DJ+04KMG)!R!_M6+1yjm(g;?3-yBvV(=u0187ZOIEen}$UPj?9^$S~{?!s(R%(9&H=p9Qb0h5_?n3-R&D9 zDBqrQdbl`p&eyX?VwO`ulo!@?-hC~W#=qj4RP1SN{d##Q{-SC<27bzqU{nHadv`a_ z$i86OVm4+O$!qLSFjB#CNN8y8_EK{Qz(ZrPt*7O%XS}y*yggZmAK;P2+aq0HN8vr~^LYK;K1X1_eurCC zQDpYz1kzlLlEj$lqUE&BQ_K-XYk9Ai9wj9}R9nD>`j%5mp~ZoP5E=< z+)8P-hSM6yHm{-?-ExA~bWu$I!vDeDHeCG!Ozu!U@ky)8;Om*`u;Ktlg^wDg4RfoId+iJ7XGDRt zj?R%gQs%b{CPMN7LLnct24%m8hH0+!AC>LQ;*k#M^%=CLb6Cuz#}Q3m9{7D5&u23{ zF&B7jvkXMW*xH&nn@bq~Oa6tXIWxWW_O&JdpP~C%wF*aK`E zl52_mOMTUj`g3j`;uXkxK`+KwB$ ze7Rlw-9t8p0^!ONGJ6H}h!DneK4;Rbg`{5JtiIoS%{w3?j64#cUPP6Q|K*LV0}e6n z*_!o)X>xrPXVO$?CdgU^r*l?%KxF_!< z0281~k2)J>Q2JCzkv=30boXeq9=f$OG$1H5KCDSbX{6>~oXDVU;QA=TiN)sT?z7O- z7iEwZWeZg^z#FMv?ueGD#;R+Hx2tv8|G;QGv?{gvv}|;qbD4Eqa;ML*O;RF=U$ph{ zgosO0h=nEmX?Q!Y&YIFs`oQh&m0liNyT-;*iB73Ku`bUzLk3N(F3~}yLEWIMU0$lD zMxN2Wj_<&YMe}=0s3ihud|j%uZl!)ndm@7l02v-s8F$KLU)R*po;%=KHLSYwoN+DU zqf@1nEtey6m<)EmabEV0V@jGAox#oy1Ib*OV3d%Rjwg$!+WN4PvIf#ULOH z^WwqzRnu2WXJa9`{=JY{h-%VWEd7~bK@~A+qnpgUiv-kkUX3QGaugbqZ^;%Cf?z-I z$q_3^7XWf^1V||YU%m_aGga{0b0y{HX}K)mxBOm5&o65s^_iIX7}{__t%;;=P|XIR zuh2V<5Yq2m@lKiy3G=f)TP;M}7F~M6Bj)L9Q_c{S%*GL88Sa)AcqP_`i|D>cuA z>6My)+sQi3%3$hn8YyWUvakdm1OfZ{veCBKXf#&5EgJ1JT|TiWcm^@k-#;&9z&~!a zzw#&kq}Qvqx|cvBv8!=&sfp%_H`C$uhMRU#oHmAW$8;-&oPJR5F(;bZFB zvE;sG$`JTYbc7hDPu`!gWwL*FP#WFQ1m#@5UJ!V}o8%K-J&2wSgOr$u0G>>hYvt%s2F~Vwa9G0_$k%F$eLF$1{H<5* z;YHMTlB}>%`42RD<-gP3;$k23L=HJ>1brn3@hH#&^%M}o5I$!-l74-fV7cb;LKqH>tI0|9hD~>7OR~nQc6u zUfZFcvsTk z?(QwT9^4U}wyWw+*FKLM5PYrmg_oUuYGqfX`J@xa)4Q(D?U)_jVrTpsCVUZ8+EEmO znp;@ZkZ|^qY2O75wwCTT|7zd=yua-H1aR%Ag69*3P>6sh?Reg>Xk)Eq{iLQq#y~@qF5@IAU5V>Zm8x8I^e%#oB_k{$o>D6D8b-|R^veDEpBo9PndUfc9#c05 zON;-=OHYCGw#B*Hm-9*OWut`{y|L$sOZ^zV)4I+$OP-G;T3-)rT$TCmUBA20K#OC5 z34=-lD|VbRW7&QxjCsORf&~}ALC6eZ~pkt#IAm?D)wr_Fr+EBF*f;WX0TTC|N4t1zOy zNmRC9R-}GeOk3fk#SX{wklw&R21|-9WVmFCa)8dlp9>XtaY>NtVrrjb0W{4z@}zVb zWhVIlx1xP;z~~J}-^Y{od5MS#;H#iu^WFO0{OU@3O+!-rpq3hTK|5@T5hik$zk(gW z4jP_;o;-h|5kV(T`@MuhkUetR=Q;g5ndc}2xat~`de~jyMx<4P5QKaZ7>Lxm&wJ{N zS#AVDC18^1k7KI7;i?tD2wL<&0!%g^h=VV{LY#aD5